def distance(lat1, lon1, lat2, lon2, in_km=False): ''' Return the distance between two geographical points. Arguments: lat1 - geographical latitude of point A lon1 - geographical longitude of point A lat2 - geographical latitude of point B lon2 - geographical longitude of point B Keyword Arguments: in_km - Default: False. If in_km is a value which evaluates to True, the distance between point A and point B is returned in kilometers. Returns: Returns the distance between point A and point B. By default, distance is returned in degrees. Example: In [1]: import antpy In [2]: antpy.distance(45.45, -75.7, 32.7, -117.17) Out[2]: 34.17313568649101 In [3]: antpy.distance(45.45, -75.7, 32.7, -117.17, in_km=True) Out[3]: 3804.1522020402367 ''' if in_km: return Dbptr().ex_eval('deg2km(%f)' % Dbptr().ex_eval('distance(%f, %f, %f, %f)' % (lat1, lon1, lat2, lon2))) else: return Dbptr().ex_eval('distance(%f ,%f ,%f, %f)' % (lat1, lon1, lat2, lon2))
def _ptrNULL(self): """ Return NULL record for a given pointer """ nullptr = Dbptr(self._ptr) nullptr.record = dbNULL return nullptr
def test_connection_context(self): """Test for Connection context manager methods""" with connect(self.dsn) as conn: dbptr = Dbptr(getattr(conn, '_dbptr')) self.assertNotEqual( dbptr.query('dbDATABASE_COUNT'), 0 ) self.assertEqual( dbptr.query('dbDATABASE_COUNT'), 0 )
def test_cursor_close(self): """Test Cursor close""" # Test we are connected to the DB curs = Connection(self.dsn).cursor() dbptr = Dbptr(getattr(curs, '_dbptr')) self.assertNotEqual( dbptr.query('dbDATABASE_COUNT'), 0 ) curs.close() self.assertEqual( dbptr.query('dbDATABASE_COUNT'), 0 )
def __str__(self): """ Prints out record content as a string. SHOULD be the same as if you cat'ted a line from the table text file """ db = Dbptr(self._ptr) formatting = ' '.join([db.query('dbFIELD_FORMAT') for db.field in range(len(self.TABLE_FIELDS))]) fields = tuple([self.__getattr__(f) for f in self.TABLE_FIELDS]) return formatting % fields
def __init__(self, db=None): """ Testing object relational mapper-type thing... """ if db: if db.record == dbALL: raise ValueError("Rec # is 'dbALL', one record only, please.") self._ptr = Dbptr(db) else: self._ptr = Dbptr() raise NotImplementedError("No empty contructor allowed here yet...")
def __init__(self, dbv=None): """ Creates from a pointer :type dbv: antelope.datascope.Dbptr :param dbv: Open pointer to an Antelope database view or table """ super(DbrecordPtrList,self).__init__() if isinstance(dbv, Dbptr): db = Dbptr(dbv) self.extend([DbrecordPtr(db) for db.record in range(db.nrecs())]) elif isinstance(dbv,list): self.extend([x for x in dbv if isinstance(x,DbrecordPtr)]) else: pass
def __init__(self, database=None, **kwargs): """ Sets the pointer. :type dbv: antelope.datascope.Dbptr :param dbv: Open pointer to an Antelope database view or table """ super(AttribDbptr,self).__init__() if isinstance(database, Dbptr): self.Ptr = Dbptr(database) elif isinstance(database, str): db = dbopen(database,'r') self.Ptr = db self._opened = True else: raise TypeError("Input pointer or string of valid database") if kwargs: self.Ptr = dblookup(self.Ptr,**kwargs)
class RowPointerDict(dict): _dbptr = None def __init__(self, dbptr=None, record=None): self._dbptr = Dbptr(dbptr) if record is not None: self._dbptr.record = record if self._dbptr.record < 0: self._dbptr.record = 0 def __getitem__(self, key): return self._dbptr.getv(key)[0] def __setitem__(self, key, value): self._dbptr.putv(key, value) def __len__(self): return self._dbptr.nrecs()
def __getitem__(self, index): """ Build a pointer to an individual record. Also supports negative indexing. """ if isinstance(index,int): if 0 <= index < len(self): dbp = Dbptr(self._ptr) dbp[3] = index return Dbtuple(dbp) elif -len(self) <= index < 0: dbp = Dbptr(self._ptr) dbp[3] = len(self)+index return Dbtuple(dbp) else: raise ValueError("Index out of range") elif isinstance(index,slice): return [self[x] for x in xrange(*index.indices(len(self)))] else: raise TypeError("Use an int or a slice to get records")
def test_connection(self): """Agile-type Connection Constructor test""" # Test object was built to spec conn = Connection(self.dsn) self.assertTrue( hasattr( conn, '_dbptr') ) self.assertTrue( hasattr( conn, 'CONVERT_NULL') ) self.assertTrue( hasattr( conn, 'cursor_factory') ) self.assertTrue( hasattr( conn, 'row_factory') ) self.assertTrue( hasattr( conn, '__init__') ) self.assertTrue( hasattr( conn, '__enter__') ) self.assertTrue( hasattr( conn, '__exit__') ) self.assertTrue( hasattr( conn, 'close') ) self.assertTrue( hasattr( conn, 'cursor') ) # Test we are connected to the DB dbptr = Dbptr(getattr(conn, '_dbptr')) self.assertNotEqual( dbptr.query('dbDATABASE_COUNT'), 0 ) conn.close() self.assertEqual( dbptr.query('dbDATABASE_COUNT'), 0 )
def _open(database, perm='r', **kwargs): """ Return a pointer to an open database from a string or Dbptr. Any keyword arguments not for dbopen are passed to dblookup """ if isinstance(database, Dbptr): db = Dbptr(database) elif isinstance(database, str): db = dbopen(database, perm=perm) else: raise TypeError("Input pointer or string of valid database") if kwargs: db = dblookup(db,**kwargs) return db
def open_db_or_string(database, perm='r'): ''' Check if a variable is a valid db or a string Returns a pointer to an open db or throw an error ''' if isinstance(database, Dbptr): ptr = Dbptr(database) elif isinstance(database, str): ptr = dbopen(database, perm) opened = True else: raise TypeError( "Input must be a Dbptr or string of a valid database path") return ptr, opened
def dbloc_source_db(db, pointer=True): """ Checks if you are in a dbloc2 'trial' db and returns the source one if you are, otherwise returns the same Dbptr. This is for running interactive scripts lauched from dbloc2 and writing to a non-volitile original db. INPUT: Dbptr of current temp database in dbloc2 OUTPUT: Dbptr to database that dbloc2 is using. """ try: from antelope.stock import pfget except ImportError: from antelope.stock import pfread as pfget db = Dbptr(db, perm='r+') dbname = db.query('dbDATABASE_NAME') pf_settings = pfget('dbloc2') pfdef = pf_settings['Define'] tempdb = pfdef['Temporary_db'] workdir = pfdef['Work_dir'] dblocdb = os.path.join(workdir,tempdb) if dbname.endswith(tempdb): # path of trial db from dbloc2 dbcwd = os.path.dirname(dbname) # relative name of 1st db in 'trial' database decriptor file dbpath0 = db.query('dbDBPATH').split(':')[0].translate(None,'{}') # full absolute path database name to source dbname = os.path.abspath(os.path.join(dbcwd, dbpath0)) db.close() db = Dbptr(dbname, perm='r+') if pointer: return db else: db.close() return dbname
def get_first_motions(dbname, orid=None): """ Port of Gabe/Mark dbprocess for getting info to pass to an FM calulator Right now, gets origin, arrival info, and joins wfdisc for filenames to the waveform """ db = Dbptr(dbname) db = dbprocess(db ,['dbopen origin', 'dbsubset orid=={0}'.format(orid), 'dbjoin origerr', 'dbjoin assoc', 'dbjoin arrival', 'dbsubset iphase =~ /.*[Pp].*/', 'dbsubset fm =~ /.*[UuCcDdRr.].*/', 'dbjoin wfdisc', 'dbsubset chan==wfdisc.chan', 'dbsort arrival.time']) #'dbjoin -o affiliation', 'dbjoin -o site', # # #'dbsubset (ondate <= time)', #'dbsubset (time <= offdate) || (offdate == -1)'] #) return db
def azimuth(lat1, lon1, lat2, lon2): ''' Returns the azimuth between two geographical points. Arguments: lat1 - geographical latitude of point A lon1 - geographical longitude of point A lat2 - geographical latitude of point B lon2 - geographical longitude of point B Returns: Returns the azimuth between point A and point B in degrees. Example: In [1]: import antpy In [2]: antpy.azimuth(45.45, -75.7, 32.7, -117.17) Out[2]: 262.80443927342213 ''' return Dbptr().ex_eval('azimuth(%f, %f, %f, %f)' % (lat1, lon1, lat2, lon2))
def dbloc_source_db(db, pointer=True): """ Checks if you are in a dbloc2 'trial' db and returns the source one if you are, otherwise returns the same Dbptr. This is for running interactive scripts lauched from dbloc2 and writing to a non-volitile original db. INPUT: Dbptr of current temp database in dbloc2 OUTPUT: Dbptr to database that dbloc2 is using. """ try: from antelope.stock import pfget except ImportError: from antelope.stock import pfread as pfget db = Dbptr(db, perm="r+") dbname = db.query("dbDATABASE_NAME") pf_settings = pfget("dbloc2") pfdef = pf_settings["Define"] tempdb = pfdef["Temporary_db"] workdir = pfdef["Work_dir"] dblocdb = os.path.join(workdir, tempdb) if dbname.endswith(tempdb): # path of trial db from dbloc2 dbcwd = os.path.dirname(dbname) # relative name of 1st db in 'trial' database decriptor file dbpath0 = db.query("dbDBPATH").split(":")[0].translate(None, "{}") # full absolute path database name to source dbname = os.path.abspath(os.path.join(dbcwd, dbpath0)) db.close() db = Dbptr(dbname, perm="r+") if pointer: return db else: db.close() return dbname
def readANTELOPE(database, station=None, channel=None, starttime=None, endtime=None): """ Reads a portion of a Antelope wfdisc table to a Stream. Attempts to return one Trace per line of the 'wfdisc' view passed. Additionally, will filter and cut with respect to any of the fields in the primary key IF specified. (sta chan time::endtime) NOTE: Currently MUST have both times (start/end) or neither. the returned Traces will have a new attribute, 'db' :type database: string or antelope.datascope.Dbptr :param database: Antelope database name or pointer :type station: string :param station: Station expression to subset :type channel: string :param channel: Channel expression to subset :type starttime: :class: `~obspy.core.utcdatetime.UTCDateTime` :param starttime: Desired start time :type endtime: :class: `~obspy.core.utcdatetime.UTCDateTime` :param endtime: Desired end time :rtype: :class: `~obspy.core.stream.Stream' :return: Stream with one Trace for each row of the database view .. rubric:: Example >>> st = readANTELOPE('/opt/antelope/example/db', station='TOLO', channel='LH.', starttime=UTCDateTime(2008,6,13), endtime=UTCDateTime(2008,6,14)) >>> print(st) 6 Trace(s) in Stream: XA.TOL0..LHE | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples XA.TOL0..LHE | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples XA.TOL0..LHN | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples XA.TOL0..LHN | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples XA.TOL0..LHZ | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:21.640000Z | 1.0 Hz, 263 samples XA.TOL0..LHZ | 2008-06-13T00:04:22.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86138 samples Also adds a Dbrecord as an attribute of the Trace >>> st[0].db Dbrecord('View43' -> TOL0 LHE 1213229044.64::1213315451.64) """ from obspy.core import read, Stream, UTCDateTime if isinstance(database, Dbptr): db = Dbptr(database) db = db.lookup(table="wfdisc") else: raise TypeError("Must input a string or pointer to a valid database") if station is not None: db = db.subset("sta=~/{0}/".format(station)) if channel is not None: db = db.subset("chan=~/{0}/".format(channel)) if starttime is not None and endtime is not None: ts = starttime.timestamp te = endtime.timestamp db = db.subset("endtime > {0} && time < {1}".format(ts, te)) else: ts = starttime te = endtime assert db.nrecs() is not 0, "No records for given time period" st = Stream() for db.record in range(db.nrecs()): fname = db.filename() dbr = RowPointerDict(db) t0 = UTCDateTime(dbr["time"]) t1 = UTCDateTime(dbr["endtime"]) if dbr["time"] < ts: t0 = starttime if dbr["endtime"] > te: t1 = endtime if os.path.exists(fname): _st = read(fname, starttime=t0, endtime=t1) # add format? _st = _st.select(station=dbr["sta"], channel=dbr["chan"]) # not location aware # _st[0].db = dbr if dbr["calib"] < 0: _st[0].data *= -1 st += _st # Close what we opened, BUT garbage collection may take care of this: # if you have an open pointer but pass db name as a string, global # use of your pointer won't work if this is uncommented: # # if isinstance(database,str): # db.close() return st
def __init__(self, dbptr=None, record=None): self._dbptr = Dbptr(dbptr) if record is not None: self._dbptr.record = record if self._dbptr.record < 0: self._dbptr.record = 0
class Relation(list): """ A pointer to a DB view, that acts like a Python list of Dbtuple's. This is a very basic object-relational-mapper for an Antelope Datascope database using the existing Dbptr class. No data (not even individual record pointers) are stored. The object acts like a list but the entire contents are just a pointer to an open db. A request for one list item returns a Dbtuple, and a slice returns a list of Dbtuple's. This is essentially the exact behavior of a Python list. You're welcome. Methods ---------- append(**kwargs) Add row to relation, populate with values from keyword arguments column(field) Get a list of values for each record for a field name get_column(field) A version of the 'column' method with Datascope NULL awareness, returns a python None where a value is NULL for its field .. rubric:: Example >>> db = dbopen('/opt/antelope/data/db/demo/demo') >>> db.lookup(table='site') >>> sites = Relation(db) >>> len(sites) 13 >>> print sites[0].sta, sites[0].lat, sites[0].lon HIA 49.2667 119.7417 >>> print sites[10].sta, sites[10].lat, sites[10].lon TKM 42.8601 75.3184 """ _ptr = Dbptr() # the only data stored locally def __init__(self, database=None, **kwargs): """ Sets the pointer. Must be a pointer to a table or view. :type dbv: antelope.datascope.Dbptr :param dbv: Open pointer to an Antelope database view or table """ super(Relation, self).__init__() self._ptr = _open(database) if self._ptr.table == dbALL: raise ValueError("Dbptr doesn't contain table or view") def __getitem__(self, index): """ Build a pointer to an individual record. Also supports negative indexing. """ if isinstance(index,int): if 0 <= index < len(self): dbp = Dbptr(self._ptr) dbp[3] = index return Dbtuple(dbp) elif -len(self) <= index < 0: dbp = Dbptr(self._ptr) dbp[3] = len(self)+index return Dbtuple(dbp) else: raise ValueError("Index out of range") elif isinstance(index,slice): return [self[x] for x in xrange(*index.indices(len(self)))] else: raise TypeError("Use an int or a slice to get records") def __getslice__(self,i,j): """Override builtin list slicing""" return self.__getitem__(slice(i,j)) def __len__(self): """Return number of items in the view""" return self._ptr.nrecs() def __iter__(self): """ Produce a generator which gives the next item in the list when called. """ # Allows class to act like a list iterator in a for loop, # for example, even though it is empty. for index in xrange(len(self)): yield self.__getitem__(index) def __str__(self): """ Print out Antelope compatible text for a table/view """ return '\n'.join([str(dbr) for dbr in self]) def _new(self): """ Add null row to a table Returns Dbtuple of the new record row """ record = self._ptr.addnull() return self[record] def append(self, **kwargs): """ Add a new null row and populate it with keyword arguements """ dbt = self._new() for key, value in kwargs.iteritems(): dbt.set(key,value) def index(self, query_string): """ Return index of a tuple which matches an item in the list. USE AT YOUR OWN RISK! This passes the query to dbfind, so dbfind rules apply: - Datascope db syntax - Returns first record matching query For now, acts like list 'index' and throws error if no match. * Could return None in future """ _index = self._ptr.find(query_string) if 0 <= _index < len(self): return _index else: raise ValueError("No items found in list") def column(self, field): """ A column of the same field from each Dbtuple record """ return [dbr[field] for dbr in self if field in dbr.TABLE_FIELDS] def get_column(self, field): """ A column of the same field from each Dbtuple using 'get'. Returns a python None if field is NULL value """ return [dbr.get(field) for dbr in self if field in dbr.TABLE_FIELDS]
def DBPTR(self): """Copy of the current Dbptr""" return Dbptr(self._ptr)
class AttribDbptr(list): """ A pointer to a DB view, that acts like a Python list of DbrecordPtr's. This is a very basic object-relational-mapper for an Antelope Datascope database using the existing Dbptr class. No data (not even individual record pointers) are stored. The object acts like a list (similar to Dbview and DbviewPtr) but the entire contents are just a pointer to an open db. When accessing items, will return a DbrecordPtr, by building a pointer, rather than actually storing them in the list. A request for one list item returns a DbrecordPtr, and a slice returns a list of DbrecordPtrs. This is essentially the exact behavior of a Python list. You're welcome. Good for large datasets that would take up a lot of memory to load the whole table or even millions of DbrecordPtr's (which are holding one Dbptr each) into RAM. Attributes ---------- Ptr - the actual Dbptr .. rubric:: Example >>> db = dbopen('/opt/antelope/data/db/demo/demo') >>> db.lookup(table='site') >>> dbptr = AttribDbptr(db) >>> len(dbptr) 13 >>> print dbptr[0].sta, dbptr[0].lat, dbptr[0].lon HIA 49.2667 119.7417 >>> print dbptr[10].sta, dbptr[10].lat, dbptr[10].lon TKM 42.8601 75.3184 """ Ptr = Dbptr() # the only data stored locally _opened = False # True if db was opened by __init__() def __init__(self, database=None, **kwargs): """ Sets the pointer. :type dbv: antelope.datascope.Dbptr :param dbv: Open pointer to an Antelope database view or table """ super(AttribDbptr,self).__init__() if isinstance(database, Dbptr): self.Ptr = Dbptr(database) elif isinstance(database, str): db = dbopen(database,'r') self.Ptr = db self._opened = True else: raise TypeError("Input pointer or string of valid database") if kwargs: self.Ptr = dblookup(self.Ptr,**kwargs) # otherwise returns empty list def __getitem__(self, index): """ Build a pointer to an individual record. Also supports negative indexing. """ if isinstance(index,int): if 0 <= index < len(self): dbp = Dbptr(self.Ptr) dbp[3] = index return DbrecordPtr(dbp) elif -len(self) <= index < 0: dbp = Dbptr(self.Ptr) dbp[3] = len(self)+index return DbrecordPtr(dbp) else: raise ValueError("Index out of range") elif isinstance(index,slice): #raise NotImplementedError("You just passed a slice") return [self[x] for x in xrange(*index.indices(len(self)))] else: raise TypeError("Use an int or a slice to get records") def __getslice__(self,i,j): """Override builtin list slicing""" return self.__getitem__(slice(i,j)) def __len__(self): """Number of items in the view""" return self.Ptr.nrecs() def __iter__(self): """ Produces a generator which gives the next item in the list when called. Allows class to act like a list iterator in a for loop, for example, even though it is empty. """ for index in xrange(len(self)): yield self.__getitem__(index) # Convenience methods def col(self, field): """A column of the same field from each Dbrecord""" return [dbr[field] for dbr in self if field in dbr.Fields ] def acol(self, field): """A numpy array of the same field from each Dbrecord""" return array(self.col(field))
def test_rowcount(self): nrecs0 = self.curs.execute('lookup', {'table':'origin'}) dbptr = Dbptr(self.curs._dbptr) self.assertEqual(dbptr.query('dbRECORD_COUNT'), self.curs.rowcount)
def eventfocalmech2db(event=None, database=None): """ Write the preferred HASH solution to Datascope database. Writes to 'fplane', 'predmech' and 'predarr' tables """ focm = event.preferred_focal_mechanism() o = focm.triggering_origin_id.getReferredObject() plane1 = focm.nodal_planes.nodal_plane_1 plane2 = focm.nodal_planes.nodal_plane_2 T = focm.principal_axes.t_axis P = focm.principal_axes.p_axis orid = int(o.creation_info.version) db = Dbptr(database, perm='r+') try: # Use the original db if in a dbloc2 'tmp/trial' db #db = dbloc_source_db(db) # save solution as a new mechid mechid = db.nextid('mechid') # in fplane... dbfpln = dblookup(db,table='fplane') dbfpln.record = dbfpln.addnull() dbfpln.putv('orid', orid, 'str1', round(plane1.strike,1) , 'dip1', round(plane1.dip,1) , 'rake1',round(plane1.rake,1), 'str2', round(plane2.strike,1) , 'dip2', round(plane2.dip,1) , 'rake2',round(plane2.rake,1), 'taxazm',round(T.azimuth,1), 'taxplg',round(T.plunge,1), 'paxazm',round(P.azimuth,1), 'paxplg',round(P.plunge,1), 'algorithm', focm.method_id.resource_id, 'auth', focm.creation_info.author, 'mechid', mechid, ) dbpmec = dblookup(db,table='predmech') dbparr = dblookup(db,table='predarr') for av in o.arrivals: pk = av.pick_id.getReferredObject() if pk.polarity is 'positive': fm = 'U' elif pk.polarity is 'negative': fm = 'D' else: continue arid = int(av.creation_info.version) # ..and predmech dbpmec.record = dbpmec.addnull() dbpmec.putv('arid', arid, 'orid', orid, 'mechid', mechid, 'fm', fm, ) # if there are entries for this arrival already, write over it... dbparr.record = dbparr.find('arid=={0} && orid=={1}'.format(arid, orid)) if dbparr.record < 0: dbparr.record = dbparr.addnull() dbparr.putv('arid', arid, 'orid', orid, 'esaz', av.azimuth, 'dip' , av.takeoff_angle, ) except Exception as e: raise e finally: db.close()
def input(hp, dbname, evid=None, orid=None): '''Input HASH data from Antelope database This will accept a database name OR Antelope Dbptr, and either an ORID, or an EVID, in which case the 'prefor' ORID is looked up and used. Inputs ------ dbname : str or antelope.datascope.Dbptr orid : int of ORID evid : int of EVID ''' db = Dbptr(dbname) if orid is None: dbv = dbprocess(db,['dbopen event', 'dbsubset evid == '+str(evid)]) orid = RowPointerDict(dbv)['prefor'] db = dbprocess(db,[ 'dbopen origin', 'dbsubset orid == '+str(orid), 'dbjoin origerr', 'dbjoin assoc', 'dbjoin arrival', 'dbjoin affiliation', 'dbjoin site', 'dbsubset iphase =~ /.*[Pp].*/', 'dbsubset (ondate <= time)', 'dbsubset (time <= offdate) || (offdate == -1)'] ) ph = RowPointerDict(db, record=0) hp.nrecs = len(ph) if len(ph) <= 0: raise ValueError("No picks for this ORID: {0}".format(orid) ) hp.tstamp = ph['origin.time'] hp.qlat = ph['origin.lat'] hp.qlon = ph['origin.lon'] hp.qdep = ph['origin.depth'] hp.qmag = ph['origin.ml'] hp.icusp = ph['origin.orid'] hp.seh = ph['origerr.smajax'] hp.sez = ph['origerr.sdepth'] aspect = np.cos(hp.qlat / degrad) # convert using python later. # The index 'k' is deliberately non-Pythonic to deal with the fortran # subroutines which need to be called and the structure of the original HASH code. # May be able to update with a rewrite... YMMV k = 0 for n in range(len(ph)): ph = RowPointerDict(db, record=n) hp.sname[k] = ph['sta'] hp.snet[k] = ph['net'] hp.scomp[k] = ph['chan'] hp.pickonset[k] = 'I' hp.pickpol[k] = ph['fm'] hp.arid[k] = ph['arid'] flat, flon, felv = ph['site.lat'],ph['site.lon'],ph['site.elev'] hp.esaz[k] = ph['esaz'] # dist @ azi, get from db OR obspy or another python mod (antelope) could do this on WGS84 dx = (flon - hp.qlon) * 111.2 * aspect dy = (flat - hp.qlat) * 111.2 dist = np.sqrt(dx**2 + dy**2) qazi = 90. - np.arctan2(dy,dx) * degrad if (qazi < 0.): qazi = qazi + 360. if (dist > hp.delmax): continue if (hp.pickpol[k] in 'CcUu'): hp.p_pol[k] = 1 elif (hp.pickpol[k] in 'RrDd'): hp.p_pol[k] = -1 else: continue # save them for other functions -MCW hp.dist[k] = dist hp.qazi[k] = qazi hp.flat[k] = flat hp.flon[k] = flon hp.felv[k] = felv if (hp.pickonset[k] in 'Ii'): hp.p_qual[k] = 0 else: hp.p_qual[k] = 1 # polarity check in original code... doesn't work here #hp.p_pol[k] = hp.p_pol[k] * hp.spol k += 1 #npol = k - 1 hp.npol = k # k is zero indexed in THIS loop db.close()
def main(): from argparse import ArgumentParser # Get command line args parser = ArgumentParser() parser.add_argument("dbin", help="Input database") parser.add_argument("dbout", help="Output database", nargs='?') parser.add_argument("-p", "--plot", help="Plot result", action='store_true') parser.add_argument("-l", "--loc", help="dbloc2 mode", action='store_true') parser.add_argument("-i", "--image", help="Save image with db", action='store_true') parser.add_argument("--pf", help="Parameter file") group = parser.add_mutually_exclusive_group() #required=True) group.add_argument("--evid", help="Event ID", type=int) group.add_argument("--orid", help="Origin ID", type=int) args = parser.parse_args() # Special 'dbloc2' settings if args.loc: from antelope.datascope import Dbptr # alter args b/c dbloc2 passes a db and a row number args.dbin = args.dbin.rstrip('.origin') db = Dbptr(args.dbin) db = db.lookup(table='origin') db.record = int(args.dbout) args.orid = db.getv('orid')[0] args.dbout = dbloc_source_db(args.dbin, pointer=False) args.plot = True # force plot args.image = True # force saving image to db folder # Now that we have a save location from command line args, # make a function to save to that database. The plotter is I/O # agnostic, it will accept a function to save anything anyhow anywhichway # def save_plot_to_db(fmplotter, dbname=args.dbout, dump_bitmap=args.image): focal_mech = fmplotter.event.focal_mechanisms[fmplotter._fm_index] if focal_mech is not fmplotter.event.preferred_focal_mechanism(): fmplotter.event.preferred_focal_mechanism_id = focal_mech.resource_id.resource_id # Save to db eventfocalmech2db(event=fmplotter.event, database=dbname) if dump_bitmap: vers = fmplotter.event.preferred_origin().creation_info.version dbdir = os.path.dirname(dbname) _dump_bitmap(figure=fmplotter.fig, directory=dbdir, uid=vers) # Run HASH hp = dbhash_run(args.dbin, orid=args.orid, pf=args.pf) # Launch plotter or spit out solution if args.plot: from hashpy.plotting.focalmechplotter import FocalMechPlotter ev = hp.output(format="OBSPY") p = FocalMechPlotter(ev, save=save_plot_to_db) else: # quick orid/strike/dip/rake line print hp.output() p = 0 if args.dbout: db = hp.output(format="ANTELOPE", dbout=args.dbout) # Done, return HashPype and/or FocalMechPlotter for debugging return hp, p
def input(hp, dbname, evid=None, orid=None): """Input HASH data from Antelope database This will accept a database name OR Antelope Dbptr, and either an ORID, or an EVID, in which case the 'prefor' ORID is looked up and used. Inputs ------ dbname : str or antelope.datascope.Dbptr orid : int of ORID evid : int of EVID """ db = Dbptr(dbname) if orid is None: dbv = dbprocess(db, ["dbopen event", "dbsubset evid == " + str(evid)]) orid = RowPointerDict(dbv)["prefor"] db = dbprocess( db, [ "dbopen origin", "dbsubset orid == " + str(orid), "dbjoin origerr", "dbjoin assoc", "dbjoin arrival", "dbjoin affiliation", "dbjoin site", "dbsubset iphase =~ /.*[Pp].*/", "dbsubset (ondate <= time)", "dbsubset (time <= offdate) || (offdate == -1)", ], ) ph = RowPointerDict(db, record=0) hp.nrecs = len(ph) if len(ph) <= 0: raise ValueError("No picks for this ORID: {0}".format(orid)) hp.tstamp = ph["origin.time"] hp.qlat = ph["origin.lat"] hp.qlon = ph["origin.lon"] hp.qdep = ph["origin.depth"] hp.qmag = ph["origin.ml"] hp.icusp = ph["origin.orid"] hp.seh = ph["origerr.smajax"] hp.sez = ph["origerr.sdepth"] aspect = np.cos(hp.qlat / degrad) # convert using python later. # The index 'k' is deliberately non-Pythonic to deal with the fortran # subroutines which need to be called and the structure of the original HASH code. # May be able to update with a rewrite... YMMV k = 0 for n in range(len(ph)): # Extract pick data from the db ph = RowPointerDict(db, record=n) hp.sname[k] = ph["sta"] hp.snet[k] = ph["net"] hp.scomp[k] = ph["chan"] hp.pickonset[k] = ph["qual"].strip(".") hp.pickpol[k] = ph["fm"] hp.arid[k] = ph["arid"] flat, flon, felv = ph["site.lat"], ph["site.lon"], ph["site.elev"] hp.esaz[k] = ph["esaz"] # Distance and Azimuth filtering dx = (flon - hp.qlon) * 111.2 * aspect dy = (flat - hp.qlat) * 111.2 dist = np.sqrt(dx ** 2 + dy ** 2) qazi = 90.0 - np.arctan2(dy, dx) * degrad if qazi < 0.0: qazi = qazi + 360.0 if dist > hp.delmax: continue # Try to get an up/down polarity if not hp.pickpol[k].lower(): continue if hp.pickpol[k].lower() in "cu": hp.p_pol[k] = 1 elif hp.pickpol[k].lower() in "dr": hp.p_pol[k] = -1 else: continue # Save them for other functions hp.dist[k] = dist hp.qazi[k] = qazi hp.flat[k] = flat hp.flon[k] = flon hp.felv[k] = felv # Try to get the onset, impulsive if none if hp.pickonset[k].lower() == "i": hp.p_qual[k] = 0 elif hp.pickonset[k].lower() == "e": hp.p_qual[k] = 1 elif hp.pickonset[k].lower() == "w": hp.p_qual[k] = 1 else: hp.p_qual[k] = 0 # polarity check in original code... doesn't work here # hp.p_pol[k] = hp.p_pol[k] * hp.spol k += 1 hp.npol = k # k is zero indexed in THIS loop db.close()
def eventfocalmech2db(event=None, database=None): """ Write the preferred HASH solution to Datascope database. Writes to 'fplane', 'predmech' and 'predarr' tables """ focm = event.preferred_focal_mechanism() o = focm.triggering_origin_id.getReferredObject() plane1 = focm.nodal_planes.nodal_plane_1 plane2 = focm.nodal_planes.nodal_plane_2 T = focm.principal_axes.t_axis P = focm.principal_axes.p_axis orid = int(o.creation_info.version) db = Dbptr(database, perm="r+") try: # Use the original db if in a dbloc2 'tmp/trial' db # db = dbloc_source_db(db) # save solution as a new mechid mechid = db.nextid("mechid") # in fplane... dbfpln = dblookup(db, table="fplane") dbfpln.record = dbfpln.addnull() dbfpln.putv( "orid", orid, "str1", round(plane1.strike, 1), "dip1", round(plane1.dip, 1), "rake1", round(plane1.rake, 1), "str2", round(plane2.strike, 1), "dip2", round(plane2.dip, 1), "rake2", round(plane2.rake, 1), "taxazm", round(T.azimuth, 1), "taxplg", round(T.plunge, 1), "paxazm", round(P.azimuth, 1), "paxplg", round(P.plunge, 1), "algorithm", focm.method_id.resource_id, "auth", focm.creation_info.author, "mechid", mechid, ) dbpmec = dblookup(db, table="predmech") dbparr = dblookup(db, table="predarr") for av in o.arrivals: pk = av.pick_id.getReferredObject() if pk.polarity is "positive": fm = "U" elif pk.polarity is "negative": fm = "D" else: continue arid = int(av.creation_info.version) # ..and predmech dbpmec.record = dbpmec.addnull() dbpmec.putv("arid", arid, "orid", orid, "mechid", mechid, "fm", fm) # if there are entries for this arrival already, write over it... dbparr.record = dbparr.find("arid=={0} && orid=={1}".format(arid, orid)) if dbparr.record < 0: dbparr.record = dbparr.addnull() dbparr.putv("arid", arid, "orid", orid, "esaz", av.azimuth, "dip", av.takeoff_angle) except Exception as e: raise e finally: db.close()
class Dbtuple(dict, object): """ Holds the pointer to a db record, NOT the data, can access the same as Dbrecord, but the pointer must remain open Useful for large datasets that may have trouble in memory Only stores the pointer, not contents, all attributes are returned by querying the open db using the pointer. """ # Only holds one thing in Python namespace, Dbptr object: _ptr = Dbptr() # built in queries for useful info @property def TABLE_NAME(self): return self._ptr.query('dbTABLE_NAME') # string of what table record came from @property def PRIMARY_KEY(self): return self._ptr.query('dbPRIMARY_KEY') # tuple of strings of fields in primary key @property def TABLE_FIELDS(self): # tuple of fields from database record return self._ptr.query('dbTABLE_FIELDS') @property def Fields(self): # May go away in future flist = list(self.TABLE_FIELDS) flist.sort() return flist @property def _ptrNULL(self): """ Return NULL record for a given pointer """ nullptr = Dbptr(self._ptr) nullptr.record = dbNULL return nullptr def __init__(self, db=None): """ Testing object relational mapper-type thing... """ if db: if db.record == dbALL: raise ValueError("Rec # is 'dbALL', one record only, please.") self._ptr = Dbptr(db) else: self._ptr = Dbptr() raise NotImplementedError("No empty contructor allowed here yet...") def __getattr__(self, field): """ Looks for attributes in fields of a db pointer """ return self._ptr.getv(field)[0] def __setattr__(self, field, value): """Try to set a db field You must have opened your db with r+ permissions! """ # Special case: trying to set the pointer. Else try to write to the db if field == '_ptr': super(Dbtuple,self).__setattr__(field, value) else: # Could try to catch an ElogComplain in else, but the same # error comes up for read-only or a wrong field if self._ptr.query('dbDATABASE_IS_WRITABLE'): self._ptr.putv(field, value) else: raise IOError("Database not opened with write permission!") # Dictionary powers activate: __getitem__ = __getattr__ __setitem__ = __setattr__ def _null(self, field): """ Returns NULL value for a given field """ return self._ptrNULL.getv(field)[0] def get(self, field): """Get a database value from the given field (NULL supported) If the value is a NULL value for that field, return a python None """ value = self.__getattr__(field) if value == self._null(field): value = None return value def set(self, field, value): """Set a database field to the given value (NULL supported) Setting a field to 'None' puts a NULL value in for that record field """ if value is None: value = self._null(field) self.__setattr__(field, value) def __repr__(self): """ Useful representation - shows the table and primary key of the record. """ start = "{0}('{1}' -> ".format(self.__class__.__name__, self.TABLE_NAME) # Build up a list containing the fields of the primary key # Annoyingly, times have a '::' between them, so deal with that... mids = [] for k in self.PRIMARY_KEY: if '::' in k: keyf = '::'.join([str(self.__getattr__(_k)) for _k in k.split('::')]) else: keyf = str(self.__getattr__(k)) mids.append(keyf) middle = ' '.join(mids) end = ")" return start+middle+end def __str__(self): """ Prints out record content as a string. SHOULD be the same as if you cat'ted a line from the table text file """ db = Dbptr(self._ptr) formatting = ' '.join([db.query('dbFIELD_FORMAT') for db.field in range(len(self.TABLE_FIELDS))]) fields = tuple([self.__getattr__(f) for f in self.TABLE_FIELDS]) return formatting % fields
def output(hp, dbout=None, solution=0, schema="css3.0"): """Write the preferred HASH solution to Datascope database. This writes the strike, dip, rakes to 'fplane', arids used for a given mech in 'predmech' and the takeoffs in 'predarr'. Input ----- dbout : str or antelope.datascope.Dbptr to database solution : <STUB> int of desired solution. """ from hashpy.doublecouple import DoubleCouple x = solution dc = DoubleCouple([hp.str_avg[x], hp.dip_avg[x], hp.rak_avg[x]]) str1, dip1, rak1 = dc.plane1 str2, dip2, rak2 = dc.plane2 axes = dc.axis if dbout is not None: db = Dbptr(dbout, perm="r+") else: db = dbtmp(schema) mechid = db.nextid("mechid") dbfpln = dblookup(db, table="fplane") dbfpln.record = dbfpln.addnull() dbfpln.putv( "orid", hp.icusp, "str1", round(str1, 1), "dip1", round(dip1, 1), "rake1", round(rak1, 1), "algorithm", "HASH", "mechid", mechid, "auth", "hashpy:" + hp.author, "str2", round(str2, 1), "dip2", round(dip2, 1), "rake2", round(rak2, 1), "taxazm", round(axes["T"]["azimuth"], 1), "taxplg", round(axes["T"]["dip"], 1), "paxazm", round(axes["P"]["azimuth"], 1), "paxplg", round(axes["P"]["dip"], 1), ) dbpmec = dblookup(db, table="predmech") dbparr = dblookup(db, table="predarr") for k in range(hp.npol): if hp.p_pol[k] > 0: fm = "U" else: fm = "D" dbpmec.record = dbpmec.addnull() dbpmec.putv("arid", int(hp.arid[k]), "orid", hp.icusp, "mechid", mechid, "fm", fm) dbparr.record = dbparr.addnull() dbparr.putv("arid", int(hp.arid[k]), "orid", hp.icusp, "esaz", hp.qazi[k], "dip", hp.p_the_mc[k, 0]) return db
def readANTELOPE(database, station=None, channel=None, starttime=None, endtime=None): """ Reads a portion of a Antelope wfdisc table to a Stream. Attempts to return one Trace per line of the 'wfdisc' view passed. Additionally, will filter and cut with respect to any of the fields in the primary key IF specified. (sta chan time::endtime) NOTE: Currently MUST have both times (start/end) or neither. the returned Traces will have a new attribute, 'db' :type database: string or antelope.datascope.Dbptr :param database: Antelope database name or pointer :type station: string :param station: Station expression to subset :type channel: string :param channel: Channel expression to subset :type starttime: :class: `~obspy.core.utcdatetime.UTCDateTime` :param starttime: Desired start time :type endtime: :class: `~obspy.core.utcdatetime.UTCDateTime` :param endtime: Desired end time :rtype: :class: `~obspy.core.stream.Stream' :return: Stream with one Trace for each row of the database view .. rubric:: Example >>> st = readANTELOPE('/opt/antelope/example/db', station='TOLO', channel='LH.', starttime=UTCDateTime(2008,6,13), endtime=UTCDateTime(2008,6,14)) >>> print(st) 6 Trace(s) in Stream: XA.TOL0..LHE | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples XA.TOL0..LHE | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples XA.TOL0..LHN | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:11.640000Z | 1.0 Hz, 253 samples XA.TOL0..LHN | 2008-06-13T00:04:12.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86148 samples XA.TOL0..LHZ | 2008-06-12T23:59:59.640000Z - 2008-06-13T00:04:21.640000Z | 1.0 Hz, 263 samples XA.TOL0..LHZ | 2008-06-13T00:04:22.640000Z - 2008-06-13T23:59:59.640000Z | 1.0 Hz, 86138 samples Also adds a Dbrecord as an attribute of the Trace >>> st[0].db Dbrecord('View43' -> TOL0 LHE 1213229044.64::1213315451.64) """ from obspy.core import read, Stream, UTCDateTime if isinstance(database,Dbptr): db = Dbptr(database) db = db.lookup(table='wfdisc') else: raise TypeError("Must input a string or pointer to a valid database") if station is not None: db = db.subset('sta=~/{0}/'.format(station)) if channel is not None: db = db.subset('chan=~/{0}/'.format(channel)) if starttime is not None and endtime is not None: ts = starttime.timestamp te = endtime.timestamp db = db.subset('endtime > {0} && time < {1}'.format(ts,te) ) else: ts = starttime te = endtime assert db.nrecs() is not 0, "No records for given time period" st = Stream() for db.record in range(db.nrecs() ): fname = db.filename() dbr = RowPointerDict(db) t0 = UTCDateTime(dbr['time']) t1 = UTCDateTime(dbr['endtime']) if dbr['time'] < ts: t0 = starttime if dbr['endtime'] > te: t1 = endtime if os.path.exists(fname): _st = read(fname, starttime=t0, endtime=t1) # add format? _st = _st.select(station=dbr['sta'], channel=dbr['chan']) #not location aware #_st[0].db = dbr if dbr['calib'] < 0: _st[0].data *= -1 st += _st # Close what we opened, BUT garbage collection may take care of this: # if you have an open pointer but pass db name as a string, global # use of your pointer won't work if this is uncommented: # #if isinstance(database,str): # db.close() return st
def output(hp, dbout=None, solution=0, schema="css3.0"): '''Write the preferred HASH solution to Datascope database. This writes the strike, dip, rakes to 'fplane', arids used for a given mech in 'predmech' and the takeoffs in 'predarr'. Input ----- dbout : str or antelope.datascope.Dbptr to database solution : <STUB> int of desired solution. ''' from hashpy.doublecouple import DoubleCouple x = solution dc = DoubleCouple([hp.str_avg[x], hp.dip_avg[x], hp.rak_avg[x]]) str1, dip1, rak1 = dc.plane1 str2, dip2, rak2 = dc.plane2 axes = dc.axis if dbout is not None: db = Dbptr(dbout, perm='r+') else: db = dbtmp(schema) mechid = db.nextid('mechid') dbfpln = dblookup(db,table='fplane') dbfpln.record = dbfpln.addnull() dbfpln.putv( 'orid' , hp.icusp, 'str1' , round(str1,1) , 'dip1' , round(dip1,1) , 'rake1', round(rak1,1), 'algorithm', "HASH", 'mechid', mechid, 'auth', 'hashpy:'+ hp.author, 'str2' , round(str2,1) , 'dip2' , round(dip2,1) , 'rake2', round(rak2,1), 'taxazm', round(axes['T']['azimuth'],1), 'taxplg', round(axes['T']['dip'],1), 'paxazm', round(axes['P']['azimuth'],1), 'paxplg', round(axes['P']['dip'],1), ) dbpmec = dblookup(db,table='predmech') dbparr = dblookup(db,table='predarr') for k in range(hp.npol): if hp.p_pol[k] > 0: fm = 'U' else: fm = 'D' dbpmec.record = dbpmec.addnull() dbpmec.putv('arid', int(hp.arid[k]) , 'orid', hp.icusp, 'mechid', mechid, 'fm', fm, ) dbparr.record = dbparr.addnull() dbparr.putv('arid', int(hp.arid[k]), 'orid', hp.icusp, 'esaz', hp.qazi[k], 'dip' , hp.p_the_mc[k,0], ) return db