Exemple #1
0
def get_first_motions(dbname, orid=None):
    """
    Port of Gabe/Mark dbprocess for getting info to pass to an FM calulator
    
    Right now, gets origin, arrival info, and joins wfdisc for filenames to the waveform
    """
    db = Dbptr(dbname)
    db = dbprocess(
        db,
        [
            "dbopen origin",
            "dbsubset orid=={0}".format(orid),
            "dbjoin origerr",
            "dbjoin assoc",
            "dbjoin arrival",
            "dbsubset iphase =~ /.*[Pp].*/",
            "dbsubset fm =~ /.*[UuCcDdRr.].*/",
            "dbjoin wfdisc",
            "dbsubset chan==wfdisc.chan",
            "dbsort arrival.time",
        ],
    )
    #'dbjoin -o affiliation', 'dbjoin -o site',
    #
    #
    #'dbsubset (ondate <= time)',
    #'dbsubset (time <= offdate) || (offdate == -1)']
    # )
    return db
Exemple #2
0
def get_first_motions(dbname, orid=None):
    """
    Port of Gabe/Mark dbprocess for getting info to pass to an FM calulator
    
    Right now, gets origin, arrival info, and joins wfdisc for filenames to the waveform
    """
    db = Dbptr(dbname)
    db = dbprocess(db ,['dbopen origin', 'dbsubset orid=={0}'.format(orid),
            'dbjoin origerr', 'dbjoin assoc',  'dbjoin arrival', 'dbsubset iphase =~ /.*[Pp].*/',
            'dbsubset fm =~ /.*[UuCcDdRr.].*/',
            'dbjoin wfdisc', 'dbsubset chan==wfdisc.chan', 'dbsort arrival.time'])
            #'dbjoin -o affiliation', 'dbjoin -o site',
            #
            #
            #'dbsubset (ondate <= time)',
            #'dbsubset (time <= offdate) || (offdate == -1)']
            #)
    return db
def generate_inframet_locations(db, mtype, deploytype, year, month, imap=False, verbose=False, debug=False):
    """Generate inframet locations for specific
    periods in time and write out to xy files 
    suitable for GMT
    """
    # Build the Datascope query str. 
    # For some reason this list comprehensions 
    # has to be at the top of a function?
    # Cannot reproduce in independent tests?

    qstr = '|'.join([ '|'.join(v) for k,v in imap.iteritems()])
    start_time, end_time = generate_times(year, month)

    if verbose or debug:
        print "  - generate_inframet_locations(): Infrasound: Searching sitechan table for chans that match: %s" % qstr

    infraptr = antdb.dbopen(db, 'r')

    process_list = [
        'dbopen sitechan',
        'dbjoin deployment',
        'dbjoin site',
        'dbsubset chan=~/(%s)/' % qstr,
        'dbsubset ondate <= %s' % end_time # Remove future deployed stations
    ]

    if mtype == 'rolling':
        process_list.append('dbsubset endtime >= %s' % start_time) # No decommissioned stations for rolling plot
    elif mtype != 'cumulative':
        print "generate_inframet_locations(): Inframet Error: Map type ('%s') is not recognized" % mtype
        exit()

    process_list.append('dbsort sta ondate chan time')

    try:
        infraptr = antdb.dbprocess(infraptr, process_list)
    except Exception,e:
        print "  - generate_inframet_locations(): Dbprocessing failed with exception: %s" % e
def generate_sta_locations(db, mtype, deploytype, year, month, verbose=False, debug=False):
    """Generate station locations for specific
    periods in time and write out to xy files 
    suitable for GMT
    """
    start_time, end_time = generate_times(year, month)

    # Get the networks
    snetptr = antdb.dbopen(db, 'r')
    snetptr = antdb.dbprocess(snetptr,
                            ['dbopen site',
                             'dbjoin snetsta',
                             'dbjoin deployment'])
    snetptr = antdb.dbsort(snetptr,'snet', unique=True)
    usnets = []
    try:
        for i in range(antdb.dbquery(snetptr, antdb.dbRECORD_COUNT )):
            snetptr['record'] = i
            mysnet = antdb.dbgetv(snetptr,'snet')[0]
            usnets.append(mysnet)
        antdb.dbclose(snetptr)
    except Exception, e:
        print "generate_sta_locations(): Exception occurred: %s" % e
    # Define dbops
    process_list = [
        'dbopen site', 
        'dbjoin snetsta', 
        'dbjoin deployment', 
        'dbsubset time <= %s' % end_time
    ]
    dbptr = antdb.dbopen(db, 'r')
    if mtype == 'rolling':
        process_list.append('dbsubset endtime >= %s' % start_time)
    elif mtype != 'cumulative':
        print "generate_sta_locations(): Map type ('%s') is not recognized" % mtype
        exit()
    process_list.append('dbsort snet sta')
    dbptr = antdb.dbprocess(dbptr,process_list)

    file_list = {}
    counter = {}

    dfile = tempfile.mkstemp(suffix='.xy', prefix='deployment_list_DECOM_')
    decom_ptr = dfile[0]
    decom_name = dfile[1]

    if mtype == 'cumulative':
        this_decom_counter = 0

    # Loop over unqiue snets
    for s in usnets:
        stmp = tempfile.mkstemp(suffix='.xy',
                                prefix='deployment_list_%s_' % s)
Exemple #6
0
def input(hp, dbname, evid=None, orid=None):
    """Input HASH data from Antelope database
    
    This will accept a database name OR Antelope Dbptr, and either
    an ORID, or an EVID, in which case the 'prefor' ORID is looked
    up and used.
    
    Inputs
    ------
    dbname  :   str or antelope.datascope.Dbptr
    orid    :   int of ORID
    evid    :   int of EVID
    """

    db = Dbptr(dbname)

    if orid is None:
        dbv = dbprocess(db, ["dbopen event", "dbsubset evid == " + str(evid)])
        orid = RowPointerDict(dbv)["prefor"]

    db = dbprocess(
        db,
        [
            "dbopen origin",
            "dbsubset orid == " + str(orid),
            "dbjoin origerr",
            "dbjoin assoc",
            "dbjoin arrival",
            "dbjoin affiliation",
            "dbjoin site",
            "dbsubset iphase =~ /.*[Pp].*/",
            "dbsubset (ondate <= time)",
            "dbsubset (time <= offdate) || (offdate == -1)",
        ],
    )

    ph = RowPointerDict(db, record=0)

    hp.nrecs = len(ph)
    if len(ph) <= 0:
        raise ValueError("No picks for this ORID: {0}".format(orid))

    hp.tstamp = ph["origin.time"]
    hp.qlat = ph["origin.lat"]
    hp.qlon = ph["origin.lon"]
    hp.qdep = ph["origin.depth"]
    hp.qmag = ph["origin.ml"]
    hp.icusp = ph["origin.orid"]
    hp.seh = ph["origerr.smajax"]
    hp.sez = ph["origerr.sdepth"]

    aspect = np.cos(hp.qlat / degrad)  # convert using python later.

    # The index 'k' is deliberately non-Pythonic to deal with the fortran
    # subroutines which need to be called and the structure of the original HASH code.
    # May be able to update with a rewrite... YMMV
    k = 0
    for n in range(len(ph)):
        # Extract pick data from the db
        ph = RowPointerDict(db, record=n)

        hp.sname[k] = ph["sta"]
        hp.snet[k] = ph["net"]
        hp.scomp[k] = ph["chan"]
        hp.pickonset[k] = ph["qual"].strip(".")
        hp.pickpol[k] = ph["fm"]
        hp.arid[k] = ph["arid"]

        flat, flon, felv = ph["site.lat"], ph["site.lon"], ph["site.elev"]
        hp.esaz[k] = ph["esaz"]

        # Distance and Azimuth filtering
        dx = (flon - hp.qlon) * 111.2 * aspect
        dy = (flat - hp.qlat) * 111.2
        dist = np.sqrt(dx ** 2 + dy ** 2)
        qazi = 90.0 - np.arctan2(dy, dx) * degrad

        if qazi < 0.0:
            qazi = qazi + 360.0

        if dist > hp.delmax:
            continue

        # Try to get an up/down polarity
        if not hp.pickpol[k].lower():
            continue
        if hp.pickpol[k].lower() in "cu":
            hp.p_pol[k] = 1
        elif hp.pickpol[k].lower() in "dr":
            hp.p_pol[k] = -1
        else:
            continue

        # Save them for other functions
        hp.dist[k] = dist
        hp.qazi[k] = qazi
        hp.flat[k] = flat
        hp.flon[k] = flon
        hp.felv[k] = felv

        # Try to get the onset, impulsive if none
        if hp.pickonset[k].lower() == "i":
            hp.p_qual[k] = 0
        elif hp.pickonset[k].lower() == "e":
            hp.p_qual[k] = 1
        elif hp.pickonset[k].lower() == "w":
            hp.p_qual[k] = 1
        else:
            hp.p_qual[k] = 0

        # polarity check in original code... doesn't work here
        # hp.p_pol[k] = hp.p_pol[k] * hp.spol
        k += 1
    hp.npol = k  # k is zero indexed in THIS loop
    db.close()
Exemple #7
0
def input(hp, dbname, evid=None, orid=None):
    '''Input HASH data from Antelope database
    
    This will accept a database name OR Antelope Dbptr, and either
    an ORID, or an EVID, in which case the 'prefor' ORID is looked
    up and used.
    
    Inputs
    ------
    dbname  :   str or antelope.datascope.Dbptr
    orid    :   int of ORID
    evid    :   int of EVID
    '''
    
    db = Dbptr(dbname)

    if orid is None:
        dbv = dbprocess(db,['dbopen event', 'dbsubset evid == '+str(evid)])
        orid = RowPointerDict(dbv)['prefor']
    
    db = dbprocess(db,[ 'dbopen origin', 'dbsubset orid == '+str(orid),
                    'dbjoin origerr', 'dbjoin assoc',  'dbjoin arrival',
                    'dbjoin affiliation', 'dbjoin site',
                    'dbsubset iphase =~ /.*[Pp].*/',
                    'dbsubset (ondate <= time)',
                    'dbsubset (time <= offdate) || (offdate == -1)']
                    )
    
    ph = RowPointerDict(db, record=0)

    hp.nrecs = len(ph)
    if len(ph) <= 0:
        raise ValueError("No picks for this ORID: {0}".format(orid) )
        
    hp.tstamp = ph['origin.time']
    hp.qlat   = ph['origin.lat']
    hp.qlon   = ph['origin.lon']
    hp.qdep   = ph['origin.depth']
    hp.qmag   = ph['origin.ml']
    hp.icusp  = ph['origin.orid']
    hp.seh    = ph['origerr.smajax']
    hp.sez    = ph['origerr.sdepth']
    
    aspect = np.cos(hp.qlat / degrad) # convert using python later.
    
    # The index 'k' is deliberately non-Pythonic to deal with the fortran
    # subroutines which need to be called and the structure of the original HASH code.
    # May be able to update with a rewrite... YMMV
    k = 0
    for n in range(len(ph)):
        
        ph = RowPointerDict(db, record=n)

        hp.sname[k]   = ph['sta']
        hp.snet[k]    = ph['net']
        hp.scomp[k]   = ph['chan']
        hp.pickonset[k] = 'I'
        hp.pickpol[k]   = ph['fm']
        hp.arid[k]    = ph['arid']
        
        flat, flon, felv = ph['site.lat'],ph['site.lon'],ph['site.elev']
        hp.esaz[k] = ph['esaz']

        
        # dist @ azi, get from db OR obspy or another python mod (antelope) could do this on WGS84
        dx = (flon - hp.qlon) * 111.2 * aspect
        dy = (flat - hp.qlat) * 111.2
        dist = np.sqrt(dx**2 + dy**2)
        qazi = 90. - np.arctan2(dy,dx) * degrad
        
        if (qazi < 0.):
            qazi = qazi + 360.
        if (dist > hp.delmax):
            continue
        if (hp.pickpol[k] in 'CcUu'):
            hp.p_pol[k] = 1
        elif (hp.pickpol[k] in 'RrDd'):
            hp.p_pol[k] = -1
        else:
            continue
        
        # save them for other functions -MCW
        hp.dist[k] = dist
        hp.qazi[k] = qazi
        hp.flat[k] = flat
        hp.flon[k] = flon
        hp.felv[k] = felv
        
        if (hp.pickonset[k] in 'Ii'):
            hp.p_qual[k] = 0
        else:
            hp.p_qual[k] = 1
        
        # polarity check in original code... doesn't work here
        #hp.p_pol[k] = hp.p_pol[k] * hp.spol
        k += 1
    #npol = k - 1
    hp.npol = k # k is zero indexed in THIS loop
    db.close()