Exemple #1
0
    def get_view(self, steps, key=None):
        """
        Open view, run commands and get entries.
        """

        self.logging.debug(', '.join(steps))

        if not self.db:
            self.logging.warning('Problems with database pointer')
            return

        with datascope.freeing(self.db.process(steps)) as dbview:

            # Get NULL values
            dbview.record = datascope.dbNULL
            nulls = get_all_fields(dbview)

            for r in dbview.iter_record():

                self.logging.debug('New document')
                temp = get_all_fields(r, nulls)

                if key:
                    self.documents[temp[key]] = Document(temp)
                else:
                    self.documents[len(self.documents)] = Document(temp)
    def get_view(self, steps, key=None):
        """
        Open view, run commands and get entries.
        """

        self.logging.debug( ', '.join(steps) )

        if not self.db:
            self.logging.warning( 'Problems with database pointer' )
            return

        with datascope.freeing(self.db.process( steps )) as dbview:

            # Get NULL values
            dbview.record = datascope.dbNULL
            nulls = get_all_fields( dbview )

            for r in dbview.iter_record():

                self.logging.debug( 'New document' )
                temp = get_all_fields( r,nulls )

                if key:
                    self.documents[ temp[ key ] ] = Document( temp )
                else:
                    self.documents[ len(self.documents) ] = Document( temp )
def extract_all_events(ev, qml, db_path, output_dir):
    with ds.closing(ds.dbopen(db_path, 'r')) as db:
        with ds.freeing(db.process(['dbopen event', 'dbsort evid'])) as view:
            for row in view.iter_record():
                log.info('Processing event' +
                         ' '.join([str(row.getv(x)[0]) for x in EVENT_FIELDS]))
                event_id = row.getv(EVENT_FIELDS[0])[0]
                event_xml(event_id=event_id,
                          event=ev,
                          quakeml=qml,
                          output_file=os.path.join(output_dir, str(event_id)))
Exemple #4
0
    def get_seismic_station_metadata(self, start_time=0, end_time=None):
        """Retrieve station locations from db and yield as a generator.

        Args:
            start_time(numeric): unix timestamp. Stations with endtimes before this time will be excluded.
            end_time (numeric): unix timestamp. Stations with "time" after this time will be excluded.

        Yields:
            SeismicStationMetadata tuple: includes snet as one of the key/value pairs. Records are returned
            sorted by snet then by sta.
        """

        stats = {"numsta": 0}

        # Construct a dbsubset format string
        dbsubset_cmd = self._onoff_subset(start_time, end_time)

        if len(dbsubset_cmd) > 0:
            dbsubset_cmd = "dbsubset " + dbsubset_cmd
        else:
            dbsubset_cmd = None

        # Define dbops
        process_list = ["dbopen site"]

        if dbsubset_cmd is not None:
            process_list.append(dbsubset_cmd)

        process_list += [
            "dbjoin snetsta",
            "dbsort snet sta",
        ]

        self.logger.debug("Seismic process_list:\n%s", pformat(process_list))
        with datascope.freeing(self.get_pointer()) as dbptr:
            dbptr = dbptr.process(process_list)

            for record in dbptr.iter_record():
                stats["numsta"] += 1
                vals = record.getv("snet", "sta", "lat", "lon", "ondate",
                                   "offdate")
                sta_data = SeismicStationMetadata(
                    snet=vals[0],
                    sta=vals[1],
                    lat=vals[2],
                    lon=vals[3],
                    time=stock.epoch(vals[4]),
                    endtime=stock.epoch(vals[5]),
                    extra_channels=None,
                )
                yield sta_data

        LOGGER.debug("Processed %d stations.", stats["numsta"])
Exemple #5
0
def store(net, sta, ondate, lon, lat, elev):
    lddate = datetime.now()
    row = zip(
        fields,
        [
            sta,
            ondate.strftime("%Y%j"), lat, lon, elev / 1000.0,
            convtime(lddate)
        ],
    )
    db = dbopen(dbpath, "r+")
    with closing(db):
        snetsta_table = db.lookup(table="snetsta")
        snetsta_view = snetsta_table.subset("sta == '{}'".format(sta))
        log.debug("snetsta_view %s", snetsta_view)
        with freeing(snetsta_view):
            try:
                rowptr = snetsta_view.iter_record().next()
            except StopIteration:
                snetsta_table.addv(
                    *zip(snetsta_fields,
                         [net, sta, sta, convtime(lddate)]))
                log.info("added snetsta record")

        site_table = db.lookup(table="site")
        site_view = site_table.subset("sta == '{}'".format(sta))
        log.debug("site_view %s", site_view)
        with freeing(site_view):
            try:
                rowptr = site_view.iter_record().next()
            except StopIteration:
                site_table.addv(*row)
                log.info("added record %s", row)
            else:
                log.debug("rowptr %s", rowptr)
                old_row = dict(zip(fields, rowptr.getv(*fields)))
                if float(convtime(lddate)) > float(old_row["lddate"]):
                    rowptr.putv(*row)
                log.info("updated record %s %s", old_row, row)
                return old_row
def extract_event(db_path, ev_file):
    """
    :param db_path: database location 
    :param ev_file: events file name, csv file
    
    """
    ev_file = ev_file if ev_file else DEFAULT_EVENT_FILE
    with ds.closing(ds.dbopen(db_path, 'r')) as db:
        with ds.freeing(db.process(['dbopen event', 'dbsort evid'])) as view:
            with open(ev_file, 'w') as csv_file:
                writer = csv.writer(csv_file, delimiter=',')
                csv_file.write(','.join(EVENT_FIELDS) + '\n')
                for row in view.iter_record():
                    writer.writerow([str(row.getv(x)[0]) for x in
                                     EVENT_FIELDS])
Exemple #7
0
    def get_view(self, steps=[], key=None):
        """
        Open view, run commands and get entries.

        if 'key' is given then use that as a key for the
        final dictionary. This should be a unique key
        in the table view otherwise you will miss some
        rows. It has the form: 'origin.orid'
        If not 'key' set then it builds a sudo list. Only
        sequential numbers as keys that represent the row
        numbers from the original view.
        """

        from anfdb import get_all_fields

        self.logging.debug( ', '.join(steps) )

        if not self.db:
            self.logging.warning( 'Problems with database pointer' )
            return

        with datascope.freeing(self.db.process( steps )) as dbview:

            self.logging.debug( '%s total values in view' % dbview.record_count )

            if not dbview.record_count:
                self.logging.warning( 'No values left in view' )
                return


            # Get NULL values
            self.logging.debug( 'Extract NULL values' )
            dbview.record = datascope.dbNULL
            nulls = get_all_fields( dbview )

            self.logging.debug( 'Extract data values' )
            for r in dbview.iter_record():

                self.logging.debug( 'New document' )
                temp = get_all_fields( r,nulls )

                if key:
                    self.documents[ temp[ key ] ] = Document( temp )
                else:
                    self.documents[ len(self.documents) ] = Document( temp )
    def get_view(self, steps, key=None):
        """
        Extract data for each row and all atributes in database view.
        """
        for step in steps:
            if 'dbopen' in step or 'dbjoin' in step:
                table = next(item for item in step.split()
                             if item not in ['dbopen', 'dbjoin', '-o'])
                if not table_present(self.db, table):
                    self.logger.error('Table does not exist: %s' % table)
                    return

        try:
            with datascope.freeing(self.db.process(steps)) as dbview:

                if dbview.record_count == 0:
                    self.logger.debug('Process returned empty view: ' +
                                      ', '.join(steps))
                    return
                else:
                    self.logger.debug('Processing: ' + ', '.join(steps))

                dbview.record = datascope.dbNULL
                nulls = get_all_fields(dbview)

                for i, row in enumerate(dbview.iter_record()):

                    data = get_all_fields(row, nulls)

                    if key is not None:
                        if key not in data:
                            self.logger.debug(
                                'Key "%s" not found in row %d of view. '
                                'Skipping.' % (key, i))
                            continue
                        self.documents[data[key]] = Document(data)
                    else:
                        self.documents[len(self.documents)] = Document(data)

        except datascope.DbprocessError as ex:
            self.logger.error('Processing: ' + ', '.join(steps))
            self.logger.error(repr(ex))
Exemple #9
0
    def get_view(self, steps, key=None):
        """
        Extract data for each row and all atributes in database view.
        """
        for step in steps:
            if 'dbopen' in step or 'dbjoin' in step:
                table = next(item for item in step.split()
                             if item not in ['dbopen', 'dbjoin', '-o'])
                if not table_present(self.db, table):
                    self.logger.error('Table does not exist: %s' % table)
                    return

        try:
            with datascope.freeing(self.db.process(steps)) as dbview:

                if dbview.record_count == 0:
                    self.logger.debug('Process returned empty view: ' +
                                      ', '.join(steps))
                    return
                else:
                    self.logger.debug('Processing: ' + ', '.join(steps))

                dbview.record = datascope.dbNULL
                nulls = get_all_fields(dbview)

                for i, row in enumerate(dbview.iter_record()):

                    data = get_all_fields(row, nulls)

                    if key is not None:
                        if key not in data:
                            self.logger.debug(
                                'Key "%s" not found in row %d of view. '
                                'Skipping.' % (key, i))
                            continue
                        self.documents[data[key]] = Document(data)
                    else:
                        self.documents[len(self.documents)] = Document(data)

        except datascope.DbprocessError as ex:
            self.logger.error('Processing: ' + ', '.join(steps))
            self.logger.error(repr(ex))
Exemple #10
0
def main():

    ##########
    # Extract station list from master_stations
    ##########

    stations = []
    with ds.closing(ds.dbopen("/aerun/sum/db/dbsum/dbsum", "r")) as db:
        steps = ["dbopen affiliation", "dbjoin site", "dbsubset net=~/AK/ && offdate==NULL", "dbsort sta"]
        with ds.freeing(db.process(steps)) as dbview:
            for record in dbview.iter_record():
                stations.append(record.getv('sta')[0])
    
 
    
    ##########
    # Extract waveform data into trace objects
    ##########
    
    twin = 600
    tcurrent = float(stock.str2epoch('now'))
    tend = tcurrent - 60
    tstart = tend - twin
    
    f = '%Y_%m_%d'
    date = stock.epoch2str(tstart,f)
    
    with ds.closing(ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")) as db:
        for sta in stations:
            data = {}
            samplerate = {}
            dbsta = db.lookup(table = 'wfdisc')
            dbsta = dbsta.subset('sta=~/%s/' % sta )
            bband = dbsta.subset('chan=~/BH./')
            
            #######
            # Extract Broadband seismic data
            #######
            if bband.query('dbRECORD_COUNT') > 0:
#                print (tstart, tcurrent, "%s" % sta, "BHE")
                tr_bhe = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHE")
                tr_bhe.trapply_calib()
                with ds.trfreeing(tr_bhe):
                    if tr_bhe.query("dbRECORD_COUNT") > 0:
                        tr_bhe.record = 0
                        data['BHE'] = tr_bhe.trdata()
                        samplerate['BHE'] = tr_bhe.getv("samprate")
                        
                tr_bhn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHN")
                tr_bhn.trapply_calib()
                with ds.trfreeing(tr_bhn):
                    if tr_bhn.query("dbRECORD_COUNT") > 0:
                        tr_bhn.record = 0
                        data['BHN'] = tr_bhn.trdata()
                        samplerate['BHN'] = tr_bhn.getv("samprate")
                        
                tr_bhz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHZ")
                tr_bhz.trapply_calib()
                with ds.trfreeing(tr_bhz):
                    if tr_bhz.query("dbRECORD_COUNT") > 0:
                        tr_bhz.record = 0
                        data['BHZ'] = tr_bhz.trdata()
                        samplerate['BHZ'] = tr_bhz.getv("samprate")
            #######
            # Extract moderate sample rate strong motion data
            #######
            smot_b = dbsta.subset('chan=~/BN./')
            if smot_b.query('dbRECORD_COUNT') > 0:
                tr_bne = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNE")
                tr_bne.trapply_calib()
                with ds.trfreeing(tr_bne):
                    if tr_bne.query("dbRECORD_COUNT") > 0:
                        tr_bne.record = 0
                        data['BNE'] = tr_bne.trdata()
                        samplerate['BNE'] = tr_bne.getv("samprate")
                        
                tr_bnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNN")
                tr_bnn.trapply_calib()
                with ds.trfreeing(tr_bnn):
                    if tr_bnn.query("dbRECORD_COUNT") > 0:
                        tr_bnn.record = 0
                        data['BNN'] = tr_bnn.trdata()
                        samplerate['BNN'] = tr_bnn.getv("samprate")
                        
                tr_bnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNZ")
                tr_bnz.trapply_calib()
                with ds.trfreeing(tr_bnz):
                    if tr_bnz.query("dbRECORD_COUNT") > 0:
                        tr_bnz.record = 0
                        data['BNZ'] = tr_bnz.trdata()
                        samplerate['BNZ'] = tr_bnz.getv("samprate")
            
            #######
            # Extract high sample rate strong motion data
            #######
            smot_h = dbsta.subset('chan=~/HN./')
            if smot_h.query('dbRECORD_COUNT') > 0:
                tr_hne = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNE")
                tr_hne.trapply_calib()
                with ds.trfreeing(tr_hne):
                    if tr_hne.query("dbRECORD_COUNT") > 0:
                        tr_hne.record = 0
                        data['HNE'] = tr_hne.trdata()
                        samplerate['HNE'] = tr_hne.getv("samprate")
                        
                tr_hnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNN")
                tr_hnn.trapply_calib()
                with ds.trfreeing(tr_hnn):
                    if tr_hnn.query("dbRECORD_COUNT") > 0:
                        tr_hnn.record = 0
                        data['HNN'] = tr_hnn.trdata()
                        samplerate['HNN'] = tr_hnn.getv("samprate")
                        
                tr_hnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNZ")
                tr_hnz.trapply_calib()
                with ds.trfreeing(tr_hnz):
                    if tr_hnz.query("dbRECORD_COUNT") > 0:
                        tr_hnz.record = 0
                        data['HNZ'] = tr_hnz.trdata()
                        samplerate['HNZ'] = tr_hnz.getv("samprate")
                        
#            if sta=="MLY":
#                plot_traces(sta, data, tstart, tend, samplerate)


#            shortperz = sbsta.subset('chan=~/EHZ/')
#            if smot.query('dbRECORD_COUNT') > 0:
#                tr_ehz = dbsta.trloadchan(tstart, tcurrent, "%s" % sta, "EHZ")
                
            print sta
            plot_traces(sta, data, tstart, tend, samplerate)
Exemple #11
0
    def get_extra_sensor_metadata(self, start_time=0, end_time=None):
        """Retrieve extra sensor locations as a generator.

        Args:
            start_time, end_time: bounds for station endtime and time
        """
        assert start_time >= 0
        assert end_time is None or end_time >= 0

        # Build the Datascope query str.
        # qstr = "|".join(["|".join(v) for k, v in infrasound_mapping.items()])
        qstr = "|".join(self._extra_sensor_channels)

        self.logger.info(
            "Infrasound: Searching sitechan table for chans that match: " +
            qstr)

        # Construct a dbsubset format string
        dbsubset_cmd = self._timeendtime_subset(start_time, end_time)
        if len(dbsubset_cmd) > 0:
            dbsubset_cmd = "dbsubset " + dbsubset_cmd
        else:
            dbsubset_cmd = None

        # Compile our dbprocess list of commands
        process_list = ["dbopen sensor"]

        if dbsubset_cmd:
            process_list.append(dbsubset_cmd)

        process_list += [
            "dbsubset chan=~/({!s})/".format(qstr),
            "dbjoin snetsta",
            "dbjoin site",
            "dbsort sta ondate chan time",
        ]
        self.logger.debug(pformat(process_list))

        # Track our stats here
        stats = {
            "numsta": 0,
            "numrec": 0,
        }

        with datascope.freeing(self.get_pointer()) as infraptr:
            try:
                infraptr = infraptr.process(process_list)
            except Exception as e:
                self.logger.exception("Dbprocessing failed.")
                raise e

            try:
                infraptr_grp = infraptr.group("sta")
            except Exception as e:
                self.logger.exception("Dbgroup failed")
                raise e
            with datascope.freeing(infraptr_grp):
                # Get values into a easily digestible dict
                for sta_record in infraptr_grp.iter_record():
                    sta, [db, view, end_rec,
                          start_rec] = sta_record.getv("sta", "bundle")
                    stats["numsta"] += 1
                    sta_data = {
                        "sta": sta,
                    }
                    extra_channels = []
                    for stachan_record in range(start_rec, end_rec):
                        infraptr.record = stachan_record
                        stats["numrec"] += 1

                        try:
                            (
                                sta_data["snet"],
                                sta_data["sta"],
                                chan,
                                sta_data["lat"],
                                sta_data["lon"],
                                time,
                                endtime,
                            ) = infraptr.getv("snet", "sta", "chan", "lat",
                                              "lon", "time", "endtime")
                        except TypeError:
                            self.logger.exception(
                                "infraptr.getv failed with dbprocess commands:\n%s",
                                pformat(process_list),
                            )
                            raise

                        # Append the channel name to the current extra_channels list.
                        extra_channels.append(chan)
                        """Due to query sort order, we end up with only the
                        most recent lat/lon for the station."""
                        """We keep the oldest time and the newest endtime."""
                        try:
                            sta_data["time"] = min(sta_data["time"], time)
                        except KeyError:
                            sta_data["time"] = time

                        try:
                            sta_data["endtime"] = min(sta_data["endtime"],
                                                      endtime)
                        except KeyError:
                            sta_data["endtime"] = endtime

                    # eliminate duplicate channels with set
                    sta_data["extra_channels"] = set(extra_channels)

                    # Create a new SeismicStationMetadata object from sta_data
                    # as keyword pairs.
                    metadata = SeismicStationMetadata(**sta_data)

                    # Set the extra_sensor_mapping attribute so that sensor
                    # mapping function works.
                    metadata.extra_sensor_mapping = self.extra_sensor_mapping

                    # Print progress stats after every 100 db rows.
                    if stats["numrec"] % 100 == 0:
                        LOGGER.debug(
                            "Progress: Retrieved %d stations with %d records.",
                            stats["numsta"],
                            stats["numrec"],
                        )

                    yield metadata
        LOGGER.debug("Retrieved %d stations with %d records.", stats["numsta"],
                     stats["numrec"])
Exemple #12
0
    def get_event(self, evid ):
        """
        Open event database and get values.
        """

        self.evid = evid
        self.valid = False

        self.logging.info('Get evid %s from %s' % (self.evid, self.database) )

        steps = ['dbopen event']
        steps.extend(['dbsubset evid==%s' % self.evid ])
        [ steps.extend( [ 'dbsubset auth =~ /%s/' % x ] ) for x in self.event_auth_select if self.event_auth_select]
        [ steps.extend( [ 'dbsubset auth !~ /%s/' % x ] ) for x in self.event_auth_reject if self.event_auth_reject]

        self.logging.debug( 'Database query for event info:' )
        self.logging.debug( ', '.join(steps) )

        with datascope.freeing(self.db.process( steps )) as dbview:
            self.logging.info( 'Found [%s] event with evid:%s' % (dbview.record_count, self.evid) )
            if not dbview.record_count:
                # This failed.
                self.logging.warning( 'No event after subset ' )
                return

            if int(dbview.record_count) > 1:
                # This failed.
                self.logging.error( 'More than one evid [%s] in %s' % (self.evid, self.database) )

            # Get NULL values
            dbview.record = datascope.dbNULL
            nulls = get_all_fields( dbview )

            #we should only have 1 here
            for temp in dbview.iter_record():

                self.logging.debug( 'Extracting info for event from db' )

                self.event_data = get_all_fields( temp, nulls )
                self.valid = True


        self._get_origins()

        # Verify that we have the prefor in origin list...
        if not self.origins.exists(self.event_data['event.prefor']):
            self.logging.warning( 'Missing prefor [%s] for evid:[%s]' % \
                    (self.event_data['event.prefor'], self.evid ) )
            try:
                last_origin = self.origins.values(sort='origin.lddate', reverse=True)[0]
            except:
                last_origin = None

            if not last_origin:
                self.logging.warning( 'No valid origin for this event evid:[%s]' % self.evid )
            else:
                self.logging.warning( 'Set [%s] as prefor for evid:[%s]' % ( last_origin['origin.orid'], self.evid ) )
                self.event_data['event.prefor'] = last_origin['origin.orid']

        self._get_arrivals()
        self._get_stamag()
        self._get_netmag()
        self._get_fplane()
        self._get_mts()
Exemple #13
0
    def phases(self, min, max):
        """Retrieve all arrival phases for an event."""

        self.logger.debug("Events():phases(%s,%s) " % (min, max))

        phases = defaultdict(lambda: defaultdict(dict))

        assoc = False

        dbname = self.dbcentral(min)

        self.logger.debug("Events():phases(%s,%s) db:(%s)" %
                          (min, max, dbname))

        if not dbname:
            return phases

        open_dbviews = []
        with datascope.closing(datascope.dbcreate(dbname, "r")) as db:
            with datascope.freeing(db.lookup(table="arrival")) as db_arrivals:
                try:
                    db_arrival_assoc = db_arrivals.join("assoc")
                    open_dbviews.append(db_arrival_assoc)
                    dbv = db_arrival_assoc
                except datascope.DatascopeException:
                    dbv = db_arrivals

                # This "try/finally" block is to emulate a context manager for a successful join with the assoc table.
                try:
                    nrecs = dbv.query(datascope.dbRECORD_COUNT)

                    if not nrecs:
                        return dict(phases)

                    try:
                        db = db.subset("%s <= time && time <= %s" %
                                       (float(min), float(max)))
                        nrecs = db.query(datascope.dbRECORD_COUNT)
                    except datascope.DatascopeException:
                        nrecs = 0

                    if not nrecs:
                        return dict(phases)

                    for p in range(nrecs):
                        db.record = p

                        if assoc:
                            phase_field = "phase"
                        else:
                            phase_field = "iphase"

                        Sta, Chan, ArrTime, Phase = db.getv(
                            "sta", "chan", "time", phase_field)
                        StaChan = Sta + "_" + Chan
                        phases[StaChan][ArrTime] = Phase

                        self.logger.debug("Phases(%s):%s" % (StaChan, Phase))
                finally:
                    for view in open_dbviews:
                        view.close()

        self.logger.debug("Events: phases(): t1=%s t2=%s [%s]" %
                          (min, max, phases))

        return dict(phases)
Exemple #14
0
    def get_event(self, evid):
        """
        Open event database and get values.
        """

        self.evid = evid
        self.valid = False

        self.logging.info("Get evid %s from %s" % (self.evid, self.database))

        steps = ["dbopen event"]
        steps.extend(["dbsubset evid==%s" % self.evid])
        [steps.extend(["dbsubset auth =~ /%s/" % x]) for x in self.event_auth_select if self.event_auth_select]
        [steps.extend(["dbsubset auth !~ /%s/" % x]) for x in self.event_auth_reject if self.event_auth_reject]

        self.logging.debug("Database query for event info:")
        self.logging.debug(", ".join(steps))

        with datascope.freeing(self.db.process(steps)) as dbview:
            self.logging.info("Found [%s] event with evid:%s" % (dbview.record_count, self.evid))
            if not dbview.record_count:
                # This failed.
                self.logging.warning("No event after subset ")
                return

            if int(dbview.record_count) > 1:
                # This failed.
                self.logging.error("More than one evid [%s] in %s" % (self.evid, self.database))

            # Get NULL values
            dbview.record = datascope.dbNULL
            nulls = get_all_fields(dbview)

            # we should only have 1 here
            for temp in dbview.iter_record():

                self.logging.debug("Extracting info for event from db")

                self.event_data = get_all_fields(temp, nulls)
                self.valid = True

        self._get_origins()

        # Verify that we have the prefor in origin list...
        if not self.origins.exists(self.event_data["event.prefor"]):
            self.logging.warning("Missing prefor [%s] for evid:[%s]" % (self.event_data["event.prefor"], self.evid))
            try:
                last_origin = self.origins.values(sort="origin.lddate", reverse=True)[0]
            except:
                last_origin = None

            if not last_origin:
                self.logging.warning("No valid origin for this event evid:[%s]" % self.evid)
            else:
                self.logging.warning("Set [%s] as prefor for evid:[%s]" % (last_origin["origin.orid"], self.evid))
                self.event_data["event.prefor"] = last_origin["origin.orid"]

        self._get_arrivals()
        self._get_stamag()
        self._get_netmag()
        self._get_fplane()
        self._get_mts()
Exemple #15
0
    def get_stations(self, regex, time, reference=False, event_data=None):
        """Get site info for each station."""
        yearday = stock.epoch2str(time, "%Y%j")

        steps = [
            "dbsubset ondate <= %s && (offdate >= %s || offdate == NULL)" %
            (yearday, yearday)
        ]

        steps.extend(["dbsort sta"])
        steps.extend(["dbsubset %s" % regex])

        self.logger.info("Database query for stations:")
        self.logger.info(", ".join(steps))

        with datascope.freeing(self.table.process(steps)) as dbview:
            self.logger.info("Extracting sites for origin from db")

            strings = []
            for temp in dbview.iter_record():
                (sta, lat, lon, chan) = temp.getv("sta", "lat", "lon", "chan")

                if len(chan) > 3:
                    chan_code = chan[:2] + "._."
                else:
                    chan_code = chan[:2]

                string = sta + chan_code

                if string not in strings:
                    strings.append(string)
                    try:
                        self.stations[sta].append_chan(chan_code)
                    except Exception:
                        self.stations[sta] = Records(sta, lat, lon)
                        self.stations[sta].append_chan(chan_code)
                        if reference and sta != reference:
                            ssaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" % (
                                    self.stations[reference].lat,
                                    self.stations[reference].lon,
                                    lat,
                                    lon,
                                ))
                            ssdelta = "%0.4f" % temp.ex_eval(
                                "distance(%s,%s,%s,%s)" % (
                                    self.stations[reference].lat,
                                    self.stations[reference].lon,
                                    lat,
                                    lon,
                                ))
                            ssdistance = round(
                                temp.ex_eval("deg2km(%s)" % ssdelta), 2)

                            self.stations[sta].set_ss(ssaz, ssdelta,
                                                      ssdistance)

                        if event_data:
                            seaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" %
                                (lat, lon, event_data.lat, event_data.lon))
                            esaz = "%0.2f" % temp.ex_eval(
                                "azimuth(%s,%s,%s,%s)" %
                                (event_data.lat, event_data.lon, lat, lon))
                            delta = "%0.4f" % temp.ex_eval(
                                "distance(%s,%s,%s,%s)" %
                                (event_data.lat, event_data.lon, lat, lon))
                            realdistance = temp.ex_eval("deg2km(%s)" % delta)

                            pdelay = int(
                                temp.ex_eval("pphasetime(%s,%s)" %
                                             (delta, event_data.depth)))

                            if pdelay > 0:
                                pdelay -= 1
                            else:
                                pdelay = 0

                            ptime = time + pdelay

                            self.stations[sta].set_es(seaz, esaz, delta,
                                                      realdistance, pdelay,
                                                      ptime)

        return self.stations