Beispiel #1
0
def test_table(dbname, tbl, verbose=False):
    """
    Verify that we can work with table.
    Returns path if valid and we see data.
    """

    logging = getLogger()

    path = False

    try:
        with datascope.closing(datascope.dbopen(dbname, "r")) as db:
            db = db.lookup(table=tbl)

            if not db.query(datascope.dbTABLE_PRESENT):
                logging.warning("No dbTABLE_PRESENT on %s" % dbname)
                return False

            if not db.record_count:
                logging.warning("No %s.record_count" % dbname)

            path = db.query("dbTABLE_FILENAME")

    except Exception, e:
        logging.warning("Prolembs with db[%s]: %s" % (dbname, e))
        return False
Beispiel #2
0
def _main():
    import ant_tools
    from core_tools import Locator, parse_cfg
    from antelope.datascope import closing, dbopen

    args = _parse_command_line()
    if args.pf:
        ant_tools.pf_2_cfg(args.pf, "pyloceq")
    else:
        ant_tools.pf_2_cfg("pyloceq", "pyloceq")
    cfg_dict = parse_cfg("pyloceq.cfg")
    locator = Locator(cfg_dict)
    with closing(dbopen(args.db, "r+")) as db:
        tbl_event = db.schema_tables["event"]
        if args.subset:
            view = tbl_event.join("origin")
            view = view.subset(args.subset)
            tbl_event = view.separate("event")
        for record in tbl_event.iter_record():
            evid = record.getv("evid")[0]
            view = tbl_event.subset("evid == %d" % evid)
            event_list = ant_tools.create_event_list(view)
            for event in event_list:
                origin = event.preferred_origin
                origin = locator.locate_eq(origin)
                if origin == None:
                    continue
                origin.update_predarr_times(cfg_dict)
                ant_tools.write_origin(origin, db)
    return 0
Beispiel #3
0
def test_table(dbname, tbl, verbose=False):
    """Verify that we can work with table.

    Args:
        dbname (string): name of the Datascope database.
        tbl (string): name of the database table.
        verbose (bool): be more verbose in output.

    Returns:
        string: path if valid and we see data.
        False: if table is invalid for any reason.
    """

    path = False

    try:
        with datascope.closing(datascope.dbopen(dbname, "r")) as db:
            db = db.lookup(table=tbl)

            if not db.query(datascope.dbTABLE_PRESENT):
                logger.warning("No dbTABLE_PRESENT on %s" % dbname)
                return False

            if not db.record_count:
                logger.warning("No %s.record_count" % dbname)

            path = db.query("dbTABLE_FILENAME")

    except Exception as e:
        logger.warning("Prolembs with db[%s]: %s" % (dbname, e))
        return False

    return path
Beispiel #4
0
def _main():
    import ant_tools
    from core_tools import Locator, parse_cfg
    from antelope.datascope import closing, dbopen
    args = _parse_command_line()
    if args.pf: ant_tools.pf_2_cfg(args.pf, 'pyloceq')
    else: ant_tools.pf_2_cfg('pyloceq', 'pyloceq')
    cfg_dict = parse_cfg('pyloceq.cfg')
    locator = Locator(cfg_dict)
    with closing(dbopen(args.db, 'r+')) as db:
        tbl_event = db.schema_tables['event']
        if args.subset:
            view = tbl_event.join('origin')
            view = view.subset(args.subset)
            tbl_event = view.separate('event')
        for record in tbl_event.iter_record():
            evid = record.getv('evid')[0]
            view = tbl_event.subset('evid == %d' % evid)
            event_list = ant_tools.create_event_list(view)
            for event in event_list:
                origin = event.preferred_origin
                origin = locator.locate_eq(origin)
                if origin == None:
                    continue
                origin.update_predarr_times(cfg_dict)
                ant_tools.write_origin(origin, db)
    return 0
Beispiel #5
0
    def _get_events(self):
        """Update all orids/evids from the database."""
        self.cache = []

        # Test if we have event table
        with datascope.closing(datascope.dbopen(self.db, "r")) as db:
            dbtable = db.lookup(table="event")
            if dbtable.query(datascope.dbTABLE_PRESENT):
                steps = ["dbopen event"]
                steps.extend(["dbjoin origin"])
                steps.extend(["dbsubset origin.orid != NULL"])
                steps.extend(["dbsubset origin.orid == prefor"])
                fields = ["evid"]
            else:
                steps = ["dbopen origin"]
                steps.extend(["dbsubset orid != NULL"])
                fields = []

        fields.extend([
            "orid", "time", "lat", "lon", "depth", "auth", "nass", "ndef",
            "review"
        ])

        for v in extract_from_db(self.db, steps, fields, self.db_subset):
            if "evid" not in v:
                v["evid"] = v["orid"]

            self.logger.debug("Events(): new event #%s" % v["evid"])

            v["allmags"] = []
            v["magnitude"] = "-"
            v["maglddate"] = 0
            v["srname"] = "-"
            v["grname"] = "-"
            v["time"] = parse_sta_time(v["time"])
            v["strtime"] = readable_time(v["time"], self.timeformat,
                                         self.timezone)

            try:
                v["srname"] = stock.srname(v["lat"], v["lon"])
            except Exception as e:
                self.logger.warning("Problems with srname for orid %s: %s" %
                                    (v["orid"], v["lat"], v["lon"], e))

            try:
                v["grname"] = stock.grname(v["lat"], v["lon"])
            except Exception as e:
                self.logger.warning("Problems with grname for orid %s: %s" %
                                    (v["orid"], v["lat"], v["lon"], e))

            orid = v["orid"]
            if orid in self.mags:
                for o in self.mags[orid]:
                    v["allmags"].append(self.mags[orid][o])
                    if self.mags[orid][o]["lddate"] > v["maglddate"]:
                        v["magnitude"] = self.mags[orid][o]["strmag"]
                        v["maglddate"] = self.mags[orid][o]["lddate"]

            self.cache.append(v)
Beispiel #6
0
def make_instrument(inv,dbdir,dbnam,respdir):
    flds = ('inid','insname','instype','samprate','dir','dfile')
    
    datemin = UTCDateTime(2011,1,1)
    
    #  What if instrument table already exists?
    instabfile = dbdir+dbnam+".instrument"
    if os.path.isfile(instabfile):
        kok = 0;
        while kok==0:
            yn = raw_input("Instrument table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
            if yn=="o":
                print("Ok,deleting and overwriting...")
                os.remove(instabfile)
                kok=1;
            elif yn=="a":
                print("Ok, appending...")
                kok=1;
            elif yn=="c":
                raise SystemExit("Ok, not deleting... aborting")
                exit
                kok=1;
    
    with closing(dbopen(dbdir+dbnam,'r+')) as db:
		dbins = db.lookup(table='instrument')
		inid = dbins.record_count
		insnams_all = []
		for inn in range(0,len(inv.networks)):
			network = inv[inn]
			for iss in range(0, len(network.stations)):
				station = inv[inn][iss]
				for icc in range(0,len(station.channels)):
					channel = inv[inn][iss][icc]
					if (channel.code[1]=='H' or channel.code[1:3]=='DH') and channel.start_date>datemin:
						insnam = channel.sensor.description
						if not list(set(insnams_all) & set([insnam])):
							inid=inid+1
							insnams_all.append(insnam)
							respfile = find_respfile(respdir,station.code,channel.code)
							# if respfile is not found write own respfile with write_sac_response_file)
							if not respfile
								respfile = write_sac_response_file(channel,\
								 "junk_respfile",network.code,station.code)

							cprespfile = "SAC_PZs__CAdb_instr"+str(inid).zfill(2)							
							shutil.copy(respdir+respfile,respdir+cprespfile)

							if network.code == '7D':
								instype = 'O_'+channel.code[0:2]+"x"
							else:
								instype =	 'L_'+channel.code[0:2]+"x"

							print("Adding instrument "+str(inid)+", " +insnam)
							vals = (inid,insnam,instype,channel.sample_rate,respdir,cprespfile)
							fldvals = zip(flds, vals)
							dbaddrec = dbins.addv(*fldvals)
def extract_all_events(ev, qml, db_path, output_dir):
    with ds.closing(ds.dbopen(db_path, 'r')) as db:
        with ds.freeing(db.process(['dbopen event', 'dbsort evid'])) as view:
            for row in view.iter_record():
                log.info('Processing event' +
                         ' '.join([str(row.getv(x)[0]) for x in EVENT_FIELDS]))
                event_id = row.getv(EVENT_FIELDS[0])[0]
                event_xml(event_id=event_id,
                          event=ev,
                          quakeml=qml,
                          output_file=os.path.join(output_dir, str(event_id)))
def _generate_tt_maps(db, write_binary=True):
    logger = logging.getLogger(sys.argv[0])
    logger.debug('Begin travel-time map generateion.')
    with closing(dbopen(db, 'r')) as db:
        tbl_site = db.schema_tables['site']
        for record in tbl_site.iter_record():
            sta, lat, lon, elev = record.getv('sta', 'lat', 'lon', 'elev')
            logger.debug('Begin travel-time map generation for station %s' %
                         sta)
            _write_sources_file(0.10, lat, lon)
            os.system(tt_calculator)
            logger.debug('End travel-time map generation for station %s' % sta)
Beispiel #9
0
    def _get_main_list(self):

        self.logging.debug("_get_main_list()")

        # Default is with no snetsta
        steps = ["dbopen site", "dbsort sta"]
        fields = ["sta", "ondate", "offdate", "lat", "lon", "elev", "staname", "statype", "dnorth", "deast"]

        # Test if we have snetsta table
        with datascope.closing(datascope.dbopen(self.db, "r")) as db:
            dbtable = db.lookup(table="snetsta")
            if dbtable.query(datascope.dbTABLE_PRESENT):
                steps = ["dbopen site", "dbjoin -o snetsta", "dbsort sta"]
                fields = [
                    "snet",
                    "sta",
                    "ondate",
                    "offdate",
                    "lat",
                    "lon",
                    "elev",
                    "staname",
                    "statype",
                    "dnorth",
                    "deast",
                ]

        for v in extract_from_db(self.db, steps, fields, self.db_subset):
            sta = v["sta"]
            if "snet" in v:
                snet = v["snet"]
            else:
                snet = "-"

            self.logging.debug("_get_main_list(%s_%s)" % (snet, sta))

            # Fix values of time and endtime
            v["time"] = parse_sta_date(v["ondate"], epoch=True)
            v["endtime"] = parse_sta_date(v["offdate"], epoch=True)

            # Readable times
            v["strtime"] = readable_time(v["time"], self.timeformat, self.timezone)
            v["strendtime"] = readable_time(v["endtime"], self.timeformat, self.timezone)

            # Need lat and lon with 2 decimals only
            v["latlat"] = v["lat"]
            v["lonlon"] = v["lon"]
            v["lat"] = round(v["lat"], 2)
            v["lon"] = round(v["lon"], 2)

            self._verify_cache(snet, sta, primary=True)

            self.cache[snet][sta] = v
Beispiel #10
0
    def _init_db(self, db):
        """
        Initialize station list using a CSS3.0 database as input.
        """
        with closing(dbopen(db, 'r')) as db:
            tbl_site = db.schema_tables['site']
#The following line will be taken out
            tbl_site = tbl_site.subset('lon >= -117.80 && lat >= 32.5 && lon <= -115.4456 && lat <= 34.5475')
            tbl_site = tbl_site.sort('sta', unique=True)
            for record in tbl_site.iter_record():
                sta, lat, lon, elev = record.getv('sta', 'lat', 'lon', 'elev')
                self.append(Station(sta, lat, lon, elev))
def _generate_tt_maps(db, write_binary=True):
    logger = logging.getLogger(sys.argv[0])
    logger.debug('Begin travel-time map generateion.')
    with closing(dbopen(db, 'r')) as db:
        tbl_site = db.schema_tables['site']
        for record in tbl_site.iter_record():
            sta, lat, lon, elev = record.getv('sta', 'lat', 'lon', 'elev')
            logger.debug('Begin travel-time map generation for station %s'
                % sta)
            _write_sources_file(0.10, lat, lon)
            os.system(tt_calculator)
            logger.debug('End travel-time map generation for station %s'
                % sta)
Beispiel #12
0
    def _get_events(self):
        """
        Read all orids/evids from the database and update
        local dict with the info.
        """
        self.cache = []

        # Test if we have event table
        with datascope.closing(datascope.dbopen(self.db, 'r')) as db:
            dbtable = db.lookup(table='event')
            if dbtable.query(datascope.dbTABLE_PRESENT):
                steps = ['dbopen event']
                steps.extend(['dbjoin origin'])
                steps.extend(['dbsubset origin.orid != NULL'])
                steps.extend(['dbsubset origin.orid == prefor'])
                fields = ['evid']
            else:
                steps = ['dbopen origin']
                steps.extend(['dbsubset orid != NULL'])
                fields = []

        fields.extend(['orid','time','lat','lon','depth','auth','nass',
                'ndef','review'])

        for v in extract_from_db(self.db, steps, fields, self.db_subset):
            if not 'evid' in v:
                v['evid'] = v['orid']

            self.logging.debug( "Events(): new event #%s" % v['evid'] )

            v['allmags'] = []
            v['magnitude'] = '-'
            v['maglddate'] = 0
            v['srname'] = '-'
            v['grname'] = '-'
            v['time'] = parse_sta_time(v['time'])
            v['strtime'] = readable_time(v['time'], self.timeformat, self.timezone)

            try:
                v['srname'] = stock.srname(v['lat'],v['lon'])
            except Exception,e:
                warninig('Problems with srname for orid %s: %s' % (v['orid'],
                        v['lat'],v['lon'],e) )

            try:
                v['grname'] = stock.grname(v['lat'],v['lon'])
            except Exception,e:
                warninig('Problems with grname for orid %s: %s' % (v['orid'],
                        v['lat'], v['lon'],e) )
def extract_event(db_path, ev_file):
    """
    :param db_path: database location 
    :param ev_file: events file name, csv file
    
    """
    ev_file = ev_file if ev_file else DEFAULT_EVENT_FILE
    with ds.closing(ds.dbopen(db_path, 'r')) as db:
        with ds.freeing(db.process(['dbopen event', 'dbsort evid'])) as view:
            with open(ev_file, 'w') as csv_file:
                writer = csv.writer(csv_file, delimiter=',')
                csv_file.write(','.join(EVENT_FIELDS) + '\n')
                for row in view.iter_record():
                    writer.writerow([str(row.getv(x)[0]) for x in
                                     EVENT_FIELDS])
Beispiel #14
0
 def _init_from_db(self, db, evid):
     """
     Initialize Event object using a CSS3.0 database as input.
     """
     if evid == None: raise(Exception('No \'evid\' supplied. Could not '
         'initialize Event object from CSS3.0 database.'))
     with closing(dbopen(db, 'r')) as db:
         view = db.schema_tables['event']
         view = view.join('origin')
         view = view.subset('evid == %s' % evid)
         view = view.subset('orid == prefor')
         #If for some reason this subset is empty, just take the first
         #solution as preferred. EG. prefor field is unitialized.
         if view.record_count == 0:
             view = db.schema_tables['origin']
             view = db.schema_tables['event']
             view = view.join('origin')
             view = view.subset('evid == %s' % evid)
         view = view.join('netmag', outer=True)
         view.record = 0
         evid, time, lat, lon, depth, mag, magtype =  view.getv('evid',
             'time', 'lat', 'lon', 'depth', 'magnitude', 'magtype')
         self.evid       = evid
         self.time       = time
         self.lat        = lat
         self.lon        = lon
         self.depth      = depth
         self.mag        = mag
         self.magtype    = magtype
         self.year       = int(epoch2str(time, '%Y'))
         self.month      = int(epoch2str(time, '%m'))
         self.day        = int(epoch2str(time, '%d'))
         self.hour       = int(epoch2str(time, '%H'))
         self.minute     = int(epoch2str(time, '%M'))
         self.second     = float(epoch2str(time, '%S.%s'))
         view = view.join('assoc')
         view = view.join('arrival')
         arrivals = [ record.getv('sta',
                                  'arrival.time',
                                  'phase') \
                                  + (None, ) \
                                  for record in view.iter_record()
                    ]
         self.arrivals = [ Phase(sta, time, phase, qual)
                         for sta, time, phase, qual in arrivals
                         ]
Beispiel #15
0
    def _get_main_list(self):

        self.logging.debug( "_get_main_list()" )

        # Default is with no snetsta
        steps = [ 'dbopen site', 'dbsort sta']
        fields = ['sta','ondate','offdate','lat','lon','elev','staname','statype',
                'dnorth','deast']

        # Test if we have snetsta table
        with datascope.closing(datascope.dbopen(self.db, 'r')) as db:
            dbtable = db.lookup(table='snetsta')
            if dbtable.query(datascope.dbTABLE_PRESENT):
                steps = [ 'dbopen site', 'dbjoin -o snetsta', 'dbsort sta']
                fields = ['snet','sta','ondate','offdate','lat','lon','elev','staname','statype',
                        'dnorth','deast']

        for v in extract_from_db(self.db, steps, fields, self.db_subset):
            sta = v['sta']
            if 'snet' in v:
                snet = v['snet']
            else:
                snet = '-'

            self.logging.debug( "_get_main_list(%s_%s)" % (snet,sta) )

            # Fix values of time and endtime
            v['time'] = parse_sta_date( v['ondate'],epoch=True )
            v['endtime'] = parse_sta_date( v['offdate'],epoch=True )

            # Readable times
            v['strtime'] = readable_time(v['time'], self.timeformat, self.timezone)
            v['strendtime'] = readable_time(v['endtime'], self.timeformat, self.timezone)


            # Need lat and lon with 2 decimals only
            v['latlat'] = v['lat']
            v['lonlon'] = v['lon']
            v['lat'] = round(v['lat'],2)
            v['lon'] = round(v['lon'],2)


            self._verify_cache(snet,sta,primary=True)

            self.cache[snet][sta] = v
Beispiel #16
0
def store(net, sta, ondate, lon, lat, elev):
    lddate = datetime.now()
    row = zip(
        fields,
        [
            sta,
            ondate.strftime("%Y%j"), lat, lon, elev / 1000.0,
            convtime(lddate)
        ],
    )
    db = dbopen(dbpath, "r+")
    with closing(db):
        snetsta_table = db.lookup(table="snetsta")
        snetsta_view = snetsta_table.subset("sta == '{}'".format(sta))
        log.debug("snetsta_view %s", snetsta_view)
        with freeing(snetsta_view):
            try:
                rowptr = snetsta_view.iter_record().next()
            except StopIteration:
                snetsta_table.addv(
                    *zip(snetsta_fields,
                         [net, sta, sta, convtime(lddate)]))
                log.info("added snetsta record")

        site_table = db.lookup(table="site")
        site_view = site_table.subset("sta == '{}'".format(sta))
        log.debug("site_view %s", site_view)
        with freeing(site_view):
            try:
                rowptr = site_view.iter_record().next()
            except StopIteration:
                site_table.addv(*row)
                log.info("added record %s", row)
            else:
                log.debug("rowptr %s", rowptr)
                old_row = dict(zip(fields, rowptr.getv(*fields)))
                if float(convtime(lddate)) > float(old_row["lddate"]):
                    rowptr.putv(*row)
                log.info("updated record %s %s", old_row, row)
                return old_row
Beispiel #17
0
def extract_from_db(db, steps, fields, subset=""):
    logging = getLogger()

    if subset:
        steps.extend(["dbsubset %s" % subset])

    logging.debug("Extract from db: " + ", ".join(steps))

    results = []

    with datascope.closing(datascope.dbopen(db, "r")) as dbview:
        dbview = dbview.process(steps)
        logging.debug("Records in new view: %s" % dbview.record_count)

        if not dbview.record_count:
            logging.warning("No records after deployment-site join %s" % dbview.query(datascope.dbDATABASE_NAME))
            return None

        for temp in dbview.iter_record():
            results.append(dict(zip(fields, temp.getv(*fields))))

    return results
Beispiel #18
0
def verify_db(db):
    """Verify a Datascope database can be opened."""

    logger.debug("Verify database: [%s]" % (db))

    name = False

    if isinstance(db, str):
        with datascope.closing(datascope.dbopen(db, "r")) as pointer:

            if pointer.query(datascope.dbDATABASE_COUNT):
                logger.debug(pointer.query(datascope.dbDATABASE_NAME))
                name = pointer.query(datascope.dbDATABASE_NAME)
                logger.info("%s => valid" % name)

            else:
                logger.warning("PROBLEMS OPENING DB: %s" % db)

    else:
        logger.error("Not a valid parameter for db: [%s]" % db)

    return name
Beispiel #19
0
def verify_db(db):
    logging = getLogger()

    logging.debug("Verify database: [%s]" % (db))

    name = False

    if isinstance(db, str):
        with datascope.closing(datascope.dbopen(db, "r")) as pointer:

            if pointer.query(datascope.dbDATABASE_COUNT):
                logging.debug(pointer.query(datascope.dbDATABASE_NAME))
                name = pointer.query(datascope.dbDATABASE_NAME)
                logging.info("%s => valid" % name)

            else:
                logging.warning("PROBLEMS OPENING DB: %s" % db)

    else:
        logging.error("Not a valid parameter for db: [%s]" % db)

    return name
Beispiel #20
0
def extract_from_db(db, steps, fields, subset=""):
    """Retrieve data from a datascope database."""
    logger = getLogger()

    if subset:
        steps.extend(["dbsubset %s" % subset])

    logger.debug("Extract from db: " + ", ".join(steps))

    results = []

    with datascope.closing(datascope.dbopen(db, "r")) as dbview:
        dbview = dbview.process(steps)
        logger.debug("Records in new view: %s" % dbview.record_count)

        if not dbview.record_count:
            logger.warning("No records after deployment-site join %s" %
                           dbview.query(datascope.dbDATABASE_NAME))
            return None

        for temp in dbview.iter_record():
            results.append(dict(zip(fields, temp.getv(*fields))))

    return results
Beispiel #21
0
def make_sitechan_sensor(invfile):
	"""This subroutine reads an obspy inventory and creates sitechan and sensor tables."""
	
	dbdir = "/Users/zeilon/Work/CASCADIA/CAdb/" # needs final slash
	dbnam= "cascattendb"
	respdir = "/Users/zeilon/Work/CASCADIA/CAdb/response/" # needs final slash
	datemin = UTCDateTime(2011,1,1)

	print("Reading inventory file %s") % invfile
	from obspy import read_inventory
	inv = read_inventory(invfile)

	print "Writing .sitechan + .sensor tables"
	flds_sch = ('sta','chan','ondate','offdate','chanid','hang','vang','descrip')
	flds_sen = ('sta','chan','time','endtime','inid','chanid')

	# What if sitechan table already exists?
	sitechantabfile = dbdir+dbnam+".sitechan"
	if os.path.isfile(sitechantabfile):
		kok = 0;
		while kok==0:
			yn = raw_input("sitechan table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
			if yn=="o":
				print("Ok,deleting and overwriting...")
				os.remove(sitechantabfile)
				kok=1;
			elif yn=="a":
				print("Ok, appending...")
				kok=1;
			elif yn=="c":
				raise SystemExit("Ok, not deleting... aborting")
				exit
				kok=1;
	# What if sensor table already exists?
	sensortabfile = dbdir+dbnam+".sensor"
	if os.path.isfile(sensortabfile):
		kok = 0;
		while kok==0:
			yn = raw_input("sensor table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
			if yn=="o":
				print("Ok,deleting and overwriting...")
				os.remove(sensortabfile)
				kok=1;
			elif yn=="a":
				print("Ok, appending...")
				kok=1;
			elif yn=="c":
				raise SystemExit("Ok, not deleting... aborting")
				exit
				kok=1;

	with closing(dbopen(dbdir+dbnam,'r+')) as db:
		dbsch = db.lookup(table = 'sitechan')
		dbsen = db.lookup(table = 'sensor')
		dbins = db.lookup(table = 'instrument')

		for inn in range(0,len(inv.networks)):
			network = inv[inn]
			for iss in range(0, len(network.stations)):
				station = inv[inn][iss]
				for icc in range(0,len(station.channels)):
					channel = inv[inn][iss][icc]
					if (channel.code[1]=='H' or channel.code[1:3]=='DH') and channel.start_date>datemin:
						insnam = channel.sensor.description
						try:
							dbins.record = dbins.find('insname == "%s"' %insnam)
							inid = dbins.getv('inid')
						except DbfindEnd:
							print("No such instrument, sorry =======================================")
							inid = []
						sens_on_date = utc2epoch(station.start_date)
						sens_off_date = utc2epoch(station.end_date)
				
						chan_on_date = channel.start_date.year*1000 + channel.start_date.julday
						chan_off_date = channel.end_date.year*1000 + channel.end_date.julday
						chanid = dbsch.nextid('chanid')

						# flds_sch = ('sta','chan','ondate','offdate','chanid','hang','vang','descrip')
						# flds_sen = ('sta','chan','time','endtime','inid','chanid')
						vals_sch = (station.code,channel.code,chan_on_date,chan_off_date,chanid,\
									float(channel.azimuth),float(channel.dip)+90,\
									find_respfile(respdir,station.code,channel.code))
						vals_sen = (station.code,channel.code,sens_on_date,sens_off_date,inid[0],chanid)
						fldvals_sch = zip(flds_sch, vals_sch)
						fldvals_sen = zip(flds_sen, vals_sen)

						try:
							print("chanid=%.0f, inid=%.0f, %s, %s, %s, %s" %\
									  (chanid,inid[0],station.code,channel.code,chan_on_date,chan_off_date))
							dbaddrec = dbsch.addv(*fldvals_sch)
						except DbaddvError:
							chanid = chanid - 1
							print "Error with adding this row to sitechan..."
							try:
								ifrep = dbsch.find('sta=="%s" && chan=="%s"' % (station.code,channel.code))
								print "Skipping repeated station+chan %s, %s" % (station.code,channel.code)
							except DbfindEnd:
								pdb.set_trace()
								raise SystemExit("Something wrong - won't add sta,\
														but not already in there")
						try:
							dbaddrec = dbsen.addv(*fldvals_sen)
						except DbaddvError:
							chanid = chanid - 1
							print "Error with adding this row to sensor..."
							try:
								ifrep = dbsen.find('sta=="%s" && chan=="%s"' % (station.code,channel.code))
								print "Skipping repeated station+chan %s, %s" % (station.code,channel.code)
							except DbfindEnd:
								pdb.set_trace()
								raise SystemExit("Something wrong - won't add sta,\
														but not already in there")
	return
Beispiel #22
0
    def _get_main_list(self):

        self.logger.debug("_get_main_list()")

        # Default is with no snetsta
        steps = ["dbopen site", "dbsort sta"]
        fields = [
            "sta",
            "ondate",
            "offdate",
            "lat",
            "lon",
            "elev",
            "staname",
            "statype",
            "dnorth",
            "deast",
        ]

        # Test if we have snetsta table
        with datascope.closing(datascope.dbopen(self.db, "r")) as db:
            dbtable = db.lookup(table="snetsta")
            if dbtable.query(datascope.dbTABLE_PRESENT):
                steps = ["dbopen site", "dbjoin -o snetsta", "dbsort sta"]
                fields = [
                    "snet",
                    "sta",
                    "ondate",
                    "offdate",
                    "lat",
                    "lon",
                    "elev",
                    "staname",
                    "statype",
                    "dnorth",
                    "deast",
                ]

        for v in extract_from_db(self.db, steps, fields, self.db_subset):
            sta = v["sta"]
            if "snet" in v:
                snet = v["snet"]
            else:
                snet = "-"

            self.logger.debug("_get_main_list(%s_%s)" % (snet, sta))

            # Fix values of time and endtime
            v["time"] = parse_sta_date(v["ondate"], epoch=True)
            v["endtime"] = parse_sta_date(v["offdate"], epoch=True)

            # Readable times
            v["strtime"] = readable_time(v["time"], self.timeformat, self.timezone)
            v["strendtime"] = readable_time(
                v["endtime"], self.timeformat, self.timezone
            )

            # Need lat and lon with 2 decimals only
            v["latlat"] = v["lat"]
            v["lonlon"] = v["lon"]
            v["lat"] = round(v["lat"], 2)
            v["lon"] = round(v["lon"], 2)

            self._verify_cache(snet, sta, primary=True)

            self.cache[snet][sta] = v
    """
    Verify that we can work with table.
    Returns path if valid and we see data.
    """

    try:
        import antelope.elog as elog
        import antelope.stock as stock
        import antelope.datascope as datascope
    except Exception,e:
        raise sta2jsonException( 'Problems loading Antelope libs: %s' % e )

    path = False

    try:
        with datascope.closing(datascope.dbopen( dbname , 'r' )) as db:
            db = db.lookup( table=tbl )

            if not db.query(datascope.dbTABLE_PRESENT):
                if verbose: elog.complain( 'No dbTABLE_PRESENT on %s' % dbname )
                return False

            if not db.record_count:
                if verbose: elog.complain( 'No %s.record_count' % dbname )
                return False

            path = db.query('dbTABLE_FILENAME')
    except Exception,e:
        elog.complain("Prolembs with db[%s]: %s" % (dbname,e) )
        return False
Beispiel #24
0
def main():

    ##########
    # Extract station list from master_stations
    ##########

    stations = []
    with ds.closing(ds.dbopen("/aerun/sum/db/dbsum/dbsum", "r")) as db:
        steps = ["dbopen affiliation", "dbjoin site", "dbsubset net=~/AK/ && offdate==NULL", "dbsort sta"]
        with ds.freeing(db.process(steps)) as dbview:
            for record in dbview.iter_record():
                stations.append(record.getv('sta')[0])
    
 
    
    ##########
    # Extract waveform data into trace objects
    ##########
    
    twin = 600
    tcurrent = float(stock.str2epoch('now'))
    tend = tcurrent - 60
    tstart = tend - twin
    
    f = '%Y_%m_%d'
    date = stock.epoch2str(tstart,f)
    
    with ds.closing(ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")) as db:
        for sta in stations:
            data = {}
            samplerate = {}
            dbsta = db.lookup(table = 'wfdisc')
            dbsta = dbsta.subset('sta=~/%s/' % sta )
            bband = dbsta.subset('chan=~/BH./')
            
            #######
            # Extract Broadband seismic data
            #######
            if bband.query('dbRECORD_COUNT') > 0:
#                print (tstart, tcurrent, "%s" % sta, "BHE")
                tr_bhe = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHE")
                tr_bhe.trapply_calib()
                with ds.trfreeing(tr_bhe):
                    if tr_bhe.query("dbRECORD_COUNT") > 0:
                        tr_bhe.record = 0
                        data['BHE'] = tr_bhe.trdata()
                        samplerate['BHE'] = tr_bhe.getv("samprate")
                        
                tr_bhn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHN")
                tr_bhn.trapply_calib()
                with ds.trfreeing(tr_bhn):
                    if tr_bhn.query("dbRECORD_COUNT") > 0:
                        tr_bhn.record = 0
                        data['BHN'] = tr_bhn.trdata()
                        samplerate['BHN'] = tr_bhn.getv("samprate")
                        
                tr_bhz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHZ")
                tr_bhz.trapply_calib()
                with ds.trfreeing(tr_bhz):
                    if tr_bhz.query("dbRECORD_COUNT") > 0:
                        tr_bhz.record = 0
                        data['BHZ'] = tr_bhz.trdata()
                        samplerate['BHZ'] = tr_bhz.getv("samprate")
            #######
            # Extract moderate sample rate strong motion data
            #######
            smot_b = dbsta.subset('chan=~/BN./')
            if smot_b.query('dbRECORD_COUNT') > 0:
                tr_bne = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNE")
                tr_bne.trapply_calib()
                with ds.trfreeing(tr_bne):
                    if tr_bne.query("dbRECORD_COUNT") > 0:
                        tr_bne.record = 0
                        data['BNE'] = tr_bne.trdata()
                        samplerate['BNE'] = tr_bne.getv("samprate")
                        
                tr_bnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNN")
                tr_bnn.trapply_calib()
                with ds.trfreeing(tr_bnn):
                    if tr_bnn.query("dbRECORD_COUNT") > 0:
                        tr_bnn.record = 0
                        data['BNN'] = tr_bnn.trdata()
                        samplerate['BNN'] = tr_bnn.getv("samprate")
                        
                tr_bnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNZ")
                tr_bnz.trapply_calib()
                with ds.trfreeing(tr_bnz):
                    if tr_bnz.query("dbRECORD_COUNT") > 0:
                        tr_bnz.record = 0
                        data['BNZ'] = tr_bnz.trdata()
                        samplerate['BNZ'] = tr_bnz.getv("samprate")
            
            #######
            # Extract high sample rate strong motion data
            #######
            smot_h = dbsta.subset('chan=~/HN./')
            if smot_h.query('dbRECORD_COUNT') > 0:
                tr_hne = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNE")
                tr_hne.trapply_calib()
                with ds.trfreeing(tr_hne):
                    if tr_hne.query("dbRECORD_COUNT") > 0:
                        tr_hne.record = 0
                        data['HNE'] = tr_hne.trdata()
                        samplerate['HNE'] = tr_hne.getv("samprate")
                        
                tr_hnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNN")
                tr_hnn.trapply_calib()
                with ds.trfreeing(tr_hnn):
                    if tr_hnn.query("dbRECORD_COUNT") > 0:
                        tr_hnn.record = 0
                        data['HNN'] = tr_hnn.trdata()
                        samplerate['HNN'] = tr_hnn.getv("samprate")
                        
                tr_hnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNZ")
                tr_hnz.trapply_calib()
                with ds.trfreeing(tr_hnz):
                    if tr_hnz.query("dbRECORD_COUNT") > 0:
                        tr_hnz.record = 0
                        data['HNZ'] = tr_hnz.trdata()
                        samplerate['HNZ'] = tr_hnz.getv("samprate")
                        
#            if sta=="MLY":
#                plot_traces(sta, data, tstart, tend, samplerate)


#            shortperz = sbsta.subset('chan=~/EHZ/')
#            if smot.query('dbRECORD_COUNT') > 0:
#                tr_ehz = dbsta.trloadchan(tstart, tcurrent, "%s" % sta, "EHZ")
                
            print sta
            plot_traces(sta, data, tstart, tend, samplerate)
Beispiel #25
0
import misc_tools
from mtools import *
from numpy import arange,asarray
from antelope.stock import pfread, pfin
from antelope.datascope import closing, dbopen
params=pfin('eqloc3d.pf')
loc_params = params['location_parameters']
#These should go in parameter file.
#nr = int(loc_params['nr'])
#nlat = int(loc_params['nlat'])
#nlon = int(loc_params['nlon'])
#nx, ny, nz = nlon, nlat, nr
earth_rad=6371

#Load events
print 'Reading db'
with closing(dbopen('/Users/mcwhite/staging/dbs/anza_sub/anza')) as db:
    tbl_event = db.schema_tables['event']
    tbl_event = tbl_event.join('origin')
    tbl_event = tbl_event.subset('time >= _2013319 00:00:00_')
    tbl_event = tbl_event.separate('event')
    event_list = misc_tools.create_event_list(tbl_event, 'CSS3.0')
print 'Done reading db'

for ev in event_list:
    origin = ev.preferred_origin
    if origin.lon<-117 or origin.lon>-116 or origin.lat<33.0 or origin.lat>34.0:
        continue
    misc_tools.locate_eq(origin)
    sys.exit()
Beispiel #26
0
def make_origin():
	"""This subroutine contains the workflow to select a bunch of event parameters and then use the IRIS request tools to build a catalogue of events that satisfy the parameters. Then the catalogue is exported to the Antelope origin table format."""
	print "\n\n THE EVENT PARAMETERS ARE DEFINED IN THE MAKE_ORIGIN SUBROUTINE. EDIT THERE\n\n"
# 	DEFINE EVENT PARAMETERS (or leave empty for unconstrained)
	dbdir = "/Users/zeilon/Work/CASCADIA/CAdb/" # needs final slash
	dbnam= "cascattendb"

	deplims = [    0 ,1000 ] # [mindep, maxdep] ... in km

	maglims = [  6.5 , 10 ]  # [minmag, maxmag]

	starttime = "2006-01-01" # "yyyy-mm-dd"
	endtime   = "2010-12-31" # "yyyy-mm-dd"

	# geographic search params - leave either blank to ignore. Will fail if both filled in.

	# 						SEARCH IN BOX
	latlims = [   ] # [minlat, maxlat]
	lonlims = [   ] # [minlon, maxlon]
	# 	   --------------------------OR--------------------------
	# 					SEARCH BY DISTANCE
	refpt  = [45, -120] # [ref_lat, ref_lon] 
	gcarclims = [ 30, 135 ]  # [min_gcarc, max_gcarc] ... in degrees
#  ================== NO NEED TO ALTER ANYTHING BELOW THIS LINE ================== # 
			
# 	parse conditions to right format
	starttime =  UTCDateTime(starttime)
	endtime = UTCDateTime(endtime)
	
	magmin = np.float(maglims[0])
	magmax = np.float(maglims[1])
	depmin = np.float(deplims[0])
	depmax = np.float(deplims[1])

	# if distance search prefs empty, must be a BOX search
	if (not refpt) | (not gcarclims): 
		print "No distance limits"
		refla = None
		reflo = None
		gcmin = None
		gcmax = None
	else:    
		refla = np.float(refpt[0])
		reflo = np.float(refpt[1])
		gcmin = np.float(gcarclims[0])
		gcmax = np.float(gcarclims[1])

	# if box search prefs empty, must be a DISTANCE search
	if (not latlims) | (not lonlims):
		print "No box limits"
		latmin = None
		latmax = None
		lonmin = None
		lonmax = None
	else:    
		latmin = np.float(latlims[0])
		latmax = np.float(latlims[1])
		lonmin = np.float(lonlims[0])
		lonmax = np.float(lonlims[1])

	# Get the catalogue
	cat = client.get_events(starttime=starttime,endtime=endtime,\
		minmagnitude=magmin,maxmagnitude=magmax,\
		mindepth=depmin,maxdepth=depmax,\
		minlatitude=latmin,maxlatitude=latmax,\
		minlongitude=lonmin,maxlongitude=lonmax,\
		latitude=refla,longitude=reflo,minradius=gcmin,maxradius=gcmax,\
		orderby="time-asc",catalog="GCMT")
	print(cat) 

	nevts = len(cat)
	
# 	REMOVE ANY OVERLAPPING EVENTS
# 	Remove any events from the catalog that are < 2000s after or 1000s before another event

	evtimes = np.zeros([nevts,1])

	for ie in range(0,nevts):
		evt =  cat.events[ie]
		elat = evt.origins[0].latitude
		elon = evt.origins[0].longitude
		edep = evt.origins[0].depth/1000
		evtime = evt.origins[0].time
		emag = evt.magnitudes[0].mag
		emagt = evt.magnitudes[0].magnitude_type
		print "%s=%.2f event at [%6.2f, %7.2f] and %5.1f km depth on %s " % \
			(emagt, emag, elat, elon, edep, evtime)
		evtimes[ie] = evtime
	kill = []
	for ie in range(0,nevts):
		if ( ( (evtimes[ie]-evtimes)<2000 ) & ( (evtimes[ie]-evtimes)>0 ) ).any():
			print("One recently before:")
			print(UTCDateTime(evtimes[ie]))
			print(UTCDateTime(evtimes[ie+1]))
			kill.append(ie)
		elif ( ( (evtimes[ie]-evtimes)>-1000 ) & ( (evtimes[ie]-evtimes)<0 ) ).any():
			print("One soon after:")
			print(UTCDateTime(evtimes[ie]))
			print(UTCDateTime(evtimes[ie-1]))
			kill.append(ie)
		
	evts = cat.events[:]
	for ik in range(0,len(kill)):
		evt_to_kill = evts[kill[ik]]
		print "Removing event %s" % (UTCDateTime(evt_to_kill.origins[0].time))
		cat.events.remove(evt_to_kill)

	print "%.0f events removed becauss of overlapping data... plotting" % nevts-len(cat)
	nevts = len(cat)
# 	cat.plot();
	
	ifwrite = raw_input("Write %.0f events to origin table? [y/n]" % nevts)
	if ifwrite=="y":
	# 	What if origin table already exists
		origintabfile = dbdir+dbnam+".origin"
		if os.path.isfile(origintabfile):
			kok = 0;
			while kok==0:
				yn = raw_input("Origin table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
				if   yn=="o":
					print("Ok,deleting and overwriting...")
					os.remove(origintabfile)
					kok=1;
				elif yn=="a":
					print("Ok, appending...")
					kok=1;
				elif yn=="c":
					raise SystemExit("Ok, not deleting... aborting")
					exit
					kok=1;

	# 	Add rows to origin table
		with closing(dbopen(dbdir+dbnam,'r+')) as db:
			dbor = db.lookup(table = 'origin')
			nrecs = dbor.record_count
			dbor.record=dbor.record_count # set current record to last row to append
			for ie in range(0,nevts):
				evt =  cat.events[ie]
				elat = evt.origins[0].latitude
				elon = evt.origins[0].longitude
				edep = evt.origins[0].depth/1000
				evtime=evt.origins[0].time
				emag = evt.magnitudes[0].mag
				emagt = evt.magnitudes[0].magnitude_type
				print "Event",nrecs+ie+1,"  ",elat,elon,edep,emag,utc2epoch(evtime)
				flds = ('orid','lat','lon','depth','time','ms','nass','ndef' )
				vals = ( nrecs+ie+1,elat,elon,edep,utc2epoch(evtime),emag,0,0)
				fldvals = zip(flds, vals)
				try:
					dbaddrec = dbor.addv(*fldvals)		
				except DbaddvError:
					print "Error with adding this row..."
					try:
						ifrep = dbor.find('orid=='+nrecs+ie+1)
						print "Skipping repeated orid %s" % sta
					except DbfindEnd:
						pdb.set_trace()
						raise SystemExit("Something wrong - won't add orid,but not already in there")
		
	return 
Beispiel #27
0
    def phases(self, min, max):
        """Retrieve all arrival phases for an event."""

        self.logger.debug("Events():phases(%s,%s) " % (min, max))

        phases = defaultdict(lambda: defaultdict(dict))

        assoc = False

        dbname = self.dbcentral(min)

        self.logger.debug("Events():phases(%s,%s) db:(%s)" %
                          (min, max, dbname))

        if not dbname:
            return phases

        open_dbviews = []
        with datascope.closing(datascope.dbcreate(dbname, "r")) as db:
            with datascope.freeing(db.lookup(table="arrival")) as db_arrivals:
                try:
                    db_arrival_assoc = db_arrivals.join("assoc")
                    open_dbviews.append(db_arrival_assoc)
                    dbv = db_arrival_assoc
                except datascope.DatascopeException:
                    dbv = db_arrivals

                # This "try/finally" block is to emulate a context manager for a successful join with the assoc table.
                try:
                    nrecs = dbv.query(datascope.dbRECORD_COUNT)

                    if not nrecs:
                        return dict(phases)

                    try:
                        db = db.subset("%s <= time && time <= %s" %
                                       (float(min), float(max)))
                        nrecs = db.query(datascope.dbRECORD_COUNT)
                    except datascope.DatascopeException:
                        nrecs = 0

                    if not nrecs:
                        return dict(phases)

                    for p in range(nrecs):
                        db.record = p

                        if assoc:
                            phase_field = "phase"
                        else:
                            phase_field = "iphase"

                        Sta, Chan, ArrTime, Phase = db.getv(
                            "sta", "chan", "time", phase_field)
                        StaChan = Sta + "_" + Chan
                        phases[StaChan][ArrTime] = Phase

                        self.logger.debug("Phases(%s):%s" % (StaChan, Phase))
                finally:
                    for view in open_dbviews:
                        view.close()

        self.logger.debug("Events: phases(): t1=%s t2=%s [%s]" %
                          (min, max, phases))

        return dict(phases)
Beispiel #28
0
def make_sitechan_sensor_old(invfile):
	"""This subroutine reads an obspy inventory and creates sitechan and sensor tables."""
	
	dbdir = "/Users/zeilon/Work/CASCADIA/CAdb/" # needs final slash
	dbnam= "cascattendb"
	respdir = "/Users/zeilon/Work/CASCADIA/CAdb/response/" # needs final slash

	print("Reading inventory file %s") % invfile
	from obspy import read_inventory
	inv = read_inventory(invfile)

	#Check if instrument table exists
	with closing(dbopen(dbdir+dbnam,'r')) as db:
		dbins = db.lookup(table='instrument')
		insrecs = dbins.record_count
	if not (insrecs > 0):
		print("No instrument table yet... writing one")
		make_instrument(inv,dbdir,dbnam,respdir)
	
	print "Writing .sitechan + .sensor tables"
	flds_sitechan = ('sta','chan','ondate','offdate','chanid','hang','vang','description')
	flds_sensor = ('sta','chan','time','endtime','inid','chanid')
			
	ifwrite = raw_input("Write %.0f stations to origin table? [y/n] " % count_stas(inv))
	if ifwrite=="y":
		# 	What if site table already exists
		sitetabfile = dbdir+dbnam+".site"
		if os.path.isfile(sitetabfile):
			kok = 0;
			while kok==0:
				yn = raw_input("Site table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
				if yn=="o":
					print("Ok,deleting and overwriting...")
					os.remove(sitetabfile)
					kok=1;
				elif yn=="a":
					print("Ok, appending...")
					kok=1;
				elif yn=="c":
					raise SystemExit("Ok, not deleting... aborting")
					exit
					kok=1;

	
		with closing(dbopen(dbdir+dbnam,'r+')) as db:
			dbsi = db.lookup(table = 'site')
	
			ista = 0
			for inn in range(0,len(inv.networks)):
				network = inv.networks[inn].code.encode()
				print "\n ====Network %s" % network
				for iss in range(0,len(inv.networks[inn].stations)):
					station = inv.networks[inn].stations[iss]
					sta = station.code.encode()
					on_date = station.start_date.year*1000 + station.start_date.julday
					off_date = station.end_date.year*1000 + station.end_date.julday
					vals_site = (sta,on_date,off_date,\
							station.latitude,station.longitude,station.elevation)
					fldvals_site = zip(flds_site, vals_site)
					print "--------Station %5s %5.2f %7.2f "\
									% (sta,station.latitude,station.longitude)
					try:
						dbaddrec = dbsi.addv(*fldvals_site)		
					except DbaddvError:
						print "Error with adding this row..."
						try:
							ifrep = dbsi.find('sta=="'+sta+'"')
							print "Skipping repeated station %s" % sta
						except DbfindEnd:
							pdb.set_trace()
							raise SystemExit("Something wrong - won't add sta,\
							 						but not already in there")
			
	return
Beispiel #29
0
# add the antelope python package to the pythonpath
sys.path.append( os.environ['ANTELOPE'] + '/data/python' )


import antelope.datascope as datascope
from antelope.datascope import dbopen
from antelope.datascope import closing
from antelope.datascope import freeing
import pickle
import csv

os.chdir('/auto/proj/Cascadia/MERGED') #location of the antelope database

database_name='cascadia' #antelope database name

with closing(dbopen(database_name,'r')) as db:

    # open the origin table 
    db_origin_raw=db.lookup(table = 'origin') #has null records (-1)
    db_origin=db_origin_raw.subset('orid > -1') #no longer has null records; should be able to crunch but can't

    # open the event table
    db_event_raw=db.lookup(table = 'event') #has null records
    db_event=db_event_raw.subset('evid > -1') #no longer has null records


    print('fields in db_event are: ' + str(db_event_raw.query(datascope.dbTABLE_FIELDS)))
    print('fields in db_origin are: ' + str(db_origin_raw.query(datascope.dbTABLE_FIELDS)))
    
    #join event and origin
    db_origin_event=db_origin.join(db_event)
Beispiel #30
0
def make_site(invfile):
	"""This subroutine reads an obspy inventory and creates site table."""
	
	dbdir = "/Users/zeilon/Work/CASCADIA/CAdb/" # needs final slash
	dbnam= "cascattendb"

	
	print("Reading inventory file %s") % invfile
	from obspy import read_inventory
	inv = read_inventory(invfile)
	
	print "Writing .site table"
	flds_site = ('sta','ondate','offdate','lat','lon','elev','staname','statype','refsta')
	# will store network in the 'refsta' field.	 
			
	ifwrite = raw_input("Write %.0f stations to site table? [y/n] " % count_stas(inv))
	if ifwrite=="y":
		# 	What if site table already exists
		sitetabfile = dbdir+dbnam+".site"
		if os.path.isfile(sitetabfile):
			kok = 0;
			while kok==0:
				yn = raw_input("Site table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
				if yn=="o":
					print("Ok,deleting and overwriting...")
					os.remove(sitetabfile)
					kok=1;
				elif yn=="a":
					print("Ok, appending...")
					kok=1;
				elif yn=="c":
					raise SystemExit("Ok, not deleting... aborting")
					exit
					kok=1;

	
		with closing(dbopen(dbdir+dbnam,'r+')) as db:
			dbsi = db.lookup(table = 'site')
	
			ista = 0
			for inn in range(0,len(inv.networks)):
				network = inv.networks[inn].code.encode()
				print "\n ====Network %s" % network
				for iss in range(0,len(inv.networks[inn].stations)):
					
					station = inv.networks[inn].stations[iss]
					sta = station.code.encode()
					on_date = station.start_date.year*1000 + station.start_date.julday
					off_date = station.end_date.year*1000 + station.end_date.julday
					
					if network == '7D':
						statype = 'OBS'
					else:
						statype = 'LAND'
					
					vals_site = (sta,on_date,off_date,\
							station.latitude,station.longitude,station.elevation,\
							station.site.name,statype,network)
					fldvals_site = zip(flds_site, vals_site)
					print "--------Station %5s %5.2f %7.2f  --  %s"\
									% (sta,station.latitude,station.longitude, station.site.name)
					try:
						dbaddrec = dbsi.addv(*fldvals_site)		
					except DbaddvError:
						print "Error with adding this row..."
						try:
							print "Maybe OBS/LAND station repeat?"
							ifrep = dbsi.find('sta=="'+sta+'"')
							dbsi.record=ifrep
							statype_existing = tup2str(dbsi.getv('statype'))
							if statype_existing!=statype: # same staname, different statype
								print("Different statypes: ammending old sta %s to %s"\
										 % (sta,sta+statype_existing[0]))
								# amend old row
								dbsi.putv(('sta',sta+statype_existing[0]))
								# add new row with amended sta name
								print("Ammending new sta %s to %s"\
										 % (sta,sta+statype[0]))
								vals_site = (sta+statype[0],on_date,off_date,\
									station.latitude,station.longitude,\
									station.elevation,station.site.name,statype,network)
								fldvals_site = zip(flds_site, vals_site)
								try:
									dbaddrec = dbsi.addv(*fldvals_site)	
								except DbaddvError:
									print("Some other error")
									pdb.set_trace()
									raise SystemExit("Something wrong - won't add sta,\
							 						but not already in there")
							else: # same staname and statype: skip
								print "Skipping repeated station %s" % sta
						except DbfindEnd:
							pdb.set_trace()
							raise SystemExit("Something wrong - won't add sta,\
							 						but not already in there")
							 
					
					
	return
Beispiel #31
0
def make_instrument_old(inv):
    dbdir = "/Users/zeilon/Work/CASCADIA/CAdb/" # needs final slash
    dbnam = "cascattendb"
    
    respdir = "/Users/zeilon/Work/CASCADIA/CAdb/response/" # needs final slash
    
    flds = ('inid','insname','instype','samprate','dir','dfile')
    
    ignorechans = ['HKO']
    datemin = UTCDateTime(2011,1,1)
    
    #  What if site table already exists

    instabfile = dbdir+dbnam+".instrument"
    if os.path.isfile(instabfile):
        kok = 0;
        while kok==0:
            yn = raw_input("Instrument table already seems to exist. Overwrite/append/cancel? [o/a/c] ")
            if yn=="o":
                print("Ok,deleting and overwriting...")
                os.remove(instabfile)
                kok=1;
            elif yn=="a":
                print("Ok, appending...")
                kok=1;
            elif yn=="c":
                raise SystemExit("Ok, not deleting... aborting")
                exit
                kok=1;
    
    with closing(dbopen(dbdir+dbnam,'r+')) as db:
        dbins = db.lookup(table='instrument')
        
        inid = dbins.record_count

        for inn in range(0,len(inv.networks)):
            network = inv[inn]
            for iss in range(0, len(network.stations)):
                station = inv[inn][iss]
                for icc in range(0,len(station.channels)):
                    channel = inv[inn][iss][icc]
#                     if list(set(ignorechans) & set([channel.code])) \
#                                         or channel.code[1]=='X' \
#                                         or channel.code[1]=='Y' \
#                                         or channel.code[1]=='L' \
#                                         or channel.code[1]=='N' \
#                                         or channel.code[1]=='B':
#                         print("Ignoring channel %s..." % channel.code)
#                     else:
                    if (channel.code[1]=='H' or channel.code[1:3]=='DH') and channel.start_date>datemin:
                        if network.code == '7D':
                            instype = 'O_'+channel.code
                        else:
                            instype = 'L_'+channel.code

                        insnam = channel.sensor.description
                        inid=inid+1
                        respfile = find_respfile(respdir,station.code,channel.code)

                        print("Adding instrument "+str(inid)+", " +insnam+" for channel "+channel.code)
                        vals = (inid,insnam,instype,channel.sample_rate,respdir,respfile)
                        fldvals = zip(flds, vals)
                        dbaddrec = dbins.addv(*fldvals)
Beispiel #32
0
import misc_tools
from mtools import *
from numpy import arange, asarray
from antelope.stock import pfread, pfin
from antelope.datascope import closing, dbopen
params = pfin('eqloc3d.pf')
loc_params = params['location_parameters']
#These should go in parameter file.
#nr = int(loc_params['nr'])
#nlat = int(loc_params['nlat'])
#nlon = int(loc_params['nlon'])
#nx, ny, nz = nlon, nlat, nr
earth_rad = 6371

#Load events
print 'Reading db'
with closing(dbopen('/Users/mcwhite/staging/dbs/anza_sub/anza')) as db:
    tbl_event = db.schema_tables['event']
    tbl_event = tbl_event.join('origin')
    tbl_event = tbl_event.subset('time >= _2013319 00:00:00_')
    tbl_event = tbl_event.separate('event')
    event_list = misc_tools.create_event_list(tbl_event, 'CSS3.0')
print 'Done reading db'

for ev in event_list:
    origin = ev.preferred_origin
    if origin.lon < -117 or origin.lon > -116 or origin.lat < 33.0 or origin.lat > 34.0:
        continue
    misc_tools.locate_eq(origin)
    sys.exit()
    """
    Verify that we can work with table.
    Returns path if valid and we see data.
    """

    try:
        import antelope.elog as elog
        import antelope.stock as stock
        import antelope.datascope as datascope
    except Exception, e:
        raise sta2jsonException('Problems loading Antelope libs: %s' % e)

    path = False

    try:
        with datascope.closing(datascope.dbopen(dbname, 'r')) as db:
            db = db.lookup(table=tbl)

            if not db.query(datascope.dbTABLE_PRESENT):
                if verbose: elog.complain('No dbTABLE_PRESENT on %s' % dbname)
                return False

            if not db.record_count:
                if verbose: elog.complain('No %s.record_count' % dbname)
                return False

            path = db.query('dbTABLE_FILENAME')
    except Exception, e:
        elog.complain("Prolembs with db[%s]: %s" % (dbname, e))
        return False