示例#1
0
def read_paz(files):
    """
  Read dataless and extract poles and zeros
  """
    paz = {}

    for file in files:
        p = Parser(file)
        blk = p.blockettes

        for j in range(len(blk[50])):

            mult = len(blk[58]) / len(blk[52])

            sta = blk[50][j].station_call_letters
            paz[sta] = {}

            for i in range(j * 3, len(blk[52])):
                channel = blk[52][i].channel_identifier
                paz[sta][channel] = {}
                paz[sta][channel]['poles'] = np.array(
                    blk[53][i].real_pole) + 1j * np.array(
                        blk[53][i].imaginary_pole)
                paz[sta][channel]['zeros'] = np.array(
                    blk[53][i].real_zero) + 1j * np.array(
                        blk[53][i].imaginary_zero)
                paz[sta][channel]['gain'] = blk[53][i].A0_normalization_factor
                paz[sta][channel]['sensitivity'] = blk[58][(i + 1) * mult -
                                                           1].sensitivity_gain

    return paz
示例#2
0
    def test_evalresp_with_output_from_seed(self):
        """
        The StationXML file has been converted to SEED with the help of a tool
        provided by IRIS:

        https://seiscode.iris.washington.edu/projects/stationxml-converter
        """
        t_samp = 0.05
        nfft = 16384

        # Test for different output units.
        units = ["DISP", "VEL", "ACC"]
        filenames = ["IRIS_single_channel_with_response", "XM.05", "AU.MEEK"]

        for filename in filenames:
            xml_filename = os.path.join(self.data_dir,
                                        filename + os.path.extsep + "xml")
            seed_filename = os.path.join(self.data_dir,
                                         filename + os.path.extsep + "seed")

            p = Parser(seed_filename)

            # older systems don't like an end date in the year 2599
            t_ = UTCDateTime(2030, 1, 1)
            if p.blockettes[50][0].end_effective_date > t_:
                p.blockettes[50][0].end_effective_date = None
            if p.blockettes[52][0].end_date > t_:
                p.blockettes[52][0].end_date = None

            resp_filename = p.getRESP()[0][-1]

            inv = read_inventory(xml_filename)

            network = inv[0].code
            station = inv[0][0].code
            location = inv[0][0][0].location_code
            channel = inv[0][0][0].code
            date = inv[0][0][0].start_date

            for unit in units:
                resp_filename.seek(0, 0)

                seed_response, seed_freq = evalresp(t_samp,
                                                    nfft,
                                                    resp_filename,
                                                    date=date,
                                                    station=station,
                                                    channel=channel,
                                                    network=network,
                                                    locid=location,
                                                    units=unit,
                                                    freq=True)

                xml_response, xml_freq = \
                    inv[0][0][0].response.get_evalresp_response(t_samp, nfft,
                                                                output=unit)

                self.assertTrue(np.allclose(seed_freq, xml_freq, rtol=1E-5))
                self.assertTrue(
                    np.allclose(seed_response, xml_response, rtol=1E-5))
示例#3
0
    def import_dataless(self):
        """
        Import station XML from XML file absolute or relative path. The 
        function then returns two outputs to self.inventory and self.XML
        """
        metadata_path = self.metadata_path

        return Parser(metadata_path)
示例#4
0
 def test_channel_in_parser(self):
     """
     Tests if a given channel is part of a Parser object.
     """
     starttime = UTCDateTime(2007, 2, 12, 10, 30, 28, 197700)
     endtime = UTCDateTime(2007, 2, 12, 11, 35, 28, 197700)
     channel_id = "ES.ECAL..HHE"
     # An empty file should of course not contain much.
     parser_object = Parser(
         os.path.join(self.data_dir, "channelless_datalessSEED"))
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id, starttime,
                                 endtime))
     # Now read a file that actually contains data.
     channel_id = "IU.PAB.00.BHE"
     starttime = UTCDateTime(1999, 2, 18, 10, 0)
     endtime = UTCDateTime(2009, 8, 13, 19, 0)
     parser_object = Parser(os.path.join(self.data_dir, "dataless.IU_PAB"))
     # This is an exact fit of the start and end times in this file.
     self.assertTrue(
         utils.channel_in_parser(parser_object, channel_id, starttime,
                                 endtime))
     # Now try some others that do not fit.
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id, starttime - 1,
                                 endtime))
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id, starttime,
                                 endtime + 1))
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id + "x", starttime,
                                 endtime))
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id, starttime - 200,
                                 starttime - 100))
     self.assertFalse(
         utils.channel_in_parser(parser_object, channel_id, endtime + 100,
                                 endtime + 200))
     # And some that do fit.
     self.assertTrue(
         utils.channel_in_parser(parser_object, channel_id, starttime,
                                 starttime + 10))
     self.assertTrue(
         utils.channel_in_parser(parser_object, channel_id, endtime - 100,
                                 endtime))
示例#5
0
    def stats_from_dataless(self, metadata_path=None):
        """
        Function that returns a (1,N) shaped array of the station names
        from a dataless SEED file. 
        """
        if metadata_path is None:
            metadata_path = self.metadata_path

        sp = Parser(metadata_path)

        metadata = Parser.getInventory(sp)
        stats = np.asarray(
            [stat['station_id'] for stat in metadata['stations']])
        return stats
示例#6
0
文件: client.py 项目: msimon00/obspy
    def getPAZ(self, seed_id, datetime):
        """
        Get PAZ for a station at given time span. Gain is the A0 normalization
        constant for the poles and zeros.

        :type seed_id: str
        :param seed_id: SEED or channel id, e.g. ``"BW.RJOB..EHZ"`` or
            ``"EHE"``.
        :type datetime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param datetime: Time for which the PAZ is requested,
            e.g. ``'2010-01-01 12:00:00'``.
        :rtype: dict
        :return: Dictionary containing zeros, poles, gain and sensitivity.

        .. rubric:: Example

        >>> c = Client()
        >>> paz = c.station.getPAZ('BW.MANZ..EHZ', '20090707')
        >>> paz['zeros']
        [0j, 0j]
        >>> len(paz['poles'])
        5
        >>> print(paz['poles'][0])
        (-0.037004+0.037016j)
        >>> paz['gain']
        60077000.0
        >>> paz['sensitivity']
        2516800000.0
        """
        network, station, location, channel = seed_id.split(".")
        # request station information
        station_list = self.getList(network=network, station=station,
                                    datetime=datetime)
        if not station_list:
            return {}
        # don't allow wild cards
        for wildcard in ['*', '?']:
            if wildcard in seed_id:
                msg = "Wildcards in seed_id are not allowed."
                raise ValueError(msg)

        if len(station_list) > 1:
            warnings.warn("Received more than one XSEED file. Using first.")

        xml_doc = station_list[0]
        res = self.client.station.getResource(xml_doc['resource_name'])
        parser = Parser(res)
        paz = parser.getPAZ(seed_id=seed_id, datetime=UTCDateTime(datetime))
        return paz
示例#7
0
    def dSEED_XML(self, metadata_path=None):
        """
        Function that imports a given dataless SEED format file from
        its either absolute or relative file path, to a station XML
        file format. This XML location and file name will be x.dataless
        convereted into x.XML and the locations of both files will be the
        same!
        """
        if metadata_path is None:
            metadata_path = self.metadata_path

        dataless_basename = os.path.basename(metadata_path)
        xml_name = os.path.splitext(dataless_basename)[0]
        xml_path = '{}.xml'.format(xml_name)
        sp = Parser(metadata_path)
        sp.writeXSEED(xml_path)
示例#8
0
 def _parse_seed(self, station_item, all_stations):
     """
     Helper function to parse SEED and XSEED files.
     """
     parser = Parser(station_item)
     for station in parser.stations:
         network_code = None
         station_code = None
         latitude = None
         longitude = None
         elevation = None
         local_depth = None
         for blockette in station:
             if blockette.id not in [50, 52]:
                 continue
             elif blockette.id == 50:
                 network_code = str(blockette.network_code)
                 station_code = str(blockette.station_call_letters)
                 continue
             elif blockette.id == 52:
                 latitude = blockette.latitude
                 longitude = blockette.longitude
                 elevation = blockette.elevation
                 local_depth = blockette.local_depth
                 break
         if None in [
                 network_code, station_code, latitude, longitude, elevation,
                 local_depth
         ]:
             msg = "Could not parse %s" % station_item
             raise ValueError(msg)
         stat = {
             "id": "%s.%s" % (network_code, station_code),
             "latitude": latitude,
             "longitude": longitude,
             "elevation_in_m": elevation,
             "local_depth_in_m": local_depth
         }
         if stat["id"] in all_stations:
             all_stations[stat["id"]].update(stat)
         else:
             all_stations[stat["id"]] = stat
示例#9
0
def getStationDataless(netsta):
	#the function that returns the dataless for a given station
	net = netsta[:2].upper()
	sta = netsta[2:].upper()
	netsta = '_'.join([net,sta])
	if os.path.exists(staDatalessPath + 'DATALESS.' + netsta + '.seed'):
		station = []
		parsedDataless = aslParser(staDatalessPath + 'DATALESS.' + netsta + '.seed')
		for blockette in parsedDataless.stations:
			station.extend(blockette)
		return station
	else:
		parsedDataless = Parser(netDatalessPath + net + '.dataless')
		if len(netsta) > 2:
			sta = netsta[2:].upper()
			for station in parsedDataless.stations:
				for blockette in station:
					if blockette.id == 50:
						if blockette.station_call_letters == sta:
							return station
示例#10
0
    def _extract_index_values_seed(self, filename):
        """
        Reads SEED files and extracts some keys per channel.
        """
        try:
            p = Parser(filename)
        except:
            msg = "Could not read SEED file '%s'." % filename
            raise ValueError(msg)
        channels = p.getInventory()["channels"]

        channels = [[
            _i["channel_id"],
            int(_i["start_date"].timestamp),
            int(_i["end_date"].timestamp) if _i["end_date"] else None,
            _i["latitude"], _i["longitude"], _i["elevation_in_m"],
            _i["local_depth_in_m"]
        ] for _i in channels]

        return channels
示例#11
0
    def locs_from_dataless(self, metadata_path=None):
        """
        Function that returns a numpy (2,N) shaped array of the longitude
        latitude coordinates (in degree, decimal) from a dataless SEED file. 
        """
        if metadata_path is None:
            metadata_path = self.metadata_path

        sp = Parser(metadata_path)

        metadata = Parser.getInventory(sp)

        lats = np.asarray([float(i['latitude']) for i in metadata['channels']])

        lons = np.asarray(
            [float(i['longitude']) for i in metadata['channels']])

        elev = np.asarray(
            [float(i['elevation_in_m']) for i in metadata['channels']])

        return np.column_stack((lons, lats, elev))
示例#12
0
 def load_dataless(self, parser_data):
    ''' User input: Locate and saves the directory of the dataless file into the
    parser_data dictionary.
    
    Saves the parser data using the parser method from obspy, 
    into parser_data dictionary'''
    
    dir_dl=askopenfilenames()
    
    key_name="" #key name for the dictionary data
    for dir in dir_dl:
       for c in dir:
          if c != '.':
             key_name+=c
          else:
             break
             
       parser=Parser(dir)
  
       parser_data[key_name]=parser
    
    return parser_data
    '''try:      
示例#13
0
 # skip directories
 if not os.path.isfile(file):
     continue
 # create folder from filename
 seedfile = os.path.basename(file)
 resp_path = os.path.join(path, seedfile)
 # skip existing directories
 if os.path.isdir(resp_path):
     print "Skipping", os.path.join(relpath, seedfile)
     continue
 else:
     os.mkdir(resp_path)
     print "Parsing %s\t\t" % os.path.join(relpath, seedfile)
 # Create the RESP file.
 try:
     sp = Parser(file)
     sp.writeRESP(folder=resp_path)
     sp.writeRESP(folder=resp_path, zipped=True)
     # Compare with RESP files generated with rdseed from IRIS if existing
     for resp_file in glob.iglob(resp_path + os.sep + '*'):
         print '  ' + os.path.basename(resp_file)
         org_resp_file = resp_file.replace('output' + os.sep,
                                           'data' + os.sep)
         if os.path.exists(org_resp_file):
             _compareRESPFiles(org_resp_file, resp_file)
 except Exception, e:
     # remove all related files
     if os.path.isdir(resp_path):
         for f in glob.glob(os.path.join(resp_path, '*')):
             os.remove(f)
         os.removedirs(resp_path)
示例#14
0
from obspy.core import read
from obspy.core.util.geodetics import gps2DistAzimuth
from obspy.xseed import Parser
from math import log10

st = read("../data/LKBD.MSEED")

paz_wa = {
    'sensitivity': 2800,
    'zeros': [0j],
    'gain': 1,
    'poles': [-6.2832 - 4.7124j, -6.2832 + 4.7124j]
}

parser = Parser("../data/LKBD.dataless")
paz_le3d5s = parser.getPAZ("CH.LKBD..EHZ")

st.simulate(paz_remove=paz_le3d5s, paz_simulate=paz_wa, water_level=10)

t = UTCDateTime("2012-04-03T02:45:03")
st.trim(t, t + 50)

tr_n = st.select(component="N")[0]
ampl_n = max(abs(tr_n.data))
tr_e = st.select(component="E")[0]
ampl_e = max(abs(tr_e.data))
ampl = max(ampl_n, ampl_e)

sta_lat = 46.38703
sta_lon = 7.62714
event_lat = 46.218
    print parseresult.date
    print "Here is the year " + parseresult.date.split()[0]
    print "Here is the day " + parseresult.date.split()[1]
try:
    epochtime = UTCDateTime(parseresult.date.split()[0] + "-" +
                            parseresult.date.split()[1] + "T00:00:00.0")
except:
    print "Problem reading epoch time"
    sys.exit(0)

#Read in the dataless
if verbose:
    print "Reading in the dataless"
#try:

sp = Parser(parseresult.dataless)
#except:
#	print "Not able to read dataless"
#	sys.exit(0)

if parseresult.station:
    if verbose:
        print "Making a station list"
    stations = getstalist(sp, epochtime)
    for sta in stations:
        print sta

if parseresult.stationlist:
    if verbose:
        print "Making a station list"
    stations = getstalistlocation(sp, epochtime)
示例#16
0
文件: util.py 项目: 717524640/obspyck
def fetch_waveforms_with_metadata(options, args, config):
    """
    Sets up obspy clients and fetches waveforms and metadata according to
    command line options.
    Now also fetches data via arclink if --arclink-ids is used.
    Args are tried to read as local waveform files or metadata files.

    XXX Notes: XXX
     - there is a problem in the arclink client with duplicate traces in
       fetched streams. therefore at the moment it might be necessary to use
       "-m overwrite" option.

    :returns: (dictionary with clients,
               list(:class:`obspy.core.stream.Stream`s))
    """
    if options.station_combination is None:
        station_combination = config.get("base", "station_combination")
    else:
        station_combination = options.station_combination

    no_metadata = config.getboolean("base", "no_metadata")

    if options.time is None:
        time_ = UTCDateTime(config.get("base", "time"))
    else:
        time_ = UTCDateTime(options.time)
    t1 = time_ + config.getfloat("base", "starttime_offset")
    if options.duration is None:
        t2 = t1 + config.getfloat("base", "duration")
    else:
        t2 = t1 + options.duration

    seed_ids_to_fetch = {}
    seed_ids = config.get("station_combinations", station_combination)
    seed_id_lookup_keys = config.options("seed_id_lookup")
    for seed_id in seed_ids.split(","):
        netstaloc = seed_id.rsplit(".", 1)[0]
        netsta = seed_id.rsplit(".", 2)[0]
        net = seed_id.rsplit(".", 3)[0]
        # look up by exact SEED ID:
        if seed_id in seed_id_lookup_keys:
            seed_ids_to_fetch[seed_id] = config.get("seed_id_lookup", seed_id)
        # look up by SEED ID down to location code
        elif netstaloc in seed_id_lookup_keys:
            seed_ids_to_fetch[seed_id] = config.get("seed_id_lookup",
                                                    netstaloc)
        # look up by SEED ID down to station code
        elif netsta in seed_id_lookup_keys:
            seed_ids_to_fetch[seed_id] = config.get("seed_id_lookup", netsta)
        # look up by network code
        elif net in seed_id_lookup_keys:
            seed_ids_to_fetch[seed_id] = config.get("seed_id_lookup", net)

    clients = {}

    streams = []
    sta_fetched = set()
    # Local files:
    parsers = []
    inventories = []
    if args:
        print "=" * 80
        print "Reading local files:"
        print "-" * 80
        stream_tmp = Stream()
        for file in args:
            # try to read as metadata
            try:
                p = Parser(file)
            except:
                pass
            else:
                print "%s: Metadata" % file
                parsers.append(p)
                continue
            # try to read as metadata
            try:
                inv = read_inventory(file)
            except:
                pass
            else:
                print "%s: Metadata" % file
                inventories.append(inv)
                continue
            # try to read as waveforms
            try:
                st = read(file,
                          starttime=t1,
                          endtime=t2,
                          verify_chksum=not config.getboolean(
                              "base", "ignore_gse2_chksum_error"))
            except TypeError:
                print "File %s not recognized as dataless or waveform file. Skipped." % file
                continue
            msg = "%s: Waveforms" % file
            if not st:
                msg += " (not matching requested time window)"
            print msg
            stream_tmp += st
        if len(parsers + inventories) == 0:
            msg = "No station metadata for waveforms from local files."
            raise Exception(msg)
        for tr in stream_tmp:
            if not no_metadata:
                has_metadata = False
                for inv in inventories:
                    try:
                        tr.attach_response(inv)
                        tr.stats.coordinates = inv.get_coordinates(
                            tr.id, tr.stats.starttime)
                        tr.stats.orientation = get_orientation(
                            inv, tr.id, tr.stats.starttime)
                        has_metadata = True
                        break
                    except:
                        continue
                if not has_metadata:
                    for parser in parsers:
                        try:
                            tr.stats.paz = parser.getPAZ(
                                tr.id, tr.stats.starttime)
                            tr.stats.coordinates = parser.getCoordinates(
                                tr.id, tr.stats.starttime)
                            tr.stats.orientation = get_orientation_from_parser(
                                parser, tr.id, tr.stats.starttime)
                            has_metadata = True
                            break
                        except:
                            continue
                if not has_metadata:
                    print "found no metadata for %s!!!" % file
            if tr.stats._format == 'GSE2':
                apply_gse2_calib(tr)
        ids = set([(tr.stats.network, tr.stats.station, tr.stats.location)
                   for tr in stream_tmp])
        for net, sta, loc in ids:
            stream_tmp_ = stream_tmp.select(network=net,
                                            station=sta,
                                            location=loc)
            # check whether to attempt rotation
            if config.has_section("rotate_channels"):
                net_sta_loc = ".".join((net, sta, loc))
                if net_sta_loc in config.options("rotate_channels"):
                    rotate_channels(st, net, sta, loc, config)
            streams.append(stream_tmp_)

    print "=" * 80
    print "Fetching waveforms and metadata from servers:"
    print "-" * 80
    for seed_id, server in sorted(seed_ids_to_fetch.items()):
        server_type = config.get(server, "type")
        if server_type not in ("seishub", "fdsn", "jane", "arclink"):
            msg = ("Unknown server type '{}' in server definition section "
                   "'{}' in config file.").format(server_type, server)
            raise NotImplementedError(msg)
        client = connect_to_server(server, config, clients)
        net, sta, loc, cha = seed_id.split(".")
        net_sta_loc = "%s.%s.%s" % (net, sta, loc)
        # make sure we dont fetch a single station of
        # one network twice (could happen with wildcards)
        if any([char in net_sta_loc for char in '?*[]']):
            msg = ("Wildcards in SEED IDs to fetch are only allowed in "
                   "channel part: {}").format(seed_id)
            raise NotImplementedError(msg)
        if net_sta_loc in sta_fetched:
            print "%s (%s: %s) skipped! (Was already retrieved)" % (
                seed_id.ljust(15), server_type, server)
            continue
        try:
            sys.stdout.write("\r%s (%s: %s) ..." %
                             (seed_id.ljust(15), server_type, server))
            sys.stdout.flush()
            # SeisHub
            if server_type == "seishub":
                st = client.waveform.getWaveform(net,
                                                 sta,
                                                 loc,
                                                 cha,
                                                 t1,
                                                 t2,
                                                 apply_filter=True)
                if not no_metadata:
                    data = client.station.getList(network=net,
                                                  station=sta,
                                                  datetime=t1)
                    if len(data) == 0:
                        msg = "No station metadata on server."
                        raise Exception(msg)
                    parsers = [
                        Parser(client.station.getResource(d['resource_name']))
                        for d in data
                    ]
                    for tr in st:
                        orientation = [
                            get_orientation_from_parser(p_, tr.id, datetime=t1)
                            for p_ in parsers
                        ]
                        coordinates = [
                            p_.getCoordinates(tr.id, datetime=t1)
                            for p_ in parsers
                        ]
                        paz = [p_.getPAZ(tr.id, datetime=t1) for p_ in parsers]
                        # check for clashing multiple station metadata
                        for list_ in (orientation, coordinates, paz):
                            for i in range(1, len(list_))[::-1]:
                                if list_[i] == list_[0]:
                                    list_.pop(i)
                        for list_, name in zip(
                            (orientation, coordinates, paz),
                            ("orientation", "coordinates", "paz")):
                            if len(list_) > 1:
                                msg = ("Multiple matching station metadata "
                                       "({}) on server: {}.").format(
                                           name, list_)
                                raise Exception(msg)
                        tr.stats.orientation = orientation[0]
                        tr.stats.coordinates = coordinates[0]
                        tr.stats.paz = paz[0]
            # ArcLink
            elif server_type == "arclink":
                st = client.getWaveform(network=net,
                                        station=sta,
                                        location=loc,
                                        channel=cha,
                                        starttime=t1,
                                        endtime=t2)
                if not no_metadata:
                    parsers = {}
                    for net_, sta_, loc_, cha_ in set(
                        [tuple(tr.id.split(".")) for tr in st]):
                        sio = StringIO()
                        client.saveResponse(sio, net_, sta_, loc_, cha_,
                                            t1 - 10, t2 + 10)
                        sio.seek(0)
                        id_ = ".".join((net_, sta_, loc_, cha_))
                        parsers[id_] = Parser(sio)
                    for tr in st:
                        p_ = parsers[tr.id]
                        tr.stats.orientation = \
                            get_orientation_from_parser(p_, tr.id, datetime=t1)
                        tr.stats.coordinates = \
                            p_.getCoordinates(tr.id, datetime=t1)
                        tr.stats.paz = p_.getPAZ(tr.id, datetime=t1)
            # FDSN (or JANE)
            elif server_type in ("fdsn", "jane"):
                st = client.get_waveforms(network=net,
                                          station=sta,
                                          location=loc,
                                          channel=cha,
                                          starttime=t1,
                                          endtime=t2)
                if not no_metadata:
                    inventory = client.get_stations(network=net,
                                                    station=sta,
                                                    location=loc,
                                                    level="response")
                    failed = st.attach_response(inventory)
                    if failed:
                        msg = ("Failed to get response for {}!").format(failed)
                        raise Exception(msg)
                    for tr in st:
                        tr.stats.coordinates = inventory.get_coordinates(
                            tr.id, tr.stats.starttime)
                        tr.stats.orientation = get_orientation(
                            inventory, tr.id, tr.stats.starttime)
            sta_fetched.add(net_sta_loc)
            sys.stdout.write("\r%s (%s: %s) fetched.\n" %
                             (seed_id.ljust(15), server_type, server))
            sys.stdout.flush()
        except Exception, e:
            sys.stdout.write("\r%s (%s: %s) skipped! (Exception: %s)\n" %
                             (seed_id.ljust(15), server_type, server, e))
            sys.stdout.flush()
            continue
        # check whether to attempt rotation
        if not no_metadata:
            if config.has_section("rotate_channels"):
                if net_sta_loc in config.options("rotate_channels"):
                    rotate_channels(st, net, sta, loc, config)
        # SeisHub
        if server_type == "seishub":
            for tr in st:
                if tr.stats._format == 'GSE2':
                    apply_gse2_calib(tr)
                tr.stats['_format'] = "SeisHub"
        # ArcLink
        elif server_type == "arclink":
            for tr in st:
                tr.stats['_format'] = "ArcLink"
        # FDSN (or JANE)
        elif server_type in ("fdsn", "jane"):
            for tr in st:
                tr.stats['_format'] = "FDSN"
        streams.append(st)
示例#17
0
        # SEED Handling.
        if seed_file in faulty_seed_files:
            continue

        #from obspy import UTCDateTime
        #if channel.start_date != UTCDateTime(1999, 12, 28, 22, 24, 39):
        #continue

        print chan + ": ",

        unit_known_to_evalresp = True

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            try:
                p = Parser(seed_file)
            except:
                faulty_seed_files.append(seed_file)
                counter["random_error"] += 1
                print_warning("Failed to read SEED file!")
                continue

        try:
            all_resps = p.getRESP()
        except:
            counter["random_error"] += 1
            print_warning("getRESP() failed. Very likely a faulty SEED file")
            continue
        resp_string = "RESP.%s.%s.%s.%s" % (net_id, stat_id, loc_id, chan_id)
        all_resps = [_i for _i in all_resps if _i[0] == resp_string]
示例#18
0
#!/usr/bin/env python
# test_dataless.py
#
# Checks whether it is possible to read poles and zeroes from a dataless file
# Prints the erorrs to stdout
#
# 2012-02-15 - Claudio Satriano <*****@*****.**>
from obspy.xseed import Parser
from datetime import timedelta
from glob import glob

for dlessfile in glob('dataless.*'):
    try:
        sp = Parser(dlessfile)
    except IOError:
        print 'Error reading file:', dlessfile
        continue

    blk = sp.blockettes
    net = blk[50][0].network_code
    sta = blk[50][0].station_call_letters

    startdates = [b.start_date for b in blk[52]]
    #enddates = [b.end_date for b in blk[52]]
    chans = [b.channel_identifier for b in blk[52]]
    locs = [b.location_identifier for b in blk[52]]

    for i in range(0, len(startdates)):
        channel_id = net + '.' + sta + '.' + locs[i] + '.' + chans[i]
        # If we do not add at least 7 minutes to the start_time, ObsPy says:
        # "None or more than one channel with the given description"
示例#19
0
 def run(self):
     while True:
         try:
             channel = self.queue.get(False)
         except Queue.Empty:
             break
         network = channel["network"]
         station = channel["station"]
         location = channel["location"]
         chan = channel["channel"]
         starttime = channel["starttime"]
         endtime = channel["endtime"]
         channel_id = "%s.%s.%s.%s" % (network, station, location, chan)
         time.sleep(0.5)
         if logger:
             logger.debug("Starting ArcLink download for %s..." %
                          channel_id)
         # Telnet sometimes has issues...
         success = False
         for _i in xrange(3):
             try:
                 arc_client = obspy.arclink.Client(user=arclink_user,
                                                   timeout=30)
                 success = True
                 break
             except:
                 time.sleep(0.3)
         if success is False:
             msg = (" A problem occured initializing ArcLink. Try "
                    "again later")
             logger.error(msg)
             failed_downloads.put(channel)
             continue
         try:
             memfile = StringIO.StringIO()
             arc_client.saveResponse(memfile,
                                     channel["network"],
                                     channel["station"],
                                     channel["location"],
                                     channel["channel"],
                                     starttime=channel["starttime"],
                                     endtime=channel["endtime"],
                                     format="SEED")
         except Exception as e:
             msg = "While downloading %s [%s to %s]: %s" % (
                 channel_id, channel["starttime"], channel["endtime"],
                 str(e))
             logger.error(msg)
             failed_downloads.put(channel)
             continue
         memfile.seek(0, 0)
         # Read the file again and perform a sanity check.
         try:
             parser = Parser(memfile)
         except:
             msg = ("Arclink did not return a valid dataless SEED file "
                    "for channel %s [%s-%s]") % (channel_id, starttime,
                                                 endtime)
             logger.error(msg)
             failed_downloads.put(channel)
             continue
         if not utils.channel_in_parser(parser, channel_id, starttime,
                                        endtime):
             msg = ("Arclink returned a valid dataless SEED file "
                 "for channel %s [%s to %s], but it does not actually "
                 " contain data for the requested channel and time "
                 "frame.") % \
                 (channel_id, starttime, endtime)
             logger.error(msg)
             failed_downloads.put(channel)
             continue
         memfile.seek(0, 0)
         save_station_fct(memfile,
                          channel["network"],
                          channel["station"],
                          channel["location"],
                          channel["channel"],
                          format="datalessSEED")
         successful_downloads.put(channel)
         if logger:
             logger.info("Successfully downloaded dataless SEED for "
                         "channel %s.%s.%s.%s from ArcLink." %
                         (channel["network"], channel["station"],
                          channel["location"], channel["channel"]))
示例#20
0
                                           os.path.basename(seedfile))
     print msg,
 # fetch original SEED file
 fp = open(file, 'r')
 org_seed = fp.read()
 fp.close()
 # set compact date flag
 compact = False
 if os.path.basename(file) in compact_date_files:
     compact = True
 # start parsing
 try:
     print "rS",
     sys.stdout.flush()
     # parse SEED
     sp = Parser(org_seed)
     print "wS",
     sys.stdout.flush()
     # write SEED to compare to original SEED.
     f1 = open(seedfile, 'w')
     seed = sp.getSEED(compact=compact)
     f1.write(seed)
     f1.close()
     print "cS",
     sys.stdout.flush()
     # Compare to original SEED.
     utils.compareSEED(org_seed, seed)
     print "wX",
     sys.stdout.flush()
     # generate XSEED versions 1.0 and 1.1
     f1 = open(xseedfile_10, 'w')
示例#21
0
文件: project.py 项目: msimon00/LASIF
            def get_value(self):
                station_id, coordinates = self.items[self.current_index]

                data = Stream()
                # Now get the actual waveform files. Also find the
                # corresponding station file and check the coordinates.
                this_waveforms = {
                    _i["channel_id"]: _i
                    for _i in waveforms
                    if _i["channel_id"].startswith(station_id + ".")
                }
                marked_for_deletion = []
                for key, value in this_waveforms.iteritems():
                    value["trace"] = read(value["filename"])[0]
                    data += value["trace"]
                    value["station_file"] = \
                        station_cache.get_station_filename(
                            value["channel_id"],
                            UTCDateTime(value["starttime_timestamp"]))
                    if value["station_file"] is None:
                        marked_for_deletion.append(key)
                        msg = ("Warning: Data and station information for '%s'"
                               " is available, but the station information "
                               "only for the wrong timestamp. You should try "
                               "and retrieve the correct station file.")
                        warnings.warn(msg % value["channel_id"])
                        continue
                    data[-1].stats.station_file = value["station_file"]
                for key in marked_for_deletion:
                    del this_waveforms[key]
                if not this_waveforms:
                    msg = "Could not retrieve data for station '%s'." % \
                        station_id
                    warnings.warn(msg)
                    return None
                # Now attempt to get the synthetics.
                synthetics_filenames = []
                for name, path in synthetic_files.iteritems():
                    if (station_id + ".") in name:
                        synthetics_filenames.append(path)

                if len(synthetics_filenames) != 3:
                    msg = "Found %i not 3 synthetics for station '%s'." % (
                        len(synthetics_filenames), station_id)
                    warnings.warn(msg)
                    return None

                synthetics = Stream()
                # Read all synthetics.
                for filename in synthetics_filenames:
                    synthetics += read(filename)
                for synth in synthetics:
                    if synth.stats.channel in ["X", "Z"]:
                        synth.data *= -1.0
                    synth.stats.channel = SYNTH_MAPPING[synth.stats.channel]
                    synth.stats.starttime = event_info["origin_time"]

                # Process the data.
                len_synth = synthetics[0].stats.endtime - \
                    synthetics[0].stats.starttime
                data.trim(synthetics[0].stats.starttime - len_synth * 0.05,
                          synthetics[0].stats.endtime + len_synth * 0.05)
                if data:
                    max_length = max([tr.stats.npts for tr in data])
                else:
                    max_length = 0
                if max_length == 0:
                    msg = (
                        "Warning: After trimming the waveform data to "
                        "the time window of the synthetics, no more data is "
                        "left. The reference time is the one given in the "
                        "QuakeML file. Make sure it is correct and that "
                        "the waveform data actually contains data in that "
                        "time span.")
                    warnings.warn(msg)
                data.detrend("linear")
                data.taper()

                new_time_array = np.linspace(
                    synthetics[0].stats.starttime.timestamp,
                    synthetics[0].stats.endtime.timestamp,
                    synthetics[0].stats.npts)

                # Simulate the traces.
                for trace in data:
                    # Decimate in case there is a large difference between
                    # synthetic sampling rate and sampling_rate of the data.
                    # XXX: Ugly filter, change!
                    if trace.stats.sampling_rate > (6 *
                                                    synth.stats.sampling_rate):
                        new_nyquist = trace.stats.sampling_rate / 2.0 / 5.0
                        trace.filter("lowpass",
                                     freq=new_nyquist,
                                     corners=4,
                                     zerophase=True)
                        trace.decimate(factor=5, no_filter=None)

                    station_file = trace.stats.station_file
                    if "/SEED/" in station_file:
                        paz = Parser(station_file).getPAZ(
                            trace.id, trace.stats.starttime)
                        trace.simulate(paz_remove=paz)
                    elif "/RESP/" in station_file:
                        trace.simulate(
                            seedresp={
                                "filename": station_file,
                                "units": "VEL",
                                "date": trace.stats.starttime
                            })
                    else:
                        raise NotImplementedError

                    # Make sure that the data array is at least as long as the
                    # synthetics array. Also add some buffer sample for the
                    # spline interpolation to work in any case.
                    buf = synth.stats.delta * 5
                    if synth.stats.starttime < (trace.stats.starttime + buf):
                        trace.trim(starttime=synth.stats.starttime - buf,
                                   pad=True,
                                   fill_value=0.0)
                    if synth.stats.endtime > (trace.stats.endtime - buf):
                        trace.trim(endtime=synth.stats.endtime + buf,
                                   pad=True,
                                   fill_value=0.0)

                    old_time_array = np.linspace(
                        trace.stats.starttime.timestamp,
                        trace.stats.endtime.timestamp, trace.stats.npts)

                    # Interpolation.
                    trace.data = interp1d(old_time_array, trace.data,
                                          kind=1)(new_time_array)
                    trace.stats.starttime = synthetics[0].stats.starttime
                    trace.stats.sampling_rate = \
                        synthetics[0].stats.sampling_rate

                data.filter("bandpass", freqmin=lowpass, freqmax=highpass)
                synthetics.filter("bandpass",
                                  freqmin=lowpass,
                                  freqmax=highpass)

                # Rotate the synthetics if nessesary.
                if self.rot_angle:
                    # First rotate the station back to see, where it was
                    # recorded.
                    lat, lng = rotations.rotate_lat_lon(
                        coordinates["latitude"], coordinates["longitude"],
                        self.rot_axis, -self.rot_angle)
                    # Rotate the data.
                    n_trace = synthetics.select(component="N")[0]
                    e_trace = synthetics.select(component="E")[0]
                    z_trace = synthetics.select(component="Z")[0]
                    n, e, z = rotations.rotate_data(n_trace.data, e_trace.data,
                                                    z_trace.data, lat, lng,
                                                    self.rot_axis,
                                                    self.rot_angle)
                    n_trace.data = n
                    e_trace.data = e
                    z_trace.data = z

                return {
                    "data": data,
                    "synthetics": synthetics,
                    "coordinates": coordinates
                }
    plt.ylabel("Source Radius [m]", fontsize="x-large")
    plt.grid()
    plt.savefig("/Users/lion/Desktop/SourceRadius.pdf")


if __name__ == "__main__":
    # Read all instrument responses.
    widgets = ['Parsing instrument responses...', progressbar.Percentage(),
        ' ', progressbar.Bar()]
    pbar = progressbar.ProgressBar(widgets=widgets,
        maxval=len(STATION_FILES)).start()
    parsers = {}
    # Read all waveform files.
    for _i, xseed in enumerate(STATION_FILES):
        pbar.update(_i)
        parser = Parser(xseed)
        channels = [c['channel_id'] for c in parser.getInventory()['channels']]
        parsers_ = dict.fromkeys(channels, parser)
        if any([k in parsers for k in parsers_.keys()]):
            msg = "Channel(s) defined in more than one metadata file."
            warnings.warn(msg)
        parsers.update(parsers_)
    pbar.finish()

    # Parse all waveform files.
    widgets = ['Indexing waveform files...     ', progressbar.Percentage(),
        ' ', progressbar.Bar()]
    pbar = progressbar.ProgressBar(widgets=widgets,
        maxval=len(WAVEFORM_FILES)).start()
    waveform_index = {}
    # Read all waveform files.
示例#23
0
MIN_STATIONS = 3  # minimum of coincident stations for alert
SUMMARY = "/scratch/uh_trigger.txt"

mseed_files = []
parsers = []
for station in STATIONS:
    # waveforms
    dir = os.path.join(BASEDIR, str(TIME.year), NET, station, CHANNEL)
    # XXX maybe read the day before/after to make sure we dont miss data around
    # 00:00
    files = glob.glob("%s*/*.%s" % (dir, TIME.julday))
    mseed_files.extend(files)
    # metadata
    files = glob.glob("%s/dataless*%s" % (BASEDIR_DATALESS, station))
    for file in files:
        parsers.append(Parser(file))

if not mseed_files:
    pass  # XXX print/mail warning

inst = cornFreq2Paz(1.0)
nfft = 4194304  # next nfft of 5h
last_endtime = 0
last_id = "--"

trigger_list = []
summary = []
summary.append("#" * 79)
for file in mseed_files:
    summary.append(file)
    try:
示例#24
0
from obspy.signal.psd import PPSD
from cpsd import PPSD
from obspy.core import *
from obspy.xseed import Parser

st = read("BW.KW1..EHZ.D.2011.090")
tr = st[0]
p = Parser("dataless.seed.BW_KW1")
paz = p.getPAZ("BW.KW1..EHZ")
ppsd = PPSD(tr.stats, paz)
#ppsd.add(tr)
#ppsd.add(st[1])
#ppsd.plot()
#ppsd.save("/tmp/ppsd")
示例#25
0
            respfiles = glob.glob(os.path.join(args.inputFolder, '*.resp'))
            if not len(seedfiles):
                if not len(respfiles):
                    print 'A dataless SEED file (ending in .seed) or a RESP file (ending in .resp) must be supplied with input SAC files. Exiting.'
                    sys.exit(1)
                else:
                    seedresp = {
                        'filename': respfiles[0],  # RESP filename
                        # when using Trace/Stream.simulate() the "date" parameter can
                        # also be omitted, and the starttime of the trace is then used.
                        'date': obspy.UTCDateTime(etime),
                        # Units to return response in ('DIS', 'VEL' or ACC)
                        'units': 'ACC'
                    }
            else:
                parser = Parser(seedfiles[0])
        elif args.source == 'unam':
            tdatafiles = glob.glob(os.path.join(args.inputFolder,
                                                '*'))  #grab everything
            datafiles = []
            for dfile in tdatafiles:
                fname, fext = os.path.splitext(dfile)
                if re.match('\d', fext[1:]) is not None:
                    datafiles.append(dfile)
        else:
            print 'Data source %s not supported.' % args.source
            sys.exit(1)

    traces = []
    for dfile in datafiles:
        if args.source == 'knet':
示例#26
0
def obspy_fullresp_RESP(input_dics,
                        trace,
                        resp_file,
                        Address,
                        unit='DIS',
                        BP_filter=(0.008, 0.012, 3.0, 4.0),
                        inform='N/N'):
    """
    Instrument correction using dataless seed --->
    equivalent to full response file steps: detrend, demean, taper, filter,
    deconvolution
    :param input_dics:
    :param trace:
    :param resp_file:
    :param Address:
    :param unit:
    :param BP_filter:
    :param inform:
    :return:
    """
    dataless_parser = Parser(resp_file)
    seedresp = {'filename': dataless_parser, 'units': unit}

    try:
        trace.detrend('linear')
        trace.simulate(seedresp=seedresp,
                       paz_remove=None,
                       paz_simulate=None,
                       remove_sensitivity=True,
                       simulate_sensitivity=False,
                       water_level=input_dics['water_level'],
                       zero_mean=True,
                       taper=True,
                       taper_fraction=0.05,
                       pre_filt=eval(BP_filter),
                       pitsasim=False,
                       sacsim=True)
        # Remove the following line since we want to keep
        # the units as it is in the stationXML
        # trace.data *= 1.e9
        trace_identity = '%s.%s.%s.%s' % (
            trace.stats['network'], trace.stats['station'],
            trace.stats['location'], trace.stats['channel'])
        if input_dics['mseed'] == 'N':
            trace.write(os.path.join(Address,
                                     '%s.%s' % (unit.lower(), trace_identity)),
                        format='SAC')
        else:
            trace.write(os.path.join(Address,
                                     '%s.%s' % (unit.lower(), trace_identity)),
                        format='MSEED')

        if unit.lower() == 'dis':
            unit_print = 'displacement'
        elif unit.lower() == 'vel':
            unit_print = 'velocity'
        elif unit.lower() == 'acc':
            unit_print = 'acceleration'
        else:
            unit_print = 'UNKNOWN'
        print '%s -- instrument correction to %s for: %s' \
              % (inform, unit_print, trace_identity)

    except Exception as e:
        print '%s -- %s' % (inform, e)
示例#27
0
from obspy import read
from obspy.xseed import Parser

st = read("http://examples.obspy.org/BW.BGLD..EH.D.2010.037")
parser = Parser("http://examples.obspy.org/dataless.seed.BW_BGLD")
st.simulate(seedresp={'filename': parser, 'units': "DIS"})
示例#28
0
    def add_stations(self, stations):
        """
        Add the desired output stations to the input file generator.

        Can currently deal with SEED/XML-SEED files and dictionaries of the
        following form:

            {"latitude": 123.4,
             "longitude": 123.4,
             "elevation_in_m": 123.4,
             "local_depth_in_m": 123.4,
             "id": "network_code.station_code"}

        `local_depth_in_m` is optional and will be assumed to be zero if not
        present. It denotes the burrial of the sensor beneath the surface.

        If it is a SEED/XML-SEED files, all stations in it will be added.

        :type stations: List of filenames, list of dictionaries or a single
            filename, single dictionary.
        :param stations: The stations for which output files should be
            generated.
        """
        # Try to interpret it as json. If it works and results in a list or
        # dicionary, use it!
        try:
            json_s = json.loads(stations)
        except:
            pass
        else:
            # A simple string is also a valid JSON document.
            if isinstance(json_s, list) or isinstance(json_s, dict):
                stations = json_s

        # Thin wrapper to enable single element treatment.
        if isinstance(stations, dict) or not hasattr(stations, "__iter__") or \
                (hasattr(stations, "read") and
                 hasattr(stations.read, "__call__")):
            stations = [
                stations,
            ]

        all_stations = {}

        for station_item in stations:
            # Store the original pointer position to be able to restore it.
            original_position = None
            try:
                original_position = station_item.tell()
                station_item.seek(original_position, 0)
            except:
                pass

            # Download it if it is some kind of URL.
            if isinstance(station_item, basestring) and "://" in station_item:
                station_item = io.BytesIO(urllib2.urlopen(station_item).read())

            # If it is a dict do some checks and add it.
            if isinstance(station_item, dict):
                if "latitude" not in station_item or \
                        "longitude" not in station_item or \
                        "elevation_in_m" not in station_item or \
                        "id" not in station_item:
                    msg = (
                        "Each station dictionary needs to at least have "
                        "'latitude', 'longitude', 'elevation_in_m', and 'id' "
                        "keys.")
                    raise ValueError(msg)
                # Create new dict to not carry around any additional keys.
                stat = {
                    "latitude": float(station_item["latitude"]),
                    "longitude": float(station_item["longitude"]),
                    "elevation_in_m": float(station_item["elevation_in_m"]),
                    "id": str(station_item["id"])
                }
                try:
                    stat["local_depth_in_m"] = \
                        float(station_item["local_depth_in_m"])
                except:
                    pass
                all_stations[stat["id"]] = stat
                continue

            # Also accepts SAC files.
            if isSAC(station_item):
                st = read(station_item)
                for tr in st:
                    stat = {}
                    stat["id"] = "%s.%s" % (tr.stats.network, tr.stats.station)
                    stat["latitude"] = float(tr.stats.sac.stla)
                    stat["longitude"] = float(tr.stats.sac.stlo)
                    stat["elevation_in_m"] = float(tr.stats.sac.stel)
                    stat["local_depth_in_m"] = float(tr.stats.sac.stdp)
                    # lat/lng/ele must be given.
                    if stat["latitude"] == -12345.0 or \
                            stat["longitude"] == -12345.0 or \
                            stat["elevation_in_m"] == -12345.0:
                        warnings.warn("No coordinates for channel '%s'." %
                                      str(tr))
                        continue
                    # Local may be neclected.
                    if stat["local_depth_in_m"] == -12345.0:
                        del stat["local_depth_in_m"]
                    all_stations[stat["id"]] = stat
                    continue
                continue

            # Reset pointer.
            if original_position is not None:
                station_item.seek(original_position, 0)

            # SEED / XML-SEED
            try:
                Parser(station_item)
                is_seed = True
            except:
                is_seed = False
            # Reset.
            if original_position is not None:
                station_item.seek(original_position, 0)
            if is_seed is True:
                self._parse_seed(station_item, all_stations)
                continue

            # StationXML
            try:
                stations = extract_coordinates_from_StationXML(station_item)
            except:
                pass
            else:
                for station in stations:
                    all_stations[station["id"]] = station
                continue

            msg = "Could not read %s." % station_item
            raise ValueError(msg)

        self.__add_stations(all_stations.values())
示例#29
0
from obspy.xseed import Parser
sp = Parser("dataless.seed")
sp.writeXSEED("dataless.seed")
示例#30
0
from os import path, makedirs
from obspy.core import utcdatetime, event
from obspy.core.event import Catalog, Event, Magnitude, Origin, StationMagnitude
import sys
from obspy.neic.client import Client
#from obspy.clients.neic.client import Client
from obspy.xseed import Parser
#from obspy.io.xseed import Parser
from obspy.xseed.utils import SEEDParserException
#from obspy.io.xseed.utils import SEEDParserException
from multiprocessing import Pool, Process, Queue, cpu_count
sys.path.append('../../')
from catalogue.parsers import parse_ggcat

# we will use dataless seed from IRIS to get station information
parser = Parser("../../data/AU.seed")


def sind(x):
    return np.sin(x / 180. * np.pi)


def cosd(x):
    return np.cos(x / 180. * np.pi)


def tand(x):
    return np.tan(x / 180. * np.pi)


def arcsind(x):