コード例 #1
0
ファイル: create_staxml.py プロジェクト: crotwell/StationXML
def do_xml():
    nrl = NRL('http://ds.iris.edu/NRL/')
    datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40']
    sensor_keys = ['Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present']

    response = nrl.get_response(sensor_keys=sensor_keys, datalogger_keys=datalogger_keys)

    channel = Channel(code='BHZ',
                      location_code='10',      # required
                      latitude=0,      # required
                      longitude=0,   # required
                      elevation=0.0,        # required
                      depth=0.,                # required
                      )

    channel.response = response
    station = Station(code='ABCD',
                      latitude=0,
                      longitude=0,
                      elevation=0.0,
                      creation_date=UTCDateTime(1970, 1, 1),          # required
                      site=Site(name='Fake Site'),  # required
                      channels=[channel],
                      )

    network = Network(code='XX',
                     stations=[station])
    inventory = Inventory(networks=[network], source="demo")

    inventory.write("Test.xml", format="stationxml", validate=True)
コード例 #2
0
def test_demo():
    ## following is drawn as directly as possible from demo/README.md
    gemlog.gem2ms.main(['-i', '../demo/demo/raw/'])
    ##########################
    coords = gemlog.summarize_gps('gps', output_file = 'project_coords.csv', station_info = '../demo/demo/station_info.txt')
    gemlog.rename_files('mseed/*', station_info = '../demo/demo/station_info.txt', output_dir = 'renamed_mseed')
    nrl = NRL()
    response = nrl.get_response(sensor_keys = ['Gem', 'Gem Infrasound Sensor v1.0'],
	   		    datalogger_keys = ['Gem', 'Gem Infrasound Logger v1.0',
			    '0 - 128000 counts/V']) # may cause warning--ok to ignore

    ## create an inventory of all sensors used in this project--may cause warnings
    inv = gemlog.make_gem_inventory('../demo/demo/station_info.txt', coords, response)
    inv.write('NM_inventory.xml', format='STATIONXML')
    
    ## read the data
    data = obspy.read('renamed_mseed/*')
    print(data)

    ## combine traces so that each station has one trace
    data.merge()
    print(data)

    ## deconvolve the instrument responses using the inventory already created
    inv = obspy.read_inventory('NM_inventory.xml')
    data.remove_response(inv) # may cause warnings--ok to ignore
    
    ## filter data above 1 Hz (lower frequencies are often wind noise)
    data.filter("highpass", freq=1.0)
    
    ## trim the data around a known event
    t1 = obspy.UTCDateTime('2020-05-10T12:14:00')
    t2 = obspy.UTCDateTime('2020-05-10T12:15:00')
    data.trim(t1, t2)
コード例 #3
0
ファイル: test_obspy.py プロジェクト: junqfisica/SDP
 def test_response(self):
     nrl = NRL()
     description = None
     try:
         if description:
             sensor_keys = ["Lennartz", "LE-3D/5s", description]
         else:
             sensor_keys = ["Lennartz", "LE-3D/5s"]
         datalogger_keys = ["DiGOS/Omnirecs", "DATACUBE", "1", "400"]
         response = nrl.get_response(sensor_keys=sensor_keys,
                                     datalogger_keys=datalogger_keys)
         print(response)
     except KeyError:
         print(None)
コード例 #4
0
def do_plot():

    nrl = NRL('http://ds.iris.edu/NRL/')
    datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40']
    sensor_keys = [
        'Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present'
    ]

    response = nrl.get_response(sensor_keys=sensor_keys,
                                datalogger_keys=datalogger_keys)

    response.plot(min_freq=.001, outfile="sts2-rt130.png")

    for stage in response.response_stages:
        print(stage)
コード例 #5
0
ファイル: test_obspy.py プロジェクト: junqfisica/SDP
    def test_data_logger(self):
        nrl = NRL()
        data_logger = nrl.dataloggers
        manufacture_names = [data_logger[key] for key in data_logger.keys()]
        datalogger_names = []
        for mfk in manufacture_names:
            datalogger_names.extend(list(mfk.keys()))

        print(datalogger_names)
コード例 #6
0
ファイル: test_obspy.py プロジェクト: junqfisica/SDP
    def test_data_sensor(self):
        nrl = NRL()
        sensor = nrl.sensors
        manufacture_names = [sensor[key] for key in sensor.keys()]
        sensor_names = []
        for mfk in manufacture_names:
            sensor_names.extend(list(mfk.keys()))

        print(sensor_names)
コード例 #7
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_gain_from_nrl(manufactory, instrument):
        """
        Uses Obspy NRL library to get the gain.

        :return: A list of gain.
        """
        nrl = NRL()
        try:
            gains = nrl.dataloggers[manufactory][instrument]
            gains = list(gains)
        except KeyError:
            gains = []

        return gains
コード例 #8
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_sample_rate_from_nrl(manufactory, instrument, gain):
        """
        Uses Obspy NRL library to get the sample rate from the instrument.

        :return: A list of sample rate.
        """
        nrl = NRL()
        try:
            sample_rates = nrl.dataloggers[manufactory][instrument][str(gain)]
            sample_rates = list(sample_rates)
        except KeyError:
            sample_rates = []

        return sample_rates
コード例 #9
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_all_sensors_from_nrl():
        """
        Uses Obspy NRL library to get all sensor's names available.

        :return: A list of sensor names.
        """
        nrl = NRL()
        manufacture_names = list(nrl.sensors)
        sensor_names = []
        for mfn in manufacture_names:
            sensor_names.extend(
                EquipmentModel.__get_instruments_from_nrl(mfn, True))

        return sensor_names
コード例 #10
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_all_dataloggers_from_nrl():
        """
        Uses Obspy NRL library to get all data loggers names available.

        :return: A list of data loggers names.
        """
        nrl = NRL()
        manufacture_names = list(nrl.dataloggers)
        datalogger_names = []
        for mfn in manufacture_names:
            datalogger_names.extend(
                EquipmentModel.__get_instruments_from_nrl(mfn, False))

        return datalogger_names
コード例 #11
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_sensor_extra_information(manufactory, instrument):
        """
        Uses Obspy NRL library to get the sensor extra information.

        :return: A list of extra information.
        """
        nrl = NRL()
        try:
            extra_info = nrl.sensors[manufactory][instrument]
            extra_info = list(extra_info)
            for info in extra_info:
                if info.startswith("http:") or info.startswith("https:"):
                    return []
        except KeyError:
            extra_info = []

        return extra_info
コード例 #12
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def get_all_manufactures_nrl(instrument_type: str):
        """
        Uses Obspy NRL library to get all manufactures of sensors/datalogger.

        :param instrument_type: The instrument type, ie: Datalogger or Sensor.

        :return: A list of manufactures.
        """
        nrl = NRL()
        if instrument_type == "Datalogger":
            manufacture_names = list(nrl.dataloggers)
        elif instrument_type == "Sensor":
            manufacture_names = list(nrl.sensors)
        else:
            raise InvalidInstrumentType(
                "The instrument type {} is not valid.".format(instrument_type))

        return manufacture_names
コード例 #13
0
ファイル: equipments_model.py プロジェクト: junqfisica/SDP
    def __get_instruments_from_nrl(manufactory: str, is_sensor: bool):
        """
        Uses Obspy NRL library to get instruments from the given manufactory.

        :param is_sensor: If true get sensors, otherwise get dataloggers.

        :param manufactory: The manufactory of sensors/datalogger.

        :return: A list of sensors/dataloggers names.
        """
        nrl = NRL()
        try:
            if is_sensor:
                instrument = nrl.sensors[manufactory]
            else:
                instrument = nrl.dataloggers[manufactory]
            return list(instrument)
        except KeyError:
            return []
コード例 #14
0
    def nrl_client(self):
        """Initiate a nominal response library object."""
        from obspy.clients.nrl import NRL

        return NRL()
コード例 #15
0
from obspy.clients.nrl import NRL
from obspy.core.inventory.response import Response
from obspy.io.xseed.parser import Parser
from obspy.clients.nrl.client import NRLDict
from obspy.core.inventory.response import PolesZerosResponseStage
nrl = NRL()

nrl_dic = NRLDict(nrl)


def node_search(node):
    root = []
    for elem in node:
        if node[elem].__class__ == tuple:
            return root.append(elem)
        else:
            root.append(elem)
            return root.append(node_search(node[elem]))


if __name__ == "__main__":
    # for man in nrl.sensors:
    #     if man == "Guralp":
    #         print(man)
    #         print("->")
    #         t = NRLDict(nrl.sensors[man])
    #         for product in nrl.sensors[man]:
    #             print("  "+product)
    #             print("   ->")
    #             if nrl.sensors[man][product].__class__ == NRLDict:
    #                 for frequence in nrl.sensors[man][product]:
コード例 #16
0
def main():
    chans = "EHZ,EHN,EHE"
    # Get StationXML file
    print(f"Interactive StaXML builder")
    print(f"Work in progress...some things hardwired\n\n")
    inv_name = input(f"Enter StationXML file name: ")
    if (os.path.isfile(inv_name)):
        inv = read_inventory(inv_name)
    else:
        print(f"Making new inventory: {inv_name}\n")
        inv = Inventory(networks=[], source="Weston")

    # Net code
    ques = f"Enter Network Code ({str(netc)}) :"
    net_code = str(input(ques) or netc)
    net = Network(code=net_code, stations=[])
    print(f"\n")

    # connect to NRL
    nrl = NRL()

    # Datalogger info
    ret = 0
    digi = f"REF TEK|RT 130S & 130-SMHR|1|200"
    print(f"Input NRL Digi info ( | separated, careful with spaces)....")
    print(f"E.g manufacturer| model| gain| sps\n")
    while ret == 0:
        ques = f"Enter DIGI info ({digi}) :"
        digi = str(input(ques) or digi)
        print(f"\n")
        try:
            nrl.get_datalogger_response(digi.split('|'))
            ret = 1
            print("!!!!! DATA LOGGER SUCCESS!!!\n")
        except Exception as e:
            print(f"Try again ... {e}")

    # Sensor info
    ret = 0
    sensor = f"Streckeisen,STS-1,360 seconds"
    print(f"Input NRL Sensor info ....\n")
    print(f"E.g Manufact|model|Sensitivy\n")
    print(f"Guralp|CMG-40T,30s - 100Hz|800")
    print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms")
    print(f"Streckeisen|STS-1|360 seconds")
    print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s")
    while ret == 0:
        ques = f"Enter sensor info {str(sensor)} :"
        sensor = str(input(ques) or sensor)
        try:
            nrl.get_sensor_response(sensor.split('|'))
            ret = 1
            inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}"
            print("Sensor success!!!!")
        except Exception as e:
            print(f"Try again ... {e}")

    print("Getting full response...")
    try:
        response = nrl.get_response(sensor_keys=sensor.split('|'),
                                    datalogger_keys=digi.split('|'))
        print("Full response success \n\n")
    except Exception as e:
        print(f"Oops .. {e}")
    #
    nstas = int(
        input(
            "Enter number of stations to add with same sensor/digitizer (default 1):"
        ) or 1)
    for i in range(0, nstas):
        ques = "Station code (" + str(scode) + ") :"
        sta_code = str(input(ques) or scode)

        ques = "Station latitude (" + str(geolat) + ") :"
        sta_lat = float(input(ques) or geolat)

        ques = "Station longitude (" + str(geolon) + ") :"
        sta_lon = float(input(ques) or geolat)

        ques = "Station elev(" + str(geoelev) + ") :"
        sta_elev = float(input(ques) or geoelev)

        ques = "Station ondate (" + str(date) + ") :"
        sta_ondate = str(input(ques) or date)

        ques = "Station offdate (" + str(date) + ") :"
        sta_offdate = str(input(ques) or date)

        ques = "Station long name (" + str(longname) + ") :"
        sta_sitename = str(input(ques) or longname)

        sta = Station(code=sta_code,
                      latitude=sta_lat,
                      longitude=sta_lon,
                      elevation=sta_elev,
                      creation_date=UTCDateTime(sta_ondate),
                      site=Site(name=sta_sitename))
        # add station to network
        net.stations.append(sta)

        # Default chan info
        coords = {
            'latitude': sta_lat,
            'longitude': sta_lon,
            'elevation': sta_elev,
            'depth': 0.0,
            'sample_rate': sps
        }

        n = -1
        ques = f"Enter channel names, comma separated ({chans}) :"
        chans = str(input(ques) or chans)
        for j in chans.split(','):
            n += 1
            chantmp = j
            print("Doing channel ", chantmp)
            aztmp = azims[n]
            diptmp = dips[n]
            loc = locs[n]
            for k in coords.keys():
                ques = str(chantmp) + " enter " + k + "(" + str(
                    coords[k]) + "):"
                coords[k] = float(input(ques) or coords[k])

            chan = Channel(code=chantmp,
                           location_code=loc,
                           latitude=coords['latitude'],
                           longitude=coords['longitude'],
                           elevation=coords['elevation'],
                           depth=coords['depth'],
                           azimuth=aztmp,
                           dip=diptmp,
                           sample_rate=coords['sample_rate'],
                           sensor=Equipment(description=inst_info))
            chan.response = response
            sta.channels.append(chan)

    inv.networks.append(net)
    inv.write(inv_name, format="STATIONXML")
コード例 #17
0
def create_new_skeleton_inventory_file(path2xmlfile):
    """
    write a NEW skeleton inventory xml file
    :param path2xmlfile: path to a new xml file.
    :return:
    """
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="XX",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=obspy.UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABC",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=200)

    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    response = nrl.get_response(  # doctest: +SKIP
        sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
        datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    inv.write(path2xmlfile, format="stationxml", validate=True)
コード例 #18
0
def beacon(network_code="2P", level="station", comp_list=["N", "E", "Z"]):
    """
    Create Beacon network data from scratch.
    Station information taken from the Site and Sensor field deployment notes
    kept on a shared Google Drive with Yoshi, Jonathan and myself.

    Updated Jun 23, 2022

    .. note::
        Start and end times are based on the MiniSEED data files

    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # Station name, Abbreviation, Code, Lat, Lon, Start, End, Instrument type
    station_info = np.array([
        [
            "Pori Rd", "PORI", "RD01", "-40.55475083", "175.9710354",
            "2017-07-18T02:46:10.400000Z", "2019-01-19T04:34:31.600000Z", "60s"
        ],
        [
            "Angora Rd", "ANGR", "RD02", "-40.45974293", "176.4750588",
            "2017-07-18T00:06:19.090000Z", "2019-01-19T13:36:37.970000Z", "60s"
        ],
        [
            "Te Uri Rd", "TURI", "RD03", "-40.2656269", "176.3828498",
            "2017-07-18T00:10:35.140000Z", "2019-02-22T05:19:15.270000Z", "30s"
        ],
        [
            "Porangahau", "PORA", "RD04", "-40.2667317", "176.6344719",
            "2017-07-18T00:18:51.410000Z", "2019-02-05T03:12:25.860000Z", "60s"
        ],
        [
            "Manuhara Rd", "MNHR", "RD05", "-40.4689786", "176.2231874",
            "2017-07-18T04:08:06.500000Z", "2019-02-22T02:45:06.830000Z", "30s"
        ],
        [
            "Dannevirke", "DNVK", "RD06", "-40.2971794", "176.1663731",
            "2017-07-18T02:45:58.130000Z", "2019-03-08T13:03:23.340000Z", "30s"
        ],
        [
            "Waipawa", "WPAW", "RD07", "-39.9017124", "176.5370861",
            "2017-07-18T00:01:13.990000Z", "2019-02-28T08:49:42.780000Z", "60s"
        ],
        [
            "Raukawa", "RAKW", "RD08", "-39.7460611", "176.6205577",
            "2017-07-21T04:54:37.466100Z", "2019-02-06T17:43:41.150000Z", "60s"
        ],
        [
            "McNeill Hill", "MCNL", "RD09", "-39.4447675", "176.6974385",
            "2017-07-21T03:51:49.360000Z", "2019-02-11T13:46:27.440000Z", "60s"
        ],
        [
            "Cape Kidnappers", "CPKN", "RD10", "-39.64661592", "177.0765055",
            "2017-07-23T01:15:24.490000Z", "2018-03-04T14:37:40.050000Z", "60s"
        ],
        [
            "Kahuranaki", "KAHU", "RD11", "-39.78731589", "176.8624521",
            "2017-07-21T04:12:48.360000Z", "2018-03-06T05:22:32.170000Z", "60s"
        ],
        [
            "Kaweka Forest", "KWKA", "RD12", "-39.425214", "176.4228",
            "2017-07-21T04:22:08.830000Z", "2019-02-04T18:04:08.470000Z", "30s"
        ],
        [
            "Kereru", "KERE", "RD13", "-39.643259", "176.3768865",
            "2017-07-21T05:43:56.610000Z", "2019-03-09T00:14:02.930000Z", "60s"
        ],
        [
            "Pukenui", "PNUI", "RD14", "-39.9129963", "176.2001869",
            "2017-07-23T00:16:18.150000Z", "2018-06-13T18:27:00.980000Z", "60s"
        ],
        [
            "Waipukarau", "WPUK", "RD15", "-40.0627107", "176.4391311",
            "2017-07-23T00:10:44.120000Z", "2019-02-11T00:09:50.690000Z", "60s"
        ],
        [
            "Omakere", "OROA", "RD16", "-40.105341", "176.6804449",
            "2017-07-23T00:08:12.220000Z", "2019-02-05T22:59:39.980000Z", "60s"
        ],
        [
            "Te Apiti Rd", "TEAC", "RD17", "-39.90868978", "176.9561896",
            "2017-09-25T02:10:21.585100Z", "2018-03-02T05:51:56.420000Z", "30s"
        ],  # no sensor number, no instr type
        [
            "River Rd", "RANC", "RD18", "-39.929775", "176.7039773",
            "2017-09-25T04:55:05.610000Z", "2019-01-18T20:12:29.850000Z", "30s"
        ],
        [
            "Matapiro Rd", "MATT", "RD19", "-39.5796128", "176.6449024",
            "2018-03-13T20:31:38.610000Z", "2018-06-23T16:11:59.100000Z", "30s"
        ],  # same instr. as RD10
        [
            "Kahuranaki", "KAHU2", "RD20", "-39.79385769", "176.8758813",
            "2018-03-13T04:39:43.610000Z", "2018-08-26T19:00:36.390000Z", "30s"
        ],  # same instr. as RD11
        [
            "Te Apiti Rd", "TEAC2", "RD21", "-39.913152", "176.946881",
            "2018-03-10T09:22:33.610000Z", "2019-01-22T22:57:49.770000Z", "30s"
        ],  # same instr. as RD17
        [
            "Castlepoint", "CAPT", "RD22", "-40.910278", "176.199167",
            "2019-01-01T00:00:00.000000Z", "2019-01-09T00:41:31.120000Z", "60s"
        ],  # unknown sensor number
    ])

    # For setting the network timing
    starttimes = station_info[:, 5]
    endtimes = station_info[:, 6]

    unique_starts = [UTCDateTime(str(_)) for _ in np.unique(starttimes)]
    unique_ends = [UTCDateTime(str(_)) for _ in np.unique(endtimes)]

    min_starttime = min(unique_starts)
    max_endtime = max(unique_ends)

    # Elevations are not known
    default_elevation = 0.0
    default_depth = 0.0

    # Response is the same for all stations. Response information was provided
    # through personal correspondance to GeoNet site selection scientist
    # Jonathan Hanson, but could also be ascertained from the instrument type
    # and datalogger type
    if level == "channel":
        nrl = NRL()
        responses = {
            "30s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "30s - 50 Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ]),
            "60s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "60s - 50Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ])
        }
        sensors = {
            "30s":
            Equipment(type="sensor",
                      manufacturer="Guralp",
                      model="CMG-40T",
                      description=f"30s (Broadband) 30s-50Hz 800 V/m/s"),
            "60s":
            Equipment(type="sensor",
                      manufacturer="Guralp",
                      model="CMG-40T",
                      description=f"60s (Broadband) 60s-50Hz 800 V/m/s"),
        }
        data_logger = Equipment(type="data_logger",
                                manufacturer="Nanometrics",
                                model=f"Taurus",
                                description="16 Vpp (gain 1), 100 sps, low "
                                "impedance, DC removal filter off")

    # Add stations to objects
    stations = []
    for stalist in station_info:
        # Parse the station information
        name = stalist[0]  # e.g. Castlepoint
        nickname = stalist[1]  # e.g. CAPT
        code = stalist[2]  # e.g. RD22
        latitude = float(stalist[3])
        longitude = float(stalist[4])
        start_date = UTCDateTime(stalist[5])
        end_date = UTCDateTime(stalist[6])
        sensor_type = stalist[7]

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code="10",
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=default_depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100,
                              sensor=sensors[sensor_type],
                              data_logger=data_logger)
                # Attach the response
                cha.response = responses[sensor_type]
                channels.append(cha)
        else:
            channels = None

        # Create the site object to provide information on the site location
        site = Site(name=nickname, description=name)

        # Create the station object
        station = Station(code=code,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          start_date=start_date,
                          end_date=end_date,
                          site=site,
                          creation_date=UTCDateTime(),
                          channels=channels)
        stations.append(station)

    # Create the network object
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="Broadband East Coast Network",
                      stations=stations)
    return network
コード例 #19
0
def sahke(network_code="X2", level="station", comp_list=["N", "E", "Z"]):
    """
    SAHKE transect broadband stations aren't fetchable through fdsn webservies,
    build a network object from the available information that was collected
    through the SAHKE final report provided by Martha Savage.

    I'm not sure if the CMG's are 30s or 60s instruments or how much that 
    actually matters

    Notes from GNS SAHKE Report:

        Instruments and dataloggers:
        CMG3ESP: T004, LTN6
        CMG40T: LE4, T007, T010, T014, T016, T018, T020
        Dataloggers: Reftek-130s 

        3-2-2010 (2010-061): LE4 sampling rate changed from 40Hz to 100Hz
        This isn't relevant for the data that I have...

    NRL variations:
        3ESP:
            Natural Period: "100 s - 50 Hz", "120 s - 50 Hz", "30 s - 50 Hz", 
                            "60 s - 50 Hz"
            Sensitivity: "1500", "2000", "20000"
        40T:
            Natural Period: "100s - 50Hz", "10s - 100Hz", "1s - 100Hz",
                            "20s - 50Hz", "2s - 100Hz", "30s - 100Hz",
                            "30s - 50 Hz", "40s - 100Hz", "5s - 100Hz",
                            "60s - 100Hz", "60s - 50Hz"
            Sensitivity:   "1600", "2000", "20000", "800"
        RT130S:
            Gain: "1", "32"



    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # station, location, start, stop, lat, lon, instr type
    station_info = np.array(
        [["LE4", "", "2010-136", "2010-331", -41.3579, 175.6919, "40t"],
         ["LTN6", "LT", "2010-193", "2010-349", -41.1033, 175.3238, "3esp"],
         ["T004", "", "2010-088", "2010-255", -41.3403, 175.6688, "3esp"],
         ["T007", "", "2010-041", "2010-123", -41.3041, 175.6513, "40t"],
         ["T010", "T0", "2010-135", "2010-348", -41.2520, 175.5825, "40t"],
         ["T014", "", "2010-034", "2010-350", -41.2075, 175.5063, "40t"],
         ["T016", "", "2010-088", "2010-322", -41.1893, 175.4737, "40t"],
         ["T018", "", "2010-055", "2010-349", -41.1715, 175.3850, "40t"],
         ["T020", "", "2010-089", "2010-261", -41.1251, 175.3497, "40t"]])

    # For setting the network timing
    starttimes = station_info[:, 2]
    endtimes = station_info[:, 3]

    unique_starts = [UTCDateTime(str(_)) for _ in np.unique(starttimes)]
    unique_ends = [UTCDateTime(str(_)) for _ in np.unique(endtimes)]

    min_starttime = min(unique_starts)
    max_endtime = max(unique_ends)

    # Elevations are not known
    default_elevation = 0.0
    default_depth = 0.0
    default_site = Site(name="SAHKE")

    # Create response information
    if level == "channel":
        nrl = NRL()
        responses = {
            "40t":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "60s - 50Hz", "2000"],
                datalogger_keys=["REF TEK", "RT 130S & 130-SMHR", "1", "100"]),
            "3esp":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-3ESP", "60 s - 50 Hz", "2000"],
                datalogger_keys=["REF TEK", "RT 130S & 130-SMHR", "1", "100"]),
        }

    # Add stations to Station objects
    stations = []
    for stalist in station_info:
        # Parse the list to avoid confusion with indices
        code = stalist[0]
        location = stalist[1]
        start_date = UTCDateTime(stalist[2])
        end_date = UTCDateTime(stalist[3])
        latitude = stalist[4]
        longitude = stalist[5]

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code=location,
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=default_depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100)
                cha.response = responses[stalist[-1]]
                channels.append(cha)
        else:
            channels = None

        # Create the Station object
        station = Station(code=code,
                          start_date=start_date,
                          end_date=end_date,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          site=default_site,
                          creation_date=UTCDateTime(),
                          channels=channels)

        stations.append(station)

    # Build the network and place a description
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="SAHKE BBTRANSECT",
                      stations=stations)

    return network
コード例 #20
0
def beacon(network_code="XX", level="station", comp_list=["N", "E", "Z"]):
    """
    Create Beacon network data from scratch.
    Station information taken from the Site and Sensor field deployment notes
    kept on a shared Google Drive with Yoshi, Jonathan and myself.

    Updated 1.5.2020

    :type network_code: str
    :param network_code: chosen two value code used for the network
    :type level: str
    :param level: level to propogate network creation
    :type comp_list: list of str
    :param comp_list: components to create channels for
    :rtype: obspy.core.inventory.network.Network
    :return: obspy Network object with information propogated to chosen level
    """
    # Station name, Abbreviation, Code, Lat, Lon, Start, End, Instrument type
    station_info = np.array([
        [
            "Pori Rd", "PORI", "RD01", "-40.55475083", "175.9710354",
            "2017-07-19", "2019-04-04", "60s"
        ],
        [
            "Angora Rd", "ANGR", "RD02", "-40.45974293", "176.4750588",
            "2017-07-19", "2019-04-04", "60s"
        ],
        [
            "Te Uri Rd", "TURI", "RD03", "-40.2656269", "176.3828498",
            "2017-07-20", "2019-04-04", "30s"
        ],
        [
            "Porangahau", "PORA", "RD04", "-40.2667317", "176.6344719",
            "2017-07-20", "2019-04-04", "60s"
        ],
        [
            "Manuhara Rd", "MNHR", "RD05", "-40.4689786", "176.2231874",
            "2017-07-20", "2019-04-05", "30s"
        ],
        [
            "Dannevirke", "DNVK", "RD06", "-40.2971794", "176.1663731",
            "2017-07-24", "2019-04-02", "30s"
        ],
        [
            "Waipawa", "WPAW", "RD07", "-39.9017124", "176.5370861",
            "2017-07-24", "2019-04-02", "60s"
        ],
        [
            "Raukawa", "RAKW", "RD08", "-39.7460611", "176.6205577",
            "2017-07-24", "2019-04-02", "60s"
        ],
        [
            "McNeill Hill", "MCNL", "RD09", "-39.4447675", "176.6974385",
            "2017-07-25", "2019-04-03", "60s"
        ],
        [
            "Cape Kidnappers", "CPKN", "RD10", "-39.64661592", "177.0765055",
            "2017-07-25", "2018-03-13", "60s"
        ],
        [
            "Kahuranaki", "KAHU", "RD11", "-39.78731589", "176.8624521",
            "2017-07-25", "2018-03-13", "60s"
        ],
        [
            "Kaweka Forest", "KWKA", "RD12", "-39.425214", "176.4228",
            "2017-07-26", "2019-05-03", "30s"
        ],
        [
            "Kereru", "KERE", "RD13", "-39.643259", "176.3768865",
            "2017-07-26", "2019-04-03", "60s"
        ],
        [
            "Pukenui", "PNUI", "RD14", "-39.9129963", "176.2001869",
            "2017-07-26", "2018-09-08", "60s"
        ],
        [
            "Waipukarau", "WPUK", "RD15", "-40.0627107", "176.4391311",
            "2017-07-27", "2019-04-02", "60s"
        ],
        [
            "Omakere", "OROA", "RD16", "-40.105341", "176.6804449",
            "2017-07-27", "2019-04-04", "60s"
        ],
        [
            "Te Apiti Rd", "TEAC", "RD17", "-39.90868978", "176.9561896",
            "2017-09-25", "2018-03-14", "30s"
        ],  # no sensor number, no instr type
        [
            "River Rd", "RANC", "RD18", "-39.929775", "176.7039773",
            "2017-09-25", "2019-04-03", "30s"
        ],
        [
            "Matapiro Rd", "MATT", "RD19", "-39.5796128", "176.6449024",
            "2018-03-14", "2018-06-25", "30s"
        ],  # same instr. as RD10
        [
            "Kahuranaki", "KAHU2", "RD20", "-39.79385769", "176.8758813",
            "2018-03-13", "2018-09-03", "30s"
        ],  # same instr. as RD11
        [
            "Te Apiti Rd", "TEAC2", "RD21", "-39.913152", "176.946881",
            "2018-03-14", "2019-04-03", "30s"
        ],  # same instr. as RD17
        [
            "Castlepoint", "CAPT", "RD22", "-40.910278", "176.199167",
            "2018-07-20", "2019-05-05", "60s"
        ],  # unknown sensor number
    ])

    # For setting the network timing
    starttimes = station_info[:, 5]
    endtimes = station_info[:, 6]

    unique_starts = [UTCDateTime(str(_)) for _ in np.unique(starttimes)]
    unique_ends = [UTCDateTime(str(_)) for _ in np.unique(endtimes)]

    min_starttime = min(unique_starts)
    max_endtime = max(unique_ends)

    # Elevations are not known
    default_elevation = 0.0
    default_depth = 0.0

    # Response is the same for all stations. Response information was provided
    # through personal correspondance to GeoNet site selection scientist
    # Jonathan Hanson, but could also be ascertained from the instrument type
    # and datalogger type
    if level == "channel":
        nrl = NRL()
        responses = {
            "30s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "30s - 50 Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ]),
            "60s":
            nrl.get_response(
                sensor_keys=["Guralp", "CMG-40T", "60s - 50Hz", "800"],
                datalogger_keys=[
                    "Nanometrics", "Taurus", "16 Vpp (1)", "Low (default)",
                    "Off", "100"
                ])
        }

    # Add stations to objects
    stations = []
    for stalist in station_info:
        # Parse the station information
        name = stalist[0]  # e.g. Castlepoint
        nickname = stalist[1]  # e.g. CAPT
        code = stalist[2]  # e.g. RD22
        latitude = float(stalist[3])
        longitude = float(stalist[4])
        start_date = UTCDateTime(stalist[5])
        end_date = UTCDateTime(stalist[6])

        # Create channel level objects if required
        if level == "channel":
            channels = []
            for comp in comp_list:
                cha = Channel(code=f"HH{comp}",
                              location_code="10",
                              start_date=start_date,
                              end_date=end_date,
                              latitude=latitude,
                              longitude=longitude,
                              elevation=default_elevation,
                              depth=default_depth,
                              azimuth=0.0,
                              dip=-90.0,
                              sample_rate=100)
                # Attach the response
                cha.response = responses[stalist[7]]
                channels.append(cha)
        else:
            channels = None

        # Create the site object to provide information on the site location
        site = Site(name=nickname, description=name)

        # Create the station object
        station = Station(code=code,
                          latitude=latitude,
                          longitude=longitude,
                          elevation=default_elevation,
                          start_date=start_date,
                          end_date=end_date,
                          site=site,
                          creation_date=UTCDateTime(),
                          channels=channels)
        stations.append(station)

    # Create the network object
    network = Network(code=network_code,
                      start_date=min_starttime,
                      end_date=max_endtime,
                      description="Broadband East Coast Network",
                      stations=stations)
    return network
コード例 #21
0
from obspy.clients.nrl import NRL
from obspy.core.inventory.response import Response
from obspy.io.xseed.parser import Parser
from obspy.clients.nrl.client import NRLDict
from obspy.core.inventory.response import PolesZerosResponseStage
nrl = NRL()

nrl_dic = NRLDict(nrl)
for man in nrl.sensors:
    print(man)
    print("->")
    for product in nrl.sensors[man]:
        print(product)
        print("->")
        period_freq = nrl.sensors[man][product]

sensor = nrl.get_sensor_response(
    sensor_keys=['Guralp', 'CMG-40T', '40s - 100Hz', '800'])
print(sensor._get_overall_sensitivity_and_gain())
for stage in sensor.response_stages:
    if isinstance(stage, PolesZerosResponseStage):
        print('Input ' + stage.input_units)
        print('Output ' + stage.output_units)
        print(' Transfer Function Type ' + stage.pz_transfer_function_type)
        print(' Normalization Factor : {norm_factor:g}'.format(
            norm_factor=stage.normalization_factor))
        print(' Normalization Frequency : {norm_frequency:.2f}'.format(
            norm_frequency=stage.normalization_frequency))
        print(' Gain : {gain:.5f}'.format(gain=stage.stage_gain))
        print(stage.zeros.__str__())
        print(stage.poles.__str__())