예제 #1
0
def do_xml():
    nrl = NRL('http://ds.iris.edu/NRL/')
    datalogger_keys = ['REF TEK', 'RT 130 & 130-SMA', '1', '40']
    sensor_keys = ['Streckeisen', 'STS-2', '1500', '3 - installed 04/97 to present']

    response = nrl.get_response(sensor_keys=sensor_keys, datalogger_keys=datalogger_keys)

    channel = Channel(code='BHZ',
                      location_code='10',      # required
                      latitude=0,      # required
                      longitude=0,   # required
                      elevation=0.0,        # required
                      depth=0.,                # required
                      )

    channel.response = response
    station = Station(code='ABCD',
                      latitude=0,
                      longitude=0,
                      elevation=0.0,
                      creation_date=UTCDateTime(1970, 1, 1),          # required
                      site=Site(name='Fake Site'),  # required
                      channels=[channel],
                      )

    network = Network(code='XX',
                     stations=[station])
    inventory = Inventory(networks=[network], source="demo")

    inventory.write("Test.xml", format="stationxml", validate=True)
예제 #2
0
def dataframe_to_fdsn_station_xml(inventory_df, nominal_instruments, filename, show_progress=True):
    """Export dataframe of station metadata to FDSN station xml file

    :param inventory_df: Dataframe containing all the station records to export.
    :type inventory_df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA
    :param nominal_instruments: Dictionary mapping from channel code to nominal instrument
        characterization
    :type nominal_instruments: {str: Instrument(obspy.core.inventory.util.Equipment,
        obspy.core.inventory.response.Response) }
    :param filename: Output filename
    :type filename: str or path
    """
    if show_progress:
        import tqdm
        pbar = tqdm.tqdm(total=len(inventory_df), ascii=True)
        progressor = pbar.update
    else:
        progressor = None

    global_inventory = Inventory(networks=[], source='EHB')
    for netcode, data in inventory_df.groupby('NetworkCode'):
        net = dataframe_to_network(netcode, data, nominal_instruments, progressor=progressor)
        global_inventory.networks.append(net)
    if show_progress:
        pbar.close()

    # Write global inventory text file in FDSN stationxml inventory format.
    global_inventory.write(filename, format="stationxml")
예제 #3
0
    def write_new_version_inventory(self, new_xml_file=None):
        """
        Construct a new inventory object of networks, making use of new obspy version and new attributes for Inventory
        re-write the input xml file in to new xml file

                # The new network inventory has new <Module>ObsPy 1.2.1 and new <Source>
        :return: path of new xml file
        """

        if new_xml_file is None:
            filename_newv_xml = "new_version_" + os.path.basename(
                self.input_xml)
            new_version_xml = os.path.join(self.output_dir, filename_newv_xml)
        else:
            new_version_xml = new_xml_file

        print("Target New XML file = ", new_version_xml)

        inv2 = Inventory(
            # We'll add networks later.
            networks=[],
            # The source should be the id whoever create the file.
            source="Geoscience Australia EFTF AusArray")

        for a_net in self.inv_obj.networks:
            # Re-write each network of the big inventory into the new object inv2
            # inv2.networks = []
            inv2.networks.append(a_net)
            print("The network %s has %s stations." %
                  (a_net.code, len(a_net.stations)))

        inv2.write(new_version_xml, format="stationxml",
                   validate=True)  # every Station got equipment

        return new_version_xml
예제 #4
0
def exportStationXml(df, nominal_instruments, output_folder, filename_base):
    """
    Export the dataset in df to FDSN Station XML format.

    Given a dataframe containing network and station codes grouped by network, for each
    network create an obspy inventory object and export to FDSN station XML file. Also
    write an overall list of stations based on global inventory to stations.txt.

    :param df: Dataframe containing all the station records to export.
    :type df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA
    :param nominal_instruments: Dictionary mapping from channel code to nominal instrument
        characterization
    :type nominal_instruments: {str: Instrument(obspy.core.inventory.util.Equipment,
        obspy.core.inventory.response.Response) }
    :param output_folder: Name of output folder in which to place the exported XML files
    :type output_folder: str or pathlib.Path
    :param filename_base: Base name of each output file. Exported filename is appended with the
        network code, plus .xml extension.
    :type filename_base: str
    """
    from obspy.core.inventory import Inventory

    pathlib.Path(output_folder).mkdir(exist_ok=True)
    print("Exporting stations to folder {0}".format(output_folder))

    if show_progress:
        pbar = tqdm.tqdm(total=len(df), ascii=True)
        progressor = pbar.update
        std_print = pbar.write
    else:
        progressor = None
        std_print = print

    global_inventory = Inventory(networks=[], source='EHB')
    for netcode, data in df.groupby('NetworkCode'):
        net = pd2Network(netcode,
                         data,
                         nominal_instruments,
                         progressor=progressor)
        net_inv = Inventory(networks=[net], source=global_inventory.source)
        global_inventory.networks.append(net)
        fname = "{0}{1}.xml".format(filename_base, netcode)
        try:
            net_inv.write(os.path.join(output_folder, fname),
                          format="stationxml",
                          validate=True)
        except Exception as e:
            std_print(e)
            std_print(
                "FAILED writing file {0} for network {1}, continuing".format(
                    fname, netcode))
            continue
    if show_progress:
        pbar.close()

    # Write global inventory text file in FDSN stationtxt inventory format.
    global_inventory.write("station.txt", format="stationtxt")
예제 #5
0
파일: io.py 프로젝트: weijias-fork/SeisNN
def read_hyp_inventory(hyp, network, kml_output_dir=None):
    inventory = Inventory(networks=[], source="")
    net = Network(code=network, stations=[], description="")

    with open(hyp, 'r') as file:
        blank_line = 0
        while True:
            line = file.readline().rstrip()

            if not len(line):
                blank_line += 1
                continue

            if blank_line > 1:
                break

            elif blank_line == 1:
                lat = line[6:14]
                lon = line[14:23]
                elev = float(line[23:])
                station = line[1:6]

                if lat[-1] == 'S':
                    NS = -1
                else:
                    NS = 1

                if lon[-1] == 'W':
                    EW = -1
                else:
                    EW = 1

                lat = (int(lat[0:2]) + float(lat[2:-1]) / 60) * NS
                lat = Latitude(lat)

                lon = (int(lon[0:3]) + float(lon[3:-1]) / 60) * EW
                lon = Longitude(lon)

                sta = Station(code=station,
                              latitude=lat,
                              longitude=lon,
                              elevation=elev)

                net.stations.append(sta)

    inventory.networks.append(net)

    if kml_output_dir:
        os.makedirs(kml_output_dir, exist_ok=True)
        inventory.write(kml_output_dir + "/" + network + ".kml", format="KML")

    return inventory
예제 #6
0
def save_station_local_plots(df,
                             plot_folder,
                             progressor=None,
                             include_stations_list=True):
    """
    Save visual map plot per station, saved to file netcode.stationcode.png.

    :param df: Dataframe of station records to save.
    :type df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA
    :param plot_folder: Name of output folder
    :type plot_folder: str
    :param progressor: Progress bar functor to receive progress updates, defaults to None
    :param progressor: Callable object receiving incremental update on progress, optional
    :param include_stations_list: If True, also export stationtxt file alongside each png file, defaults to True
    :param include_stations_list: bool, optional
    """
    dest_path = os.path.join(plot_folder, "stations")
    pathlib.Path(dest_path).mkdir(parents=True, exist_ok=True)
    failed = []
    for (netcode, statcode), data in df.groupby(['NetworkCode',
                                                 'StationCode']):
        net = dataframe_to_network(netcode, data, no_instruments)
        station_name = ".".join([netcode, statcode])
        plot_fname = os.path.join(dest_path, station_name + ".png")
        try:
            fig = net.plot(projection="local",
                           resolution="l",
                           outfile=plot_fname,
                           continent_fill_color="#e0e0e0",
                           water_fill_color="#d0d0ff",
                           color="#c08080")
            plt.close(fig)
        except Exception:
            failed.append(station_name)

        if include_stations_list:
            inv = Inventory(networks=[net], source='EHB')
            inv_fname = os.path.join(dest_path, station_name + ".txt")
            inv.write(inv_fname, format="stationtxt")

        if progressor:
            progressor(len(data))
    if failed:
        print("FAILED plotting on the following stations:")
        print("\n".join(failed))
    else:
        print("SUCCESS!")
예제 #7
0
    def test_inventory_can_be_initialized_with_no_arguments(self):
        """
        Source and networks need not be specified.
        """
        inv = Inventory()
        self.assertEqual(inv.networks, [])
        self.assertEqual(inv.source, "ObsPy %s" % obspy.__version__)

        # Should also be serializable.
        with io.BytesIO() as buf:
            # This actually would not be a valid StationXML file but there
            # might be uses for this.
            inv.write(buf, format="stationxml")
            buf.seek(0, 0)
            inv2 = read_inventory(buf)

        self.assertEqual(inv, inv2)
예제 #8
0
    def test_inventory_can_be_initialized_with_no_arguments(self):
        """
        Source and networks need not be specified.
        """
        inv = Inventory()
        self.assertEqual(inv.networks, [])
        self.assertEqual(inv.source, "ObsPy %s" % obspy.__version__)

        # Should also be serializable.
        with io.BytesIO() as buf:
            # This actually would not be a valid StationXML file but there
            # might be uses for this.
            inv.write(buf, format="stationxml")
            buf.seek(0, 0)
            inv2 = read_inventory(buf)

        self.assertEqual(inv, inv2)
예제 #9
0
    def test_writing_module_tags(self):
        """
        Tests the writing of ObsPy related tags.
        """
        net = Network(code="UL")
        inv = Inventory(networks=[net], source="BLU")

        file_buffer = io.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True)
        file_buffer.seek(0, 0)
        lines = file_buffer.read().decode().splitlines()
        module_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<Module>")][0]
        self.assertTrue(fnmatch.fnmatch(module_line,
                                        "<Module>ObsPy *</Module>"))
        module_uri_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<ModuleURI>")][0]
        self.assertEqual(module_uri_line,
                         "<ModuleURI>https://www.obspy.org</ModuleURI>")
예제 #10
0
    def test_writing_module_tags(self):
        """
        Tests the writing of ObsPy related tags.
        """
        net = Network(code="UL")
        inv = Inventory(networks=[net], source="BLU")

        file_buffer = io.BytesIO()
        inv.write(file_buffer, format="StationXML", validate=True)
        file_buffer.seek(0, 0)
        lines = file_buffer.read().decode().splitlines()
        module_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<Module>")][0]
        self.assertTrue(fnmatch.fnmatch(module_line,
                                        "<Module>ObsPy *</Module>"))
        module_uri_line = [_i.strip() for _i in lines if _i.strip().startswith(
            "<ModuleURI>")][0]
        self.assertEqual(module_uri_line,
                         "<ModuleURI>https://www.obspy.org</ModuleURI>")
예제 #11
0
    def test_empty_network_code(self):
        """
        Tests that an empty sring is acceptabble.
        """
        # An empty string is allowed.
        n = Network(code="")
        assert n.code == ""

        # But None is not allowed.
        with pytest.raises(ValueError, match='A code is required'):
            Network(code=None)

        # Should still serialize to something.
        inv = Inventory(networks=[n])
        with io.BytesIO() as buf:
            inv.write(buf, format="stationxml", validate=True)
            buf.seek(0, 0)
            inv2 = read_inventory(buf)

        assert inv == inv2
예제 #12
0
def split_inventory_by_network(obspy_inv, output_folder, validate=False):
    """Export a station XML file per network for each network in given obspy Inventory.

    :param obspy_inv: Obspy Inventory containing the networks to export to file.
    :type obspy_inv: obspy.core.inventory.inventory.Inventory
    :param output_folder: Folder in which to output the per-network XML files. Will be created if doesn't yet exist.
    :type output_folder: str or pathlib.Path
    :param validate: Whether to validate the station data on write, defaults to False
    :type validate: bool, optional
    """
    pathlib.Path(output_folder).mkdir(exist_ok=True)

    if show_progress:
        pbar = tqdm.tqdm(total=len(obspy_inv.networks), ascii=True)
        std_print = pbar.write
    else:
        std_print = print

    # Since duplicate network codes can occur, we ensure that output file names are unique by keeping an instance
    # count for the occurrences of each network, and appending this to the file name.
    network_count = defaultdict(int)
    for network in obspy_inv:
        if show_progress:
            pbar.update()
            pbar.set_description("Network {}".format(network.code))
        net_inv = Inventory(networks=[network], source=obspy_inv.source)
        fname = "network_{}_{}.xml".format(network.code,
                                           network_count[network.code])
        network_count[network.code] += 1
        try:
            net_inv.write(os.path.join(output_folder, fname),
                          format="stationxml",
                          validate=validate)
        except Exception as e:
            std_print(str(e))
            std_print(
                "FAILED writing file {0} for network {1}, continuing".format(
                    fname, network.code))
            continue
    if show_progress:
        pbar.close()
예제 #13
0
    def test_empty_network_code(self):
        """
        Tests that an empty sring is acceptabble.
        """
        # An empty string is allowed.
        n = Network(code="")
        self.assertEqual(n.code, "")

        # But None is not allowed.
        with self.assertRaises(ValueError) as e:
            Network(code=None)
        self.assertEqual(e.exception.args[0], "A code is required")

        # Should still serialize to something.
        inv = Inventory(networks=[n])
        with io.BytesIO() as buf:
            inv.write(buf, format="stationxml", validate=True)
            buf.seek(0, 0)
            inv2 = read_inventory(buf)

        self.assertEqual(inv, inv2)
예제 #14
0
    def test_empty_network_code(self):
        """
        Tests that an empty sring is acceptabble.
        """
        # An empty string is allowed.
        n = Network(code="")
        self.assertEqual(n.code, "")

        # But None is not allowed.
        with self.assertRaises(ValueError) as e:
            Network(code=None)
        self.assertEqual(e.exception.args[0], "A code is required")

        # Should still serialize to something.
        inv = Inventory(networks=[n])
        with io.BytesIO() as buf:
            inv.write(buf, format="stationxml", validate=True)
            buf.seek(0, 0)
            inv2 = read_inventory(buf)

        self.assertEqual(inv, inv2)
예제 #15
0
def dataless2stationXml(datalessFileName, xmlFileName):
    # Read the dataless seed file
    sp = Parser(datalessFileName)

    # Collect all potential unit abbreviations
    units = {}
    #genAbbrev={}
    for entry in sp.abbreviations:
        if entry.name == 'Units Abbreviations':
            units[entry.unit_lookup_code] = entry.unit_name
    #    elif entry.name=='Generic Abbreviation':
    #        genAbbrev[entry.abbreviation_lookup_code]=entry.abbreviation_description

    # Make a look-up dictionary for the transfer functions
    transFuncs = {
        'A': 'LAPLACE (RADIANS/SECOND)',
        'B': 'ANALOG (HERTZ)',
        'C': 'COMPOSITE',
        'D': 'DIGITAL (Z-TRANSFORM)'
    }

    # Collect each of the stations objects
    stations = []
    staNetCodes = []
    for stationBlock in sp.stations:
        station, staNetCode = getStation(stationBlock, units, transFuncs)
        stations.append(station)
        staNetCodes.append(staNetCode)

    # For each of the unique networks codes, collect the stations which relate to it
    networks = []
    staNetCodes = np.array(staNetCodes)
    unqNets = np.unique(staNetCodes)
    for aNet in unqNets:
        netStas = [stations[arg] for arg in np.where(staNetCodes == aNet)[0]]
        networks.append(Network(aNet, stations=netStas))

    # Finally turn this into an inventory and save
    inv = Inventory(networks, 'Lazylyst')
    inv.write(xmlFileName, format='stationxml', validate=True)
예제 #16
0
def staCsv2Xml(staCsvPath, staXmlPath, source='Lazylyst'):
    # Load the csv file
    info = np.genfromtxt(staCsvPath, delimiter=',', dtype=str)
    # For each network...
    networks = []
    unqNets = np.unique(info[:, 5])
    for net in unqNets:
        netInfo = info[np.where(info[:, 5] == net)]
        # ...gather its stations
        stations = []
        for entry in netInfo:
            stations.append(
                Station(entry[0],
                        entry[1],
                        entry[2],
                        entry[3],
                        site=Site(''),
                        creation_date=UTCDateTime(1970, 1, 1)))
        networks.append(Network(net, stations=stations))
    # Generate the inventory object, and save it as a station XML
    inv = Inventory(networks=networks, source=source)
    inv.write(staXmlPath, format='stationxml', validate=True)
예제 #17
0
    def modify_invenory(self,
                        gps_clock_corr_csv=None,
                        orient_corr_json=None,
                        equipment_csv=None):
        """
        Modify the existing station XML files to include new metadata:
        - add equipment sensor digitizer
        - add extra metadata: GPS correction
        - add extra metadata: Orientation correction
        Args:

        Returns: the final station_xml file modified with new metadata: inv2_xml_file

        """

        # Construct a new inventory object of networks.
        # This will use new obspy version and new attributes:
        inv2 = Inventory(
            # We'll add networks later.
            networks=[],
            # The source should be the id whoever create the file.
            source="Geoscience Australia EFTF AusArray PST")

        # output dir for modified station inventory xml files
        out_dir = self.output_dir  # "/home/fzhang/tmpdir"

        net, sta, csv_data = get_csv_correction_data(gps_clock_corr_csv)
        net_sta, oricorr_json_data = get_orientation_corr(orient_corr_json)
        my_equip_obj = EquipmentExtractor(csvfile=equipment_csv)

        big_inv = self.inv_obj

        for a_net in big_inv.networks:

            print("The number of station-nodes in the network =",
                  len(a_net.stations))

            for a_sta in a_net.stations:
                # print(a_net.code, a_sta.code)  # this contains 328 pairs, but they are NOT unique, station code may repeat.

                a_inv = big_inv.select(
                    network=a_net.code,
                    station=a_sta.code)  # .copy appears to have no effect here

                # print (a_sta.code, " stations has %s channels"%len(a_sta))

                _sensors = my_equip_obj.get_sensors(a_net.code, a_sta.code)
                if len(_sensors) > 0:
                    sensor_desc = _sensors[0].get("Description")
                    sensor_sernumb = _sensors[0].get("SerNumber")
                else:
                    print("%s %s  No sensors !" % (a_net.code, a_sta.code))
                    # sensor_desc = "NA Sensor for (%s,%s)" % (a_net.code, a_sta.code)
                    sensor_desc = "Nanometrics Trillium Compact 120s"
                    sensor_sernumb = "N/A"

                _digitizers = my_equip_obj.get_digitizer(
                    a_net.code, a_sta.code)
                if len(_digitizers) > 0:
                    dig_desc = _digitizers[0].get("Description")
                    dig_sernumb = _digitizers[0].get("SerNumber")
                else:
                    print("%s %s  No digitizers !" % (a_net.code, a_sta.code))
                    #dig_desc = "NA Digitizer for (%s,%s)" % (a_net.code, a_sta.code)
                    dig_desc = "Guralp Minimus"
                    dig_sernumb = "N/A"

                # modify station metadata
                my_sensor = obspy.core.inventory.util.Equipment(
                    type="Sensor",
                    description=sensor_desc,
                    serial_number=sensor_sernumb)

                # my_digitizer = obspy.core.inventory.util.Equipment(type="Digitizer", description="Guralp Minimus",serial_number="MIN-A456")
                my_digitizer = obspy.core.inventory.util.Equipment(
                    type="Digitizer",
                    description=dig_desc,
                    serial_number=dig_sernumb)

                a_sta.equipments = [my_sensor, my_digitizer]

                # get station start_ end_date and split csv_data
                start_dt = a_sta.start_date
                end_dt = a_sta.end_date

                ajson = StationMetadataExtra(a_net.code,
                                             a_sta.code,
                                             start_datetime=start_dt,
                                             end_datetime=end_dt)

                # generate/format extra metadata from inputs
                mpdf = ajson.add_gps_correction_from_csv(csv_data)

                # updated the ajson object with more metadata, such as orientation corr
                ajson.add_orientation_correction(oricorr_json_data)

                ajson.write_metadata2json(
                    os.path.join(
                        out_dir, "%s.%s_%s_extra_metadata.json" %
                        (a_net.code, a_sta.code, str(start_dt))))

                # Now, ready to write the ajson obj into new xml file
                mformat = "JSON"

                my_tag = AttribDict()
                my_tag.namespace = GA_NameSpace

                my_tag.value = ajson.make_json_string(
                )  # store all the extra metadata into a json string.

                a_sta.extra = AttribDict()
                a_sta.extra.GAMetadata = my_tag

                # prepare to write out a modified xml file
                stationxml_with_extra = '%s.%s_station_metadata_%s.xml' % (
                    a_net.code, a_sta.code, mformat)

                if out_dir is not None and os.path.isdir(out_dir):
                    stationxml_with_extra = os.path.join(
                        out_dir, stationxml_with_extra)

                a_inv.write(stationxml_with_extra,
                            format='STATIONXML',
                            nsmap={'GeoscienceAustralia': GA_NameSpace})

            # Problem:
            # sta_file_name2 = "%s_%s_station2.xml"%(a_net.code, a_sta.code)
            # # OA_CE28 was written 3-times!!!!!! due to multiple (OA,CE28)-station-nodes
            # There will be 119 xml files written in this loop of 328 items. However, the final results missed 119 equipments!!
            # outxml2 = os.path.join(OUTPUT_DIR, sta_file_name2)
            #
            # inv2.networks = a_inv.networks
            #
            # inv2.write(outxml2,format="stationxml", validate=True) # nsmap={'GeoscienceAustralia': GA_NameSpace})

            # After the modification of ALL the station objects,
            # write the big inventory in new object inv2
            inv2.networks = []
            inv2.networks.append(a_net)
            inv2_xml_file = os.path.join(out_dir,
                                         a_net.code + "_stations2.xml")
            inv2.write(inv2_xml_file,
                       format="stationxml",
                       nsmap={'GeoscienceAustralia': GA_NameSpace},
                       validate=True)  # every Station got equipment

            # Add responses:
            resp_obj = read_response()
            self.add_response_into_stationxml(inv2, resp_obj)

            # and the original write out again to check what has been modified?
            post_orig = os.path.join(out_dir,
                                     a_net.code + "_stations_post_orig.xml")
            big_inv.write(post_orig,
                          format="stationxml",
                          nsmap={'GeoscienceAustralia': GA_NameSpace},
                          validate=True)  # also has the Sensors etc

            return inv2_xml_file
예제 #18
0
def main(argv):
    '''@package isc2stnxml
       It gathers station information from all STN files provided in ISC and Engdahl catalogues assigning correct network code.
       When proper network code can not be identified the program just guess it, sorry...
    '''
    inv = read_inventory("IRIS-ALL.xml")

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    our_xml = Inventory(networks=[], source='EHB')

    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]),
                                             np.float(stn_found[j, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]),
                                             np.float(stn_found[0, 3]),
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

#                Now we try to find the same station in XML file
#                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude,
                                             xstn_found[j][0].longitude,
                                             np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

# last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [
                ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'Z',
                '1964-1-1 00:00:00', '2599-12-31 23:59:59'
            ]
            filed = True
        if xml:
            #our_xml.networks.append(record)
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) &
                                (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1],
                                  stations=[],
                                  description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(
            stn_found[stn_found == ' ']) > 0:
        print "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    for k in xrange(stn_found.shape[0]):

        net = Network(code=stn_found[k, 1], stations=[], description=' ')
        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta=Station(code=stn_found[k,0],creation_date=utcdatetime.UTCDateTime(stn_found[k,6]), \
        termination_date=utcdatetime.UTCDateTime(stn_found[k,7]), \
        site=Site(name=' '), \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        cha=Channel(code=stn_found[k,5], \
        depth=0., \
        azimuth=0., \
        dip=-90., \
        location_code='', \
        latitude=np.float(stn_found[k,2]), \
        longitude=np.float(stn_found[k,3]), \
        elevation=np.float(stn_found[k,4]))

        sta.channels.append(cha)
        net.stations.append(sta)
        our_xml.networks.append(net)


#             print 'np',stn_found[k,:]

    our_xml.write("station.xml", format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
예제 #19
0
                cha.channel_number = station_df['{0}_num'.format(comp)]
                cha.sensor = Equipment(
                    serial_number=station_df['{0}_id'.format(comp)])
            elif 'e' in comp:
                cha = Channel(code=comp.upper(),
                              location_code="",
                              latitude=station_df['lat'],
                              longitude=station_df['lon'],
                              elevation=station_df['nm_elev'],
                              depth=0,
                              azimuth=station_df['{0}_azm'.format(comp)],
                              dip=0,
                              sample_rate=station_df['sampling_rate'])
                cha.extra = {'dipole_length': {'value': 10, 'namespace': 'MT'}}
#                cha.comments = Comment(['Dipole Length (m) = {0:.1f}'.format(station_df['{0}_len'.format(comp)])])

            sta.channels.append(cha)
    # Now tie it all together.
    #cha.response = response

    net.stations.append(sta)

# And finally write it to a StationXML file. We also force a validation against
# the StationXML schema to ensure it produces a valid StationXML file.
#
# Note that it is also possible to serialize to any of the other inventory
# output formats ObsPy supports.
inv.write("station.xml",
          format="STATIONXML",
          validate=True,
          nsmap={'MT_ns': 'MT'})
예제 #20
0
def main(argv):
    inv = read_inventory("IRIS-ALL.xml")
    # if os.path.exists("IRIS-ALL.pkl"): # doesn't work on CentOS for some reason
    #     with open('IRIS-ALL.pkl', 'rb') as f:
    #         import cPickle as pkl
    #         inv = pkl.load(f)
    # else:
    #     inv = read_inventory("IRIS-ALL.xml")
    #     with open('IRIS-ALL.pkl', 'wb') as f:
    #         import pickle as pkl
    #         pkl.dump(inv, f, pkl.HIGHEST_PROTOCOL)
    sensorDict, responseDict = extract_unique_sensors_responses(inv)
    print('\nFound {0} response objects with keys: {1}'.format(len(responseDict.keys()), responseDict.keys()))

    # unknown stations in Indonesia are usually installed by Potsdam and we assume they have network name GE
    default_net = 'GE'
    ehb1 = read_eng('BMG.STN')
    ehb2 = read_eng('ISC.STN')
    ehb = np.unique(np.vstack((ehb1, ehb2)), axis=0)

    isc1 = read_isc('ehb.stn')
    isc2 = read_isc('iscehb.stn')
    isc = np.unique(np.vstack((isc1, isc2)), axis=0)

    catalogue = []
    for i in xrange(ehb.shape[0]):
        filed = False
        xml = False
        stn_found = isc[isc[:, 0] == ehb[i, 0], :]
        min_dist = 10e10
        if stn_found.shape[0] > 0:
            if stn_found.shape[0] > 1:
                for j in xrange(stn_found.shape[0]):
                    dist = locations2degrees(np.float(stn_found[j, 2]), np.float(stn_found[j, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if dist < min_dist:
                        min_dist = dist
                        record = stn_found[j, :]
            else:
                min_dist = locations2degrees(np.float(stn_found[0, 2]), np.float(stn_found[0, 3]), np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                record = stn_found[0, :]

            #                Now we try to find the same station in XML file
            #                if min_dist > 1. or stn_found.shape[0]==0:

        xstn_found = inv.select(station=ehb[i, 0], channel="*HZ")

        if len(stn_found) == 0 and len(xstn_found) == 0:
            # we filed to find station anywhere and assign dummy values
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            min_dist = 0.
            filed = True
        else:
            # if station is found somehwere we try to iterate and see if XML has data giving it preference through adding extra value to min_dist found in ISC
            if len(xstn_found) > 0:
                #                        print "----------",len(xstn_found)
                #                        print xstn_found[0][0].latitude
                min_dist = min_dist + 0.1
                for j in xrange(len(xstn_found)):
                    dist = locations2degrees(xstn_found[j][0].latitude, xstn_found[j][0].longitude, np.float(ehb[i, 1]),
                                             np.float(ehb[i, 2]))
                    if min_dist > dist:
                        min_dist = dist
                        record = xstn_found[j]
                        #                                print record
                        xml = True

                    # last defence if stations have been done but distance between declared and found locations are more than 1 degree
        if min_dist > 1:
            record = [ehb[i, 0], default_net, ehb[i, 1], ehb[i, 2], ehb[i, 3], 'SHZ', '1964-1-1 00:00:00',
                      '2599-12-31 23:59:59']
            filed = True
        if xml:
            xml = False

        else:
            if filed:

                if len(record[7]) < 5:
                    record[7] = '2599-12-31 23:59:59'
                catalogue.append(record)

            else:

                stn_found = isc[(isc[:, 0] == record[0]) & (isc[:, 1] == record[1]), :]

                for k in xrange(stn_found.shape[0]):
                    net = Network(code=stn_found[k, 1], stations=[], description=' ')
                    if len(stn_found[k, 7]) < 5:
                        stn_found[k, 7] = '2599-12-31 23:59:59'
                    catalogue.append(stn_found[k, :])

    stn_found = np.unique(np.array(catalogue), axis=0)
    if len(stn_found[stn_found == '']) > 0 or len(stn_found[stn_found == ' ']) > 0:
        print
        "Some elements are empty, check the list"

    # we composed our inventory. However some stations from ISC list can be left behind. We check if some stations in ISC are forgotten
    lost = []
    for j in xrange(isc.shape[0]):
        # is there any common station name?
        common_st = stn_found[isc[j, 0] == stn_found[:, 0]]
        if common_st.shape[0] > 0:
            # is network code the same?
            common_net = common_st[common_st[:, 1] == isc[j, 1]]
            if common_net.shape[0] < 1:
                # ok we found forgotten one, check the XML
                if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                    # Bingo...
                    lost.append(isc[j, :])
        else:
            if len(inv.select(station=isc[j, 0], network=isc[j, 1])) <= 0:
                # Bingo...
                lost.append(isc[j, :])

    stn_found = np.vstack((stn_found, np.array(lost)))

    netDict = defaultdict(list)
    for k in xrange(stn_found.shape[0]):
        result = inv.select(network=stn_found[k, 1])
        if (len(result.networks)):
            net = result.networks[0]
            net.stations = []
        else:
            net = Network(code=stn_found[k, 1], stations=[], description=' ')

        # print stn_found[k, 1]

        if len(stn_found[k, 7]) < 5:
            stn_found[k, 7] = '2599-12-31 23:59:59'
        catalogue.append(stn_found[k, :])
        sta = Station(code=stn_found[k, 0], creation_date=utcdatetime.UTCDateTime(stn_found[k, 6]), \
                      termination_date=utcdatetime.UTCDateTime(stn_found[k, 7]), \
                      site=Site(name=' '), \
                      latitude=np.float(stn_found[k, 2]), \
                      longitude=np.float(stn_found[k, 3]), \
                      elevation=np.float(stn_found[k, 4]))

        if (stn_found[k, 5] in responseDict.keys()):
            r = responseDict[stn_found[k, 5]]

            cha = Channel(code=stn_found[k, 5], \
                          depth=0., \
                          azimuth=0., \
                          dip=-90., \
                          location_code='', \
                          latitude=np.float(stn_found[k, 2]), \
                          longitude=np.float(stn_found[k, 3]), \
                          elevation=np.float(stn_found[k, 4]), \
                          # sensor=sensorDict[stn_found[k,5]], \
                          response=r)

            sta.channels.append(cha)

            if (type(netDict[stn_found[k, 1]]) == Network):
                netDict[stn_found[k, 1]].stations.append(sta)
            else:
                net.stations.append(sta)
                netDict[stn_found[k, 1]] = net

            #                 print 'np',stn_found[k,:]
            # end if

    our_xml = Inventory(networks=netDict.values(), source='EHB')

    print 'Writing output files..'
    for inet, net in enumerate(our_xml.networks):
        currInv = Inventory(networks=[net], source='EHB')
        currInv.write("output/station.%d.xml" % (inet), format="stationxml", validate=True)

    # our_xml.write("station.xml",format="stationxml", validate=True)
    our_xml.write("station.txt", format="stationtxt")
            elif "e" in comp:
                cha = Channel(
                    code=comp.upper(),
                    location_code="",
                    latitude=station_df["lat"],
                    longitude=station_df["lon"],
                    elevation=station_df["nm_elev"],
                    depth=0,
                    azimuth=station_df["{0}_azm".format(comp)],
                    dip=0,
                    sample_rate=station_df["sampling_rate"],
                )
                cha.extra = {"dipole_length": {"value": 10, "namespace": "MT"}}
            #                cha.comments = Comment(['Dipole Length (m) = {0:.1f}'.format(station_df['{0}_len'.format(comp)])])

            sta.channels.append(cha)
    # Now tie it all together.
    # cha.response = response

    net.stations.append(sta)

# And finally write it to a StationXML file. We also force a validation against
# the StationXML schema to ensure it produces a valid StationXML file.
#
# Note that it is also possible to serialize to any of the other inventory
# output formats ObsPy supports.
inv.write("station.xml",
          format="STATIONXML",
          validate=True,
          nsmap={"MT_ns": "MT"})
예제 #22
0
def load_metadata(
        client: Optional[Client],
        eqinfo: Event,
        dist_range,
        networks,
        inventory=None,
        save_path=None,
        t_before_origin=3600.,
        t_after_origin=3600.):

    """
    :param float t_after_origin: The end of the time window around
        *eqinfo['time']* (in seconds) in which the station must exist. This
        should not be confused with the parameter *t_afterWP* used elsewhere,
        which is used in a more complicated calculation which requires the
        location of the station, which we get in this function. This is only
        used to filter the request for the inventory and hence only need be
        very rough (within the week would probably be equally sensible).
    """
    if inventory:
        if isinstance(inventory, str):
            inventory = obspy.read_inventory(inventory)
        logger.info('Loaded provided inventory')
        inv = inventory
    else:
        def caller_maker(depth=0, **kwargs):
            if 'network' in kwargs and kwargs['network'].upper() == 'ALL':
                kwargs.pop('network')

            base_call = {
                "level"    : 'response',
                "channel"  : 'BH?',
                "latitude" : eqinfo.latitude,
                "longitude": eqinfo.longitude,
                "minradius": dist_range[0],
                "maxradius": dist_range[1],
                "starttime": eqinfo.time - t_before_origin,
                "endtime"  : eqinfo.time + t_after_origin}

            base_call.update(kwargs)

            def make_call(**kwargs):
                args = base_call.copy()
                args.update(kwargs)
                return client.get_stations(**args)

            logger.info('Retrieving metadata from server %s', client.base_url)
            args = dict()
            if depth >= 1:
                args["network"] = net
            if depth >= 2:
                args["station"] = sta
            if depth >= 3:
                args["channel"] = cha

            return partial(make_call, **args)

        try:
            # first, try and get everything
            inv = caller_maker(network=networks)()

        except Exception:
            # ... that didn't work
            nets = caller_maker(network=networks, level='network')()
            inv = Inventory([], None)

            # try by network
            call1 = caller_maker(1)
            for net in nets:
                try:
                    inv += call1(net.code)
                except Exception:
                    # ... by station
                    stas = caller_maker(network=net.code, level='station')()
                    call2 = caller_maker(2)
                    for sta in stas[0]:
                        try:
                            inv += call2(net.code, sta.code)
                        except Exception:
                            # ... by channel
                            chans = caller_maker(network=net.code, station=sta.code, level='channel')()
                            call3 = caller_maker(3)
                            for chan in chans[0][0]:
                                try:
                                    inv += call3(net.code, sta.code, chan.code)
                                except Exception:
                                    # ... skip the channel
                                    # TODO: log that this has happenned
                                    pass

    if save_path:
        logger.info("Saving inventory in %s", save_path)
        inv.write(save_path, format='STATIONXML')
    return build_metadata_dict(inv)
예제 #23
0
# now add the network start/end date
net_inv.start_date = UTCDateTime(network_start_end[0])
net_inv.end_date = UTCDateTime(network_start_end[1])

# print(net_inv)

#add the network inventory to the complete and updated inventory
new_inv.networks.append(net_inv)

XML_file = join(XML_path_out, FDSNnetwork+'_updated.xml')

if exists(XML_file):
    remove(XML_file)

# write the inventory into the default path
new_inv.write(path_or_file_object=XML_file, format='STATIONXML', validate=True)

# add it to ASDF file
ds.add_stationxml(new_inv)

big_dictionary = dict(zip(keys_list, info_list))

with open(JSON_out, 'w') as fp:
    json.dump(big_dictionary, fp)

del ds
print '\n'

exec_time = time.time() - code_start_time

exec_str = "--- Execution time: %s seconds ---" % exec_time
예제 #24
0
def main():
    chans = "EHZ,EHN,EHE"
    # Get StationXML file
    print(f"Interactive StaXML builder")
    print(f"Work in progress...some things hardwired\n\n")
    inv_name = input(f"Enter StationXML file name: ")
    if (os.path.isfile(inv_name)):
        inv = read_inventory(inv_name)
    else:
        print(f"Making new inventory: {inv_name}\n")
        inv = Inventory(networks=[], source="Weston")

    # Net code
    ques = f"Enter Network Code ({str(netc)}) :"
    net_code = str(input(ques) or netc)
    net = Network(code=net_code, stations=[])
    print(f"\n")

    # connect to NRL
    nrl = NRL()

    # Datalogger info
    ret = 0
    digi = f"REF TEK|RT 130S & 130-SMHR|1|200"
    print(f"Input NRL Digi info ( | separated, careful with spaces)....")
    print(f"E.g manufacturer| model| gain| sps\n")
    while ret == 0:
        ques = f"Enter DIGI info ({digi}) :"
        digi = str(input(ques) or digi)
        print(f"\n")
        try:
            nrl.get_datalogger_response(digi.split('|'))
            ret = 1
            print("!!!!! DATA LOGGER SUCCESS!!!\n")
        except Exception as e:
            print(f"Try again ... {e}")

    # Sensor info
    ret = 0
    sensor = f"Streckeisen,STS-1,360 seconds"
    print(f"Input NRL Sensor info ....\n")
    print(f"E.g Manufact|model|Sensitivy\n")
    print(f"Guralp|CMG-40T,30s - 100Hz|800")
    print(f"Sercel/Mark Products|L-22D|5470 Ohms|20000 Ohms")
    print(f"Streckeisen|STS-1|360 seconds")
    print(f"Nanometrics|Trillium Compact 120 (Vault, Posthole, OBS)|754 V/m/s")
    while ret == 0:
        ques = f"Enter sensor info {str(sensor)} :"
        sensor = str(input(ques) or sensor)
        try:
            nrl.get_sensor_response(sensor.split('|'))
            ret = 1
            inst_info = f"{sensor.split('|')[0]} {sensor.split('|')[1]}"
            print("Sensor success!!!!")
        except Exception as e:
            print(f"Try again ... {e}")

    print("Getting full response...")
    try:
        response = nrl.get_response(sensor_keys=sensor.split('|'),
                                    datalogger_keys=digi.split('|'))
        print("Full response success \n\n")
    except Exception as e:
        print(f"Oops .. {e}")
    #
    nstas = int(
        input(
            "Enter number of stations to add with same sensor/digitizer (default 1):"
        ) or 1)
    for i in range(0, nstas):
        ques = "Station code (" + str(scode) + ") :"
        sta_code = str(input(ques) or scode)

        ques = "Station latitude (" + str(geolat) + ") :"
        sta_lat = float(input(ques) or geolat)

        ques = "Station longitude (" + str(geolon) + ") :"
        sta_lon = float(input(ques) or geolat)

        ques = "Station elev(" + str(geoelev) + ") :"
        sta_elev = float(input(ques) or geoelev)

        ques = "Station ondate (" + str(date) + ") :"
        sta_ondate = str(input(ques) or date)

        ques = "Station offdate (" + str(date) + ") :"
        sta_offdate = str(input(ques) or date)

        ques = "Station long name (" + str(longname) + ") :"
        sta_sitename = str(input(ques) or longname)

        sta = Station(code=sta_code,
                      latitude=sta_lat,
                      longitude=sta_lon,
                      elevation=sta_elev,
                      creation_date=UTCDateTime(sta_ondate),
                      site=Site(name=sta_sitename))
        # add station to network
        net.stations.append(sta)

        # Default chan info
        coords = {
            'latitude': sta_lat,
            'longitude': sta_lon,
            'elevation': sta_elev,
            'depth': 0.0,
            'sample_rate': sps
        }

        n = -1
        ques = f"Enter channel names, comma separated ({chans}) :"
        chans = str(input(ques) or chans)
        for j in chans.split(','):
            n += 1
            chantmp = j
            print("Doing channel ", chantmp)
            aztmp = azims[n]
            diptmp = dips[n]
            loc = locs[n]
            for k in coords.keys():
                ques = str(chantmp) + " enter " + k + "(" + str(
                    coords[k]) + "):"
                coords[k] = float(input(ques) or coords[k])

            chan = Channel(code=chantmp,
                           location_code=loc,
                           latitude=coords['latitude'],
                           longitude=coords['longitude'],
                           elevation=coords['elevation'],
                           depth=coords['depth'],
                           azimuth=aztmp,
                           dip=diptmp,
                           sample_rate=coords['sample_rate'],
                           sensor=Equipment(description=inst_info))
            chan.response = response
            sta.channels.append(chan)

    inv.networks.append(net)
    inv.write(inv_name, format="STATIONXML")
예제 #25
0
    temp_network = [station_template.copy()]
    #Put into new inventory object
    temp_inv = Inventory(temp_network, 'VUW')
    temp_sta = temp_inv[0].stations[0]
    temp_sta.code = unicode(new_sta_dict['code'], 'utf-8')
    temp_sta.start_date = new_sta_dict['start']
    temp_sta.creation_date = new_sta_dict['start']
    temp_sta.site.name = new_sta_dict['name']
    temp_sta.latitude = new_sta_dict['lat']
    temp_sta.longitude = new_sta_dict['lon']
    temp_sta.elevation = new_sta_dict['elev']
    #Loop through channel and response info to change minor naming issues
    for chan in temp_sta.channels:
        chan.start_date = new_sta_dict['start']
        chan.latitude = new_sta_dict['lat']
        chan.longitude = new_sta_dict['lon']
        chan.elevation = new_sta_dict['elev']
        #Roundabout replacing of station name in descriptions
        dl_desc_split = chan.data_logger.description.split('.')
        dl_desc_split[0] = new_sta_dict['code']
        chan.data_logger.description = '.'.join(dl_desc_split)
        #Do the same replacement as above for all response stages
        for stage in chan.response.response_stages:
            if stage.name:
                tmp_name = stage.name.split('.')
                tmp_name[0] = new_sta_dict['code']
                stage.name = '.'.join(tmp_name)
    temp_inv.write('/home/chet/data/GeoNet_catalog/stations/station_xml/' +
                   new_sta_dict['code'] + '_STATIONXML.xml',
                   format='STATIONXML')
예제 #26
0
    def test_write_stationtxt(self):
        """
        Test writing stationtxt at channel level
        """
        # Manually create a test Inventory object.
        resp_1 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.02,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=8.48507E8))
        resp_2 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=1.0,
                                                         input_units="M/S**2",
                                                         output_units=None,
                                                         value=53435.4))
        resp_3 = Response(
            instrument_sensitivity=InstrumentSensitivity(frequency=0.03,
                                                         input_units="M/S",
                                                         output_units=None,
                                                         value=6.27252E8))
        test_inv = Inventory(
            source=None,
            networks=[
                Network(
                    code="IU",
                    start_date=obspy.UTCDateTime("1988-01-01T00:00:00"),
                    end_date=obspy.UTCDateTime("2500-12-31T23:59:59"),
                    total_number_of_stations=1,
                    description="Global Seismograph Network (GSN - IRIS/USGS)",
                    stations=[
                        Station(code="ANMO",
                                latitude=34.9459,
                                longitude=-106.4572,
                                elevation=1850.0,
                                channels=[
                                    Channel(code="BCI",
                                            location_code="",
                                            latitude=34.9459,
                                            longitude=-106.4572,
                                            elevation=1850.0,
                                            depth=100.0,
                                            azimuth=0.0,
                                            dip=0.0,
                                            sample_rate=0.0,
                                            sensor=Equipment(
                                                description=
                                                "Geotech KS-36000-I Borehole "
                                                "Seismometer"),
                                            start_date=obspy.UTCDateTime(
                                                "1989-08-29T00:00:00"),
                                            end_date=obspy.UTCDateTime(
                                                "1995-02-01T00:00:00"),
                                            response=resp_1),
                                    Channel(
                                        code="LNZ",
                                        location_code="20",
                                        latitude=34.9459,
                                        longitude=-106.4572,
                                        elevation=1820.7,
                                        depth=0.0,
                                        azimuth=0.0,
                                        dip=-90.0,
                                        sample_rate=0.0,
                                        sensor=Equipment(
                                            description="Titan Accelerometer"),
                                        start_date=obspy.UTCDateTime(
                                            "2013-06-20T16:30:00"),
                                        response=resp_2),
                                ]),
                    ]),
                Network(
                    code="6E",
                    start_date=obspy.UTCDateTime("2013-01-01T00:00:00"),
                    end_date=obspy.UTCDateTime("2016-12-31T23:59:59"),
                    total_number_of_stations=1,
                    description="Wabash Valley Seismic Zone",
                    stations=[
                        Station(
                            code="SH01",
                            latitude=37.7457,
                            longitude=-88.1368,
                            elevation=126.0,
                            channels=[
                                Channel(
                                    code="LOG",
                                    location_code="",
                                    latitude=37.7457,
                                    longitude=-88.1368,
                                    elevation=126.0,
                                    depth=0.0,
                                    azimuth=0.0,
                                    dip=0.0,
                                    sample_rate=0.0,
                                    sensor=Equipment(
                                        description="Reftek 130 Datalogger"),
                                    start_date=obspy.UTCDateTime(
                                        "2013-11-23T00:00:00"),
                                    end_date=obspy.UTCDateTime(
                                        "2016-12-31T23:59:59"),
                                    response=resp_3)
                            ]),
                    ])
            ])

        # CHANNEL level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="CHANNEL")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Station|Location|Channel|Latitude|Longitude|"
             "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
             "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"),
            ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|"
             "0.0|Geotech KS-36000-I Borehole Seismometer|"
             "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|"
             "1995-02-01T00:00:00"),
            ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|"
             "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|"
             "2013-06-20T16:30:00|"),
            ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|"
             "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|"
             "2013-11-23T00:00:00|2016-12-31T23:59:59"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # STATION level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="STATION")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Station|Latitude|Longitude|"
             "Elevation|SiteName|StartTime|EndTime"),
            ("IU|ANMO|34.9459|-106.4572|1850.0||"),
            ("6E|SH01|37.7457|-88.1368|126.0||"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # NETWORK level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="NETWORK")
        # check contents
        content = stio.getvalue()
        expected = [
            ("Network|Description|StartTime|EndTime|TotalStations"),
            ("IU|Global Seismograph Network (GSN - IRIS/USGS)|"
             "1988-01-01T00:00:00|2500-12-31T23:59:59|1"),
            ("6E|Wabash Valley Seismic Zone|"
             "2013-01-01T00:00:00|2016-12-31T23:59:59|1"),
        ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))
예제 #27
0
def create_new_skeleton_inventory_file(path2xmlfile):
    """
    write a NEW skeleton inventory xml file
    :param path2xmlfile: path to a new xml file.
    :return:
    """
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="XX",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=obspy.UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABC",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=obspy.UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha = Channel(
        # This is the channel code according to the SEED standard.
        code="HHZ",
        # This is the location code according to the SEED standard.
        location_code="",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=200)

    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    response = nrl.get_response(  # doctest: +SKIP
        sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
        datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    inv.write(path2xmlfile, format="stationxml", validate=True)
예제 #28
0
                                    'Device URL.')
                            ])
                        if _channel.calibration_units == "COUNTS" or _channel.calibration_units == "COUNT":
                            _channel.calibration_units = _channel.calibration_units.lower(
                            )
                        _station.channels.append(_channel)

                        print(
                            "Channel {}.{} appended successfully to Inventory."
                            .format(_channel_code,
                                    Channels[channel]['_location_code']))
                except Exception as e:
                    print(
                        "Check that metadata assignments are correct for station: {}; {}"
                        .format(station, e))
        except:
            print("No epochs assigned to station: {}".format(station))

print("\n\n###\nInventory: \n", inv)

# Write the Inventory to StationXML
print("""\n\nWriting StationXML file to "{}".""".format(os.getcwd()))
inv.write("NV_ONC.xml", format="stationxml", validate=True)

from obspy.io.stationxml.core import validate_stationxml
print("\n\nStationXML is valid? {}.".format(
    validate_stationxml('NV_ONC.xml')[0]))
if validate_stationxml('NV_ONC.xml')[1] == ():
    print("\t - No errors were found.")
else:
    print("Errors found: {}".format(validate_stationxml('NV_ONC.xml')[1]))
예제 #29
0
def stats2inv(stats, resp=None, filexml=None, locs=None):

    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(networks=[], source="japan_from_resp")

    if locs is None:
        net = Network(
            # This is the network code according to the SEED standard.
            code=stats.network,
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=stats.station,
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            creation_date=stats.starttime,
            site=Site(name="First station"))

        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=stats.sac["stla"],
            longitude=stats.sac["stlo"],
            elevation=stats.sac["stel"],
            depth=-stats.sac["stel"],
            azimuth=stats.sac["cmpaz"],
            dip=stats.sac["cmpinc"],
            sample_rate=stats.sampling_rate)

    else:
        ista = locs[locs['station'] == stats.station].index.values.astype(
            'int64')[0]

        net = Network(
            # This is the network code according to the SEED standard.
            code=locs.iloc[ista]["network"],
            # A list of stations. We'll add one later.
            stations=[],
            description="Marine created from SAC and resp files",
            # Start-and end dates are optional.
            start_date=stats.starttime)

        sta = Station(
            # This is the station code according to the SEED standard.
            code=locs.iloc[ista]["station"],
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            creation_date=stats.starttime,
            site=Site(name="First station"))
        cha = Channel(
            # This is the channel code according to the SEED standard.
            code=stats.channel,
            # This is the location code according to the SEED standard.
            location_code=stats.location,
            # Note that these coordinates can differ from the station coordinates.
            latitude=locs.iloc[ista]["latitude"],
            longitude=locs.iloc[ista]["longitude"],
            elevation=locs.iloc[ista]["elevation"],
            depth=-locs.iloc[ista]["elevation"],
            azimuth=0,
            dip=0,
            sample_rate=stats.sampling_rate)

    response = obspy.core.inventory.response.Response()
    if resp is not None:
        print('i dont have the response')
    # By default this accesses the NRL online. Offline copies of the NRL can
    # also be used instead
    # nrl = NRL()
    # The contents of the NRL can be explored interactively in a Python prompt,
    # see API documentation of NRL submodule:
    # http://docs.obspy.org/packages/obspy.clients.nrl.html
    # Here we assume that the end point of data logger and sensor are already
    # known:
    #response = nrl.get_response( # doctest: +SKIP
    #    sensor_keys=['Streckeisen', 'STS-1', '360 seconds'],
    #    datalogger_keys=['REF TEK', 'RT 130 & 130-SMA', '1', '200'])

    # Now tie it all together.
    cha.response = response
    sta.channels.append(cha)
    net.stations.append(sta)
    inv.networks.append(net)

    # And finally write it to a StationXML file. We also force a validation against
    # the StationXML schema to ensure it produces a valid StationXML file.
    #
    # Note that it is also possible to serialize to any of the other inventory
    # output formats ObsPy supports.
    if filexml is not None:
        inv.write(filexml, format="stationxml", validate=True)

    return inv
예제 #30
0
    def test_write_stationtxt(self):
        """
        Test writing stationtxt at channel level
        """
        # Manually create a test Inventory object.
        resp_1 = Response(
            instrument_sensitivity=InstrumentSensitivity(
                frequency=0.02, input_units="M/S", output_units=None,
                value=8.48507E8))
        resp_2 = Response(
            instrument_sensitivity=InstrumentSensitivity(
                frequency=1.0, input_units="M/S**2",
                output_units=None, value=53435.4))
        resp_3 = Response(
            instrument_sensitivity=InstrumentSensitivity(
                frequency=0.03, input_units="M/S",
                output_units=None, value=6.27252E8))
        test_inv = Inventory(source=None, networks=[
            Network(
                code="IU",
                start_date=obspy.UTCDateTime("1988-01-01T00:00:00"),
                end_date=obspy.UTCDateTime("2500-12-31T23:59:59"),
                total_number_of_stations=1,
                description="Global Seismograph Network (GSN - IRIS/USGS)",
                stations=[
                    Station(
                        code="ANMO",
                        latitude=34.9459,
                        longitude=-106.4572,
                        elevation=1850.0,
                        channels=[
                            Channel(
                                code="BCI", location_code="",
                                latitude=34.9459,
                                longitude=-106.4572,
                                elevation=1850.0,
                                depth=100.0,
                                azimuth=0.0,
                                dip=0.0,
                                sample_rate=0.0,
                                sensor=Equipment(
                                    type="Geotech KS-36000-I Borehole "
                                         "Seismometer"),
                                start_date=obspy.UTCDateTime(
                                    "1989-08-29T00:00:00"),
                                end_date=obspy.UTCDateTime(
                                    "1995-02-01T00:00:00"),
                                response=resp_1),
                            Channel(
                                code="LNZ", location_code="20",
                                latitude=34.9459,
                                longitude=-106.4572,
                                elevation=1820.7,
                                depth=0.0,
                                azimuth=0.0,
                                dip=-90.0,
                                sample_rate=0.0,
                                sensor=Equipment(
                                    type="Titan Accelerometer"),
                                start_date=obspy.UTCDateTime(
                                    "2013-06-20T16:30:00"),
                                response=resp_2),
                        ]),
                ]),
            Network(
                code="6E",
                start_date=obspy.UTCDateTime("2013-01-01T00:00:00"),
                end_date=obspy.UTCDateTime("2016-12-31T23:59:59"),
                total_number_of_stations=1,
                description="Wabash Valley Seismic Zone",
                stations=[
                    Station(
                        code="SH01",
                        latitude=37.7457,
                        longitude=-88.1368,
                        elevation=126.0,
                        channels=[
                            Channel(
                                code="LOG", location_code="",
                                latitude=37.7457,
                                longitude=-88.1368,
                                elevation=126.0,
                                depth=0.0,
                                azimuth=0.0,
                                dip=0.0,
                                sample_rate=0.0,
                                sensor=Equipment(
                                    type="Reftek 130 Datalogger"),
                                start_date=obspy.UTCDateTime(
                                    "2013-11-23T00:00:00"),
                                end_date=obspy.UTCDateTime(
                                    "2016-12-31T23:59:59"),
                                response=resp_3)
                        ]),
                ])
        ])

        # CHANNEL level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="CHANNEL")
        # check contents
        content = stio.getvalue()
        expected = [("Network|Station|Location|Channel|Latitude|Longitude|"
                     "Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|"
                     "ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime"),
                    ("IU|ANMO||BCI|34.9459|-106.4572|1850.0|100.0|0.0|"
                     "0.0|Geotech KS-36000-I Borehole Seismometer|"
                     "848507000.0|0.02|M/S|0.0|1989-08-29T00:00:00|"
                     "1995-02-01T00:00:00"),
                    ("IU|ANMO|20|LNZ|34.9459|-106.4572|1820.7|0.0|0.0|"
                     "-90.0|Titan Accelerometer|53435.4|1.0|M/S**2|0.0|"
                     "2013-06-20T16:30:00|"),
                    ("6E|SH01||LOG|37.7457|-88.1368|126.0|0.0|0.0|0.0|"
                     "Reftek 130 Datalogger|627252000.0|0.03|M/S|0.0|"
                     "2013-11-23T00:00:00|2016-12-31T23:59:59"),
                    ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # STATION level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="STATION")
        # check contents
        content = stio.getvalue()
        expected = [("Network|Station|Latitude|Longitude|"
                     "Elevation|SiteName|StartTime|EndTime"),
                    ("IU|ANMO|34.9459|-106.4572|1850.0||"),
                    ("6E|SH01|37.7457|-88.1368|126.0||"),
                    ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))

        # NETWORK level test
        stio = io.StringIO()
        test_inv.write(stio, format="STATIONTXT", level="NETWORK")
        # check contents
        content = stio.getvalue()
        expected = [("Network|Description|StartTime|EndTime|TotalStations"),
                    ("IU|Global Seismograph Network (GSN - IRIS/USGS)|"
                     "1988-01-01T00:00:00|2500-12-31T23:59:59|1"),
                    ("6E|Wabash Valley Seismic Zone|"
                     "2013-01-01T00:00:00|2016-12-31T23:59:59|1"),
                    ]
        num_lines_written = 0
        for line in expected:
            self.assertIn(line, content)
            num_lines_written = num_lines_written + 1
        # assert that the number of lines written equals
        # the number of lines expected
        self.assertEqual(num_lines_written, len(expected))
예제 #31
0
        needs_correction = str(row.split()[2])
        latitude = str(row.split()[3])
        longitude = str(row.split()[4])
        elevation = str(row.split()[5])
        if elevation == '-':
            elevation = 0
        sensor_type = str(row.split()[6])
        
        sta = Station(
                      # This is the station code according to the SEED standard.
                      code=sensor_code,
                      latitude=latitude,
                      longitude=longitude,
                      elevation=elevation)
        sta.sensor_type = sensor_type
        sta.needs_correction = needs_correction
        net.stations.append(sta)
inv.networks.append(net)
print(inv)



# And finally write it to a StationXML file. We also force a validation against
# the StationXML schema to ensure it produces a valid StationXML file.
#
# Note that it is also possible to serialize to any of the other inventory
# output formats ObsPy supports.
inv.plot(projection='local')
#%%
inv.write("inventory.xml", format="stationxml", validate=True)
예제 #32
0
        l3.code = 'LHN'
        l3.azimuth = Azimuth(0.0)
        sta.channels.append(l2)
        sta.channels.append(l3)
    YJ_net.stations.append(sta)
inv_all.networks.append(YJ_net)

# EN
E_net = Network(code='EN',stations=[],\
          description='ENAP from Rodrigo',\
          start_date=UTCDateTime(2019,3,1))
E_file = glob('seed/dataless/ENAP*.xml')
for f in E_file:
    nsta = read_inventory(f)
    for ch in nsta.networks[0].stations[0].channels:
        ch.code = 'L' + ch.code[1:]
    E_net.stations.append(nsta.networks[0].stations[0])
inv_all.networks.append(E_net)

esta = read_inventory('seed/dataless/ENAP-ANMA.EN.xml')
for ch in esta.networks[0].stations[0].channels:
    ch.code = 'L' + ch.code[1:]
inv_all.networks[-1].stations.append(esta.networks[0].stations[0])

# write everything!
inv_all.write('seed/dataless/combined.xml', format='stationxml')

os.system(
    'java -jar ../other_code/stationxml-seed-converter-2.0.10-SNAPSHOT.jar --input seed/dataless/combined.xml --output seed/dataless/combined.dataless'
)