示例#1
0
 def test_setitem(self):
     """
     Tests __setitem__ method of AttribDict class.
     """
     # 1
     ad = AttribDict()
     ad['test'] = 'NEW'
     self.assertEqual(ad['test'], 'NEW')
     self.assertEqual(ad.test, 'NEW')
     self.assertEqual(ad.get('test'), 'NEW')
     self.assertEqual(ad.__getattr__('test'), 'NEW')
     self.assertEqual(ad.__getitem__('test'), 'NEW')
     self.assertEqual(ad.__dict__['test'], 'NEW')
     self.assertEqual(ad.__dict__.get('test'), 'NEW')
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)
     # 2
     ad = AttribDict()
     ad.__setitem__('test', 'NEW')
     self.assertEqual(ad['test'], 'NEW')
     self.assertEqual(ad.test, 'NEW')
     self.assertEqual(ad.get('test'), 'NEW')
     self.assertEqual(ad.__getattr__('test'), 'NEW')
     self.assertEqual(ad.__getitem__('test'), 'NEW')
     self.assertEqual(ad.__dict__['test'], 'NEW')
     self.assertEqual(ad.__dict__.get('test'), 'NEW')
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)
示例#2
0
 def test_get_stations(self):
     """
     """
     # initialize client
     client = Client(user='******')
     # example 1
     start = UTCDateTime(2008, 1, 1)
     end = start + 1
     result = client.get_stations(start, end, 'BW')
     self.assertTrue(
         AttribDict({'remark': '', 'code': 'RWMO', 'elevation': 763.0,
                     'description': 'Wildenmoos, Bavaria, BW-Net',
                     'start': UTCDateTime(2006, 7, 4, 0, 0),
                     'restricted': False, 'archive_net': '',
                     'longitude': 12.729887, 'affiliation': 'BayernNetz',
                     'depth': None, 'place': 'Wildenmoos',
                     'country': 'BW-Net', 'latitude': 47.744171,
                     'end': None}) in result)
     # example 2
     expected = AttribDict(
         {'code': 'WDD', 'description': 'Wied Dalam',
          'affiliation': '', 'country': '', 'place': '', 'remark': '',
          'restricted': False, 'archive_net': '',
          'latitude': 35.8373, 'longitude': 14.5242,
          'elevation': 44.0, 'depth': None,
          'start': UTCDateTime(1995, 7, 6, 0, 0), 'end': None})
     # routing default
     result = client.get_stations(start, end, 'MN')
     self.assertTrue(expected in result)
     # w/o routing
     result = client.get_stations(start, end, 'MN', route=False)
     self.assertTrue(expected in result)
     # w/ routing
     result = client.get_stations(start, end, 'MN', route=True)
     self.assertTrue(expected in result)
示例#3
0
def _get_beamforming_example_stream():
    # Load data
    from obspy import read
    from obspy.core.util import AttribDict
    from obspy.signal.invsim import corn_freq_2_paz
    st = read("https://examples.obspy.org/agfa.mseed")
    # Set PAZ and coordinates for all 5 channels
    st[0].stats.paz = AttribDict({
        'poles': [(-0.03736 - 0.03617j), (-0.03736 + 0.03617j)],
        'zeros': [0j, 0j],
        'sensitivity': 205479446.68601453,
        'gain': 1.0})
    st[0].stats.coordinates = AttribDict({
        'latitude': 48.108589,
        'elevation': 0.450000,
        'longitude': 11.582967})
    st[1].stats.paz = AttribDict({
        'poles': [(-0.03736 - 0.03617j), (-0.03736 + 0.03617j)],
        'zeros': [0j, 0j],
        'sensitivity': 205479446.68601453,
        'gain': 1.0})
    st[1].stats.coordinates = AttribDict({
        'latitude': 48.108192,
        'elevation': 0.450000,
        'longitude': 11.583120})
    st[2].stats.paz = AttribDict({
        'poles': [(-0.03736 - 0.03617j), (-0.03736 + 0.03617j)],
        'zeros': [0j, 0j],
        'sensitivity': 250000000.0,
        'gain': 1.0})
    st[2].stats.coordinates = AttribDict({
        'latitude': 48.108692,
        'elevation': 0.450000,
        'longitude': 11.583414})
    st[3].stats.paz = AttribDict({
        'poles': [(-4.39823 + 4.48709j), (-4.39823 - 4.48709j)],
        'zeros': [0j, 0j],
        'sensitivity': 222222228.10910088,
        'gain': 1.0})
    st[3].stats.coordinates = AttribDict({
        'latitude': 48.108456,
        'elevation': 0.450000,
        'longitude': 11.583049})
    st[4].stats.paz = AttribDict({
        'poles': [(-4.39823 + 4.48709j), (-4.39823 - 4.48709j), (-2.105 + 0j)],
        'zeros': [0j, 0j, 0j],
        'sensitivity': 222222228.10910088,
        'gain': 1.0})
    st[4].stats.coordinates = AttribDict({
        'latitude': 48.108730,
        'elevation': 0.450000,
        'longitude': 11.583157})
    # Instrument correction to 1Hz corner frequency
    paz1hz = corn_freq_2_paz(1.0, damp=0.707)
    st.simulate(paz_remove='self', paz_simulate=paz1hz)
    return st
示例#4
0
def _get_plot_starttime(event: Event, st: Stream) -> UTCDateTime:
    """Get starttime of a plot given an event and a stream."""
    try:
        attribute_with_time = event.preferred_origin() or event.origins[0]
    except (AttributeError, IndexError):
        try:
            attribute_with_time = AttribDict(
                {"time": min([p.time for p in event.picks]) - 5})
        except ValueError:
            attribute_with_time = AttribDict(
                {"time": min([tr.stats.starttime for tr in st])})
    return attribute_with_time.time
示例#5
0
 def test_deepcopy(self):
     """
     Tests deepcopy method of Stats object.
     """
     stats = Stats()
     stats.network = 'BW'
     stats['station'] = 'ROTZ'
     stats['other1'] = {'test1': '1'}
     stats['other2'] = AttribDict({'test2': '2'})
     stats['other3'] = 'test3'
     stats2 = copy.deepcopy(stats)
     stats.network = 'CZ'
     stats.station = 'RJOB'
     assert stats2.__class__ == Stats
     assert stats2.network == 'BW'
     assert stats2.station == 'ROTZ'
     assert stats2.other1.test1 == '1'
     assert stats2.other1.__class__ == AttribDict
     assert len(stats2.other1) == 1
     assert stats2.other2.test2 == '2'
     assert stats2.other2.__class__ == AttribDict
     assert len(stats2.other2) == 1
     assert stats2.other3 == 'test3'
     assert stats.network == 'CZ'
     assert stats.station == 'RJOB'
示例#6
0
 def test_nested_stats(self):
     """
     Various setter and getter tests.
     """
     # 1
     stats = Stats()
     stats.test = dict()
     stats.test['test2'] = 'muh'
     assert stats.test.test2 == 'muh'
     assert stats.test['test2'] == 'muh'
     assert stats['test'].test2 == 'muh'
     assert stats['test']['test2'] == 'muh'
     stats.test['test2'] = 'maeh'
     assert stats.test.test2 == 'maeh'
     assert stats.test['test2'] == 'maeh'
     assert stats['test'].test2 == 'maeh'
     assert stats['test']['test2'] == 'maeh'
     # 2 - multiple initialization
     stats = Stats({'muh': 'meah'})
     stats2 = Stats(Stats(Stats(stats)))
     assert stats2.muh == 'meah'
     # 3 - check conversion to AttribDict
     stats = Stats()
     stats.sub1 = {'muh': 'meah'}
     stats.sub2 = AttribDict({'muh2': 'meah2'})
     stats2 = Stats(stats)
     assert isinstance(stats.sub1, AttribDict)
     assert isinstance(stats.sub2, AttribDict)
     assert stats2.sub1.muh == 'meah'
     assert stats2.sub2.muh2 == 'meah2'
示例#7
0
 def test_writing_text_and_binary_textual_file_headers(self):
     """
     Make sure the textual file header can be written if has been passed
     either as text or as a bytestring.
     """
     # Loop over bytes/text and the textual header encoding.
     for textual_file_header in [b"12345", "12345"]:
         for encoding in ["ASCII", "EBCDIC"]:
             st = read()
             for tr in st:
                 tr.data = np.require(tr.data, dtype=np.float32)
             st.stats = AttribDict()
             st.stats.textual_file_header = textual_file_header
             with io.BytesIO() as buf:
                 # Warning raised to create a complete header.
                 with pytest.warns(UserWarning):
                     st.write(buf,
                              format="SEGY",
                              data_encoding=5,
                              textual_header_encoding=encoding)
                 buf.seek(0, 0)
                 # Read with SEG-Y to preserve the textual file header.
                 st2 = _read_segy(buf)
             self.assertEqual(
                 # Ignore the auto-generated parts of the header.
                 st2.stats.textual_file_header.decode().split()[0],
                 "12345")
示例#8
0
def get_data(station, location, component,
             tstart, tend, new=False,
             decimate=False):
    """
    Download the data and insert station location coordinates.
    """
    scl = '.'.join((station,location,component))
    fout = os.path.join('/tmp','_'.join((scl, str(tstart), str(tend))))
    if os.path.isfile(fout) and new is False:
        with open(fout, 'rb') as fh:
            tr = pickle.load(fh)
            return tr
    else:     
        try:
            st, inv = GeoNetFDSNrequest(tstart, tend, 'NZ', station, 
                                                 location, component)
        except FDSNException:
            return None 
        st.remove_sensitivity()
        st.merge(method=1, fill_value=0.)
        tr = st[0]
        tr.trim(tstart, tend)
        tr.data -= tr.data.mean()
        if decimate:
            if int(round(tr.stats.sampling_rate,0)) == 100:
                tr.data -= tr.data.mean()
                tr.taper(0.05)
                tr.decimate(10)
                tr.decimate(10)
        _s = inv[0][0]
        tr.stats.coordinates = AttribDict({'latitude':_s.latitude,
                                           'longitude':_s.longitude})
        with open(fout, 'wb') as fh:
            pickle.dump(tr, fh)
        return tr 
示例#9
0
 def test_get_stations_inconsistency(self):
     """
     """
     # initialize client
     client = Client(user='******')
     # example 1
     start = UTCDateTime(2008, 1, 1)
     end = start + 1
     result_origin = AttribDict({
         'remark': '',
         'code': 'RWMO',
         'elevation': 763.0,
         'description': 'Wildenmoos, Bavaria, BW-Net',
         'start': UTCDateTime(2006, 7, 4, 0, 0),
         'restricted': False,
         'archive_net': '',
         'longitude': 12.729887,
         'affiliation': 'BayernNetz',
         'depth': None,
         'place': 'Wildenmoos',
         'country': ' BW-Net',
         'latitude': 47.744171,
         'end': None
     })
     # OK: from origin node
     result = client.get_stations(start, end, 'BW', route=True)
     self.assertTrue(result_origin in result)
     # BUT: this one from a different node was modified and fails
     result = client.get_stations(start, end, 'BW')
     self.assertTrue(result_origin in result)
示例#10
0
 def test_nestedStats(self):
     """
     Various setter and getter tests.
     """
     #1
     stats = Stats()
     stats.test = dict()
     stats.test['test2'] = 'muh'
     self.assertEqual(stats.test.test2, 'muh')
     self.assertEqual(stats.test['test2'], 'muh')
     self.assertEqual(stats['test'].test2, 'muh')
     self.assertEqual(stats['test']['test2'], 'muh')
     stats.test['test2'] = 'maeh'
     self.assertEqual(stats.test.test2, 'maeh')
     self.assertEqual(stats.test['test2'], 'maeh')
     self.assertEqual(stats['test'].test2, 'maeh')
     self.assertEqual(stats['test']['test2'], 'maeh')
     #2 - multiple initialization
     stats = Stats({'muh': 'meah'})
     stats2 = Stats(Stats(Stats(stats)))
     self.assertEqual(stats2.muh, 'meah')
     #3 - check conversion to AttribDict
     stats = Stats()
     stats.sub1 = {'muh': 'meah'}
     stats.sub2 = AttribDict({'muh2': 'meah2'})
     stats2 = Stats(stats)
     self.assertTrue(isinstance(stats.sub1, AttribDict))
     self.assertTrue(isinstance(stats.sub2, AttribDict))
     self.assertEqual(stats2.sub1.muh, 'meah')
     self.assertEqual(stats2.sub2.muh2, 'meah2')
示例#11
0
 def test_get_stations(self):
     """
     """
     # initialize client
     client = Client(user='******')
     # example 1
     start = UTCDateTime(2008, 1, 1)
     end = start + 1
     result = client.get_stations(start, end, 'BW')
     self.assertTrue(
         AttribDict({
             'remark': '',
             'code': 'RWMO',
             'elevation': 763.0,
             'description': 'Wildenmoos, Bavaria, BW-Net',
             'start': UTCDateTime(2006, 7, 4, 0, 0),
             'restricted': False,
             'archive_net': '',
             'longitude': 12.729887,
             'affiliation': 'BayernNetz',
             'depth': None,
             'place': 'Wildenmoos',
             'country': ' BW-Net',
             'latitude': 47.744171,
             'end': None
         }) in result)
示例#12
0
    def create_obs_station(self, station_list, sta_code, array_name,
                           start_date, end_date, sta_longitude, sta_latitude,
                           sta_elevation, deployment):

        obs_station = obspy.core.inventory.Station(sta_code,
                                                   latitude=sta_latitude,
                                                   longitude=sta_longitude,
                                                   start_date=start_date,
                                                   end_date=end_date,
                                                   elevation=sta_elevation)

        obs_station.creation_date = UTCDateTime(
            station_list[deployment][0]['deploy_time/epoch_l'])
        obs_station.termination_date = UTCDateTime(
            station_list[deployment][0]['pickup_time/epoch_l'])

        extra = AttribDict({
            'PH5Array': {
                'value': str(array_name)[8:],
                'namespace': self.manager.iris_custom_ns,
                'type': 'attribute'
            }
        })
        obs_station.extra = extra
        obs_station.site = obspy.core.inventory.Site(
            name=station_list[deployment][0]['location/description_s'])
        return obs_station
示例#13
0
 def create_obs_network(self):
     obs_stations = self.read_stations()
     for errmsg, logtype in sorted(list(self.unique_errors)):
         if logtype == 'error':
             LOGGER.error(errmsg)
         else:
             LOGGER.warning(errmsg)
     if obs_stations:
         obs_network = inventory.Network(self.experiment_t[0]['net_code_s'])
         obs_network.description = self.experiment_t[0]['longname_s']
         start_time, end_time = self.get_network_date()
         obs_network.start_date = UTCDateTime(start_time)
         obs_network.end_date = UTCDateTime(end_time)
         obs_network.total_number_of_stations = self.total_number_stations
         extra = AttribDict({
             'PH5ReportNum': {
                 'value': self.experiment_t[0]['experiment_id_s'],
                 'namespace': self.manager.iris_custom_ns,
                 'type': 'attribute'
             }
         })
         obs_network.extra = extra
         obs_network.stations = obs_stations
         return obs_network
     else:
         return
示例#14
0
def _parse_hypo2000_file(hypo_file):
    hypo = AttribDict()
    picks = list()
    hypo_line = False
    station_line = False
    oldpick = None
    for line in open(hypo_file):
        word = line.split()
        if not word:
            continue
        if hypo_line:
            hypo = _parse_hypo2000_hypo_line(line)
            evid = os.path.basename(hypo_file)
            evid = evid.replace('.txt', '')
            hypo.evid = evid
        if station_line:
            try:
                pick = _parse_hypo2000_station_line(
                    line, oldpick, hypo.orig_time)
                oldpick = pick
                picks.append(pick)
            except Exception:
                continue
        if word[0] == 'YEAR':
            hypo_line = True
            continue
        hypo_line = False
        if word[0] == 'STA':
            station_line = True
    if not hypo:
        raise TypeError('Could not find hypocenter data.')
    return hypo, picks
示例#15
0
def _parse_hypo71_hypocenter(hypo_file):
    with open(hypo_file) as fp:
        line = fp.readline()
        # Skip the first line if it contains
        # characters in the first 10 digits:
        if any(c.isalpha() for c in line[0:10]):
            line = fp.readline()
    hypo = AttribDict()
    timestr = line[0:17]
    # There are two possible formats for the timestring.
    # We try both of them
    try:
        dt = datetime.strptime(timestr, '%y%m%d %H %M%S.%f')
    except Exception:
        dt = datetime.strptime(timestr, '%y%m%d %H%M %S.%f')
    hypo.origin_time = UTCDateTime(dt)
    lat = float(line[17:20])
    lat_deg = float(line[21:26])
    hypo.latitude = lat + lat_deg/60
    lon = float(line[26:30])
    lon_deg = float(line[31:36])
    hypo.longitude = lon + lon_deg/60
    hypo.depth = float(line[36:42])
    evid = os.path.basename(hypo_file)
    evid = evid.replace('.phs', '').replace('.h', '').replace('.hyp', '')
    hypo.evid = evid
    return hypo
示例#16
0
 def test_deepcopy(self):
     """
     Tests deepcopy method of Stats object.
     """
     stats = Stats()
     stats.network = 'BW'
     stats['station'] = 'ROTZ'
     stats['other1'] = {'test1': '1'}
     stats['other2'] = AttribDict({'test2': '2'})
     stats['other3'] = 'test3'
     stats2 = copy.deepcopy(stats)
     stats.network = 'CZ'
     stats.station = 'RJOB'
     self.assertEqual(stats2.__class__, Stats)
     self.assertEqual(stats2.network, 'BW')
     self.assertEqual(stats2.station, 'ROTZ')
     self.assertEqual(stats2.other1.test1, '1')
     self.assertEqual(stats2.other1.__class__, AttribDict)
     self.assertEqual(len(stats2.other1), 1)
     self.assertEqual(stats2.other2.test2, '2')
     self.assertEqual(stats2.other2.__class__, AttribDict)
     self.assertEqual(len(stats2.other2), 1)
     self.assertEqual(stats2.other3, 'test3')
     self.assertEqual(stats.network, 'CZ')
     self.assertEqual(stats.station, 'RJOB')
def add_gpscorrection_into_stationxml(csv_file, input_xml, out_xml=None):
    """
    Read in the correction CSV data from a file, get the station metadata node from input_xml file,
    then add the CSV data into the station xml node to write into out_xml

    :param csv_file: input csv file with correction data
    :param input_xml: input original stationXML file which contains the metadata for the network and station of csv_file
    :param out_xml:  Directory of the output xml file
    :return: full path of the output xml file
    """

    ns = "https://github.com/GeoscienceAustralia/hiperseis/xmlns/1.0"

    (net, sta, csv_data) = get_csv_correction_data(csv_file)

    # path2_myxml = "/home/feizhang/Githubz/hiperseis/tests/testdata/7D_2012_2013.xml"
    my_inv = read_inventory(input_xml, format='STATIONXML')

    # https://docs.obspy.org/packages/autogen/obspy.core.inventory.inventory.Inventory.select.html#obspy.core.inventory.inventory.Inventory.select

    selected_inv = my_inv.select(network=net, station=sta)

    # print(selected_inv)

    my_tag = AttribDict()
    my_tag.namespace = ns
    my_tag.value = csv_data

    selected_inv.networks[0].stations[0].extra = AttribDict()
    selected_inv.networks[0].stations[0].extra.gpsclockcorrection = my_tag

    stationxml_with_csv = '%s.%s_station_inv_modified.xml' % (net, sta)

    if out_xml is not None and os.path.isdir(out_xml):
        stationxml_with_csv = os.path.join(out_xml, stationxml_with_csv)

    selected_inv.write(
        stationxml_with_csv,
        format='STATIONXML',
        nsmap={
            'GeoscienceAustralia':
            'https://github.com/GeoscienceAustralia/hiperseis/xmlns/1.0'
        })

    # my_inv.write('modified_inventory.xml', format='STATIONXML')

    return stationxml_with_csv
示例#18
0
def _parse_hypocenter_from_event(ev):
    hypo = AttribDict()
    hypo.latitude = ev.latitude
    hypo.longitude = ev.longitude
    hypo.depth = ev.depth
    hypo.origin_time = ev.utcdate
    hypo.evid = ev.event_id
    return hypo
示例#19
0
 def offsets(self):
     """
     Returns a dictionary with element code and x, y offsets from
     array center.
     """
     offsets = {}
     for ele in self.elements:
         offsets[ele.code] = {'x': ele.x, 'y': ele.y}
     return AttribDict(offsets)
示例#20
0
 def state(self):
     """Return test state."""
     out = AttribDict()
     out.path = os.path.join(os.path.dirname(__file__), 'data')
     out.path_images = os.path.join(os.path.dirname(__file__), 'images')
     out.a = np.sin(np.linspace(0, 10, 101))
     out.b = 5 * np.roll(out.a, 5)
     out.c = 5 * np.roll(out.a[:81], 5)
     return out
示例#21
0
    def FK(self, st, inv, stime, etime, fmin, fmax, slim, sres, win_len,
           win_frac):

        n = len(st)
        for i in range(n):
            coords = inv.get_coordinates(st[i].id)
            st[i].stats.coordinates = AttribDict({
                'latitude':
                coords['latitude'],
                'elevation':
                coords['elevation'],
                'longitude':
                coords['longitude']
            })

        kwargs = dict(
            # slowness grid: X min, X max, Y min, Y max, Slow Step
            sll_x=-1 * slim,
            slm_x=slim,
            sll_y=-1 * slim,
            slm_y=slim,
            sl_s=sres,
            # sliding open_main_window properties
            win_len=win_len,
            win_frac=win_frac,
            # frequency properties
            frqlow=fmin,
            frqhigh=fmax,
            prewhiten=0,
            # restrict output
            semb_thres=-1e9,
            vel_thres=-1e9,
            timestamp='mlabday',
            stime=stime + 0.1,
            etime=etime - 0.1)

        try:
            out = array_processing(st, **kwargs)

            T = out[:, 0]
            relpower = out[:, 1]
            abspower = out[:, 2]
            AZ = out[:, 3]
            AZ[AZ < 0.0] += 360
            Slowness = out[:, 4]

        except:
            print("Check Parameters and Starttime/Endtime")

            relpower = []
            abspower = []
            AZ = []
            Slowness = []
            T = []

        return relpower, abspower, AZ, Slowness, T
示例#22
0
 def state(self):
     # directory where the test files are located
     out = AttribDict()
     out.path = PATH
     out.path_images = os.path.join(PATH, os.pardir, "images")
     # some pre-computed ppsd used for plotting tests:
     # (ppsd._psd_periods was downcast to np.float16 to save space)
     out.example_ppsd_npz = os.path.join(PATH, "ppsd_kw1_ehz.npz")
     # ignore some "RuntimeWarning: underflow encountered in multiply"
     return out
示例#23
0
 def test_setdefault(self):
     """
     Tests setdefault method of AttribDict class.
     """
     ad = AttribDict()
     # 1
     default = ad.setdefault('test', 'NEW')
     self.assertEqual(default, 'NEW')
     self.assertEqual(ad['test'], 'NEW')
     self.assertEqual(ad.test, 'NEW')
     self.assertEqual(ad.get('test'), 'NEW')
     self.assertEqual(ad.__getattr__('test'), 'NEW')
     self.assertEqual(ad.__getitem__('test'), 'NEW')
     self.assertEqual(ad.__dict__['test'], 'NEW')
     self.assertEqual(ad.__dict__.get('test'), 'NEW')
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)
     # 2 - existing key should not be overwritten
     default = ad.setdefault('test', 'SOMETHINGDIFFERENT')
     self.assertEqual(default, 'NEW')
     self.assertEqual(ad['test'], 'NEW')
     self.assertEqual(ad.test, 'NEW')
     self.assertEqual(ad.get('test'), 'NEW')
     self.assertEqual(ad.__getattr__('test'), 'NEW')
     self.assertEqual(ad.__getitem__('test'), 'NEW')
     self.assertEqual(ad.__dict__['test'], 'NEW')
     self.assertEqual(ad.__dict__.get('test'), 'NEW')
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)
     # 3 - default value isNone
     ad = AttribDict()
     default = ad.setdefault('test')
     self.assertEqual(default, None)
     self.assertEqual(ad['test'], None)
     self.assertEqual(ad.test, None)
     self.assertEqual(ad.get('test'), None)
     self.assertEqual(ad.__getattr__('test'), None)
     self.assertEqual(ad.__getitem__('test'), None)
     self.assertEqual(ad.__dict__['test'], None)
     self.assertEqual(ad.__dict__.get('test'), None)
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)
示例#24
0
def _parse_hypocenter(hypo_file):
    if hypo_file is None:
        return None

    hypo = AttribDict()
    hypo.latitude = None
    hypo.longitude = None
    hypo.depth = None
    hypo.origin_time = None
    hypo.evid = None

    if isinstance(hypo_file, str):
        try:
            with open(hypo_file) as fp:
                # Corinth hypocenter file format:
                # TODO: check file format
                line = fp.readline()
                # Skip the first line if it contains
                # characters in the first 10 digits:
                if any(c.isalpha() for c in line[0:10]):
                    line = fp.readline()
        except IOError as err:
            logger.error(err)
            ssp_exit(1)

        timestr = line[0:17]
        # There are two possible formats for the timestring.
        # We try both of them
        try:
            dt = datetime.strptime(timestr, '%y%m%d %H %M%S.%f')
        except ValueError:
            dt = datetime.strptime(timestr, '%y%m%d %H%M %S.%f')
        hypo.origin_time = UTCDateTime(dt)

        lat = float(line[17:20])
        lat_deg = float(line[21:26])
        hypo.latitude = lat + lat_deg / 60
        lon = float(line[26:30])
        lon_deg = float(line[31:36])
        hypo.longitude = lon + lon_deg / 60
        hypo.depth = float(line[36:42])
        evid = os.path.basename(hypo_file)
        evid = evid.replace('.phs', '').replace('.h', '').replace('.hyp', '')
        hypo.evid = evid

    else:  # FIXME: put a condition here!
        ev = hypo_file  # FIXME: improve this!
        hypo.latitude = ev.latitude
        hypo.longitude = ev.longitude
        hypo.depth = ev.depth
        hypo.origin_time = ev.utcdate
        hypo.evid = ev.event_id

    return hypo
示例#25
0
 def __init__(self, filename):
     if type(filename) is str:
         input_ = AttribDict()
         with open(filename) as fh:
             for line in fh:
                 # get rid of comments
                 line = line.split("#")[0].strip()
                 if not line:
                     continue
                 line = line.split()
                 input_category = input_.setdefault(line.pop(0), [])
                 input_item = AttribDict()
                 for item in line:
                     key, value = item.split("=", 1)
                     input_item[key] = _decode_string_value(value)
                 input_category.append(input_item)
     else:
         input_ = filename
     super(Inputfile, self).__init__(input_)
     self.get_Proj4()
示例#26
0
文件: events.py 项目: cjhopp/scripts
def ORNL_events_to_cat(ornl_file):
    """Make Catalog from ORNL locations"""
    cat = Catalog()
    loc_df = pd.read_csv(ornl_file, infer_datetime_format=True)
    loc_df = loc_df.set_index('event_datetime')
    eid = 0
    for dt, row in loc_df.iterrows():
        ot = UTCDateTime(dt)
        hmc_east = row['x(m)']
        hmc_north = row['y(m)']
        hmc_elev = row['z(m)']
        errX = row['error_x (m)']
        errY = row['error_y (m)']
        errZ = row['error_z (m)']
        rms = row['rms (millisecond)']
        converter = SURF_converter()
        lon, lat, elev = converter.to_lonlat((hmc_east, hmc_north,
                                              hmc_elev))
        o = Origin(time=ot, latitude=lat, longitude=lon, depth=130 - elev)
        o.origin_uncertainty = OriginUncertainty()
        o.quality = OriginQuality()
        ou = o.origin_uncertainty
        oq = o.quality
        ou.max_horizontal_uncertainty = np.max([errX, errY])
        ou.min_horizontal_uncertainty = np.min([errX, errY])
        o.depth_errors.uncertainty = errZ
        oq.standard_error = rms * 1e3
        extra = AttribDict({
            'hmc_east': {
                'value': hmc_east,
                'namespace': 'smi:local/hmc'
            },
            'hmc_north': {
                'value': hmc_north,
                'namespace': 'smi:local/hmc'
            },
            'hmc_elev': {
                'value': hmc_elev,
                'namespace': 'smi:local/hmc'
            },
            'hmc_eid': {
                'value': eid,
                'namespace': 'smi:local/hmc'
            }
        })
        o.extra = extra
        rid = ResourceIdentifier(id=ot.strftime('%Y%m%d%H%M%S%f'))
        # Dummy magnitude of 1. for all events until further notice
        mag = Magnitude(mag=1., mag_errors=QuantityError(uncertainty=1.))
        ev = Event(origins=[o], magnitudes=[mag], resource_id=rid)
        ev.preferred_origin_id = o.resource_id.id
        cat.events.append(ev)
        eid += 1
    return cat
示例#27
0
文件: tensor.py 项目: gomexnez/tdmtpy
    def __init__(self, m=None, **kwargs):
        self._set_m(m)
        # Optional keywords default to None
        inverted = AttribDict()

        keys = ["depth", "ts", "weights", "station_VR", "total_VR", "dd", "ss"]
        # Update given key by their given value
        for key, value in kwargs.items():
            if key in keys:
                inverted[key] = value
        self.inverted = inverted
示例#28
0
def _add_hypocenter(trace, hypo):
    if hypo is None:
        # Try to get hypocenter information from the SAC header
        try:
            evla = trace.stats.sac.evla
            evlo = trace.stats.sac.evlo
            evdp = trace.stats.sac.evdp
            begin = trace.stats.sac.b
        except AttributeError:
            return

        try:
            tori = trace.stats.sac.o
            origin_time = trace.stats.starttime + tori - begin
        except AttributeError:
            origin_time = None

        if origin_time is not None:
            # make a copy of origin_time and round it to the nearest second
            _second = origin_time.second
            if origin_time.microsecond >= 500000:
                _second += 1
            _microsecond = 0
            _evid_time = origin_time.replace(
                second=_second, microsecond=_microsecond)
        else:
            # make a copy of starttime and round it to the nearest minute
            _starttime = trace.stats.starttime
            _minute = _starttime.minute
            if _starttime.second >= 30:
                _minute += 1
            _second = 0
            _microsecond = 0
            _evid_time = _starttime.replace(
                minute=_minute, second=_second, microsecond=_microsecond)

        hypo = AttribDict()
        hypo.origin_time = origin_time
        try:
            kevnm = trace.stats.sac.kevnm
            # if string is empty, raise Exception
            if not kevnm:
                raise Exception
            # if string has spaces, then kevnm is not a code,
            # so raise Exception
            if ' ' in kevnm:
                raise Exception
            hypo.evid = kevnm
        except Exception:
            hypo.evid = _evid_time.strftime('%Y%m%d_%H%M%S')
        hypo.latitude = evla
        hypo.longitude = evlo
        hypo.depth = evdp
    trace.stats.hypo = hypo
示例#29
0
def get_defaults():
    df = pd.read_csv(os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                  "default.csv"),
                     delimiter=",",
                     error_bad_lines=False,
                     warn_bad_lines=True,
                     encoding="latin-1",
                     index_col=0)
    df["type"] = [pydoc.locate(t) for t in df["type"].values]
    df = df.fillna("")

    default = AttribDict()
    for id, row in df.iterrows():
        if len(row.possible_values):
            df.loc[id].definition += " " + row.possible_values.replace(
                row.default, "[%s]" % row.default)
        # elif len(row.default):
        #     df.loc[id].definition += " " + "[%s]" % row.default
        default[id] = AttribDict(row.to_dict())
    return default
示例#30
0
 def test_init(self):
     """
     Tests initialization of AttribDict class.
     """
     ad = AttribDict({'test': 'NEW'})
     self.assertEqual(ad['test'], 'NEW')
     self.assertEqual(ad.test, 'NEW')
     self.assertEqual(ad.get('test'), 'NEW')
     self.assertEqual(ad.__getattr__('test'), 'NEW')
     self.assertEqual(ad.__getitem__('test'), 'NEW')
     self.assertEqual(ad.__dict__['test'], 'NEW')
     self.assertEqual(ad.__dict__.get('test'), 'NEW')
     self.assertTrue('test' in ad)
     self.assertTrue('test' in ad.__dict__)