Пример #1
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    taxis = Axis(plunge=40, azimuth=70)
    naxis = Axis(plunge=50, azimuth=80)
    paxis = Axis(plunge=60, azimuth=90)
    paxes = PrincipalAxes(t_axis=taxis,
                          n_axis=naxis,
                          p_axis=paxis)
    focal = FocalMechanism(nodal_planes=nodal_planes,
                           principal_axes=paxes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        assert params['moment']['NP1']['strike'] == 259.0
        assert params['moment']['NP1']['dip'] == 74.0
        assert params['moment']['NP1']['rake'] == 10.0
        assert params['moment']['NP2']['strike'] == 166.0
        assert params['moment']['NP2']['dip'] == 80.0
        assert params['moment']['NP2']['rake'] == 164.0
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        assert origin.mag == 7.9
        assert origin.lat == 56.046
        assert origin.lon == -149.073
        assert origin.id == 'us2000cmy3'
    except Exception:
        assert False
    finally:
        shutil.rmtree(tempdir)
Пример #2
0
def read_regex(event_file, regex=regex_GEOFON, creation_info='GEOFON'):
    """
    Read events from event_file with the help of given regular expression.
    """
    with open(event_file, 'r') as f:
        filedata = f.read()
    event_matches = re.finditer(regex, filedata, re.VERBOSE + re.MULTILINE)
    list_ = [i.groupdict() for i in event_matches]
    events = []
    for event in list_:
        # convert numbers to float and int types
        for key, item in event.iteritems():
            if util.isint(item):
                event[key] = int(item)
            elif util.isfloat(item):
                event[key] = float(item)
            else:
                event[key] = item.strip()
        if 'latitude_sign' in event and event['latitude_sign'] == 'S':
            event['latitude'] = -event['latitude']
        if 'longitude_sign' in event and event['longitude_sign'] == 'W':
            event['longitude'] = -event['longitude']
        if 'AM' in event:
            ci = creation_info + (' automatic'
                                  if event['AM'] == 'A' else ' manual')
        else:
            ci = creation_info
        ev = Event(
            event_type='earthquake',
            creation_info=ci,
            origins=[
                Origin(time=UTC(event['time']),
                       latitude=event['latitude'],
                       longitude=event['longitude'],
                       depth=event['depth'])
            ],
            magnitudes=[Magnitude(mag=event['magnitude'], magnitude_type='M')],
            event_descriptions=[
                EventDescription(event['flinn'], 'flinn-engdahl region')
            ] if 'flinn' in event else None)
        events.append(ev)
    events.sort(key=lambda x: x.origins[0].time)
    return Catalog(events)
Пример #3
0
 def test_write_pha_minimal(self):
     ori = Origin(time=UTCDateTime(0),
                  latitude=42,
                  longitude=43,
                  depth=10000)
     pick = Pick(time=UTCDateTime(10),
                 phase_hint='S',
                 waveform_id=WaveformStreamID(station_code='STA'))
     del ori.latitude_errors
     del ori.longitude_errors
     del ori.depth_errors
     cat = Catalog([Event(origins=[ori], picks=[pick])])
     with NamedTemporaryFile() as tf:
         tempfile = tf.name
         with self.assertWarnsRegex(UserWarning, 'Missing mag'):
             cat.write(tempfile, 'HYPODDPHA')
         cat2 = read_events(tempfile)
     self.assertEqual(len(cat2), 1)
     self.assertEqual(len(cat2[0].picks), 1)
Пример #4
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     self.assertEqual(len(catalog), 0)
     catalog.append(event1)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(catalog.events, [event1])
     catalog.append(event2)
     self.assertEqual(len(catalog), 2)
     self.assertEqual(catalog.events, [event1, event2])
     # 2 - adding objects other as Event should fails
     self.assertRaises(TypeError, catalog.append, str)
     self.assertRaises(TypeError, catalog.append, Catalog)
     self.assertRaises(TypeError, catalog.append, [event1])
Пример #5
0
    def make_test_catalog(self):
        """
        Make a test catalog with fixed resource IDs some of which reference
        other objects belonging to the event (eg arrivals -> picks)
        """
        pick_rid = ResourceIdentifier(id='obspy.org/tests/test_pick')
        origin_rid = ResourceIdentifier(id='obspy.org/tests/test_origin')
        arrival_rid = ResourceIdentifier(id='obspy.org/tests/test_arrival')
        ar_pick_rid = ResourceIdentifier(id='obspy.org/tests/test_pick')
        catatlog_rid = ResourceIdentifier(id='obspy.org/tests/test_catalog')

        picks = [Pick(time=UTCDateTime(), resource_id=pick_rid)]
        arrivals = [Arrival(resource_id=arrival_rid, pick_id=ar_pick_rid)]
        origins = [Origin(arrivals=arrivals, resource_id=origin_rid)]
        events = [Event(picks=picks, origins=origins)]
        events[0].preferred_origin_id = str(origin_rid.id)
        catalog = Catalog(events=events, resource_id=catatlog_rid)
        # next bind all unbound resource_ids to the current event scope
        catalog.resource_id.bind_resource_ids()
        return catalog
Пример #6
0
    def make_xml(self):
        client = Client("IRIS")
        cat = Catalog()  # empty earthquake catalogue
        print('')

        # Method to retrieve events from IRIS based on event ID and create an xml file
        for event_id in self.ieb_events['IRIS_ID']:
            try:
                print('Requesting Information for event: ' + str(event_id))
                IRIS_event = client.get_events(eventid=int(event_id))[0]
                cat.append(IRIS_event)
            except FDSNException:
                print('')
                print('Error!!: No Event Information for ' + str(event_id))

        print('')
        print("Resulting Earthquake Catalogue:")
        print(cat)
        new_filename = os.path.splitext(self.ieb_filename)[0] + '.xml'
        cat.write(filename=new_filename, format="QUAKEML")
Пример #7
0
 def test_issue_2339(self):
     """
     Make sure an empty EventDescription object does not prevent a catalog
     from being saved to disk and re-read, while still being equal.
     """
     # create a catalog  with an empty event description
     empty_description = EventDescription()
     cat1 = Catalog(events=[read_events()[0]])
     cat1[0].event_descriptions.append(empty_description)
     # serialize the catalog using quakeml and re-read
     bio = io.BytesIO()
     cat1.write(bio, 'quakeml')
     bio.seek(0)
     cat2 = read_events(bio)
     # the text of the empty EventDescription instances should be equal
     text1 = cat1[0].event_descriptions[-1].text
     text2 = cat2[0].event_descriptions[-1].text
     self.assertEqual(text1, text2)
     # the two catalogs should be equal
     self.assertEqual(cat1, cat2)
Пример #8
0
 def export_picks(self, filename, trace_list=None, format="NLLOC_OBS", debug=False, **kwargs):
     """
     """
     trace_list = self.traces if trace_list is None else trace_list
     event_list = []
     for trace in trace_list:
         event_list.extend([Event(picks=[pick]) for pick in trace.events])
     # Export to desired format
     if format == 'NLLOC_OBS':
         basename, ext = os.path.splitext(filename)
         for event in event_list:
             ts = event.picks[0].time.strftime("%Y%m%d%H%M%S%f")
             event_filename = "%s_%s%s" % (basename, ts, ext)
             if debug:
                 print "Generating event file {}".format(event_filename)
             event.write(event_filename, format=format)
     else:
         event_catalog = Catalog(event_list)
         if debug:
             print "Generating event file {}".format(filename)
         event_catalog.write(filename, format=format, **kwargs)
Пример #9
0
def sfiles_to_event(sfile_list):
    """
    Write an event.dat file from a list of Seisan events

    :type sfile_list: list
    :param sfile_list: List of s-files to sort and put into the database

    :returns: List of tuples of event ID (int) and Sfile name
    """
    event_list = []
    sort_list = [(readheader(sfile).origins[0].time, sfile)
                 for sfile in sfile_list]
    sort_list.sort(key=lambda tup: tup[0])
    sfile_list = [sfile[1] for sfile in sort_list]
    catalog = Catalog()
    for i, sfile in enumerate(sfile_list):
        event_list.append((i, sfile))
        catalog.append(readheader(sfile))
    # Hand off to sister function
    write_event(catalog)
    return event_list
Пример #10
0
def _inner_get_event(publicid, client):
    """
    Inner loop for parallel processing

    :type publicid: str
    :param publicid: GeoNet public ID
    :return: catalog
    """
    import warnings
    from obspy.clients.fdsn.header import FDSNException
    from obspy import read_events, Catalog
    catalog = Catalog()
    try:
        data_stream = client._download('http://quakeml.geonet.org.nz/' +
                                       'quakeml/1.2/' + publicid)
        data_stream.seek(0, 0)
        catalog += read_events(data_stream, format="quakeml")
        data_stream.close()
    except FDSNException:
        warnings.warn('Unable to download event: ' + publicid)
    return catalog
Пример #11
0
def _read_pha(filename, inventory=None, id_map=None, id_default='.{}..{}',
              ph2comp={'P': 'Z', 'S': 'N'}, encoding='utf-8'):
    """
    Read a HypoDD PHA file and returns an ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.read_events` function, call this
        instead.

    The optional parameters all deal with the problem, that the PHA format
    only stores station names for the picks, but the Pick object expects
    a SEED id.

    :param str filename: File or file-like object in text mode.
    :type inventory: :class:`~obspy.core.inventory.inventory.Inventory`
    :param inventory: Inventory used to retrieve network code, location code
        and channel code of stations (SEED id).
    :param dict id_map: If channel information was not found in inventory,
        it will be looked up in this dictionary
        (example: `id_map={'MOX': 'GR.{}..HH{}'`).
        The values must contain three dots and two `{}` which are
        substituted by station code and component.
    :param str id_default: Default SEED id expression.
        The value must contain three dots and two `{}` which are
        substituted by station code and component.
    :param dict ph2comp: mapping of phases to components
        (default: {'P': 'Z', 'S': 'N'})
    :param str encoding: encoding used (default: utf-8)

    :rtype: :class:`~obspy.core.event.Catalog`
    :return: An ObsPy Catalog object.
    """
    seed_map = _seed_id_map(inventory, id_map)
    with io.open(filename, 'r', encoding=encoding) as f:
        text = f.read()
    events = [_block2event(block, seed_map, id_default, ph2comp)
              for block in text.split('#')[1:]]
    return Catalog(events)
Пример #12
0
def get_nearby_events(event: Event, catalog: Catalog,
                      radius: float) -> Catalog:
    """
    Get a catalog of events close to another event.

    Parameters
    ----------
    event:
        Central event to calculate distance relative to
    catalog:
        Catalog to extract events from
    radius:
        Radius around `event` in km

    Returns
    -------
    Catalog of events close to `event`
    """
    sub_catalog = Catalog([
        e for e in catalog.events if inter_event_distance(event, e) <= radius
    ])
    return sub_catalog
Пример #13
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     assert len(catalog) == 0
     catalog.append(event1)
     assert len(catalog) == 1
     assert catalog.events == [event1]
     catalog.append(event2)
     assert len(catalog) == 2
     assert catalog.events == [event1, event2]
     # 2 - adding objects other as Event should fails
     with pytest.raises(TypeError):
         catalog.append(str)
     with pytest.raises(TypeError):
         catalog.append(Catalog)
     with pytest.raises(TypeError):
         catalog.append([event1])
Пример #14
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Пример #15
0
 def test_catalog_grouping(self):
     testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                 'test_data', 'REA', 'TEST_', '*.S??????')
     catalog = Catalog()
     sfiles = glob.glob(testing_path)
     for sfile in sfiles:
         catalog += read_events(sfile)
     for process_len, pads in [(60, [5]), (300, [5, 60]),
                               (3600, [5, 60, 300]), (86400, [5, 60, 300])]:
         for data_pad in pads:
             sub_catalogs = _group_events(catalog=catalog,
                                          process_len=process_len,
                                          data_pad=data_pad)
             k_events = 0
             for sub_catalog in sub_catalogs:
                 min_time = min(
                     [event.origins[0].time for event in sub_catalog])
                 min_time -= data_pad
                 for event in sub_catalog:
                     self.assertTrue((event.origins[0].time + data_pad) -
                                     min_time < process_len)
                     k_events += 1
             self.assertEqual(k_events, len(catalog))
    def _on_file_save(self):
        """
        Creates a new obspy.core.event.Magnitude object and writes the moment
        magnitude to it.
        """
        # Get the save filename.
        filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...")
        filename = os.path.abspath(str(filename))
        mag = Magnitude()
        mag.mag = self.final_result["moment_magnitude"]
        mag.magnitude_type = "Mw"
        mag.station_count = self.final_result["station_count"]
        mag.evaluation_mode = "manual"
        # Link to the used origin.
        mag.origin_id = self.current_state["event"].origins[0].resource_id
        mag.method_id = "Magnitude picker Krischer"
        # XXX: Potentially change once this program gets more stable.
        mag.evaluation_status = "preliminary"
        # Write the other results as Comments.
        mag.comments.append( \
            Comment("Seismic moment in Nm: %g" % \
            self.final_result["seismic_moment"]))
        mag.comments.append( \
            Comment("Circular source radius in m: %.2f" % \
            self.final_result["source_radius"]))
        mag.comments.append( \
            Comment("Stress drop in Pa: %.2f" % \
            self.final_result["stress_drop"]))
        mag.comments.append( \
                Comment("Very rough Q estimation: %.1f" % \
            self.final_result["quality_factor"]))

        event = copy.deepcopy(self.current_state["event"])
        event.magnitudes.append(mag)
        cat = Catalog()
        cat.events.append(event)
        cat.write(filename, format="quakeml")
Пример #17
0
 def test_download_various_methods(self):
     """
     Will download data from server and store in various databases,
     then create templates using the various methods.
     """
     client = Client('GEONET')
     # get the events
     catalog = client.get_events(eventid='2016p008194')
     # Select 3 channels to use and download
     sta_chans = [(pick.waveform_id.station_code,
                   pick.waveform_id.channel_code)
                  for pick in catalog[0].picks[0:3]]
     t1 = UTCDateTime(catalog[0].origins[0].time.date)
     t2 = t1 + 86400
     bulk = [('NZ', sta_chan[0], '*', sta_chan[1], t1, t2)
             for sta_chan in sta_chans]
     continuous_st = client.get_waveforms_bulk(bulk)
     continuous_st.merge(fill_value=0)
     # Test multi_template_gen
     templates = multi_template_gen(catalog, continuous_st, length=3)
     self.assertEqual(len(templates), 1)
     # Test without an event
     templates = multi_template_gen(Catalog(), continuous_st, length=3)
     self.assertEqual(len(templates), 0)
Пример #18
0
def main(argv):

    # Instantiate catalogue object
    catalogue = Catalog()

    # Build Quakeml Event object

    ##########################################################################
    # get GG cat data
    ##########################################################################

    cat = parse_ggcat('../../catalogue/data/test.csv')

    ##########################################################################
    # loop thru events and get data
    ##########################################################################

    for evnum, ev in enumerate(cat):
        # only look for post 1990 data
        if ev['datetime'] >= datetime.datetime(1990, 1, 1, 0, 0):

            #            print evnum, ev['datetime']

            get_cwb_data(ev)
Пример #19
0
def read_phase(ph_file):
    """
    Read hypoDD phase files into Obspy catalog class.

    :type ph_file: str
    :param ph_file: Phase file to read event info from.

    :returns: Catalog of events from file.
    :rtype: :class:`obspy.core.event.Catalog`

    >>> from obspy.core.event.catalog import Catalog
    >>> # Get the path to the test data
    >>> import eqcorrscan
    >>> import os
    >>> TEST_PATH = os.path.dirname(eqcorrscan.__file__) + '/tests/test_data'
    >>> catalog = read_phase(TEST_PATH + '/tunnel.phase')
    >>> isinstance(catalog, Catalog)
    True
    """
    ph_catalog = Catalog()
    f = open(ph_file, 'r')
    # Topline of each event is marked by # in position 0
    for line in f:
        if line[0] == '#':
            if 'event_text' not in locals():
                event_text = {'header': line.rstrip(),
                              'picks': []}
            else:
                ph_catalog.append(_phase_to_event(event_text))
                event_text = {'header': line.rstrip(),
                              'picks': []}
        else:
            event_text['picks'].append(line.rstrip())
    ph_catalog.append(_phase_to_event(event_text))
    f.close()
    return ph_catalog
Пример #20
0
def _get_geonet_pubids(publicids, parallel=True):
    """
    Get GeoNet events while they haven't included get_events in fdsn.

    :type publicids: list
    :param publicids: List of public id numbers for events wanted.

    :returns: Catalog of events
    :rtype: obspy.core.event.Catalog
    """
    import obspy
    if int(obspy.__version__.split('.')[0]) > 0:
        from obspy.clients.fdsn import Client
    else:
        from obspy.fdsn import Client
    from obspy.core.event import Catalog
    from multiprocessing import Pool, cpu_count

    client = Client('GEONET')
    catalog = Catalog()
    # Multi-process this bad-boy
    if not parallel:
        for publicid in publicids:
            catalog += _inner_get_event(publicid=publicid, client=client)
    else:
        pool = Pool(processes=cpu_count())
        results = [
            pool.apply_async(_inner_get_event, args=(publicid, client))
            for publicid in publicids
        ]
        pool.close()
        cat_list = [p.get() for p in results]
        pool.join()
        for ev in cat_list:
            catalog += ev
    return catalog
Пример #21
0
    def test_creating_minimal_quakeml_with_mt(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(
            mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time, latitude=lat, longitude=lon,
                           depth=depth, resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populate event moment tensor
        ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = io.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = _read_quakeml(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
Пример #22
0
# -------------------------------------------------------------------------------
# Loop over networks
for ii in range(0,len(inventory)):
    ev_info.network = inventory[ii].code
    # Loop over stations
    for jj in range(0,len(inventory[ii])):
        ev_info.station = inventory[ii][jj].code
        ev_info.channel = channel # same as above

        ev_info.rlat = inventory[ii][jj].latitude
        ev_info.rlon = inventory[ii][jj].longitude
        ev_info.rtime

        # -------------------------------------------------------------------------------
        # subset_events = Find events common in alaska centroid ring (cat_0) and station ring (cat_ij)
        cat_subset = Catalog()
        for kk in range(0,len(cat)):
            dist = obspy.geodetics.base.locations2degrees(ev_info.rlat,ev_info.rlon,cat[kk].origins[0].latitude,cat[kk].origins[0].longitude)
            if dist >= st_minradius and dist <= st_maxradius:
                cat_subset.append(cat[kk])
        print(cat_subset)
        # -------------------------------------------------------------------------------

        # Create station directory
        sta_dir = ev_info.network + '_' + ev_info.station
        odir = out_dir + sta_dir
        if not os.path.exists(odir):
            os.makedirs(odir)

        # save catalog subset for this station
        fname = odir + '_event_subset'
Пример #23
0
def brightness(stations,
               nodes,
               lags,
               stream,
               threshold,
               thresh_type,
               template_length,
               template_saveloc,
               coherence_thresh,
               coherence_stations=['all'],
               coherence_clip=False,
               gap=2.0,
               clip_level=100,
               instance=0,
               pre_pick=0.2,
               plotsave=True,
               cores=1):
    r"""Function to calculate the brightness function in terms of energy for \
    a day of data over the entire network for a given grid of nodes.

    Note data in stream must be all of the same length and have the same
    sampling rates.

    :type stations: list
    :param stations: List of station names from in the form where stations[i] \
        refers to nodes[i][:] and lags[i][:]
    :type nodes: list, tuple
    :param nodes: List of node points where nodes[i] referes to stations[i] \
        and nodes[:][:][0] is latitude in degrees, nodes[:][:][1] is \
        longitude in degrees, nodes[:][:][2] is depth in km.
    :type lags: :class: 'numpy.array'
    :param lags: Array of arrays where lags[i][:] refers to stations[i]. \
        lags[i][j] should be the delay to the nodes[i][j] for stations[i] in \
        seconds.
    :type stream: :class: `obspy.Stream`
    :param data: Data through which to look for detections.
    :type threshold: float
    :param threshold: Threshold value for detection of template within the \
        brightness function
    :type thresh_type: str
    :param thresh_type: Either MAD or abs where MAD is the Median Absolute \
        Deviation and abs is an absoulte brightness.
    :type template_length: float
    :param template_length: Length of template to extract in seconds
    :type template_saveloc: str
    :param template_saveloc: Path of where to save the templates.
    :type coherence_thresh: tuple of floats
    :param coherence_thresh: Threshold for removing incoherant peaks in the \
            network response, those below this will not be used as templates. \
            Must be in the form of (a,b) where the coherence is given by: \
            a-kchan/b where kchan is the number of channels used to compute \
            the coherence
    :type coherence_stations: list
    :param coherence_stations: List of stations to use in the coherance \
            thresholding - defaults to 'all' which uses all the stations.
    :type coherence_clip: float
    :param coherence_clip: tuple
    :type coherence_clip: Start and end in seconds of data to window around, \
            defaults to False, which uses all the data given.
    :type pre_pick: float
    :param pre_pick: Seconds before the detection time to include in template
    :type plotsave: bool
    :param plotsave: Save or show plots, if False will try and show the plots \
            on screen - as this is designed for bulk use this is set to \
            True to save any plots rather than show them if you create \
            them - changes the backend of matplotlib, so if is set to \
            False you will see NO PLOTS!
    :type cores: int
    :param core: Number of cores to use, defaults to 1.
    :type clip_level: float
    :param clip_level: Multiplier applied to the mean deviation of the energy \
                    as an upper limit, used to remove spikes (earthquakes, \
                    lightning, electircal spikes) from the energy stack.
    :type gap: float
    :param gap: Minimum inter-event time in seconds for detections

    :return: list of templates as :class: `obspy.Stream` objects
    """
    from eqcorrscan.core.template_gen import _template_gen
    if plotsave:
        import matplotlib
        matplotlib.use('Agg')
        import matplotlib.pyplot as plt
        plt.ioff()
    # from joblib import Parallel, delayed
    from multiprocessing import Pool, cpu_count
    from copy import deepcopy
    from obspy import read as obsread
    from obspy.core.event import Catalog, Event, Pick, WaveformStreamID, Origin
    from obspy.core.event import EventDescription, CreationInfo, Comment
    import obspy.Stream
    import matplotlib.pyplot as plt
    from eqcorrscan.utils import EQcorrscan_plotting as plotting
    # Check that we actually have the correct stations
    realstations = []
    for station in stations:
        st = stream.select(station=station)
        if st:
            realstations += station
    del st
    stream_copy = stream.copy()
    # Force convert to int16
    for tr in stream_copy:
        # int16 max range is +/- 32767
        if max(abs(tr.data)) > 32767:
            tr.data = 32767 * (tr.data / max(abs(tr.data)))
            # Make sure that the data aren't clipped it they are high gain
            # scale the data
        tr.data = tr.data.astype(np.int16)
    # The internal _node_loop converts energy to int16 too to converse memory,
    # to do this it forces the maximum of a single energy trace to be 500 and
    # normalises to this level - this only works for fewer than 65 channels of
    # data
    if len(stream_copy) > 130:
        raise OverflowError('Too many streams, either re-code and cope with' +
                            'either more memory usage, or less precision, or' +
                            'reduce data volume')
    detections = []
    detect_lags = []
    parallel = True
    plotvar = True
    mem_issue = False
    # Loop through each node in the input
    # Linear run
    print('Computing the energy stacks')
    if not parallel:
        for i in range(0, len(nodes)):
            print(i)
            if not mem_issue:
                j, a = _node_loop(stations, lags[:, i], stream, plot=True)
                if 'energy' not in locals():
                    energy = a
                else:
                    energy = np.concatenate((energy, a), axis=0)
                print('energy: ' + str(np.shape(energy)))
            else:
                j, filename = _node_loop(stations, lags[:, i], stream, i,
                                         mem_issue)
        energy = np.array(energy)
        print(np.shape(energy))
    else:
        # Parallel run
        num_cores = cores
        if num_cores > len(nodes):
            num_cores = len(nodes)
        if num_cores > cpu_count():
            num_cores = cpu_count()
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_node_loop,
                             args=(stations, lags[:, i], stream, i, clip_level,
                                   mem_issue, instance))
            for i in range(len(nodes))
        ]
        pool.close()
        if not mem_issue:
            print('Computing the cumulative network response from memory')
            energy = [p.get() for p in results]
            pool.join()
            energy.sort(key=lambda tup: tup[0])
            energy = [node[1] for node in energy]
            energy = np.concatenate(energy, axis=0)
            print(energy.shape)
        else:
            pool.join()
    # Now compute the cumulative network response and then detect possible
    # events
    if not mem_issue:
        print(energy.shape)
        indeces = np.argmax(energy, axis=0)  # Indeces of maximum energy
        print(indeces.shape)
        cum_net_resp = np.array([np.nan] * len(indeces))
        cum_net_resp[0] = energy[indeces[0]][0]
        peak_nodes = [nodes[indeces[0]]]
        for i in range(1, len(indeces)):
            cum_net_resp[i] = energy[indeces[i]][i]
            peak_nodes.append(nodes[indeces[i]])
        del energy, indeces
    else:
        print('Reading the temp files and computing network response')
        node_splits = len(nodes) // num_cores
        indeces = [range(node_splits)]
        for i in range(1, num_cores - 1):
            indeces.append(range(node_splits * i, node_splits * (i + 1)))
        indeces.append(range(node_splits * (i + 1), len(nodes)))
        pool = Pool(processes=num_cores)
        results = [
            pool.apply_async(_cum_net_resp, args=(indeces[i], instance))
            for i in range(num_cores)
        ]
        pool.close()
        results = [p.get() for p in results]
        pool.join()
        responses = [result[0] for result in results]
        print(np.shape(responses))
        node_indeces = [result[1] for result in results]
        cum_net_resp = np.array(responses)
        indeces = np.argmax(cum_net_resp, axis=0)
        print(indeces.shape)
        print(cum_net_resp.shape)
        cum_net_resp = np.array(
            [cum_net_resp[indeces[i]][i] for i in range(len(indeces))])
        peak_nodes = [
            nodes[node_indeces[indeces[i]][i]] for i in range(len(indeces))
        ]
        del indeces, node_indeces
    if plotvar:
        cum_net_trace = deepcopy(stream[0])
        cum_net_trace.data = cum_net_resp
        cum_net_trace.stats.station = 'NR'
        cum_net_trace.stats.channel = ''
        cum_net_trace.stats.network = 'Z'
        cum_net_trace.stats.location = ''
        cum_net_trace.stats.starttime = stream[0].stats.starttime
        cum_net_trace = obspy.Stream(cum_net_trace)
        cum_net_trace += stream.select(channel='*N')
        cum_net_trace += stream.select(channel='*1')
        cum_net_trace.sort(['network', 'station', 'channel'])
        # np.save('cum_net_resp.npy',cum_net_resp)
        #     cum_net_trace.plot(size=(800,600), equal_scale=False,\
        #                        outfile='NR_timeseries.eps')

    # Find detection within this network response
    print('Finding detections in the cumulatve network response')
    detections = _find_detections(cum_net_resp, peak_nodes, threshold,
                                  thresh_type, stream[0].stats.sampling_rate,
                                  realstations, gap)
    del cum_net_resp
    templates = []
    nodesout = []
    good_detections = []
    if detections:
        print('Converting detections in to templates')
        # Generate a catalog of detections
        detections_cat = Catalog()
        for j, detection in enumerate(detections):
            print('Converting for detection ' + str(j) + ' of ' +
                  str(len(detections)))
            # Create an event for each detection
            event = Event()
            # Set up some header info for the event
            event.event_descriptions.append(EventDescription())
            event.event_descriptions[0].text = 'Brightness detection'
            event.creation_info = CreationInfo(agency_id='EQcorrscan')
            copy_of_stream = deepcopy(stream_copy)
            # Convert detections to obspy.core.event type -
            # name of detection template is the node.
            node = (detection.template_name.split('_')[0],
                    detection.template_name.split('_')[1],
                    detection.template_name.split('_')[2])
            print(node)
            # Look up node in nodes and find the associated lags
            index = nodes.index(node)
            detect_lags = lags[:, index]
            ksta = Comment(text='Number of stations=' + len(detect_lags))
            event.origins.append(Origin())
            event.origins[0].comments.append(ksta)
            event.origins[0].time = copy_of_stream[0].stats.starttime +\
                detect_lags[0] + detection.detect_time
            event.origins[0].latitude = node[0]
            event.origins[0].longitude = node[1]
            event.origins[0].depth = node[2]
            for i, detect_lag in enumerate(detect_lags):
                station = stations[i]
                st = copy_of_stream.select(station=station)
                if len(st) != 0:
                    for tr in st:
                        _waveform_id = WaveformStreamID(
                            station_code=tr.stats.station,
                            channel_code=tr.stats.channel,
                            network_code='NA')
                        event.picks.append(
                            Pick(waveform_id=_waveform_id,
                                 time=tr.stats.starttime + detect_lag +
                                 detection.detect_time + pre_pick,
                                 onset='emergent',
                                 evalutation_mode='automatic'))
            print('Generating template for detection: ' + str(j))
            template = (_template_gen(event.picks, copy_of_stream,
                                      template_length, 'all'))
            template_name = template_saveloc + '/' +\
                str(template[0].stats.starttime) + '.ms'
            # In the interests of RAM conservation we write then read
            # Check coherancy here!
            temp_coher, kchan = coherence(template, coherence_stations,
                                          coherence_clip)
            coh_thresh = float(coherence_thresh[0]) - kchan / \
                float(coherence_thresh[1])
            if temp_coher > coh_thresh:
                template.write(template_name, format="MSEED")
                print('Written template as: ' + template_name)
                print('---------------------------------coherence LEVEL: ' +
                      str(temp_coher))
                coherant = True
            else:
                print('Template was incoherant, coherence level: ' +
                      str(temp_coher))
                coherant = False
            del copy_of_stream, tr, template
            if coherant:
                templates.append(obsread(template_name))
                nodesout += [node]
                good_detections.append(detection)
            else:
                print('No template for you')
    if plotvar:
        all_detections = [(cum_net_trace[-1].stats.starttime +
                           detection.detect_time).datetime
                          for detection in detections]
        good_detections = [(cum_net_trace[-1].stats.starttime +
                            detection.detect_time).datetime
                           for detection in good_detections]
        if not plotsave:
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             title='Network response')
            # cum_net_trace.plot(size=(800,600), equal_scale=False)
        else:
            savefile = 'plots/' +\
                cum_net_trace[0].stats.starttime.datetime.strftime('%Y%m%d') +\
                '_NR_timeseries.pdf'
            plotting.NR_plot(cum_net_trace[0:-1],
                             obspy.Stream(cum_net_trace[-1]),
                             detections=good_detections,
                             size=(18.5, 10),
                             save=savefile,
                             title='Network response')
    nodesout = list(set(nodesout))
    return templates, nodesout
Пример #24
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Пример #25
0
out_3=[] # list of lists of pick informations

for ev in events_clust: 
    for i, events in enumerate(out):
        out_et=out[i][1::11] #time is 1st entry skip 11 entries gets times
        out_lat=out[i][6::11] #all lats
        out_long=out[i][7::11] # all lons
        if len(out_et) > 1:
            et = UTCDateTime(out_et[0])
            lats = out_lat[0]
            lons = out_long[0]
            if str(out[i][0]) == str(ev):
                out_3.append(out[i])


db_catalog = Catalog() # start empty catalog
#Append picks and events from db to Catalog 
for i, events in enumerate(out_3):
    event=Event()    
    picks=Pick()
    origin=Origin()
    arrival=Arrival()
    e_time=out_3[i][1::33]
    sta = out_3[i][2::33]
    cha = out_3[i][3::33]
    phase = out_3[i][4::33]
    pick_time = out_3[i][5::33] #phase pick time
    lat = out_3[i][6::33]
    lon = out_3[i][7::33]
    orid=out_3[i][8::33]
    dep = out_3[i][9::33]
Пример #26
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = basic_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        self.assertEqual(read_cat[0].picks, test_cat[0].picks)
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0],
                             userID='TEST',
                             evtype='L',
                             outdir='.',
                             wavefiles='test',
                             explosion=True,
                             overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        self.assertEqual(read_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(read_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(read_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(read_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(read_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(read_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(read_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        del read_cat
        # assert read_cat[0].amplitudes[0].pick_id ==\
        #     test_cat[0].amplitudes[0].pick_id
        # assert read_cat[0].amplitudes[0].waveform_id ==\
        #     test_cat[0].amplitudes[0].waveform_id

        # Test the wrappers for PICK and EVENTINFO classes
        picks, evinfo = eventtopick(test_cat)
        # Test the conversion back
        conv_cat = Catalog()
        conv_cat.append(picktoevent(evinfo, picks))
        self.assertEqual(conv_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(conv_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(conv_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(conv_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(conv_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # self.assertEqual(read_cat[0].origins[0].resource_id,
        #                  test_cat[0].origins[0].resource_id)
        self.assertEqual(conv_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(conv_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(conv_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(conv_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(conv_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(conv_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(conv_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(conv_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(conv_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # self.assertEqual(read_cat[0].amplitudes[0].resource_id,
        #                  test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(conv_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(conv_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
Пример #27
0
def recursive_read_events(xmls):
    cat = Catalog()
    for x in xmls:
        cat.events += read_events(x).events
    return cat.events
Пример #28
0
def makeCatalog(StazList, mt, scale, args):

    epi = args.epi.rsplit()
    model = args.model.split(os.sep)
    NrSt = len(StazList)
    NrCo = NrSt * 3
    (Fmin, Fmax) = getFreq(args)
    Tmin = ('%.0f' % (1 / Fmax))
    Tmax = ('%.0f' % (1 / Fmin))
    mo = ('%.3e' % (mt[0]))
    mw = ('%.2f' % (mt[1]))
    Pdc = ('%.2f' % (float(mt[2]) / 100))
    Pclvd = ('%.2f' % (float(mt[3]) / 100))

    Tval = ('%10.3e' % (mt[22]))
    Tplg = ('%4.1f' % (mt[23]))
    Tazi = ('%5.1f' % (mt[24]))
    Nval = ('%10.3e' % (mt[25]))
    Nplg = ('%4.1f' % (mt[26]))
    Nazi = ('%5.1f' % (mt[27]))
    Pval = ('%10.3e' % (mt[28]))
    Pplg = ('%4.1f' % (mt[29]))
    Pazi = ('%5.1f' % (mt[30]))

    STp1 = ('%5.1f' % (mt[31]))
    DPp1 = ('%4.1f' % (mt[32]))
    RAp1 = ('%6.1f' % (mt[33]))
    STp2 = ('%5.1f' % (mt[34]))
    DPp2 = ('%4.1f' % (mt[35]))
    RAp2 = ('%6.1f' % (mt[36]))
    var = ('%.2f' % (mt[37]))
    qua = ('%d' % (mt[38]))
    mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]]

    mm0 = str('%10.3e' % (mij[0]))
    mm1 = str('%10.3e' % (mij[1]))
    mm2 = str('%10.3e' % (mij[2]))
    mm3 = str('%10.3e' % (mij[3]))
    mm4 = str('%10.3e' % (mij[4]))
    mm5 = str('%10.3e' % (mij[5]))
    # Aki konvention
    Mrr = mm5
    Mtt = mm0
    Mff = mm1
    Mrt = mm3
    Mrf = mm4
    Mtf = mm2

    # stress regime
    A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24])
    A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27])
    A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30])

    (regime, sh) = stressRegime(A1, A2, A3)
    sh = ('%5.1f' % (sh))

    #### Build classes #################################
    #
    #Resource Id is the event origin time for definition

    res_id = ResourceIdentifier(args.ori)
    nowUTC = datetime.datetime.utcnow()
    info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC)
    evOrigin = Origin(resource_id=res_id,
                      time=args.ori,
                      latitude=epi[0],
                      longitude=epi[1],
                      depth=epi[2],
                      earth_model_id=model[-1],
                      creation_info=info)
    # Magnitudes
    magnitude = Magnitude(mag=mw, magnitude_type="Mw")
    # Nodal Planes
    np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1)
    np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2)
    planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    # Principal axes
    Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval)
    Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval)
    Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval)
    axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe)
    # MT elements
    MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf)
    # Stress regime
    regStr = 'Stress regime: ' + regime + ' -  SH = ' + sh
    strDes = EventDescription(regStr)
    # MT dataset
    dataInfo = DataUsed(wave_type="combined",
                        station_count=NrSt,
                        component_count=NrCo,
                        shortest_period=Tmin,
                        longest_period=Tmax)
    source = MomentTensor(data_used=dataInfo,
                          scalar_moment=mo,
                          tensor=MT,
                          variance_reduction=var,
                          double_couple=Pdc,
                          clvd=Pclvd,
                          iso=0)
    focMec = FocalMechanism(moment_tensor=source,
                            nodal_planes=planes,
                            principal_axes=axes,
                            azimuthal_gap=-1)

    #Initialize Event Catalog
    mtSolution = Event(creation_info=info)
    mtSolution.origins.append(evOrigin)
    mtSolution.magnitudes.append(magnitude)
    mtSolution.focal_mechanisms.append(focMec)
    mtSolution.event_descriptions.append(strDes)

    cat = Catalog()
    cat.append(mtSolution)

    return cat
Пример #29
0
def filter_picks(catalog,
                 stations=None,
                 channels=None,
                 networks=None,
                 locations=None,
                 top_n_picks=None,
                 evaluation_mode='all'):
    """
    Filter events in the catalog based on a number of parameters.

    :param catalog: Catalog to filter.
    :type catalog: obspy.core.event.Catalog
    :param stations: List for stations to keep picks from.
    :type stations: list
    :param channels: List of channels to keep picks from.
    :type channels: list
    :param networks: List of networks to keep picks from.
    :type networks: list
    :param locations: List of location codes to use
    :type locations: list
    :param top_n_picks: Filter only the top N most used station-channel pairs.
    :type top_n_picks: int
    :param evaluation_mode:
        To select only manual or automatic picks, or use all (default).
    :type evaluation_mode: str


    :return:
        Filtered Catalog - if events are left with no picks, they are removed
        from the catalog.
    :rtype: obspy.core.event.Catalog

    .. note::
        Will filter first by station, then by channel, then by network, if
        using top_n_picks, this will be done last, after the other filters
        have been applied.

    .. note::
        Doesn't work in place on the catalog, your input catalog will be safe
        unless you overwrite it.

    .. note:: Doesn't expand wildcard characters.

    .. rubric:: Example

    >>> from obspy.clients.fdsn import Client
    >>> from eqcorrscan.utils.catalog_utils import filter_picks
    >>> from obspy import UTCDateTime
    >>> client = Client('NCEDC')
    >>> t1 = UTCDateTime(2004, 9, 28)
    >>> t2 = t1 + 86400
    >>> catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3,
    ...                             minlatitude=35.7, maxlatitude=36.1,
    ...                             minlongitude=-120.6, maxlongitude=-120.2,
    ...                             includearrivals=True)
    >>> print(len(catalog))
    12
    >>> filtered_catalog = filter_picks(catalog, stations=['BMS', 'BAP',
    ...                                                    'PAG', 'PAN',
    ...                                                    'PBI', 'PKY',
    ...                                                    'YEG', 'WOF'])
    >>> print(len(filtered_catalog))
    12
    >>> stations = []
    >>> for event in filtered_catalog:
    ...     for pick in event.picks:
    ...         stations.append(pick.waveform_id.station_code)
    >>> print(sorted(list(set(stations))))
    ['BAP', 'BMS', 'PAG', 'PAN', 'PBI', 'PKY', 'WOF', 'YEG']
    """
    # Don't work in place on the catalog
    filtered_catalog = catalog.copy()

    if stations:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.station_code in stations
            ]
    if channels:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.channel_code in channels
            ]
    if networks:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.network_code in networks
            ]
    if locations:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.location_code in locations
            ]
    if evaluation_mode == 'manual':
        for event in filtered_catalog:
            event.picks = [
                pick for pick in event.picks
                if pick.evaluation_mode == 'manual'
            ]
    elif evaluation_mode == 'automatic':
        for event in filtered_catalog:
            event.picks = [
                pick for pick in event.picks
                if pick.evaluation_mode == 'automatic'
            ]
    elif evaluation_mode != 'all':
        warnings.warn('Unrecognised evaluation_mode: %s, using all picks' %
                      evaluation_mode)
    if top_n_picks:
        all_picks = []
        for event in filtered_catalog:
            all_picks += [(pick.waveform_id.station_code,
                           pick.waveform_id.channel_code)
                          for pick in event.picks]
        counted = Counter(all_picks).most_common()
        all_picks = []
        # Hack around sorting the counter object: Py 2 does it differently to 3
        for i in range(counted[0][1]):
            highest = [
                item[0] for item in counted if item[1] >= counted[0][1] - i
            ]
            # Sort them by alphabetical order in station
            highest = sorted(highest, key=lambda tup: tup[0])
            for stachan in highest:
                if stachan not in all_picks:
                    all_picks.append(stachan)
            if len(all_picks) > top_n_picks:
                all_picks = all_picks[0:top_n_picks]
                break
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if (pick.waveform_id.station_code,
                    pick.waveform_id.channel_code) in all_picks
            ]
    # Remove events without picks
    tmp_catalog = Catalog()
    for event in filtered_catalog:
        if len(event.picks) > 0:
            tmp_catalog.append(event)

    return tmp_catalog
Пример #30
0
def gcmt_catalog(
        t_start,
        t_end,
        min_latitude,
        max_latitude,
        min_longitude,
        max_longitude,
        latitude,
        longitude,
        radius_min,
        radius_max,
        d_min,
        d_max,
        mag_min,
        mag_max,
        link_gcmt='http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog'):
    """
    Function for downloading data from GCMT
    :param t_start:
    :param t_end:
    :param min_latitude:
    :param max_latitude:
    :param min_longitude:
    :param max_longitude:
    :param latitude:
    :param longitude:
    :param radius_min:
    :param radius_max:
    :param d_min:
    :param d_max:
    :param mag_min:
    :param mag_max:
    :param link_gcmt:
    :return:
    """
    # for the time record
    tic = datetime.now()

    try:
        import obspyDMT
        dmt_path = obspyDMT.__path__[0]
    except Exception as error:
        print("WARNING: %s" % error)
        dmt_path = '.'
    gcmt_cat_path = os.path.join(dmt_path, 'gcmt_catalog')
    if not os.path.exists(gcmt_cat_path):
        os.mkdir(gcmt_cat_path)
        os.mkdir(os.path.join(gcmt_cat_path, 'NEW_MONTHLY'))
        os.mkdir(os.path.join(gcmt_cat_path, 'COMBO'))

    # creating a time list
    t_list = []
    delta_t = int(UTCDateTime(t_end) - UTCDateTime(t_start) + 1) / 86400

    yymm = []
    for i in range(delta_t + 1):
        t_list.append(
            (UTCDateTime(t_start) + i * 60 * 60 * 24).strftime('%Y/%m/%d'))
        yy_tmp, mm_tmp, dd_tmp = t_list[i].split('/')
        yymm.append('%s%s' % (yy_tmp, mm_tmp))
    yymmset = set(yymm)
    yymmls = list(yymmset)
    yymmls.sort()

    # starting to search for all events in the time window given by the user:
    cat = Catalog()
    yy_ret = []
    mm_ret = []
    remotefile_add = False

    for i in range(len(yymmls)):
        try:
            yy = yymmls[i][0:4]
            mm = yymmls[i][4:6]
            if int(yy) < 2006:
                month_year = [
                    'jan', 'feb', 'mar', 'apr', 'may', 'june', 'july', 'aug',
                    'sept', 'oct', 'nov', 'dec'
                ]
            else:
                month_year = [
                    'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug',
                    'sep', 'oct', 'nov', 'dec'
                ]
            if int(yy) >= 2005:
                new_monthly = 'NEW_MONTHLY'
                file_to_open = os.path.join(
                    gcmt_cat_path, new_monthly,
                    '%s%s.ndk' % (month_year[int(mm) - 1], yy[-2:]))
                remotefile_add = '%s/%s/%s/%s%s.ndk' \
                                 % (link_gcmt, new_monthly, yy,
                                    month_year[int(mm)-1], yy[-2:])
            else:
                new_monthly = 'COMBO'
                if yy in yy_ret:
                    continue
                file_to_open = os.path.join(gcmt_cat_path, new_monthly,
                                            '%s.qml' % yy)
            if not os.path.exists(file_to_open) and not new_monthly == 'COMBO':
                print('Reading the data from GCMT webpage: %s' % yymmls[i])
                remotefile = urlopen(remotefile_add)
                remotefile_read = remotefile.readlines()
                search_fio = open(file_to_open, 'w')
                search_fio.writelines(remotefile_read)
                search_fio.close()
            print('Reading the data from local gcmt_catalog: %s' % yymmls[i])
            cat.extend(readEvents(file_to_open))
            yy_ret.append(yy)
            mm_ret.append(mm)
        except Exception as error:
            print("ERROR: %s" % error)

    print('Done reading the data from GCMT webpage.')
    toc = datetime.now()
    print('%s sec to retrieve the event info form GCMT.' % (toc - tic))

    filt1 = 'time >= %s' % t_start
    filt2 = 'time <= %s' % t_end
    cat = cat.filter(filt1, filt2)

    filt1 = 'magnitude >= %s' % mag_min
    filt2 = 'magnitude <= %s' % mag_max
    cat = cat.filter(filt1, filt2)

    filt1 = 'depth >= %s' % (float(d_min) * 1000.)
    filt2 = 'depth <= %s' % (float(d_max) * 1000.)
    cat = cat.filter(filt1, filt2)

    if None not in [min_latitude, max_latitude, min_longitude, max_longitude]:
        filt1 = 'latitude >= %s' % min_latitude
        filt2 = 'latitude <= %s' % max_latitude
        cat = cat.filter(filt1, filt2)

        filt1 = 'longitude >= %s' % min_longitude
        filt2 = 'longitude <= %s' % max_longitude
        cat = cat.filter(filt1, filt2)

    # final filtering for the remaining requests
    if None not in [latitude, longitude, radius_min, radius_max]:
        index_rm = []
        for i in range(len(cat)):
            e_lat = cat.events[i].preferred_origin().latitude or \
                    cat.events[i].origins[0].latitude
            e_lon = cat.events[i].preferred_origin().longitude or \
                    cat.events[i].origins[0].longitude
            dist = locations2degrees(latitude, longitude, e_lat, e_lon)
            if not radius_min <= dist <= radius_max:
                index_rm.append(i)
        index_rm.sort()
        index_rm.reverse()
        for i in range(len(index_rm)):
            del cat[index_rm[i]]

    return cat