Пример #1
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    focal = FocalMechanism(nodal_planes=nodal_planes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        x = 1
    except Exception as e:
        assert 1 == 2
    finally:
        shutil.rmtree(tempdir)
Пример #2
0
 def test_avoid_empty_stub_elements(self):
     """
     Test for a bug in reading QuakeML. Makes sure that some subelements do
     not get assigned stub elements, but rather stay None.
     """
     # Test 1: Test subelements of moment_tensor
     memfile = io.BytesIO()
     # create virtually empty FocalMechanism
     mt = MomentTensor(derived_origin_id='smi:local/abc')
     fm = FocalMechanism(moment_tensor=mt)
     event = Event(focal_mechanisms=[fm])
     cat = Catalog(events=[event])
     cat.write(memfile, format="QUAKEML", validate=True)
     # now read again, and make sure there's no stub subelements on
     # MomentTensor, but rather `None`
     memfile.seek(0)
     cat = read_events(memfile, format="QUAKEML")
     self.assertEqual(cat[0].focal_mechanisms[0].moment_tensor.tensor, None)
     self.assertEqual(
         cat[0].focal_mechanisms[0].moment_tensor.source_time_function,
         None)
     # Test 2: Test subelements of focal_mechanism
     memfile = io.BytesIO()
     # create virtually empty FocalMechanism
     fm = FocalMechanism()
     event = Event(focal_mechanisms=[fm])
     cat = Catalog(events=[event])
     cat.write(memfile, format="QUAKEML", validate=True)
     # now read again, and make sure there's no stub MomentTensor, but
     # rather `None`
     memfile.seek(0)
     cat = read_events(memfile, format="QUAKEML")
     self.assertEqual(cat[0].focal_mechanisms[0].nodal_planes, None)
     self.assertEqual(cat[0].focal_mechanisms[0].principal_axes, None)
Пример #3
0
 def export_picks(self,
                  filename,
                  start_trace=None,
                  end_trace=None,
                  format="NLLOC_OBS",
                  debug=False,
                  **kwargs):
     """
     """
     event_list = []
     for trace in self.traces[start_trace:end_trace]:
         event_list.extend([Event(picks=[pick]) for pick in trace.events])
     # Export to desired format
     if format == 'NLLOC_OBS':
         basename, ext = os.path.splitext(filename)
         for event in event_list:
             ts = event.picks[0].time.strftime("%Y%m%d%H%M%S%f")
             event_filename = "%s_%s%s" % (basename, ts, ext)
             if debug:
                 print "Generating event file {}".format(event_filename)
             event.write(event_filename, format=format)
     else:
         event_catalog = Catalog(event_list)
         if debug:
             print "Generating event file {}".format(filename)
         event_catalog.write(filename, format=format, **kwargs)
Пример #4
0
def parse_files(fnames):
    """Parses all given files for seiscomp xml"""
    j = 0
    out = Catalog()
    for i, fname in enumerate(fnames):
        print('read ' + fname)
        out += readSeisComPEventXML0_6(fname)
        if (i + 1) % 100 == 0 or i == len(fnames) - 1:
            out_fname = str(j) + '.xml'
            print('write %d events to %s\n' % (len(out), out_fname))
            out.write(out_fname, 'QUAKEML')
            out = Catalog()
            j += 1
def parse_files(fnames):
    """Parses all given files for seiscomp xml"""
    j = 0
    out = Catalog()
    for i, fname in enumerate(fnames):
            print('read ' + fname)
            out += readSeisComPEventXML0_6(fname)
            if (i + 1) % 100 == 0 or i == len(fnames) - 1:
                out_fname = str(j) + '.xml'
                print('write %d events to %s\n' % (len(out), out_fname))
                out.write(out_fname, 'QUAKEML')
                out = Catalog()
                j += 1
Пример #6
0
 def test_write_pha_minimal(self):
     ori = Origin(time=UTC(0), latitude=42, longitude=43, depth=10000)
     pick = Pick(time=UTC(10),
                 phase_hint='S',
                 waveform_id=WaveformStreamID(station_code='STA'))
     del ori.latitude_errors
     del ori.longitude_errors
     del ori.depth_errors
     cat = Catalog([Event(origins=[ori], picks=[pick])])
     with NamedTemporaryFile() as tf:
         tempfile = tf.name
         with self.assertWarnsRegex(UserWarning, 'Missing mag'):
             cat.write(tempfile, 'HYPODDPHA')
         cat2 = read_events(tempfile)
     self.assertEqual(len(cat2), 1)
     self.assertEqual(len(cat2[0].picks), 1)
Пример #7
0
 def test_focal_mechanism_write_read(self):
     """
     Test for a bug in reading a FocalMechanism without MomentTensor from
     QuakeML file. Makes sure that FocalMechanism.moment_tensor stays None
     if no MomentTensor is in the file.
     """
     memfile = io.BytesIO()
     # create virtually empty FocalMechanism
     fm = FocalMechanism()
     event = Event(focal_mechanisms=[fm])
     cat = Catalog(events=[event])
     cat.write(memfile, format="QUAKEML", validate=True)
     # now read again, and make sure there's no stub MomentTensor, but
     # rather `None`
     memfile.seek(0)
     cat = read_events(memfile, format="QUAKEML")
     self.assertEqual(cat[0].focal_mechanisms[0].moment_tensor, None)
Пример #8
0
def test_with_quakeml():
    np1 = NodalPlane(strike=259, dip=74, rake=10)
    np2 = NodalPlane(strike=166, dip=80, rake=164)
    nodal_planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    taxis = Axis(plunge=40, azimuth=70)
    naxis = Axis(plunge=50, azimuth=80)
    paxis = Axis(plunge=60, azimuth=90)
    paxes = PrincipalAxes(t_axis=taxis,
                          n_axis=naxis,
                          p_axis=paxis)
    focal = FocalMechanism(nodal_planes=nodal_planes,
                           principal_axes=paxes)
    event = Event(focal_mechanisms=[focal])
    catalog = Catalog(events=[event])
    event_text = '''<shakemap-data code_version="4.0" map_version="1">
<earthquake id="us2000cmy3" lat="56.046" lon="-149.073" mag="7.9"
time="2018-01-23T09:31:42Z"
depth="25.00" locstring="280km SE of Kodiak, Alaska" netid="us" network=""/>
</shakemap-data>'''
    try:
        tempdir = tempfile.mkdtemp()
        xmlfile = os.path.join(tempdir, 'quakeml.xml')
        catalog.write(xmlfile, format="QUAKEML")
        eventfile = os.path.join(tempdir, 'event.xml')
        f = open(eventfile, 'wt')
        f.write(event_text)
        f.close()
        params = read_moment_quakeml(xmlfile)
        assert params['moment']['NP1']['strike'] == 259.0
        assert params['moment']['NP1']['dip'] == 74.0
        assert params['moment']['NP1']['rake'] == 10.0
        assert params['moment']['NP2']['strike'] == 166.0
        assert params['moment']['NP2']['dip'] == 80.0
        assert params['moment']['NP2']['rake'] == 164.0
        origin = Origin.fromFile(eventfile, momentfile=xmlfile)
        assert origin.mag == 7.9
        assert origin.lat == 56.046
        assert origin.lon == -149.073
        assert origin.id == 'us2000cmy3'
    except Exception:
        assert False
    finally:
        shutil.rmtree(tempdir)
Пример #9
0
    def make_xml(self):
        client = Client("IRIS")
        cat = Catalog()  # empty earthquake catalogue
        print('')

        # Method to retrieve events from IRIS based on event ID and create an xml file
        for event_id in self.ieb_events['IRIS_ID']:
            try:
                print('Requesting Information for event: ' + str(event_id))
                IRIS_event = client.get_events(eventid=int(event_id))[0]
                cat.append(IRIS_event)
            except FDSNException:
                print('')
                print('Error!!: No Event Information for ' + str(event_id))

        print('')
        print("Resulting Earthquake Catalogue:")
        print(cat)
        new_filename = os.path.splitext(self.ieb_filename)[0] + '.xml'
        cat.write(filename=new_filename, format="QUAKEML")
Пример #10
0
 def test_issue_2339(self):
     """
     Make sure an empty EventDescription object does not prevent a catalog
     from being saved to disk and re-read, while still being equal.
     """
     # create a catalog  with an empty event description
     empty_description = EventDescription()
     cat1 = Catalog(events=[read_events()[0]])
     cat1[0].event_descriptions.append(empty_description)
     # serialize the catalog using quakeml and re-read
     bio = io.BytesIO()
     cat1.write(bio, 'quakeml')
     bio.seek(0)
     cat2 = read_events(bio)
     # the text of the empty EventDescription instances should be equal
     text1 = cat1[0].event_descriptions[-1].text
     text2 = cat2[0].event_descriptions[-1].text
     self.assertEqual(text1, text2)
     # the two catalogs should be equal
     self.assertEqual(cat1, cat2)
Пример #11
0
 def export_picks(self, filename, trace_list=None, format="NLLOC_OBS", debug=False, **kwargs):
     """
     """
     trace_list = self.traces if trace_list is None else trace_list
     event_list = []
     for trace in trace_list:
         event_list.extend([Event(picks=[pick]) for pick in trace.events])
     # Export to desired format
     if format == 'NLLOC_OBS':
         basename, ext = os.path.splitext(filename)
         for event in event_list:
             ts = event.picks[0].time.strftime("%Y%m%d%H%M%S%f")
             event_filename = "%s_%s%s" % (basename, ts, ext)
             if debug:
                 print "Generating event file {}".format(event_filename)
             event.write(event_filename, format=format)
     else:
         event_catalog = Catalog(event_list)
         if debug:
             print "Generating event file {}".format(filename)
         event_catalog.write(filename, format=format, **kwargs)
Пример #12
0
Файл: io.py Проект: preinh/RF
def convert_dmteventfile():
    eventsfile1 = os.path.join(conf.dmt_path, 'EVENT', 'event_list')
    eventsfile2 = os.path.join(conf.dmt_path, 'EVENT', 'events.xml')
    with open(eventsfile1) as f:
        events1 = pickle.load(f)
    events2 = Catalog()
    for ev in events1:
        orkw = {'time': ev['datetime'],
                'latitude': ev['latitude'],
                'longitude': ev['longitude'],
                'depth': ev['depth']}
        magkw = {'mag': ev['magnitude'],
                 'magnitude_type': ev['magnitude_type']}
        evdesargs = (ev['flynn_region'], 'Flinn-Engdahl region')
        evkw = {'resource_id': ev['event_id'],
                'event_type': 'earthquake',
                'creation_info': CreationInfo(author=ev['author']),
                'event_descriptions': [EventDescription(*evdesargs)],
                'origins': [Origin(**orkw)],
                'magnitudes': [Magnitude(**magkw)]}
        events2.append(Event(**evkw))
    events2.write(eventsfile2, 'QUAKEML')
Пример #13
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Пример #14
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
    def _on_file_save(self):
        """
        Creates a new obspy.core.event.Magnitude object and writes the moment
        magnitude to it.
        """
        # Get the save filename.
        filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...")
        filename = os.path.abspath(str(filename))
        mag = Magnitude()
        mag.mag = self.final_result["moment_magnitude"]
        mag.magnitude_type = "Mw"
        mag.station_count = self.final_result["station_count"]
        mag.evaluation_mode = "manual"
        # Link to the used origin.
        mag.origin_id = self.current_state["event"].origins[0].resource_id
        mag.method_id = "Magnitude picker Krischer"
        # XXX: Potentially change once this program gets more stable.
        mag.evaluation_status = "preliminary"
        # Write the other results as Comments.
        mag.comments.append( \
            Comment("Seismic moment in Nm: %g" % \
            self.final_result["seismic_moment"]))
        mag.comments.append( \
            Comment("Circular source radius in m: %.2f" % \
            self.final_result["source_radius"]))
        mag.comments.append( \
            Comment("Stress drop in Pa: %.2f" % \
            self.final_result["stress_drop"]))
        mag.comments.append( \
                Comment("Very rough Q estimation: %.1f" % \
            self.final_result["quality_factor"]))

        event = copy.deepcopy(self.current_state["event"])
        event.magnitudes.append(mag)
        cat = Catalog()
        cat.events.append(event)
        cat.write(filename, format="quakeml")
    def _on_file_save(self):
        """
        Creates a new obspy.core.event.Magnitude object and writes the moment
        magnitude to it.
        """
        # Get the save filename.
        filename = QtGui.QFileDialog.getSaveFileName(caption="Save as...")
        filename = os.path.abspath(str(filename))
        mag = Magnitude()
        mag.mag = self.final_result["moment_magnitude"]
        mag.magnitude_type = "Mw"
        mag.station_count = self.final_result["station_count"]
        mag.evaluation_mode = "manual"
        # Link to the used origin.
        mag.origin_id = self.current_state["event"].origins[0].resource_id
        mag.method_id = "Magnitude picker Krischer"
        # XXX: Potentially change once this program gets more stable.
        mag.evaluation_status = "preliminary"
        # Write the other results as Comments.
        mag.comments.append( \
            Comment("Seismic moment in Nm: %g" % \
            self.final_result["seismic_moment"]))
        mag.comments.append( \
            Comment("Circular source radius in m: %.2f" % \
            self.final_result["source_radius"]))
        mag.comments.append( \
            Comment("Stress drop in Pa: %.2f" % \
            self.final_result["stress_drop"]))
        mag.comments.append( \
                Comment("Very rough Q estimation: %.1f" % \
            self.final_result["quality_factor"]))

        event = copy.deepcopy(self.current_state["event"])
        event.magnitudes.append(mag)
        cat = Catalog()
        cat.events.append(event)
        cat.write(filename, format="quakeml")
Пример #17
0
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        for e in self.eventList:
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStations(lonlat[0],
                                                                   lonlat[1],
                                                                   maxdist=1e3)
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            for cr in r[0]:
                                c = cr.split('.')[0]
                                newCode = c
                            # end for
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)
                        if (np.fabs(a.distance - dist) > 0.5):
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()),
                           creation_info=ci,
                           event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            for a in e.preferred_origin.arrival_list:
                if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict):
                    missingStations[a.net + '.' + a.sta] += 1
                    continue
                # end if
                oldPick = OPick(
                    resource_id=OResourceIdentifier(id=self.get_id()),
                    time=UTCDateTime(a.utctime),
                    waveform_id=OWaveformStreamID(network_code=a.net,
                                                  station_code=a.sta,
                                                  channel_code=a.cha),
                    methodID=OResourceIdentifier('unknown'),
                    phase_hint=a.phase,
                    evaluation_mode='automatic',
                    creation_info=ci)

                oldArr = OArrival(resource_id=OResourceIdentifier(
                    id=oldPick.resource_id.id + "#"),
                                  pick_id=oldPick.resource_id,
                                  phase=oldPick.phase_hint,
                                  distance=a.distance,
                                  earth_model_id=OResourceIdentifier(
                                      'quakeml:ga.gov.au/earthmodel/iasp91'),
                                  creation_info=ci)

                event.picks.append(oldPick)
                event.preferred_origin().arrivals.append(oldArr)
            # end for

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=op[6],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)
                # end for
            # end if

            quality = OOriginQuality(
                associated_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]),
                used_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality
            oEvents.append(event)
        # end for // loop over e

        #print notFound
        print self.rank, missingStations

        cat = OCatalog(events=oEvents)
        ofn = self.output_path + '/%d.xml' % (self.rank)
        cat.write(ofn, format='SC3ML')
Пример #18
0
def read_files(fnames):
    out = Catalog()
    for fname in fnames:
        out += read_regex(fname)
    out_fname = 'output_geofon.xml'
    out.write(out_fname, 'QUAKEML')
Пример #19
0
def par2quakeml(Par_filename,
                QuakeML_filename,
                rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5,
                origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr**2 + Mtt**2 + Mpp**2 + Mtr**2 +
                                            Mpr**2 + Mtp**2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Пример #20
0
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        lines = []
        for e in tqdm(self.eventList, desc='Rank %d' % (self.rank)):
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStation(
                            lonlat[0], lonlat[1], maxdist=1e3)  # 1km
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            c = r[0].split('.')[0]
                            newCode = c
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)

                        if (np.fabs(a.distance - dist) > 0.5):
                            #print ([e.preferred_origin.lon, e.preferred_origin.lat, sc[0], sc[1]])
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(
                resource_id=OResourceIdentifier(id=str(e.public_id)),
                creation_info=ci,
                event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            if (not self.discard_old_picks):
                for a in e.preferred_origin.arrival_list:
                    if (type(self.fdsn_inventory.t[a.net][a.sta]) ==
                            defaultdict):
                        missingStations[a.net + '.' + a.sta] += 1
                        continue
                    # end if
                    oldPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(a.utctime),
                        waveform_id=OWaveformStreamID(network_code=a.net,
                                                      station_code=a.sta,
                                                      channel_code=a.cha),
                        methodID=OResourceIdentifier('unknown'),
                        phase_hint=a.phase,
                        evaluation_mode='automatic',
                        creation_info=ci)

                    oldArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=oldPick.resource_id.id + "#"),
                        pick_id=oldPick.resource_id,
                        phase=oldPick.phase_hint,
                        distance=a.distance,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)

                    event.picks.append(oldPick)
                    event.preferred_origin().arrivals.append(oldArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', a.net,
                        '{:<5s}', a.sta, '{:<5s}', a.cha, '{:<5s}',
                        a.utctime.timestamp, '{:f}', a.phase, '{:<5s}',
                        self.fdsn_inventory.t[a.net][a.sta][0], '{:f}',
                        self.fdsn_inventory.t[a.net][a.sta][1], '{:f}', -999,
                        '{:f}', -999, '{:f}', a.distance, '{:f}', -999, '{:f}',
                        -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}',
                        -999, '{:d}', -999, '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=[
                            OComment(
                                text='phasepapy_snr = ' + str(op[6][0]) +
                                ', quality_measure_cwt = ' + str(op[6][1]) +
                                ', dom_freq = ' + str(op[6][2]) +
                                ', quality_measure_slope = ' + str(op[6][3]) +
                                ', band_index = ' + str(op[6][4]) +
                                ', nsigma = ' + str(op[6][5]),
                                force_resource_id=False)
                        ],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', op[1],
                        '{:<5s}', op[2], '{:<5s}', op[3], '{:<5s}',
                        UTCDateTime(op[0]).timestamp, '{:f}', op[4], '{:<5s}',
                        op[10], '{:f}', op[9], '{:f}', op[12], '{:f}', op[13],
                        '{:f}', op[11], '{:f}', op[5], '{:f}', op[6][0],
                        '{:f}', op[6][1], '{:f}', op[6][2], '{:f}', op[6][3],
                        '{:f}',
                        int(op[6][4]), '{:d}',
                        int(op[6][5]), '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            quality= OOriginQuality(associated_phase_count= len(e.preferred_origin.arrival_list) * \
                                                            int(self.discard_old_picks) + \
                                                             len(self.our_picks.picks[e.public_id]),
                                    used_phase_count=len(e.preferred_origin.arrival_list) * \
                                                     int(self.discard_old_picks) + \
                                                     len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality

            if (len(self.our_picks.picks[e.public_id]) == 0
                    and self.discard_old_picks):
                continue
            # end if

            oEvents.append(event)
        # end for // loop over e

        if (len(missingStations)):
            for k, v in missingStations.items():
                self.logger.warning('Missing station %s: %d picks' % (k, v))
            # end for
        # end if

        # write xml output
        if (len(oEvents)):
            cat = OCatalog(events=oEvents)
            ofn = self.output_path + '/%d.xml' % (self.rank)
            cat.write(ofn, format='SC3ML')
        # end if

        # write text output
        procfile = open('%s/proc.%d.txt' % (self.output_path, self.rank), 'w+')
        for line in lines:
            lineout = ' '.join(line[1::2]).format(*line[::2])
            procfile.write(lineout + '\n')
        # end for
        procfile.close()

        # combine text output
        header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
        self.comm.barrier()
        if (self.rank == 0):
            of = open('%s/ensemble.txt' % (self.output_path), 'w+')
            of.write(header)

            for i in range(self.nproc):
                fn = '%s/proc.%d.txt' % (self.output_path, i)

                lines = open(fn, 'r').readlines()
                for line in lines:
                    of.write(line)
                # end for

                if (os.path.exists(fn)): os.remove(fn)
            # end for
            of.close()
Пример #21
0
    cat = client.get_events(starttime=starttime, endtime=endtime,includearrivals=False,
        latitude=lat_center,longitude=lon_center,maxradius=radius,minmagnitude=2)
    cat.write(u'/Users/dmelgar/Amatrice2016/afters/INGV_nopicks.xml',format='QUAKEML')

else: #read it
    cat=read_events(u'/Users/dmelgar/Amatrice2016/afters/INGV_nopicks.xml')
    
#loop over all events and build new catalogue, this time with picks  
Nevents=len(cat)
cat2=Catalog()
for kevent in range(Nevents):
    print 'Working on event %d of %d' % (kevent,Nevents)
    ev=cat[kevent]
    evID=int(ev.resource_id.id.split('=')[-1])
    try:
        cat_temp=client.get_events(eventid=evID,includearrivals=True)
        ev=cat_temp[0]
        cat2+=ev
    except:
        print '... Could not get event'
    
    
cat2.write(u'/Users/dmelgar/Amatrice2016/afters/INGV_withpicks.xml',format='QUAKEML')

if get_stations==True:
    inventory = client.get_stations(network="IV",starttime=starttime,endtime=endtime)
    inventory += client.get_stations(network="BA",starttime=starttime,endtime=endtime)
    inventory += client.get_stations(network="MN",starttime=starttime,endtime=endtime)
    inventory += client.get_stations(network="TV",starttime=starttime,endtime=endtime)
    inventory += client.get_stations(network="XO",starttime=starttime,endtime=endtime)
Пример #22
0
        for kk in range(0,len(cat)):
            dist = obspy.geodetics.base.locations2degrees(ev_info.rlat,ev_info.rlon,cat[kk].origins[0].latitude,cat[kk].origins[0].longitude)
            if dist >= st_minradius and dist <= st_maxradius:
                cat_subset.append(cat[kk])
        print(cat_subset)
        # -------------------------------------------------------------------------------

        # Create station directory
        sta_dir = ev_info.network + '_' + ev_info.station
        odir = out_dir + sta_dir
        if not os.path.exists(odir):
            os.makedirs(odir)

        # save catalog subset for this station
        fname = odir + '_event_subset'
        cat_subset.write(fname,'cnv')
        fname = odir + '_event_subset.eps'
        cat_subset.plot(outfile = fname)

        # Event selection plot
        #plotme(clat,clon,ev_info.rlat,ev_info.rlon,cat,cat_subset,st_minradius,st_maxradius,sta_dir)

        # -------------------------------------------------------------------------------
        # Loop over subset_events
        for kk in range(0,len(cat_subset)):
            ev_info.otime = cat_subset[kk].origins[0].time
            ev_info.elat = cat_subset[kk].origins[0].latitude
            ev_info.elon = cat_subset[kk].origins[0].longitude
            ev_info.edep = cat_subset[kk].origins[0].depth
            ev_info.emag = cat_subset[kk].magnitudes[0].mag
            ev_info.rtime = cat_subset[kk].origins[0].time
Пример #23
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = basic_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        self.assertEqual(read_cat[0].picks, test_cat[0].picks)
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0],
                             userID='TEST',
                             evtype='L',
                             outdir='.',
                             wavefiles='test',
                             explosion=True,
                             overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        self.assertEqual(read_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(read_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(read_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(read_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(read_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(read_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(read_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        del read_cat
        # assert read_cat[0].amplitudes[0].pick_id ==\
        #     test_cat[0].amplitudes[0].pick_id
        # assert read_cat[0].amplitudes[0].waveform_id ==\
        #     test_cat[0].amplitudes[0].waveform_id

        # Test the wrappers for PICK and EVENTINFO classes
        picks, evinfo = eventtopick(test_cat)
        # Test the conversion back
        conv_cat = Catalog()
        conv_cat.append(picktoevent(evinfo, picks))
        self.assertEqual(conv_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(conv_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(conv_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(conv_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(conv_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # self.assertEqual(read_cat[0].origins[0].resource_id,
        #                  test_cat[0].origins[0].resource_id)
        self.assertEqual(conv_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(conv_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(conv_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(conv_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(conv_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(conv_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(conv_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(conv_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(conv_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # self.assertEqual(read_cat[0].amplitudes[0].resource_id,
        #                  test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(conv_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(conv_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
Пример #24
0
def read_files(fnames):
    out = Catalog()
    for fname in fnames:
        out += read_regex(fname)
    out_fname = 'output_geofon.xml'
    out.write(out_fname, 'QUAKEML')
Пример #25
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        for i in range(len(read_cat[0].picks)):
            for key in read_cat[0].picks[i].keys():
                # Ignore backazimuth errors and horizontal_slowness_errors
                if key in ['backazimuth_errors', 'horizontal_slowness_errors']:
                    continue
                self.assertEqual(read_cat[0].picks[i][key],
                                 test_cat[0].picks[i][key])
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        # Check magnitudes
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # Check local magnitude amplitude
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)
        # Check coda magnitude pick
        self.assertEqual(read_cat[0].amplitudes[1].resource_id,
                         test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[1].pick_id,
                         test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id,
                         test_cat[0].amplitudes[1].waveform_id)
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        self.assertEqual(read_cat[0].amplitudes[1].snr,
                         test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0], userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True, overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        for i in range(len(read_cat[0].picks)):
            self.assertEqual(read_cat[0].picks[i].time,
                             test_cat[0].picks[i].time)
            self.assertEqual(read_cat[0].picks[i].backazimuth,
                             test_cat[0].picks[i].backazimuth)
            self.assertEqual(read_cat[0].picks[i].onset,
                             test_cat[0].picks[i].onset)
            self.assertEqual(read_cat[0].picks[i].phase_hint,
                             test_cat[0].picks[i].phase_hint)
            self.assertEqual(read_cat[0].picks[i].polarity,
                             test_cat[0].picks[i].polarity)
            self.assertEqual(read_cat[0].picks[i].waveform_id.station_code,
                             test_cat[0].picks[i].waveform_id.station_code)
            self.assertEqual(read_cat[0].picks[i].waveform_id.channel_code[-1],
                             test_cat[0].picks[i].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        # Check coda magnitude pick
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].resource_id,
        #                  test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].pick_id,
        #                  test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id.station_code,
                         test_cat[0].amplitudes[1].waveform_id.station_code)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id.channel_code,
                         test_cat[0].amplitudes[1].
                         waveform_id.channel_code[0] +
                         test_cat[0].amplitudes[1].
                         waveform_id.channel_code[-1])
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        # snr is not supported in s-file
        # self.assertEqual(read_cat[0].amplitudes[1].snr,
        #                  test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)
        del read_cat

        # Test a deliberate fail
        test_cat.append(full_test_event())
        with self.assertRaises(IOError):
            # Raises error due to multiple events in catalog
            sfile = eventtosfile(test_cat, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to too long userID
            sfile = eventtosfile(test_cat[0], userID='TESTICLE',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to unrecognised event type
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='U', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to no output directory
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='L', outdir='albatross',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to incorrect wavefil formatting
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles=1234, explosion=True,
                                 overwrite=True)
        with self.assertRaises(IndexError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins = []
            sfile = eventtosfile(invalid_origin, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
        with self.assertRaises(ValueError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins[0].time = None
            sfile = eventtosfile(invalid_origin, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
        # Write a near empty origin
        valid_origin = test_cat[0].copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        sfile = eventtosfile(valid_origin, userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True,
                             overwrite=True)
        self.assertTrue(os.path.isfile(sfile))
        os.remove(sfile)
Пример #26
0
def iter_chunked(tinc,
                 path,
                 data_pile,
                 tmin=None,
                 tmax=None,
                 minlat=49.1379,
                 maxlat=49.1879,
                 minlon=8.1223,
                 maxlon=8.1723,
                 channels=["EH" + "[ZNE]"],
                 client_list=["BGR"],
                 download=True,
                 seiger=True,
                 selection=None,
                 path_waveforms=None,
                 sds=None,
                 stream=False,
                 reject_blacklisted=None,
                 tpad=0,
                 tstart=None,
                 tstop=None,
                 hf=10,
                 lf=1,
                 deltat=None,
                 models=None):
    try:
        tstart = util.stt(tmin) if tmin else None
        tstop = util.stt(tmax) if tmax else None
    except:
        pass
    model_path = os.path.dirname(
        os.path.abspath(__file__)) + "/model/EqT_model.h5"
    deltat_cf = min(data_pile.deltats.keys())
    for i, trs in enumerate(
            data_pile.chopper(tinc=tinc,
                              tmin=tstart,
                              tmax=tstop,
                              tpad=tpad,
                              keep_current_files_open=False,
                              want_incomplete=True)):
        tminc = None
        tmaxc = None
        for tr in trs:
            if tminc is None:
                tminc = tr.tmin
                tmaxc = tr.tmax
            else:
                if tminc < tr.tmin:
                    tminc = tr.tmin
                if tmaxc > tr.tmax:
                    tmaxc = tr.tmax
        for tr in trs:
            tr.highpass(4, 5)
            #    try:
            try:
                tr.chop(tminc, tmaxc)
            except:
                pass
            date_min = download_raw.get_time_format_eq(tminc)
            date_max = download_raw.get_time_format_eq(tmaxc)
            io.save(
                tr, "%s/downloads/%s/%s.%s..%s__%s__%s.mseed" %
                (path, tr.station, tr.network, tr.station, tr.channel,
                 date_min, date_max))
        #    except:
        #        pass

        process(path,
                tmin=tminc,
                tmax=tmaxc,
                minlat=minlat,
                maxlat=maxlat,
                minlon=minlon,
                maxlon=maxlon,
                channels=channels,
                client_list=client_list,
                download=download,
                seiger=seiger,
                selection=selection,
                path_waveforms=path_waveforms,
                stream=stream,
                model=None,
                iter=i,
                models=models,
                sds=None)
        for tr in trs:
            subprocess.run([
                "rm -r %s/downloads/%s/%s.%s..%s__%s__%s.mseed" %
                (path, tr.station, tr.network, tr.station, tr.channel,
                 date_min, date_max)
            ],
                           shell=True)

    cat = Catalog()
    files = glob("%s/asociation*/associations.xml" % path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/events_eqt.qml" % path, format="QUAKEML")
Пример #27
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()
Пример #28
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = basic_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        self.assertEqual(read_cat[0].picks, test_cat[0].picks)
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0], userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True, overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        self.assertEqual(read_cat[0].picks[0].time,
                         test_cat[0].picks[0].time)
        self.assertEqual(read_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(read_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(read_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(read_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(read_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(read_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        del read_cat
        # assert read_cat[0].amplitudes[0].pick_id ==\
        #     test_cat[0].amplitudes[0].pick_id
        # assert read_cat[0].amplitudes[0].waveform_id ==\
        #     test_cat[0].amplitudes[0].waveform_id

        # Test the wrappers for PICK and EVENTINFO classes
        picks, evinfo = eventtopick(test_cat)
        # Test the conversion back
        conv_cat = Catalog()
        conv_cat.append(picktoevent(evinfo, picks))
        self.assertEqual(conv_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(conv_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(conv_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(conv_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(conv_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # self.assertEqual(read_cat[0].origins[0].resource_id,
        #                  test_cat[0].origins[0].resource_id)
        self.assertEqual(conv_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(conv_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(conv_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(conv_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(conv_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(conv_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(conv_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(conv_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(conv_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # self.assertEqual(read_cat[0].amplitudes[0].resource_id,
        #                  test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(conv_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(conv_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
Пример #29
0
def search(config,
           override_tmin=None,
           override_tmax=None,
           show_detections=False,
           show_movie=False,
           show_window_traces=False,
           force=False,
           stop_after_first=False,
           nparallel=6,
           save_imax=False,
           bark=False):

    fp = config.expand_path

    run_path = fp(config.run_path)

    # if op.exists(run_path):
    #     if force:
    #         shutil.rmtree(run_path)
    #     else:
    #         raise common.LassieError(
    #             'run directory already exists: %s' %
    #             run_path)

    util.ensuredir(run_path)

    write_config(config, op.join(run_path, 'config.yaml'))

    ifm_path_template = config.get_ifm_path_template()
    detections_path = config.get_detections_path()
    events_path = config.get_events_path()
    figures_path_template = config.get_figures_path_template()

    config.setup_image_function_contributions()
    ifcs = config.image_function_contributions

    grid = config.get_grid()
    receivers = config.get_receivers()

    norm_map = gridmod.geometrical_normalization(grid, receivers)

    data_paths = fp(config.data_paths)
    for data_path in fp(data_paths):
        if not op.exists(data_path):
            pass

    p = pile.make_pile(data_paths, fileformat='detect')
    if p.is_empty():
        raise common.LassieError('no usable waveforms found')

    for ifc in ifcs:
        ifc.prescan(p)

    shift_tables = []
    tshift_minmaxs = []
    for ifc in ifcs:
        shift_tables.append(ifc.get_table(grid, receivers))
        tshift_minmaxs.append(num.nanmin(shift_tables[-1]))
        tshift_minmaxs.append(num.nanmax(shift_tables[-1]))

    fsmooth_min = min(ifc.get_fsmooth() for ifc in ifcs)

    tshift_min = min(tshift_minmaxs)
    tshift_max = max(tshift_minmaxs)

    if config.detector_tpeaksearch is not None:
        tpeaksearch = config.detector_tpeaksearch
    else:
        tpeaksearch = (tshift_max - tshift_min) + 1.0 / fsmooth_min

    tpad = max(ifc.get_tpad() for ifc in ifcs) + \
        (tshift_max - tshift_min) + tpeaksearch

    tinc = (tshift_max - tshift_min) * 10. + 3.0 * tpad
    tavail = p.tmax - p.tmin
    tinc = min(tinc, tavail - 2.0 * tpad)

    if tinc <= 0:
        raise common.LassieError('available waveforms too short \n'
                                 'required: %g s\n'
                                 'available: %g s\n' % (2. * tpad, tavail))

    blacklist = set(tuple(s.split('.')) for s in config.blacklist)
    whitelist = set(tuple(s.split('.')) for s in config.whitelist)

    distances = grid.distances(receivers)
    distances_to_grid = num.min(distances, axis=0)

    distance_min = num.min(distances)
    distance_max = num.max(distances)

    station_index = dict(
        (rec.codes, i) for (i, rec) in enumerate(receivers)
        if rec.codes not in blacklist and (
            not whitelist or rec.codes in whitelist) and (
                config.distance_max is None
                or distances_to_grid[i] <= config.distance_max))

    check_data_consistency(p, config)

    deltat_cf = max(p.deltats.keys())
    assert deltat_cf > 0.0

    while True:
        if not all(ifc.deltat_cf_is_available(deltat_cf * 2) for ifc in ifcs):
            break

        deltat_cf *= 2
    logger.info('CF lassie sampling interval (rate): %g s (%g Hz)' %
                (deltat_cf, 1.0 / deltat_cf))

    ngridpoints = grid.size()

    logger.info('number of grid points: %i' % ngridpoints)
    logger.info('minimum source-receiver distance: %g m' % distance_min)
    logger.info('maximum source-receiver distance: %g m' % distance_max)
    logger.info('minimum travel-time: %g s' % tshift_min)
    logger.info('maximum travel-time: %g s' % tshift_max)

    idetection = 0

    tmin = override_tmin or config.tmin or p.tmin + tpad
    tmax = override_tmax or config.tmax or p.tmax - tpad

    events = config.get_events()
    twindows = []
    if events is not None:
        for ev in events:
            if tmin <= ev.time <= tmax:
                twindows.append(
                    (ev.time + tshift_min - (tshift_max - tshift_min) *
                     config.event_time_window_factor,
                     ev.time + tshift_min + (tshift_max - tshift_min) *
                     config.event_time_window_factor))

    else:
        twindows.append((tmin, tmax))

    for iwindow_group, (tmin_win, tmax_win) in enumerate(twindows):

        nwin = int(math.ceil((tmax_win - tmin_win) / tinc))

        logger.info('start processing time window group %i/%i: %s - %s' %
                    (iwindow_group + 1, len(twindows),
                     util.time_to_str(tmin_win), util.time_to_str(tmax_win)))

        logger.info('number of time windows: %i' % nwin)
        logger.info('time window length: %g s' % (tinc + 2.0 * tpad))
        logger.info('time window payload: %g s' % tinc)
        logger.info('time window padding: 2 x %g s' % tpad)
        logger.info('time window overlap: %g%%' % (100.0 * 2.0 * tpad /
                                                   (tinc + 2.0 * tpad)))

        iwin = -1

        for trs in p.chopper(
                tmin=tmin_win,
                tmax=tmax_win,
                tinc=tinc,
                tpad=tpad,
                want_incomplete=config.fill_incomplete_with_zeros,
                trace_selector=lambda tr: tr.nslc_id[:3] in station_index):
            iwin += 1
            trs_ok = []
            for tr in trs:
                if tr.ydata.size == 0:
                    logger.warn('skipping empty trace: %s.%s.%s.%s' %
                                tr.nslc_id)

                    continue

                if not num.all(num.isfinite(tr.ydata)):
                    logger.warn('skipping trace because of invalid values: '
                                '%s.%s.%s.%s' % tr.nslc_id)

                    continue

                trs_ok.append(tr)

            trs = trs_ok

            if not trs:
                continue

            logger.info('processing time window %i/%i: %s - %s' %
                        (iwin + 1, nwin, util.time_to_str(
                            trs[0].wmin), util.time_to_str(trs[0].wmax)))

            wmin = trs[0].wmin
            wmax = trs[0].wmax

            if config.fill_incomplete_with_zeros:
                trs = zero_fill(trs, wmin - tpad, wmax + tpad)

            t0 = math.floor(wmin / deltat_cf) * deltat_cf
            iwmin = int(round((wmin - tpeaksearch - t0) / deltat_cf))
            iwmax = int(round((wmax + tpeaksearch - t0) / deltat_cf))
            lengthout = iwmax - iwmin + 1

            pdata = []
            trs_debug = []
            parstack_params = []
            for iifc, ifc in enumerate(ifcs):
                dataset = ifc.preprocess(trs, wmin - tpeaksearch,
                                         wmax + tpeaksearch,
                                         tshift_max - tshift_min, deltat_cf)
                if not dataset:
                    continue

                nstations_selected = len(dataset)

                nsls_selected, trs_selected = zip(*dataset)

                for tr in trs_selected:
                    tr.meta = {'tabu': True}

                trs_debug.extend(trs + list(trs_selected))

                istations_selected = num.array(
                    [station_index[nsl] for nsl in nsls_selected],
                    dtype=num.int)
                arrays = [tr.ydata.astype(num.float) for tr in trs_selected]

                offsets = num.array([
                    int(round((tr.tmin - t0) / deltat_cf))
                    for tr in trs_selected
                ],
                                    dtype=num.int32)

                w = ifc.get_weights(nsls_selected)

                weights = num.ones((ngridpoints, nstations_selected))
                weights *= w[num.newaxis, :]
                weights *= ifc.weight

                shift_table = shift_tables[iifc]

                ok = num.isfinite(shift_table[:, istations_selected])
                bad = num.logical_not(ok)

                shifts = -num.round(shift_table[:, istations_selected] /
                                    deltat_cf).astype(num.int32)

                weights[bad] = 0.0
                shifts[bad] = num.max(shifts[ok])

                pdata.append((list(trs_selected), shift_table, ifc))
                parstack_params.append((arrays, offsets, shifts, weights))

            if config.stacking_blocksize is not None:
                ipstep = config.stacking_blocksize
                frames = None
            else:
                ipstep = lengthout
                frames = num.zeros((ngridpoints, lengthout))

            twall_start = time.time()
            frame_maxs = num.zeros(lengthout)
            frame_argmaxs = num.zeros(lengthout, dtype=num.int)
            ipmin = iwmin
            while ipmin < iwmin + lengthout:
                ipsize = min(ipstep, iwmin + lengthout - ipmin)
                if ipstep == lengthout:
                    frames_p = frames
                else:
                    frames_p = num.zeros((ngridpoints, ipsize))

                for (arrays, offsets, shifts, weights) in parstack_params:
                    frames_p, _ = parstack(arrays,
                                           offsets,
                                           shifts,
                                           weights,
                                           0,
                                           offsetout=ipmin,
                                           lengthout=ipsize,
                                           result=frames_p,
                                           nparallel=nparallel,
                                           impl='openmp')

                if config.sharpness_normalization:
                    frame_p_maxs = frames_p.max(axis=0)
                    frame_p_means = num.abs(frames_p).mean(axis=0)
                    frames_p *= (frame_p_maxs / frame_p_means)[num.newaxis, :]
                    frames_p *= norm_map[:, num.newaxis]

                if config.ifc_count_normalization:
                    frames_p *= 1.0 / len(ifcs)

                frame_maxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    frames_p.max(axis=0)
                frame_argmaxs[ipmin-iwmin:ipmin-iwmin+ipsize] = \
                    pargmax(frames_p)

                ipmin += ipstep
                del frames_p

            twall_end = time.time()

            logger.info('wallclock time for stacking: %g s' %
                        (twall_end - twall_start))

            tmin_frames = t0 + iwmin * deltat_cf

            tr_stackmax = trace.Trace('',
                                      'SMAX',
                                      '',
                                      '',
                                      tmin=tmin_frames,
                                      deltat=deltat_cf,
                                      ydata=frame_maxs)

            tr_stackmax.meta = {'tabu': True}

            trs_debug.append(tr_stackmax)

            if show_window_traces:
                trace.snuffle(trs_debug)

            ydata_window = tr_stackmax.chop(wmin, wmax,
                                            inplace=False).get_ydata()

            logger.info('CF stats: min %g, max %g, median %g' %
                        (num.min(ydata_window), num.max(ydata_window),
                         num.median(ydata_window)))
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s'
                    % nstations_selected)

            detector_threshold_seiger = config.detector_threshold - (
                (17 - nstations_selected) * 4
            )  # 17 is maximum number of seiger stations, 4 is a mean baseline for noise
            if nstations_selected != 17:
                logger.info(
                    'Warning, station outage detected! Nr of station operable: %s, threshold now: %s'
                    % (nstations_selected, detector_threshold_seiger))

            tpeaks, apeaks = list(
                zip(*[(tpeak, apeak) for (tpeak, apeak) in zip(
                    *tr_stackmax.peaks(detector_threshold_seiger, tpeaksearch))
                      if wmin <= tpeak and tpeak < wmax])) or ([], [])

            tr_stackmax_indx = tr_stackmax.copy(data=False)
            tr_stackmax_indx.set_ydata(frame_argmaxs.astype(num.int32))
            tr_stackmax_indx.set_location('i')

            for (tpeak, apeak) in zip(tpeaks, apeaks):

                iframe = int(round((tpeak - tmin_frames) / deltat_cf))
                imax = frame_argmaxs[iframe]

                latpeak, lonpeak, xpeak, ypeak, zpeak = \
                    grid.index_to_location(imax)

                idetection += 1

                detection = Detection(id='%06i' % idetection,
                                      time=tpeak,
                                      location=geo.Point(lat=float(latpeak),
                                                         lon=float(lonpeak),
                                                         x=float(xpeak),
                                                         y=float(ypeak),
                                                         z=float(zpeak)),
                                      ifm=float(apeak))

                if bark:
                    common.bark()

                logger.info('detection found: %s' % str(detection))

                f = open(detections_path, 'a')
                f.write(
                    '%06i %s %g %g %g %g %g %g\n' %
                    (idetection,
                     util.time_to_str(tpeak, format='%Y-%m-%d %H:%M:%S.6FRAC'),
                     apeak, latpeak, lonpeak, xpeak, ypeak, zpeak))

                f.close()

                ev = detection.get_event()
                f = open(events_path, 'a')
                model.dump_events([ev], stream=f)
                f.close()

                if show_detections or config.save_figures:
                    fmin = min(ifc.fmin for ifc in ifcs)
                    fmax = min(ifc.fmax for ifc in ifcs)

                    fn = figures_path_template % {
                        'id': util.tts(t0).replace(" ", "T"),
                        'format': 'png'
                    }

                    util.ensuredirs(fn)

                    if frames is not None:
                        frames_p = frames
                        tmin_frames_p = tmin_frames
                        iframe_p = iframe

                    else:
                        iframe_min = max(
                            0, int(round(iframe - tpeaksearch / deltat_cf)))
                        iframe_max = min(
                            lengthout - 1,
                            int(round(iframe + tpeaksearch / deltat_cf)))

                        ipsize = iframe_max - iframe_min + 1
                        frames_p = num.zeros((ngridpoints, ipsize))
                        tmin_frames_p = tmin_frames + iframe_min * deltat_cf
                        iframe_p = iframe - iframe_min

                        for (arrays, offsets, shifts, weights) \
                                in parstack_params:

                            frames_p, _ = parstack(arrays,
                                                   offsets,
                                                   shifts,
                                                   weights,
                                                   0,
                                                   offsetout=iwmin +
                                                   iframe_min,
                                                   lengthout=ipsize,
                                                   result=frames_p,
                                                   nparallel=nparallel,
                                                   impl='openmp')

                        if config.sharpness_normalization:
                            frame_p_maxs = frames_p.max(axis=0)
                            frame_p_means = num.abs(frames_p).mean(axis=0)
                            frames_p *= (frame_p_maxs /
                                         frame_p_means)[num.newaxis, :]
                            frames_p *= norm_map[:, num.newaxis]

                        if config.ifc_count_normalization:
                            frames_p *= 1.0 / len(ifcs)
                    try:
                        plot.plot_detection(grid,
                                            receivers,
                                            frames_p,
                                            tmin_frames_p,
                                            deltat_cf,
                                            imax,
                                            iframe_p,
                                            xpeak,
                                            ypeak,
                                            zpeak,
                                            tr_stackmax,
                                            tpeaks,
                                            apeaks,
                                            detector_threshold_seiger,
                                            wmin,
                                            wmax,
                                            pdata,
                                            trs,
                                            fmin,
                                            fmax,
                                            idetection,
                                            tpeaksearch,
                                            movie=show_movie,
                                            show=show_detections,
                                            save_filename=fn,
                                            event=ev)
                    except:
                        pass

                    del frames_p

                if stop_after_first:
                    return

            tr_stackmax.chop(wmin, wmax)
            tr_stackmax_indx.chop(wmin, wmax)
            if save_imax is True:
                io.save([tr_stackmax, tr_stackmax_indx], ifm_path_template)

            del frames
        logger.info('end processing time window group: %s - %s' %
                    (util.time_to_str(tmin_win), util.time_to_str(tmax_win)))
    cat = Catalog()
    files = glob("%s/../figures/*qml*" % run_path)
    files.sort(key=os.path.getmtime)
    for file in files:
        cat_read = read_events(file)
        for event in cat_read:
            cat.append(event)
    cat.write("%s/../all_events_stacking.qml" % run_path, format="QUAKEML")
Пример #30
0
def sdxtoquakeml(sdx_dir, out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="", catalog_version="",
                 agency_id="", author="", vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(
                      author=author, agency_id=agency_id,
                      version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake", creation_info=CreationInfo(
            author=author, agency_id=agency_id),
            event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([sdxstation[1].split()[0],
                                 float(sdxstation[2].split()[0]),
                                 float(sdxstation[3].split()[0]),
                                 float(sdxstation[4].split()[0])])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".
                             format(sdxorigin[1][0:10].replace(".", "-"),
                                    sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxorigin[16].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time), longitude=evt_lon,
                                latitude=evt_lat, depth=evt_depth*-1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time, author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[1][0:10].replace(".", "-"),
                                     sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime(
                    "{:}T{:}".format(sdxpick[14].split()[6][0:10]
                                     .replace(".", "-"),
                                     sdxpick[14].split()[6][11:23]))
                pick = Pick(time=pick_time,
                            waveform_id=WaveformStreamID(
                                network_code=network, station_code=station,
                                location_code=location, channel_code=channel),
                            time_errors=time_uncertainties[weight],
                            evaluation_mode="manual",
                            evaluation_status="confirmed", onset=pickonset,
                            phase_hint=phase, polarity=pol,
                            method_id=ResourceIdentifier(id="SDX"),
                            creation_info=CreationInfo(
                                creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth, distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Пример #31
0
def _dbs_associator(start_time,
                    end_time,
                    moving_window,
                    tbl,
                    pair_n,
                    save_dir,
                    station_list,
                    consider_combination=False):

    if consider_combination == True:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 0
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)),
                    ncols=100)
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:
                evid += 1

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                # QuakeML
                print(detections.iloc[0]['event_start_time'])

                if len(detections) / pair_n <= 2:
                    ch = pair_n
                else:
                    ch = int(len(detections) - pair_n)

                picks = []
                for ns in range(ch, len(detections) + 1):
                    comb = 0
                    for ind in list(combinations(detections.index, ns)):
                        comb += 1
                        selected_detections = detections.loc[ind, :]
                        sorted_detections = selected_detections.sort_values(
                            'p_arrival_time')

                        Y2000_writer.write(
                            "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                            % (int(yr), int(mo), int(dy), int(hr), int(mi),
                               float(sec), float(st_lat_DMS[0]),
                               str(st_lat_DMS[1]), float(st_lat_DMS[2]),
                               float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                               float(st_lon_DMS[2]), float(depth), float(mag)))

                        station_buffer = []
                        row_buffer = []
                        tr_names = []
                        tr_names2 = []
                        for _, row in sorted_detections.iterrows():

                            trace_name = row['traceID'] + '*' + row[
                                'station'] + '*' + str(row['event_start_time'])
                            p_unc = row['p_unc']
                            p_prob = row['p_prob']
                            s_unc = row['s_unc']
                            s_prob = row['s_prob']

                            if p_unc:
                                Pweihgt = _weighcalculator_prob(p_prob *
                                                                (1 - p_unc))
                            else:
                                Pweihgt = _weighcalculator_prob(p_prob)
                            try:
                                Pweihgt = int(Pweihgt)
                            except Exception:
                                Pweihgt = 4

                            if s_unc:
                                Sweihgt = _weighcalculator_prob(s_prob *
                                                                (1 - s_unc))
                            else:
                                Sweihgt = _weighcalculator_prob(s_prob)
                            try:
                                Sweihgt = int(Sweihgt)
                            except Exception:
                                Sweihgt = 4

                            station = "{:<5}".format(row['station'])
                            network = "{:<2}".format(row['network'])
                            try:
                                yrp = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mop = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dyp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mip = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_p = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="P")
                                picks.append(p)
                            except Exception:
                                sec_p = None

                            try:
                                yrs = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mos = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dys = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrs = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mis = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_s = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="S")
                                picks.append(p)
                            except Exception:
                                sec_s = None

                            if row['station'] not in station_buffer:
                                tr_names.append(trace_name)
                                station_buffer.append(row['station'])
                                if sec_s:
                                    Y2000_writer.write(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis),
                                         float(0.0), float(sec_s), Sweihgt))
                                if sec_p:
                                    Y2000_writer.write(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                            else:
                                tr_names2.append(trace_name)
                                if sec_s:
                                    row_buffer.append(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis), 0.0,
                                         float(sec_s), Sweihgt))
                                if sec_p:
                                    row_buffer.append(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                        Y2000_writer.write("{:<62}".format(' ') + "%10d" %
                                           (evid) + '\n')

                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

            tt += timedelta(seconds=moving_window)

    #   plt.scatter(LTTP, TTP, s=10, marker='o', c='b', alpha=0.4, label='P')
    #   plt.scatter(LTTS, TTS, s=10, marker='o', c='r', alpha=0.4, label='S')
    #   plt.legend('upper right')
    #   plt.show()

        print('The Number of Realizations: ' + str(evid) + '\n', flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")
        f.write(jj)
        f.close()

    else:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        cat = Catalog()
        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 200000
        evidd = 100000
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)))
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                Y2000_writer.write(
                    "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                    % (int(yr), int(mo), int(dy), int(hr), int(mi), float(sec),
                       float(st_lat_DMS[0]), str(st_lat_DMS[1]),
                       float(st_lat_DMS[2]), float(st_lon_DMS[0]),
                       str(st_lon_DMS[1]), float(
                           st_lon_DMS[2]), float(depth), float(mag)))
                event = Event()
                origin = Origin(time=UTCDateTime(
                    detections.iloc[0]['event_start_time']),
                                longitude=detections.iloc[0]['stlon'],
                                latitude=detections.iloc[0]['stlat'],
                                method="EqTransformer")
                event.origins.append(origin)

                station_buffer = []
                row_buffer = []
                sorted_detections = detections.sort_values('p_arrival_time')
                tr_names = []
                tr_names2 = []
                picks = []
                for _, row in sorted_detections.iterrows():
                    trace_name = row['traceID'] + '*' + row[
                        'station'] + '*' + str(row['event_start_time'])
                    p_unc = row['p_unc']
                    p_prob = row['p_prob']
                    s_unc = row['s_unc']
                    s_prob = row['s_prob']

                    if p_unc:
                        Pweihgt = _weighcalculator_prob(p_prob * (1 - p_unc))
                    else:
                        Pweihgt = _weighcalculator_prob(p_prob)
                    try:
                        Pweihgt = int(Pweihgt)
                    except Exception:
                        Pweihgt = 4

                    if s_unc:
                        Sweihgt = _weighcalculator_prob(s_prob * (1 - s_unc))
                    else:
                        Sweihgt = _weighcalculator_prob(s_prob)
                    try:
                        Sweihgt = int(Sweihgt)
                    except Exception:
                        Sweihgt = 4

                    station = "{:<5}".format(row['station'])
                    network = "{:<2}".format(row['network'])

                    try:
                        yrp = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mop = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dyp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mip = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_p = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['p_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="P",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_p = None

                    try:
                        yrs = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mos = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dys = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrs = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mis = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_s = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['s_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="S",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_s = None

                    if row['station'] not in station_buffer:
                        tr_names.append(trace_name)
                        station_buffer.append(row['station'])
                        if sec_s:
                            Y2000_writer.write(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), float(0.0),
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            Y2000_writer.write(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                    else:
                        tr_names2.append(trace_name)
                        if sec_s:
                            row_buffer.append(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), 0.0,
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            row_buffer.append(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                event.picks = picks
                event.preferred_origin_id = event.origins[0].resource_id
                cat.append(event)

                evid += 1
                Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                   '\n')
                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    evid += 1
                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

                elif len(row_buffer) < pair_n and len(row_buffer) != 0:
                    evidd += 1
                    traceNmae_dic[str(evidd)] = tr_names2

            elif len(detections) < pair_n and len(detections) != 0:
                tr_names = []
                for _, row in detections.iterrows():
                    trace_name = row['traceID']
                    tr_names.append(trace_name)
                evidd += 1
                traceNmae_dic[str(evidd)] = tr_names

            tt += timedelta(seconds=moving_window)

        print('The Number of Associated Events: ' + str(evid - 200000) + '\n',
              flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")

        f.write(jj)
        f.close()
        print(cat.__str__(print_all=True))
        cat.write(save_dir + "/associations.xml", format="QUAKEML")
Пример #32
0
def sdxtoquakeml(sdx_dir,
                 out_xml,
                 time_uncertainties=[0.1, 0.2, 0.5, 0.8, 1.5],
                 catalog_description="",
                 catalog_version="",
                 agency_id="",
                 author="",
                 vel_mod_id=""):
    """
    Convert SDX to QuakeML format using ObsPy inventory structure.
    SDX filename prefix is stored under event description.
    Input parameters:
        - sdx_dir: directory containing sdx files (required)
        - out_xml: Filename of quakeML file (required)
        - time_uncertainties: List containing time uncertainities in seconds
          for mapping from weights 0-4, respectively (optional)
        - catalog_description (optional)
        - cat_agency_id (optional)
        - author (optional)
        - vel_mod_id (optional)
    Output:
        - xml catalog in QuakeML format.
    """

    # Prepare catalog
    cat = Catalog(description=catalog_description,
                  creation_info=CreationInfo(author=author,
                                             agency_id=agency_id,
                                             version=catalog_version))

    # Read in sdx files in directory, recursively
    files = glob.glob("{:}/**/*.sdx".format(sdx_dir), recursive=True)
    if len(files) == 0:
        print("No SDX files found in path. Exiting")
    for sdx_file_path in files:
        print("Working on ", sdx_file_path.split('/')[-1])

        # Set-up event
        evt_id = (sdx_file_path.split('/')[-1])[:-4]
        event = Event(event_type="earthquake",
                      creation_info=CreationInfo(author=author,
                                                 agency_id=agency_id),
                      event_descriptions=[EventDescription(text=evt_id)])

        # Get station details, append to arrays
        sdx_file = open(sdx_file_path, "r")
        stations = []
        for line in sdx_file:
            if line.rstrip() == "station":
                sdxstation = list(islice(sdx_file, 5))
                stations.append([
                    sdxstation[1].split()[0],
                    float(sdxstation[2].split()[0]),
                    float(sdxstation[3].split()[0]),
                    float(sdxstation[4].split()[0])
                ])
        sdx_file.close()

        # Find origin details, append to origin object
        sdx_file = open(sdx_file_path, "r")
        found_origin = False
        for line in sdx_file:
            if line.rstrip() == "origin":
                found_origin = True
                sdxorigin = list(islice(sdx_file, 17))
                orig_time = ("{:}T{:}".format(
                    sdxorigin[1][0:10].replace(".", "-"), sdxorigin[1][11:23]))
                evt_lat = float(sdxorigin[2].split()[0])
                evt_lon = float(sdxorigin[3].split()[0])
                evt_depth = float(sdxorigin[4].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxorigin[16].split()[6][0:10].replace(".", "-"),
                    sdxorigin[16].split()[6][11:23]))
                num_arrivals = int(sdxorigin[12].split()[0])
                num_arrivals_p = (int(sdxorigin[12].split()[0]) -
                                  int(sdxorigin[12].split()[1]))
                min_dist = float(sdxorigin[12].split()[9])
                max_dist = float(sdxorigin[12].split()[10])
                med_dist = float(sdxorigin[12].split()[11])
                max_az_gap = float(sdxorigin[12].split()[6])

                origin = Origin(time=UTCDateTime(orig_time),
                                longitude=evt_lon,
                                latitude=evt_lat,
                                depth=evt_depth * -1000,
                                earth_model_id=vel_mod_id,
                                origin_type="hypocenter",
                                evaluation_mode="manual",
                                evaluation_status="confirmed",
                                method_id=ResourceIdentifier(id="SDX_hypo71"),
                                creation_info=CreationInfo(
                                    creation_time=creation_time,
                                    author=author,
                                    agency_id=agency_id),
                                quality=OriginQuality(
                                    associated_phase_count=num_arrivals,
                                    used_phase_count=num_arrivals,
                                    associated_station_count=num_arrivals_p,
                                    used_station_count=num_arrivals_p,
                                    azimuthal_gap=max_az_gap,
                                    minimum_distance=min_dist,
                                    maximum_distance=max_dist,
                                    median_distance=med_dist))
                event.origins.append(origin)

        sdx_file.close()

        # Skip event if no computed origin
        if found_origin is False:
            print("No origin found ... skipping event")
            continue

        # Get pick details, append to pick and arrival objects
        sdx_file = open(sdx_file_path, "r")
        found_pick = False
        for line in sdx_file:
            if line.rstrip() == "pick":
                found_pick = True
                sdxpick = list(islice(sdx_file, 15))
                pick_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[1][0:10].replace(".", "-"), sdxpick[1][11:23]))
                network = sdxpick[2].split()[0]
                station = sdxpick[2].split()[1]
                location = sdxpick[2].split()[2]
                if "NOT_SET" in location:
                    location = ""
                channel = sdxpick[2].split()[3]
                onset = sdxpick[8].split()[0]
                if onset == "0":
                    pickonset = "emergent"
                elif onset == "1":
                    pickonset = "impulsive"
                elif onset == "2":
                    pickonset = "questionable"
                phase = sdxpick[9].split()[0]
                polarity = sdxpick[10].split()[0]
                if polarity == "0":
                    pol = "positive"
                elif polarity == "1":
                    pol = "negative"
                elif polarity == "2":
                    pol = "undecidable"
                weight = int(sdxpick[11].split()[0])
                creation_time = UTCDateTime("{:}T{:}".format(
                    sdxpick[14].split()[6][0:10].replace(".", "-"),
                    sdxpick[14].split()[6][11:23]))
                pick = Pick(
                    time=pick_time,
                    waveform_id=WaveformStreamID(network_code=network,
                                                 station_code=station,
                                                 location_code=location,
                                                 channel_code=channel),
                    time_errors=time_uncertainties[weight],
                    evaluation_mode="manual",
                    evaluation_status="confirmed",
                    onset=pickonset,
                    phase_hint=phase,
                    polarity=pol,
                    method_id=ResourceIdentifier(id="SDX"),
                    creation_info=CreationInfo(creation_time=creation_time))
                event.picks.append(pick)

                # Compute azimuth, distance, append to arrival object
                for i in range(0, len(stations)):
                    if stations[i][0] == station:
                        azimuth = (gps2dist_azimuth(evt_lat, evt_lon,
                                                    stations[i][1],
                                                    stations[i][2])[1])
                        dist_deg = locations2degrees(evt_lat, evt_lon,
                                                     stations[i][1],
                                                     stations[i][2])
                        arrival = Arrival(phase=phase,
                                          pick_id=pick.resource_id,
                                          azimuth=azimuth,
                                          distance=dist_deg,
                                          time_weight=1.00)
                        event.origins[0].arrivals.append(arrival)

        # Skip event if no picks
        if found_pick is False:
            print("No picks found ... skipping event")
            continue

        # Set preferred origin and append event to catalogue
        event.preferred_origin_id = event.origins[0].resource_id
        cat.events.append(event)

        sdx_file.close()

    cat.write(out_xml, format="QUAKEML")
Пример #33
0
def plot_detection(grid,
                   receivers,
                   frames,
                   tmin_frames,
                   deltat_cf,
                   imax,
                   iframe,
                   xpeak,
                   ypeak,
                   zpeak,
                   tr_stackmax,
                   tpeaks,
                   apeaks,
                   detector_threshold,
                   wmin,
                   wmax,
                   pdata,
                   trs_raw,
                   fmin,
                   fmax,
                   idetection,
                   tpeaksearch,
                   movie=False,
                   save_filename=None,
                   show=True,
                   event=None):
    from matplotlib import pyplot as plt
    from matplotlib import cm
    from matplotlib.animation import FuncAnimation

    nsls = set(tr.nslc_id[:3] for tr in trs_raw)
    receivers_on = [r for r in receivers if r.codes in nsls]
    receivers_off = [r for r in receivers if r.codes not in nsls]

    distances = grid.distances(receivers)

    plot.mpl_init(fontsize=9)

    fig = plt.figure(figsize=plot.mpl_papersize('a4', 'landscape'))

    axes = plt.subplot2grid((2, 3), (0, 2), aspect=1.0)
    plot.mpl_labelspace(axes)

    axes2 = plt.subplot2grid((2, 3), (1, 2))
    plot.mpl_labelspace(axes2)

    axes3 = plt.subplot2grid((2, 3), (0, 1), rowspan=2)
    axes4 = plt.subplot2grid((2, 3), (0, 0), rowspan=2)
    if grid.distance_max() > km:
        dist_units = km
        axes.set_xlabel('Easting [km]')
        axes.set_ylabel('Northing [km]')
        axes4.set_ylabel('Distance [km]')
    else:
        dist_units = 1.0
        axes.set_xlabel('Easting [m]')
        axes.set_ylabel('Northing [m]')
        axes4.set_ylabel('Distance [m]')

    axes.locator_params(nbins=6, tight=True)

    axes2.set_xlabel('Time [s]')
    axes2.set_ylabel('Detector level')

    axes3.set_xlabel('Time [s]')
    for el in axes3.get_yticklabels():
        el.set_visible(False)

    axes4.set_xlabel('Time [s]')

    tpeak_current = tmin_frames + deltat_cf * iframe
    t0 = tpeak_current
    tduration = 2.0 * tpeaksearch

    axes2.axvspan(tr_stackmax.tmin - t0,
                  wmin - t0,
                  color=plot.mpl_color('aluminium2'))

    axes2.axvspan(wmax - t0,
                  tr_stackmax.tmax - t0,
                  color=plot.mpl_color('aluminium2'))

    axes2.axvspan(tpeak_current - 0.5 * tduration - t0,
                  tpeak_current + 0.5 * tduration - t0,
                  color=plot.mpl_color('scarletred2'),
                  alpha=0.3,
                  lw=0.)

    axes2.set_xlim(tr_stackmax.tmin - t0, tr_stackmax.tmax - t0)

    axes2.axhline(detector_threshold,
                  color=plot.mpl_color('aluminium6'),
                  lw=2.)

    t = tr_stackmax.get_xdata()
    amp = tr_stackmax.get_ydata()
    axes2.plot(t - t0, amp, color=plot.mpl_color('scarletred2'), lw=1.)

    for tpeak, apeak in zip(tpeaks, apeaks):
        axes2.plot(tpeak - t0, apeak, '*', ms=20., mfc='white', mec='black')

    station_index = dict((rec.codes, i) for (i, rec) in enumerate(receivers))

    dists_all = []
    amps = []
    shifts = []
    pdata2 = []
    for trs, shift_table, shifter in pdata:
        trs = [tr.copy() for tr in trs]
        dists = []
        for tr in trs:
            istation = station_index[tr.nslc_id[:3]]
            shift = shift_table[imax, istation]
            tr2 = tr.chop(tpeak_current - 0.5 * tduration + shift,
                          tpeak_current + 0.5 * tduration + shift,
                          inplace=False)

            dists.append(distances[imax, istation])
            amp = tr2.get_ydata() * shifter.weight
            amps.append(num.max(num.abs(amp)))
            shifts.append(shift)

        pdata2.append((trs, dists, shift_table, shifter))
        dists_all.extend(dists)

    dist_min = min(dists_all)
    dist_max = max(dists_all)

    shift_min = min(shifts)
    shift_max = max(shifts)

    amp_max = max(amps)

    scalefactor = (dist_max - dist_min) / len(trs) * 0.5

    axes3.set_xlim(-0.5 * tduration + shift_min, 0.5 * tduration + shift_max)
    axes3.set_ylim((dist_min - scalefactor) / dist_units,
                   (dist_max + scalefactor) / dist_units)

    axes4.set_xlim(-0.5 * tduration + shift_min, 0.5 * tduration + shift_max)
    axes4.set_ylim((dist_min - scalefactor) / dist_units,
                   (dist_max + scalefactor) / dist_units)

    axes3.axvline(0., color=plot.mpl_color('aluminium3'), lw=2.)

    nsl_have = set()
    phase_markers = []
    picks = []
    for ishifter, (trs, dists, shift_table, shifter) in enumerate(pdata2):
        color = plot.mpl_graph_color(ishifter)

        for tr, dist in zip(trs, dists):
            tmint = tr.tmin
            tr = tr.chop(tpeak_current - 0.5 * tduration + shift_min,
                         tpeak_current + 0.5 * tduration + shift_max,
                         inplace=False)

            nsl = tr.nslc_id[:3]
            istation = station_index[nsl]
            shift = shift_table[imax, istation]

            phase_markers.append(
                PhaseMarker([(nsl[0], nsl[1], "", nsl[2])],
                            tmin=tmint + shift,
                            tmax=tmint + shift,
                            phasename=shifter.name,
                            event_time=t0,
                            event_hash=idetection))
            p = Pick(time=UTCDateTime(tmint + shift),
                     waveform_id=WaveformStreamID(network_code=nsl[0],
                                                  station_code=nsl[1]),
                     phase_hint=shifter.name,
                     method_id="lassie")
            picks.append(p)
            axes3.plot(shift,
                       dist / dist_units,
                       '|',
                       mew=2,
                       mec=color,
                       ms=10,
                       zorder=2)

            t = tr.get_xdata()
            amp = tr.get_ydata() * shifter.weight
            amp /= amp_max
            axes3.plot(
                t - t0,
                (dist + scalefactor * amp + ishifter * scalefactor * 0.1) /
                dist_units,
                color=color,
                zorder=1)

            if nsl not in nsl_have:
                axes3.annotate('.'.join(nsl),
                               xy=(t[0] - t0, dist / dist_units),
                               xytext=(10., 0.),
                               textcoords='offset points',
                               verticalalignment='top')

                nsl_have.add(nsl)

        for tr in trs_raw:
            istation = station_index[tr.nslc_id[:3]]
            dist = distances[imax, istation]
            shift = shift_table[imax, istation]

            tr = tr.copy()

            tr.highpass(4, fmin, demean=True)
            tr.lowpass(4, fmax, demean=False)

            tr.chop(tpeak_current - 0.5 * tduration + shift_min,
                    tpeak_current + 0.5 * tduration + shift_max)

            t = tr.get_xdata()
            amp = tr.get_ydata().astype(num.float)
            amp = amp / num.max(num.abs(amp))

            axes4.plot(t - t0, (dist + scalefactor * amp) / dist_units,
                       color='black',
                       alpha=0.5,
                       zorder=1)

            axes4.plot(shift,
                       dist / dist_units,
                       '|',
                       mew=2,
                       mec=color,
                       ms=10,
                       zorder=2)
    PhaseMarker.save_markers(phase_markers,
                             "%s.pym" % (save_filename[:-4]),
                             fdigits=3)
    event_obspy = Event()
    origin = Origin(time=UTCDateTime(t0),
                    longitude=event.lon,
                    latitude=event.lat,
                    method="lassie")
    event_obspy.origins.append(origin)
    event_obspy.picks = picks
    cat = Catalog()
    cat.append(event_obspy)
    cat.write(save_filename[:-4] + ".qml", format="QUAKEML")
    nframes = frames.shape[1]

    iframe_min = max(0, int(round(iframe - 0.5 * tduration / deltat_cf)))
    iframe_max = min(nframes - 1,
                     int(round(iframe + 0.5 * tduration / deltat_cf)))

    amax = frames[imax, iframe]

    axes.set_xlim(grid.ymin / dist_units, grid.ymax / dist_units)
    axes.set_ylim(grid.xmin / dist_units, grid.xmax / dist_units)

    cmap = cm.YlOrBr
    system = ('ne', grid.lat, grid.lon)

    static_artists = []
    static_artists.extend(
        plot_receivers(axes,
                       receivers_on,
                       system=system,
                       units=dist_units,
                       style=dict(mfc='black', ms=5.0)))

    static_artists.extend(
        plot_receivers(axes,
                       receivers_off,
                       system=system,
                       units=dist_units,
                       style=dict(mfc='none', ms=5.0)))

    static_artists.extend(
        axes.plot(ypeak / dist_units,
                  xpeak / dist_units,
                  '*',
                  ms=20.,
                  mec='black',
                  mfc='white'))

    static_artists.append(
        fig.suptitle('%06i - %s' % (idetection, util.time_to_str(t0))))

    frame_artists = []
    progress_artists = []

    def update(iframe):
        if iframe is not None:
            frame = frames[:, iframe]
            if not progress_artists:
                progress_artists[:] = [
                    axes2.axvline(tmin_frames - t0 + deltat_cf * iframe,
                                  color=plot.mpl_color('scarletred3'),
                                  alpha=0.5,
                                  lw=2.)
                ]

            else:
                progress_artists[0].set_xdata(tmin_frames - t0 +
                                              deltat_cf * iframe)

        else:
            frame = num.max(frames[:, iframe_min:iframe_max + 1], axis=1)

        frame_artists[:] = grid.plot(axes,
                                     frame,
                                     amin=0.0,
                                     amax=amax,
                                     cmap=cmap,
                                     system=system,
                                     artists=frame_artists,
                                     units=dist_units,
                                     shading='gouraud')

        return frame_artists + progress_artists + static_artists

    if movie:
        ani = FuncAnimation(
            fig,
            update,
            frames=list(range(iframe_min, iframe_max + 1))[::10] + [None],
            interval=20.,
            repeat=False,
            blit=True)

    else:
        ani = None
        update(None)

    if save_filename:
        fig.savefig(save_filename)

    if show:
        plt.show()
    else:
        plt.close()

    del ani
Пример #34
0
all_picks=[]
for event in db_catalog:
    stations = ['KCT', 'KMPB', 'KCR', 'KHMB', 'KCS', 'KCO', 'KMR', 'KPP']                
    event.picks = [pick for pick in event.picks if pick.waveform_id.station_code in stations]
    all_picks+= [(pick.waveform_id.station_code, pick.waveform_id.channel_code) for pick in event.picks]

new_catalog=Catalog()
for event in db_catalog:
    event.picks = [pick for pick in event.picks if (pick.waveform_id.station_code, pick.waveform_id.channel_code) in all_picks]
    new_catalog.append(event)

# count number of picks for each event in catalog
n_picks = [len(event.picks) for event in new_catalog] 

#Save catalog
new_catalog.write("DB_Event_Catalog" + '.xml', "QUAKEML")

# =============================================================================
# ## 2. LOAD CATALOG IF AVAILABLE 
# cat=read_events("DB_Event_Catalog.xml")    
# =============================================================================

## 1. IMPORT WAVEFORMS FOR TEMPLATE 
print("IMPORTING AND PREPROCESSING WAVEFORM STREAMS.")
jday = [event.origins[0].time.julday for event in new_catalog]
yrs = [event.origins[0].time.year for event in new_catalog]
yrjdays = zip(yrs, jday)
    
# Handle directory level stuff
# if dirs already exist ask to overwrite
if os.path.isdir('Template_plots/'):
Пример #35
0
def iris2quakeml(url, output_folder=None):
    if not "/spudservice/" in url:
        url = url.replace("/spud/", "/spudservice/")
        if url.endswith("/"):
            url += "quakeml"
        else:
            url += "/quakeml"
    print "Downloading %s..." % url
    r = requests.get(url)
    if r.status_code != 200:
        msg = "Error Downloading file!"
        raise Exception(msg)

    # For some reason the quakeml file is escaped HTML.
    h = HTMLParser.HTMLParser()

    data = h.unescape(r.content)

    # Replace some XML tags.
    data = data.replace("long-period body waves", "body waves")
    data = data.replace("intermediate-period surface waves", "surface waves")
    data = data.replace("long-period mantle waves", "mantle waves")

    data = data.replace("<html><body><pre>", "")
    data = data.replace("</pre></body></html>", "")

    # Change the resource identifiers. Colons are not allowed in QuakeML.
    pattern = r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.(\d{6})"
    data = re.sub(pattern, r"\1-\2-\3T\4-\5-\6.\7", data)

    data = StringIO(data)

    try:
        cat = readEvents(data)
    except:
        msg = "Could not read downloaded event data"
        raise ValueError(msg)

    # Parse the event, and use only one origin, magnitude and focal mechanism.
    # Only the first event is used. Should not be a problem for the chosen
    # global cmt application.
    ev = cat[0]

    if ev.preferred_origin():
        ev.origins = [ev.preferred_origin()]
    else:
        ev.origins = [ev.origins[0]]
    if ev.preferred_focal_mechanism():
        ev.focal_mechanisms = [ev.preferred_focal_mechanism()]
    else:
        ev.focal_mechanisms = [ev.focal_mechanisms[0]]

    try:
        mt = ev.focal_mechanisms[0].moment_tensor
    except:
        msg = "No moment tensor found in file."
        raise ValueError
    seismic_moment_in_dyn_cm = mt.scalar_moment
    if not seismic_moment_in_dyn_cm:
        msg = "No scalar moment found in file."
        raise ValueError(msg)

    # Create a new magnitude object with the moment magnitude calculated from
    # the given seismic moment.
    mag = Magnitude()
    mag.magnitude_type = "Mw"
    mag.origin_id = ev.origins[0].resource_id
    # This is the formula given on the GCMT homepage.
    mag.mag = (2.0 / 3.0) * (math.log10(seismic_moment_in_dyn_cm) - 16.1)
    mag.resource_id = ev.origins[0].resource_id.resource_id.replace("Origin",
        "Magnitude")
    ev.magnitudes = [mag]
    ev.preferred_magnitude_id = mag.resource_id

    # Convert the depth to meters.
    org = ev.origins[0]
    org.depth *= 1000.0
    if org.depth_errors.uncertainty:
        org.depth_errors.uncertainty *= 1000.0

    # Ugly asserts -- this is just a simple script.
    assert(len(ev.magnitudes) == 1)
    assert(len(ev.origins) == 1)
    assert(len(ev.focal_mechanisms) == 1)

    # All values given in the QuakeML file are given in dyne * cm. Convert them
    # to N * m.
    for key, value in mt.tensor.iteritems():
        if key.startswith("m_") and len(key) == 4:
            mt.tensor[key] /= 1E7
        if key.endswith("_errors") and hasattr(value, "uncertainty"):
            mt.tensor[key].uncertainty /= 1E7
    mt.scalar_moment /= 1E7
    if mt.scalar_moment_errors.uncertainty:
        mt.scalar_moment_errors.uncertainty /= 1E7
    p_axes = ev.focal_mechanisms[0].principal_axes
    for ax in [p_axes.t_axis, p_axes.p_axis, p_axes.n_axis]:
        if ax is None or not ax.length:
            continue
        ax.length /= 1E7

    # Check if it has a source time function
    stf = mt.source_time_function
    if stf:
        if stf.type != "triangle":
            msg = ("Source time function type '%s' not yet mapped. Please "
                "contact the developers.") % stf.type
            raise NotImplementedError(msg)
        if not stf.duration:
            if not stf.decay_time:
                msg = "Not known how to derive duration without decay time."
                raise NotImplementedError(msg)
            # Approximate the duraction for triangular STF.
            stf.duration = 2 * stf.decay_time

    # Get the flinn_engdahl region for a nice name.
    fe = FlinnEngdahl()
    region_name = fe.get_region(ev.origins[0].longitude,
        ev.origins[0].latitude)
    region_name = region_name.replace(" ", "_")
    event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \
        (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year,
        ev.origins[0].time.month, ev.origins[0].time.day,
        ev.origins[0].time.hour, ev.origins[0].time.minute)

    # Check if the ids of the magnitude and origin contain the corresponding
    # tag. Otherwise replace tme.
    ev.origins[0].resource_id = ev.origins[0].resource_id.resource_id.replace(
        "quakeml/gcmtid", "quakeml/origin/gcmtid")
    ev.magnitudes[0].resource_id = \
        ev.magnitudes[0].resource_id.resource_id.replace(
            "quakeml/gcmtid", "quakeml/magnitude/gcmtid")

    # Fix up the moment tensor resource_ids.
    mt.derived_origin_id = ev.origins[0].resource_id
    mt.resource_id = mt.resource_id.resource_id.replace("focalmechanism",
        "momenttensor")

    cat = Catalog()
    cat.resource_id = ev.origins[0].resource_id.resource_id.replace("origin",
        "event_parameters")
    cat.append(ev)
    if output_folder:
        event_name = os.path.join(output_folder, event_name)
    cat.write(event_name, format="quakeml", validate=True)
    print "Written file", event_name
Пример #36
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        for i in range(len(read_cat[0].picks)):
            for key in read_cat[0].picks[i].keys():
                # Ignore backazimuth errors and horizontal_slowness_errors
                if key in ['backazimuth_errors', 'horizontal_slowness_errors']:
                    continue
                self.assertEqual(read_cat[0].picks[i][key],
                                 test_cat[0].picks[i][key])
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        # Check magnitudes
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # Check local magnitude amplitude
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)
        # Check coda magnitude pick
        self.assertEqual(read_cat[0].amplitudes[1].resource_id,
                         test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[1].pick_id,
                         test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id,
                         test_cat[0].amplitudes[1].waveform_id)
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        self.assertEqual(read_cat[0].amplitudes[1].snr,
                         test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0],
                             userID='TEST',
                             evtype='L',
                             outdir='.',
                             wavefiles='test',
                             explosion=True,
                             overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        for i in range(len(read_cat[0].picks)):
            self.assertEqual(read_cat[0].picks[i].time,
                             test_cat[0].picks[i].time)
            self.assertEqual(read_cat[0].picks[i].backazimuth,
                             test_cat[0].picks[i].backazimuth)
            self.assertEqual(read_cat[0].picks[i].onset,
                             test_cat[0].picks[i].onset)
            self.assertEqual(read_cat[0].picks[i].phase_hint,
                             test_cat[0].picks[i].phase_hint)
            self.assertEqual(read_cat[0].picks[i].polarity,
                             test_cat[0].picks[i].polarity)
            self.assertEqual(read_cat[0].picks[i].waveform_id.station_code,
                             test_cat[0].picks[i].waveform_id.station_code)
            self.assertEqual(read_cat[0].picks[i].waveform_id.channel_code[-1],
                             test_cat[0].picks[i].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        # Check coda magnitude pick
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].resource_id,
        #                  test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].pick_id,
        #                  test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id.station_code,
                         test_cat[0].amplitudes[1].waveform_id.station_code)
        self.assertEqual(
            read_cat[0].amplitudes[1].waveform_id.channel_code,
            test_cat[0].amplitudes[1].waveform_id.channel_code[0] +
            test_cat[0].amplitudes[1].waveform_id.channel_code[-1])
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        # snr is not supported in s-file
        # self.assertEqual(read_cat[0].amplitudes[1].snr,
        #                  test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)
        del read_cat

        # Test a deliberate fail
        test_cat.append(full_test_event())
        with self.assertRaises(IOError):
            # Raises error due to multiple events in catalog
            sfile = eventtosfile(test_cat,
                                 userID='TEST',
                                 evtype='L',
                                 outdir='.',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
            # Raises error due to too long userID
            sfile = eventtosfile(test_cat[0],
                                 userID='TESTICLE',
                                 evtype='L',
                                 outdir='.',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
            # Raises error due to unrecognised event type
            sfile = eventtosfile(test_cat[0],
                                 userID='TEST',
                                 evtype='U',
                                 outdir='.',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
            # Raises error due to no output directory
            sfile = eventtosfile(test_cat[0],
                                 userID='TEST',
                                 evtype='L',
                                 outdir='albatross',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
            # Raises error due to incorrect wavefil formatting
            sfile = eventtosfile(test_cat[0],
                                 userID='TEST',
                                 evtype='L',
                                 outdir='.',
                                 wavefiles=1234,
                                 explosion=True,
                                 overwrite=True)
        with self.assertRaises(IndexError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins = []
            sfile = eventtosfile(invalid_origin,
                                 userID='TEST',
                                 evtype='L',
                                 outdir='.',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
        with self.assertRaises(ValueError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins[0].time = None
            sfile = eventtosfile(invalid_origin,
                                 userID='TEST',
                                 evtype='L',
                                 outdir='.',
                                 wavefiles='test',
                                 explosion=True,
                                 overwrite=True)
        # Write a near empty origin
        valid_origin = test_cat[0].copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        sfile = eventtosfile(valid_origin,
                             userID='TEST',
                             evtype='L',
                             outdir='.',
                             wavefiles='test',
                             explosion=True,
                             overwrite=True)
        self.assertTrue(os.path.isfile(sfile))
        os.remove(sfile)
Пример #37
0
def test_read_write():
    """
    Function to test the read and write capabilities of Sfile_util.
    """
    import os
    from obspy.core.event import Pick, WaveformStreamID, Arrival, Amplitude
    from obspy.core.event import Catalog, Event, Origin, Magnitude
    from obspy.core.event import EventDescription, CreationInfo
    import obspy
    if int(obspy.__version__.split('.')[0]) >= 1:
        from obspy.core.event import read_events
    else:
        from obspy.core.event import readEvents as read_events
    from obspy import UTCDateTime

    # Set-up a test event
    test_event = Event()
    test_event.origins.append(Origin())
    test_event.origins[0].time = UTCDateTime("2012-03-26") + 1
    test_event.event_descriptions.append(EventDescription())
    test_event.event_descriptions[0].text = 'LE'
    test_event.origins[0].latitude = 45.0
    test_event.origins[0].longitude = 25.0
    test_event.origins[0].depth = 15.0
    test_event.creation_info = CreationInfo(agency_id='TES')
    test_event.origins[0].time_errors['Time_Residual_RMS'] = 0.01
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[0].mag = 0.1
    test_event.magnitudes[0].magnitude_type = 'ML'
    test_event.magnitudes[0].creation_info = CreationInfo('TES')
    test_event.magnitudes[0].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[1].mag = 0.5
    test_event.magnitudes[1].magnitude_type = 'Mc'
    test_event.magnitudes[1].creation_info = CreationInfo('TES')
    test_event.magnitudes[1].origin_id = test_event.origins[0].resource_id
    test_event.magnitudes.append(Magnitude())
    test_event.magnitudes[2].mag = 1.3
    test_event.magnitudes[2].magnitude_type = 'Ms'
    test_event.magnitudes[2].creation_info = CreationInfo('TES')
    test_event.magnitudes[2].origin_id = test_event.origins[0].resource_id

    # Define the test pick
    _waveform_id = WaveformStreamID(station_code='FOZ',
                                    channel_code='SHZ',
                                    network_code='NZ')
    test_event.picks.append(
        Pick(waveform_id=_waveform_id,
             onset='impulsive',
             phase_hint='PN',
             polarity='positive',
             time=UTCDateTime("2012-03-26") + 1.68,
             horizontal_slowness=12,
             backazimuth=20))
    test_event.amplitudes.append(
        Amplitude(generic_amplitude=2.0,
                  period=0.4,
                  pick_id=test_event.picks[0].resource_id,
                  waveform_id=test_event.picks[0].waveform_id,
                  unit='m'))
    test_event.origins[0].arrivals.append(
        Arrival(time_weight=2,
                phase=test_event.picks[0].phase_hint,
                pick_id=test_event.picks[0].resource_id,
                backazimuth_residual=5,
                time_residual=0.2,
                distance=15,
                azimuth=25))
    # Add the event to a catalogue which can be used for QuakeML testing
    test_cat = Catalog()
    test_cat += test_event
    # Write the catalog
    test_cat.write("Test_catalog.xml", format='QUAKEML')
    # Read and check
    read_cat = read_events("Test_catalog.xml")
    os.remove("Test_catalog.xml")
    assert read_cat[0].resource_id == test_cat[0].resource_id
    assert read_cat[0].picks == test_cat[0].picks
    assert read_cat[0].origins[0].resource_id ==\
        test_cat[0].origins[0].resource_id
    assert read_cat[0].origins[0].time == test_cat[0].origins[0].time
    # Note that time_residuel_RMS is not a quakeML format
    assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude
    assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude
    assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth
    assert read_cat[0].magnitudes == test_cat[0].magnitudes
    assert read_cat[0].event_descriptions == test_cat[0].event_descriptions
    assert read_cat[0].amplitudes[0].resource_id ==\
        test_cat[0].amplitudes[0].resource_id
    assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period
    assert read_cat[0].amplitudes[0].unit == test_cat[0].amplitudes[0].unit
    assert read_cat[0].amplitudes[0].generic_amplitude ==\
        test_cat[0].amplitudes[0].generic_amplitude
    assert read_cat[0].amplitudes[0].pick_id ==\
        test_cat[0].amplitudes[0].pick_id
    assert read_cat[0].amplitudes[0].waveform_id ==\
        test_cat[0].amplitudes[0].waveform_id

    # Check the read-write s-file functionality
    sfile = eventtoSfile(test_cat[0],
                         userID='TEST',
                         evtype='L',
                         outdir='.',
                         wavefiles='test',
                         explosion=True,
                         overwrite=True)
    del read_cat
    assert readwavename(sfile) == ['test']
    read_cat = Catalog()
    read_cat += readpicks(sfile)
    os.remove(sfile)
    assert read_cat[0].picks[0].time == test_cat[0].picks[0].time
    assert read_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth
    assert read_cat[0].picks[0].onset == test_cat[0].picks[0].onset
    assert read_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint
    assert read_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity
    assert read_cat[0].picks[0].waveform_id.station_code ==\
        test_cat[0].picks[0].waveform_id.station_code
    assert read_cat[0].picks[0].waveform_id.channel_code[-1] ==\
        test_cat[0].picks[0].waveform_id.channel_code[-1]
    # assert read_cat[0].origins[0].resource_id ==\
    #     test_cat[0].origins[0].resource_id
    assert read_cat[0].origins[0].time == test_cat[0].origins[0].time
    # Note that time_residuel_RMS is not a quakeML format
    assert read_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude
    assert read_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude
    assert read_cat[0].origins[0].depth == test_cat[0].origins[0].depth
    assert read_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag
    assert read_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag
    assert read_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag
    assert read_cat[0].magnitudes[0].creation_info ==\
        test_cat[0].magnitudes[0].creation_info
    assert read_cat[0].magnitudes[1].creation_info ==\
        test_cat[0].magnitudes[1].creation_info
    assert read_cat[0].magnitudes[2].creation_info ==\
        test_cat[0].magnitudes[2].creation_info
    assert read_cat[0].magnitudes[0].magnitude_type ==\
        test_cat[0].magnitudes[0].magnitude_type
    assert read_cat[0].magnitudes[1].magnitude_type ==\
        test_cat[0].magnitudes[1].magnitude_type
    assert read_cat[0].magnitudes[2].magnitude_type ==\
        test_cat[0].magnitudes[2].magnitude_type
    assert read_cat[0].event_descriptions == test_cat[0].event_descriptions
    # assert read_cat[0].amplitudes[0].resource_id ==\
    #     test_cat[0].amplitudes[0].resource_id
    assert read_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period
    assert read_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr
    del read_cat
    # assert read_cat[0].amplitudes[0].pick_id ==\
    #     test_cat[0].amplitudes[0].pick_id
    # assert read_cat[0].amplitudes[0].waveform_id ==\
    #     test_cat[0].amplitudes[0].waveform_id

    # Test the wrappers for PICK and EVENTINFO classes
    picks, evinfo = eventtopick(test_cat)
    # Test the conversion back
    conv_cat = Catalog()
    conv_cat.append(picktoevent(evinfo, picks))
    assert conv_cat[0].picks[0].time == test_cat[0].picks[0].time
    assert conv_cat[0].picks[0].backazimuth == test_cat[0].picks[0].backazimuth
    assert conv_cat[0].picks[0].onset == test_cat[0].picks[0].onset
    assert conv_cat[0].picks[0].phase_hint == test_cat[0].picks[0].phase_hint
    assert conv_cat[0].picks[0].polarity == test_cat[0].picks[0].polarity
    assert conv_cat[0].picks[0].waveform_id.station_code ==\
        test_cat[0].picks[0].waveform_id.station_code
    assert conv_cat[0].picks[0].waveform_id.channel_code[-1] ==\
        test_cat[0].picks[0].waveform_id.channel_code[-1]
    # assert read_cat[0].origins[0].resource_id ==\
    #     test_cat[0].origins[0].resource_id
    assert conv_cat[0].origins[0].time == test_cat[0].origins[0].time
    # Note that time_residuel_RMS is not a quakeML format
    assert conv_cat[0].origins[0].longitude == test_cat[0].origins[0].longitude
    assert conv_cat[0].origins[0].latitude == test_cat[0].origins[0].latitude
    assert conv_cat[0].origins[0].depth == test_cat[0].origins[0].depth
    assert conv_cat[0].magnitudes[0].mag == test_cat[0].magnitudes[0].mag
    assert conv_cat[0].magnitudes[1].mag == test_cat[0].magnitudes[1].mag
    assert conv_cat[0].magnitudes[2].mag == test_cat[0].magnitudes[2].mag
    assert conv_cat[0].magnitudes[0].creation_info ==\
        test_cat[0].magnitudes[0].creation_info
    assert conv_cat[0].magnitudes[1].creation_info ==\
        test_cat[0].magnitudes[1].creation_info
    assert conv_cat[0].magnitudes[2].creation_info ==\
        test_cat[0].magnitudes[2].creation_info
    assert conv_cat[0].magnitudes[0].magnitude_type ==\
        test_cat[0].magnitudes[0].magnitude_type
    assert conv_cat[0].magnitudes[1].magnitude_type ==\
        test_cat[0].magnitudes[1].magnitude_type
    assert conv_cat[0].magnitudes[2].magnitude_type ==\
        test_cat[0].magnitudes[2].magnitude_type
    assert conv_cat[0].event_descriptions == test_cat[0].event_descriptions
    # assert read_cat[0].amplitudes[0].resource_id ==\
    #     test_cat[0].amplitudes[0].resource_id
    assert conv_cat[0].amplitudes[0].period == test_cat[0].amplitudes[0].period
    assert conv_cat[0].amplitudes[0].snr == test_cat[0].amplitudes[0].snr
    return True
Пример #38
0
    def test_creating_minimal_quakeml_with_mt(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(
            mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time, latitude=lat, longitude=lon,
                           depth=depth, resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populate event moment tensor
        ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = io.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = _read_quakeml(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
Пример #39
0
def iris2quakeml(url, output_folder=None):
    if not "/spudservice/" in url:
        url = url.replace("/spud/", "/spudservice/")
        if url.endswith("/"):
            url += "quakeml"
        else:
            url += "/quakeml"
    print "Downloading %s..." % url
    r = requests.get(url)
    if r.status_code != 200:
        msg = "Error Downloading file!"
        raise Exception(msg)

    # For some reason the quakeml file is escaped HTML.
    h = HTMLParser.HTMLParser()

    data = h.unescape(r.content)

    # Replace some XML tags.
    data = data.replace("long-period body waves", "body waves")
    data = data.replace("intermediate-period surface waves", "surface waves")
    data = data.replace("long-period mantle waves", "mantle waves")

    data = data.replace("<html><body><pre>", "")
    data = data.replace("</pre></body></html>", "")

    # Change the resource identifiers. Colons are not allowed in QuakeML.
    pattern = r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.(\d{6})"
    data = re.sub(pattern, r"\1-\2-\3T\4-\5-\6.\7", data)

    data = StringIO(data)

    try:
        cat = readEvents(data)
    except:
        msg = "Could not read downloaded event data"
        raise ValueError(msg)

    # Parse the event, and use only one origin, magnitude and focal mechanism.
    # Only the first event is used. Should not be a problem for the chosen
    # global cmt application.
    ev = cat[0]

    if ev.preferred_origin():
        ev.origins = [ev.preferred_origin()]
    else:
        ev.origins = [ev.origins[0]]
    if ev.preferred_focal_mechanism():
        ev.focal_mechanisms = [ev.preferred_focal_mechanism()]
    else:
        ev.focal_mechanisms = [ev.focal_mechanisms[0]]

    try:
        mt = ev.focal_mechanisms[0].moment_tensor
    except:
        msg = "No moment tensor found in file."
        raise ValueError
    seismic_moment_in_dyn_cm = mt.scalar_moment
    if not seismic_moment_in_dyn_cm:
        msg = "No scalar moment found in file."
        raise ValueError(msg)

    # Create a new magnitude object with the moment magnitude calculated from
    # the given seismic moment.
    mag = Magnitude()
    mag.magnitude_type = "Mw"
    mag.origin_id = ev.origins[0].resource_id
    # This is the formula given on the GCMT homepage.
    mag.mag = (2.0 / 3.0) * (math.log10(seismic_moment_in_dyn_cm) - 16.1)
    mag.resource_id = ev.origins[0].resource_id.resource_id.replace(
        "Origin", "Magnitude")
    ev.magnitudes = [mag]
    ev.preferred_magnitude_id = mag.resource_id

    # Convert the depth to meters.
    org = ev.origins[0]
    org.depth *= 1000.0
    if org.depth_errors.uncertainty:
        org.depth_errors.uncertainty *= 1000.0

    # Ugly asserts -- this is just a simple script.
    assert (len(ev.magnitudes) == 1)
    assert (len(ev.origins) == 1)
    assert (len(ev.focal_mechanisms) == 1)

    # All values given in the QuakeML file are given in dyne * cm. Convert them
    # to N * m.
    for key, value in mt.tensor.iteritems():
        if key.startswith("m_") and len(key) == 4:
            mt.tensor[key] /= 1E7
        if key.endswith("_errors") and hasattr(value, "uncertainty"):
            mt.tensor[key].uncertainty /= 1E7
    mt.scalar_moment /= 1E7
    if mt.scalar_moment_errors.uncertainty:
        mt.scalar_moment_errors.uncertainty /= 1E7
    p_axes = ev.focal_mechanisms[0].principal_axes
    for ax in [p_axes.t_axis, p_axes.p_axis, p_axes.n_axis]:
        if ax is None or not ax.length:
            continue
        ax.length /= 1E7

    # Check if it has a source time function
    stf = mt.source_time_function
    if stf:
        if stf.type != "triangle":
            msg = ("Source time function type '%s' not yet mapped. Please "
                   "contact the developers.") % stf.type
            raise NotImplementedError(msg)
        if not stf.duration:
            if not stf.decay_time:
                msg = "Not known how to derive duration without decay time."
                raise NotImplementedError(msg)
            # Approximate the duraction for triangular STF.
            stf.duration = 2 * stf.decay_time

    # Get the flinn_engdahl region for a nice name.
    fe = FlinnEngdahl()
    region_name = fe.get_region(ev.origins[0].longitude,
                                ev.origins[0].latitude)
    region_name = region_name.replace(" ", "_")
    event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \
        (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year,
        ev.origins[0].time.month, ev.origins[0].time.day,
        ev.origins[0].time.hour, ev.origins[0].time.minute)

    # Check if the ids of the magnitude and origin contain the corresponding
    # tag. Otherwise replace tme.
    ev.origins[0].resource_id = ev.origins[0].resource_id.resource_id.replace(
        "quakeml/gcmtid", "quakeml/origin/gcmtid")
    ev.magnitudes[0].resource_id = \
        ev.magnitudes[0].resource_id.resource_id.replace(
            "quakeml/gcmtid", "quakeml/magnitude/gcmtid")

    # Fix up the moment tensor resource_ids.
    mt.derived_origin_id = ev.origins[0].resource_id
    mt.resource_id = mt.resource_id.resource_id.replace(
        "focalmechanism", "momenttensor")

    cat = Catalog()
    cat.resource_id = ev.origins[0].resource_id.resource_id.replace(
        "origin", "event_parameters")
    cat.append(ev)
    if output_folder:
        event_name = os.path.join(output_folder, event_name)
    cat.write(event_name, format="quakeml", validate=True)
    print "Written file", event_name