Exemplo n.º 1
0
def _group_events(catalog, process_len, template_length, data_pad):
    """
    Internal function to group events into sub-catalogs based on process_len.

    :param catalog: Catalog to groups into sub-catalogs
    :type catalog: obspy.core.event.Catalog
    :param process_len: Length in seconds that data will be processed in
    :type process_len: int

    :return: List of catalogs
    :rtype: list
    """
    # case for catalog only containing one event
    if len(catalog) == 1:
        return [catalog]
    sub_catalogs = []
    # Sort catalog by date
    catalog.events = sorted(catalog.events,
                            key=lambda e:
                            (e.preferred_origin() or e.origins[0]).time)
    sub_catalog = Catalog([catalog[0]])
    for event in catalog[1:]:
        origin_time = (event.preferred_origin() or event.origins[0]).time
        last_pick = sorted(event.picks, key=lambda p: p.time)[-1]
        max_diff = (process_len - (last_pick.time - origin_time) -
                    template_length)
        max_diff -= 2 * data_pad
        if origin_time - sub_catalog[0].origins[0].time < max_diff:
            sub_catalog.append(event)
        else:
            sub_catalogs.append(sub_catalog)
            sub_catalog = Catalog([event])
    sub_catalogs.append(sub_catalog)
    return sub_catalogs
Exemplo n.º 2
0
def read_phase(ph_file):
    """
    Read hypoDD phase files into Obspy catalog class.

    :type ph_file: str
    :param ph_file: Phase file to read event info from.

    :returns: Catalog of events from file.
    :rtype: :class:`obspy.core.event.Catalog`

    >>> from obspy.core.event.catalog import Catalog
    >>> catalog = read_phase('eqcorrscan/tests/test_data/tunnel.phase')
    >>> isinstance(catalog, Catalog)
    True
    """
    ph_catalog = Catalog()
    f = open(ph_file, 'r')
    # Topline of each event is marked by # in position 0
    for line in f:
        if line[0] == '#':
            if 'event_text' not in locals():
                event_text = {'header': line.rstrip(), 'picks': []}
            else:
                ph_catalog.append(_phase_to_event(event_text))
                event_text = {'header': line.rstrip(), 'picks': []}
        else:
            event_text['picks'].append(line.rstrip())
    ph_catalog.append(_phase_to_event(event_text))
    return ph_catalog
Exemplo n.º 3
0
    def get_phases(self, evids=None, times=None, lats=None, lons=None, mags=None, depths=None, types=None, gtypes=None, output_file=None, is_xml=False):
        """ Download events and phase picks from STP using the PHASE command.
        """

        if not self.connected:
            print('STP is not connected')
            return None
        self._get_event_phase('phase', evids, times, lats, lons, mags, depths, types, gtypes, output_file)
        evid_pattern = re.compile('^[1-9]+')
        catalog = Catalog()
        event = None
        for line in self.message.splitlines():
            line = line.strip()
            if not line.startswith('#'):
                #print(evid_pattern.match(line))
                if evid_pattern.match(line) is not None:
                    #print('Creating event')
                    event = utils.make_event(line)
                    catalog.append(event)
                else:
                    #print('Creating phase pick')
                    pick = utils.make_pick(line.strip(), event.origins[0].time)
                    if event is None:
                        raise Exception('Error parsing phase output')
                    event.picks.append(pick)
        self._end_command()    
        return catalog
Exemplo n.º 4
0
def read_cat_ref(cat_file):
    """
    Parses a given refrence catalogue (in ascii format,see the header for details)
    output is Obspy catalogue object
    """
    cat_ref = np.loadtxt(cat_file,delimiter=',',skiprows=1)
    cat = Catalog()
    for i,e in enumerate(cat_ref):
        event = Event(resource_id='smi:local/='+str(i),creation_info='HG')
        origin = Origin()
        origin.time = UTCDateTime(int(e[2]),int(e[3]),int(e[4]),
                                  int(e[7]),int(e[8]),e[9])
        origin.longitude = e[0]
        origin.latitude = e[1]
        origin.depth = e[6] * 1000. #in meters
        event.origins.append(origin)
        if ~(np.isnan(e[10])):
            mag = Magnitude(creation_info='HER')
            mag.mag = e[10]
            mag.magnitude_type = 'Mw'
            event.magnitudes.append(mag)
        if ~(np.isnan(e[11])):
            mag = Magnitude(creation_info='MAR')
            mag.mag = e[11]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        if ~(np.isnan(e[12])):
            mag = Magnitude(creation_info='SIP')
            mag.mag = e[12]
            mag.magnitude_type = 'Mw' 
            event.magnitudes.append(mag)
        cat.append(event)
    return cat
Exemplo n.º 5
0
    def test_catalog_grouping(self):
        from obspy.core.event import Catalog
        from eqcorrscan.utils.sfile_util import read_event
        import glob
        import os
        from eqcorrscan.core.template_gen import _group_events

        testing_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
                                    'test_data', 'REA', 'TEST_', '*')
        catalog = Catalog()
        sfiles = glob.glob(testing_path)
        for sfile in sfiles:
            catalog.append(read_event(sfile=sfile))
        for process_len, pads in [(60, [5]),
                                  (300, [5, 60]),
                                  (3600, [5, 60, 300]),
                                  (86400, [5, 60, 300])]:
            for data_pad in pads:
                sub_catalogs = _group_events(catalog=catalog,
                                             process_len=process_len,
                                             data_pad=data_pad)
                k_events = 0
                for sub_catalog in sub_catalogs:
                    min_time = min([event.origins[0].time
                                    for event in sub_catalog])
                    min_time -= data_pad
                    for event in sub_catalog:
                        self.assertTrue((event.origins[0].time +
                                         data_pad) - min_time < process_len)
                        k_events += 1
                self.assertEqual(k_events, len(catalog))
Exemplo n.º 6
0
def read_phase(ph_file):
    """
    Read hypoDD phase files into Obspy catalog class.

    :type ph_file: str
    :param ph_file: Phase file to read event info from.

    :returns: obspy.core.event.catlog

    >>> from obspy.core.event.catalog import Catalog
    >>> catalog = read_phase('eqcorrscan/tests/test_data/tunnel.phase')
    >>> isinstance(catalog, Catalog)
    True
    """
    from obspy.core.event import Catalog
    ph_catalog = Catalog()
    f = open(ph_file, 'r')
    # Topline of each event is marked by # in position 0
    for line in f:
        if line[0] == '#':
            if 'event_text' not in locals():
                event_text = {'header': line.rstrip(),
                              'picks': []}
            else:
                ph_catalog.append(_phase_to_event(event_text))
                event_text = {'header': line.rstrip(),
                              'picks': []}
        else:
            event_text['picks'].append(line.rstrip())
    ph_catalog.append(_phase_to_event(event_text))
    return ph_catalog
Exemplo n.º 7
0
def strip_events(
        events: catalog_or_event,
        reject_evaluation_status: Iterable = "rejected") -> catalog_or_event:
    """
    Removes all derivative data and rejected objects from an event or catalog

    This is a nuclear option for when processing goes horribly wrong. It will
    only keep picks and amplitudes that are not rejected in addition to the
    first event description for the event.

    Parameters
    ----------
    events
        The events to strip
    reject_evaluation_status
        Reject picks and amplitudes that have this as an evaluation status
        (accepts either a single value or a list)

    Returns
    -------
    The stripped events
    """
    # Make sure this returns a new catalog
    out = Catalog()
    for eve in events:
        out.append(
            strip_events(eve,
                         reject_evaluation_status=reject_evaluation_status))
    return out
Exemplo n.º 8
0
Arquivo: core.py Projeto: zurgeg/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [
                         int(v) if v is not None else 0 for v in comps[:-1]
                     ]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Exemplo n.º 9
0
    def test_seishub(self):
        """Test the seishub method, use obspy default seishub client."""
        import sys
        if sys.version_info.major == 2:
            from future.backports.urllib.request import URLError
        else:
            from urllib.request import URLError
        t = UTCDateTime(2009, 9, 3)
        test_cat = Catalog()
        test_cat.append(Event())
        test_cat[0].origins.append(Origin())
        test_cat[0].origins[0].time = t
        test_cat[0].origins[0].latitude = 45
        test_cat[0].origins[0].longitude = 45
        test_cat[0].origins[0].depth = 5000
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHZ',
                                              network_code='BW'),
                 phase_hint='PG',
                 time=t + 2000))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHN',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005))
        test_cat[0].picks.append(
            Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                              channel_code='EHE',
                                              network_code='BW'),
                 phase_hint='SG',
                 time=t + 2005.5))

        test_url = "http://teide.geophysik.uni-muenchen.de:8080"

        if sys.version_info.major == 3:
            try:
                template = template_gen(method="from_seishub",
                                        catalog=test_cat,
                                        url=test_url,
                                        lowcut=1.0,
                                        highcut=5.0,
                                        samp_rate=20,
                                        filt_order=4,
                                        length=3,
                                        prepick=0.5,
                                        swin='all',
                                        process_len=300)
            except URLError:
                pass
        else:
            pass
        if 'template' in locals():
            self.assertEqual(len(template), 3)
Exemplo n.º 10
0
Arquivo: core.py Projeto: Brtle/obspy
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split('\n'):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split('\t', 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get('lon'))
         origin.latitude = self._str2num(values.get('lat'))
         depth = self._str2num(values.get('depth'))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get('z_err'))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get('h_err'))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = 'horizontal uncertainty'
             origin.origin_uncertainty = ou
         year = self._str2num(values.get('year'))
         if year is not None:
             t_fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 # no seconds involved
                 if len(comps) < 6:
                     utc_args = [int(v) for v in comps if v is not None]
                 # we also have to handle seconds
                 else:
                     utc_args = [int(v) if v is not None else 0
                                 for v in comps[:-1]]
                     # just leave float seconds as is
                     utc_args.append(comps[-1])
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get('mag'))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get('m_err'))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         event.scope_resource_ids()
         catalog.append(event)
     return catalog
Exemplo n.º 11
0
def filter_cat_poly(cat,poly):
    """
    Gets the events within a polygon
    output is Obspy catalogue object
    """
    cat_filt = Catalog()
    for eve in cat:
        if poly.contains(Point(eve.origins[0].longitude,
                                eve.origins[0].latitude)):
            cat_filt.append(eve)
    return cat_filt
Exemplo n.º 12
0
def append_cmt_to_catalog(event_origin,
                          cmt_to_add,
                          tag="new_cmt",
                          author="Princeton GATG",
                          change_preferred_id=True):
    """
    Add cmt to event. The cmt.resource_id will be appened tag to avoid
    tag duplication problem in event.
    :param event: the event that you want to add cmt in.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param cmt: the cmt that you want to add to event.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param change_preferred_id: change all preferred_id to the new added cmt
    :type change_preferred_id: bool
    :return: obspy.Catalog
    """
    event = _parse_event(event_origin)
    cmt_event = _parse_event(cmt_to_add)

    if not isinstance(tag, str):
        raise TypeError("tag(%s) should be type of str" % type(tag))

    if not isinstance(author, str):
        raise TypeError("author(%s) should be type of str" % type(author))

    # User defined creation information
    creation_info = CreationInfo(author=author, version=tag)

    # add cmt origin
    cmt_origin = prepare_cmt_origin(cmt_event, tag, creation_info)
    event.origins.append(cmt_origin)

    # add cmt magnitude
    cmt_mag = prepare_cmt_mag(cmt_event, tag, cmt_origin.resource_id,
                              creation_info)
    event.magnitudes.append(cmt_mag)

    # add cmt focal mechanism
    cmt_focal = prepare_cmt_focal(cmt_event, tag, cmt_origin.resource_id,
                                  cmt_mag.resource_id, creation_info)
    event.focal_mechanisms.append(cmt_focal)

    # change preferred id if needed
    if change_preferred_id:
        event.preferred_origin_id = str(cmt_origin.resource_id)
        event.preferred_magnitude_id = str(cmt_mag.resource_id)
        event.preferred_focal_mechanism_id = str(cmt_focal.resource_id)
        _validator(event, cmt_origin, cmt_mag, cmt_focal)

    new_cat = Catalog()
    new_cat.append(event)

    return new_cat
Exemplo n.º 13
0
def node_catalog_no_picks(node_catalog) -> Tuple[obspy.Catalog, Dict]:
    """return the node catalog with just origins"""
    eid_map = {}
    cat = Catalog()
    for num, eve in enumerate(node_catalog):
        eve_out = Event(origins=eve.origins)
        for o in eve_out.origins:
            o.arrivals = []
        eve_out.resource_id = ResourceIdentifier(f"event_{num}")
        cat.append(eve_out)
        eid_map[eve.resource_id.id] = eve_out.resource_id.id
    return cat, eid_map
Exemplo n.º 14
0
def detections_to_catalog(detections):
    r"""Helper to convert from list of detections to obspy catalog.

    :type detections: list
    :param detections: list of eqcorrscan.core.match_filter.detection

    :returns: obspy.core.event.Catalog
    """
    from obspy.core.event import Catalog
    catalog = Catalog()
    for detection in detections:
        catalog.append(detection.event)
    return catalog
Exemplo n.º 15
0
def detections_to_catalog(detections):
    r"""Helper to convert from list of detections to obspy catalog.

    :type detections: list
    :param detections: list of eqcorrscan.core.match_filter.detection

    :returns: obspy.core.event.Catalog
    """
    from obspy.core.event import Catalog
    catalog = Catalog()
    for detection in detections:
        catalog.append(detection.event)
    return catalog
Exemplo n.º 16
0
def append_cmt_to_catalog(event_origin, cmt_to_add, tag="new_cmt",
                          author="Princeton GATG",
                          change_preferred_id=True):
    """
    Add cmt to event. The cmt.resource_id will be appened tag to avoid
    tag duplication problem in event.
    :param event: the event that you want to add cmt in.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param cmt: the cmt that you want to add to event.
    :type event: str, obspy.core.event.Event or obspy.core.event.Catalog
    :param change_preferred_id: change all preferred_id to the new added cmt
    :type change_preferred_id: bool
    :return: obspy.Catalog
    """
    event = _parse_event(event_origin)
    cmt_event = _parse_event(cmt_to_add)

    if not isinstance(tag, str):
        raise TypeError("tag(%s) should be type of str" % type(tag))

    if not isinstance(author, str):
        raise TypeError("author(%s) should be type of str" % type(author))

    # User defined creation information
    creation_info = CreationInfo(author=author, version=tag)

    # add cmt origin
    cmt_origin = prepare_cmt_origin(cmt_event, tag, creation_info)
    event.origins.append(cmt_origin)

    # add cmt magnitude
    cmt_mag = prepare_cmt_mag(cmt_event, tag, cmt_origin.resource_id,
                              creation_info)
    event.magnitudes.append(cmt_mag)

    # add cmt focal mechanism
    cmt_focal = prepare_cmt_focal(cmt_event, tag, cmt_origin.resource_id,
                                  cmt_mag.resource_id, creation_info)
    event.focal_mechanisms.append(cmt_focal)

    # change preferred id if needed
    if change_preferred_id:
        event.preferred_origin_id = str(cmt_origin.resource_id)
        event.preferred_magnitude_id = str(cmt_mag.resource_id)
        event.preferred_focal_mechanism_id = str(cmt_focal.resource_id)
        _validator(event, cmt_origin, cmt_mag, cmt_focal)

    new_cat = Catalog()
    new_cat.append(event)

    return new_cat
Exemplo n.º 17
0
def get_catalog(detections):
    """
    Generate an obspy catalog from detections of DETECTION class.

    :type detections: list
    :param detections: list of eqcorrscan.core.match_filter.DETECTION

    :returns: obspy.core.event.Catalog
    """
    from obspy.core.event import Catalog
    catalog = Catalog()
    for detection in detections:
        catalog.append(detection.event)
    return catalog
Exemplo n.º 18
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = '/'.join((res_id_prefix,
                        self.filename.replace(':', '/')))\
         .replace('\\', '/').replace('//', '/')
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = 'Created from NEIC PDE mchedr format'
     catalog.comments = ''
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         # XXX: ugly, probably we should do everything in byte strings
         # here? Is the pde / mchedr format unicode aware?
         line = line.decode()
         record_id = line[0:2]
         if record_id == 'HY':
             event = self._parse_record_hy(line)
             catalog.append(event)
         elif record_id == 'P ':
             pick, arrival = self._parse_record_p(line, event)
         elif record_id == 'E ':
             self._parse_record_e(line, event)
         elif record_id == 'L ':
             self._parse_record_l(line, event)
         elif record_id == 'A ':
             self._parse_record_a(line, event)
         elif record_id == 'C ':
             self._parse_record_c(line, event)
         elif record_id == 'AH':
             self._parse_record_ah(line, event)
         elif record_id == 'AE':
             self._parse_record_ae(line, event)
         elif record_id == 'Dp':
             focal_mechanism = self._parse_record_dp(line, event)
         elif record_id == 'Dt':
             self._parse_record_dt(line, focal_mechanism)
         elif record_id == 'Da':
             self._parse_record_da(line, focal_mechanism)
         elif record_id == 'Dc':
             self._parse_record_dc(line, focal_mechanism)
         elif record_id == 'M ':
             self._parse_record_m(line, event, pick)
         elif record_id == 'S ':
             self._parse_record_s(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
         event.scope_resource_ids()
     return catalog
Exemplo n.º 19
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = '/'.join((res_id_prefix,
                        self.filename.replace(':', '/')))\
         .replace('\\', '/').replace('//', '/')
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = 'Created from NEIC PDE mchedr format'
     catalog.comments = ''
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         # XXX: ugly, probably we should do everything in byte strings
         # here? Is the pde / mchedr format unicode aware?
         line = line.decode()
         record_id = line[0:2]
         if record_id == 'HY':
             event = self._parse_record_hy(line)
             catalog.append(event)
         elif record_id == 'P ':
             pick, arrival = self._parse_record_p(line, event)
         elif record_id == 'E ':
             self._parse_record_e(line, event)
         elif record_id == 'L ':
             self._parse_record_l(line, event)
         elif record_id == 'A ':
             self._parse_record_a(line, event)
         elif record_id == 'C ':
             self._parse_record_c(line, event)
         elif record_id == 'AH':
             self._parse_record_ah(line, event)
         elif record_id == 'AE':
             self._parse_record_ae(line, event)
         elif record_id == 'Dp':
             focal_mechanism = self._parse_record_dp(line, event)
         elif record_id == 'Dt':
             self._parse_record_dt(line, focal_mechanism)
         elif record_id == 'Da':
             self._parse_record_da(line, focal_mechanism)
         elif record_id == 'Dc':
             self._parse_record_dc(line, focal_mechanism)
         elif record_id == 'M ':
             self._parse_record_m(line, event, pick)
         elif record_id == 'S ':
             self._parse_record_s(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
         event.scope_resource_ids()
     return catalog
Exemplo n.º 20
0
    def get_events(self, evids=None, times=None, lats=None, lons=None, mags=None, depths=None, types=None, gtypes=None, output_file=None, is_xml=False):
        """ Download events from STP using the EVENT command.
        """

        if not self.connected:
            print('STP is not connected')
            return None
        self._get_event_phase('event', evids, times, lats, lons, mags, depths, types, gtypes, output_file)
        catalog = Catalog()
        for line in self.message.splitlines():
            if not line.startswith('#'):
                catalog.append(utils.make_event(line))
        self._end_command()
        return catalog
Exemplo n.º 21
0
def get_catalog(detections):
    """
    Generate an obspy catalog from detections of DETECTION class.

    :type detections: list
    :param detections: list of eqcorrscan.core.match_filter.DETECTION

    :returns: obspy.core.event.Catalog
    """
    from obspy.core.event import Catalog
    catalog = Catalog()
    for detection in detections:
        catalog.append(detection.event)
    return catalog
Exemplo n.º 22
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = "/".join((res_id_prefix, self.filename))
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = "Created from NEIC PDE mchedr format"
     catalog.comments = ""
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         # XXX: ugly, probably we should do everything in byte strings
         # here? Is the pde / mchedr format unicode aware?
         line = line.decode()
         record_id = line[0:2]
         if record_id == "HY":
             event = self._parseRecordHY(line)
             catalog.append(event)
         elif record_id == "P ":
             pick, arrival = self._parseRecordP(line, event)
         elif record_id == "E ":
             self._parseRecordE(line, event)
         elif record_id == "L ":
             self._parseRecordL(line, event)
         elif record_id == "A ":
             self._parseRecordA(line, event)
         elif record_id == "C ":
             self._parseRecordC(line, event)
         elif record_id == "AH":
             self._parseRecordAH(line, event)
         elif record_id == "AE":
             self._parseRecordAE(line, event)
         elif record_id == "Dp":
             focal_mechanism = self._parseRecordDp(line, event)
         elif record_id == "Dt":
             self._parseRecordDt(line, focal_mechanism)
         elif record_id == "Da":
             self._parseRecordDa(line, focal_mechanism)
         elif record_id == "Dc":
             self._parseRecordDc(line, focal_mechanism)
         elif record_id == "M ":
             self._parseRecordM(line, event, pick)
         elif record_id == "S ":
             self._parseRecordS(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
     return catalog
Exemplo n.º 23
0
    def test_seishub(self):
        """Test the seishub method, use obspy default seishub client."""
        from obspy.core.event import Catalog, Event, Origin, Pick
        from obspy.core.event import WaveformStreamID
        from obspy import UTCDateTime
        import warnings
        from future import standard_library
        with standard_library.hooks():
            from urllib.request import URLError


        t = UTCDateTime(2009, 9, 3)
        test_cat = Catalog()
        test_cat.append(Event())
        test_cat[0].origins.append(Origin())
        test_cat[0].origins[0].time = t
        test_cat[0].origins[0].latitude = 45
        test_cat[0].origins[0].longitude = 45
        test_cat[0].origins[0].depth = 5000
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHZ',
                                                            network_code='BW'),
                              phase_hint='PG', time=t + 2000))
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHN',
                                                           network_code='BW'),
                              phase_hint='SG', time=t + 2005))
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHE',
                                                           network_code='BW'),
                              phase_hint='SG', time=t + 2005.5))

        test_url = 'http://teide.geophysik.uni-muenchen.de:8080'

        try:
            template = from_seishub(test_cat, url=test_url, lowcut=1.0,
                                    highcut=5.0, samp_rate=20, filt_order=4,
                                    length=3, prepick=0.5, swin='all',
                                    process_len=300)
        except URLError:
            warnings.warn('Timed out connection to seishub')
        if 'template' in locals():
            self.assertEqual(len(template), 3)
Exemplo n.º 24
0
    def test_seishub(self):
        """Test the seishub method, use obspy default seishub client."""
        from obspy.core.event import Catalog, Event, Origin, Pick
        from obspy.core.event import WaveformStreamID
        from obspy import UTCDateTime
        import warnings
        from future import standard_library
        with standard_library.hooks():
            from urllib.request import URLError


        t = UTCDateTime(2009, 9, 3)
        test_cat = Catalog()
        test_cat.append(Event())
        test_cat[0].origins.append(Origin())
        test_cat[0].origins[0].time = t
        test_cat[0].origins[0].latitude = 45
        test_cat[0].origins[0].longitude = 45
        test_cat[0].origins[0].depth = 5000
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHZ',
                                                            network_code='BW'),
                              phase_hint='PG', time=t + 2000))
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHN',
                                                           network_code='BW'),
                              phase_hint='SG', time=t + 2005))
        test_cat[0].\
            picks.append(Pick(waveform_id=WaveformStreamID(station_code='MANZ',
                                                           channel_code='EHE',
                                                           network_code='BW'),
                              phase_hint='SG', time=t + 2005.5))

        test_url = 'http://teide.geophysik.uni-muenchen.de:8080'

        try:
            template = from_seishub(test_cat, url=test_url, lowcut=1.0,
                                    highcut=5.0, samp_rate=20, filt_order=4,
                                    length=3, prepick=0.5, swin='all')
        except URLError:
            warnings.warn('Timed out connection to seishub')
        if 'template' in locals():
            self.assertEqual(len(template), 3)
Exemplo n.º 25
0
 def _deserialize(self, zmap_str):
     catalog = Catalog()
     for row in zmap_str.split("\n"):
         if len(row) == 0:
             continue
         origin = Origin()
         event = Event(origins=[origin])
         event.preferred_origin_id = origin.resource_id.id
         # Begin value extraction
         columns = row.split("\t", 13)[:13]  # ignore extra columns
         values = dict(zip(_STD_ZMAP_COLUMNS + _EXT_ZMAP_COLUMNS, columns))
         # Extract origin
         origin.longitude = self._str2num(values.get("lon"))
         origin.latitude = self._str2num(values.get("lat"))
         depth = self._str2num(values.get("depth"))
         if depth is not None:
             origin.depth = depth * 1000.0
         z_err = self._str2num(values.get("z_err"))
         if z_err is not None:
             origin.depth_errors.uncertainty = z_err * 1000.0
         h_err = self._str2num(values.get("h_err"))
         if h_err is not None:
             ou = OriginUncertainty()
             ou.horizontal_uncertainty = h_err
             ou.preferred_description = "horizontal uncertainty"
             origin.origin_uncertainty = ou
         year = self._str2num(values.get("year"))
         if year is not None:
             t_fields = ["year", "month", "day", "hour", "minute", "second"]
             comps = [self._str2num(values.get(f)) for f in t_fields]
             if year % 1 != 0:
                 origin.time = self._decyear2utc(year)
             elif any(v > 0 for v in comps[1:]):
                 utc_args = [int(v) for v in comps if v is not None]
                 origin.time = UTCDateTime(*utc_args)
         mag = self._str2num(values.get("mag"))
         # Extract magnitude
         if mag is not None:
             magnitude = Magnitude(mag=mag)
             m_err = self._str2num(values.get("m_err"))
             magnitude.mag_errors.uncertainty = m_err
             event.magnitudes.append(magnitude)
             event.preferred_magnitude_id = magnitude.resource_id.id
         catalog.append(event)
     return catalog
Exemplo n.º 26
0
 def _deserialize(self):
     catalog = Catalog()
     res_id = '/'.join((res_id_prefix, self.filename))
     catalog.resource_id = ResourceIdentifier(id=res_id)
     catalog.description = 'Created from NEIC PDE mchedr format'
     catalog.comments = ''
     catalog.creation_info = CreationInfo(creation_time=UTCDateTime())
     for line in self.fh.readlines():
         record_id = line[0:2]
         if record_id == 'HY':
             event = self._parseRecordHY(line)
             catalog.append(event)
         elif record_id == 'P ':
             pick, arrival = self._parseRecordP(line, event)
         elif record_id == 'E ':
             self._parseRecordE(line, event)
         elif record_id == 'L ':
             self._parseRecordL(line, event)
         elif record_id == 'A ':
             self._parseRecordA(line, event)
         elif record_id == 'C ':
             self._parseRecordC(line, event)
         elif record_id == 'AH':
             self._parseRecordAH(line, event)
         elif record_id == 'AE':
             self._parseRecordAE(line, event)
         elif record_id == 'Dp':
             focal_mechanism = self._parseRecordDp(line, event)
         elif record_id == 'Dt':
             self._parseRecordDt(line, focal_mechanism)
         elif record_id == 'Da':
             self._parseRecordDa(line, focal_mechanism)
         elif record_id == 'Dc':
             self._parseRecordDc(line, focal_mechanism)
         elif record_id == 'M ':
             self._parseRecordM(line, event, pick)
         elif record_id == 'S ':
             self._parseRecordS(line, event, pick, arrival)
     self.fh.close()
     # strip extra whitespaces from event comments
     for event in catalog:
         for comment in event.comments:
             comment.text = comment.text.strip()
     return catalog
Exemplo n.º 27
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     self.assertEqual(len(catalog), 0)
     catalog.append(event1)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(catalog.events, [event1])
     catalog.append(event2)
     self.assertEqual(len(catalog), 2)
     self.assertEqual(catalog.events, [event1, event2])
     # 2 - adding objects other as Event should fails
     self.assertRaises(TypeError, catalog.append, str)
     self.assertRaises(TypeError, catalog.append, Catalog)
     self.assertRaises(TypeError, catalog.append, [event1])
Exemplo n.º 28
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     self.assertEqual(len(catalog), 0)
     catalog.append(event1)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(catalog.events, [event1])
     catalog.append(event2)
     self.assertEqual(len(catalog), 2)
     self.assertEqual(catalog.events, [event1, event2])
     # 2 - adding objects other as Event should fails
     self.assertRaises(TypeError, catalog.append, str)
     self.assertRaises(TypeError, catalog.append, Catalog)
     self.assertRaises(TypeError, catalog.append, [event1])
Exemplo n.º 29
0
    def make_xml(self):
        client = Client("IRIS")
        cat = Catalog()  # empty earthquake catalogue
        print('')

        # Method to retrieve events from IRIS based on event ID and create an xml file
        for event_id in self.ieb_events['IRIS_ID']:
            try:
                print('Requesting Information for event: ' + str(event_id))
                IRIS_event = client.get_events(eventid=int(event_id))[0]
                cat.append(IRIS_event)
            except FDSNException:
                print('')
                print('Error!!: No Event Information for ' + str(event_id))

        print('')
        print("Resulting Earthquake Catalogue:")
        print(cat)
        new_filename = os.path.splitext(self.ieb_filename)[0] + '.xml'
        cat.write(filename=new_filename, format="QUAKEML")
Exemplo n.º 30
0
def sfiles_to_event(sfile_list):
    """
    Write an event.dat file from a list of Seisan events

    :type sfile_list: list
    :param sfile_list: List of s-files to sort and put into the database

    :returns: List of tuples of event ID (int) and Sfile name
    """
    event_list = []
    sort_list = [(readheader(sfile).origins[0].time, sfile)
                 for sfile in sfile_list]
    sort_list.sort(key=lambda tup: tup[0])
    sfile_list = [sfile[1] for sfile in sort_list]
    catalog = Catalog()
    for i, sfile in enumerate(sfile_list):
        event_list.append((i, sfile))
        catalog.append(readheader(sfile))
    # Hand off to sister function
    write_event(catalog)
    return event_list
Exemplo n.º 31
0
 def test_append(self):
     """
     Tests the append method of the Catalog object.
     """
     # 1 - create catalog and add a few events
     catalog = Catalog()
     event1 = Event()
     event2 = Event()
     assert len(catalog) == 0
     catalog.append(event1)
     assert len(catalog) == 1
     assert catalog.events == [event1]
     catalog.append(event2)
     assert len(catalog) == 2
     assert catalog.events == [event1, event2]
     # 2 - adding objects other as Event should fails
     with pytest.raises(TypeError):
         catalog.append(str)
     with pytest.raises(TypeError):
         catalog.append(Catalog)
     with pytest.raises(TypeError):
         catalog.append([event1])
Exemplo n.º 32
0
def sfiles_to_event(sfile_list):
    """
    Function to write out an event.dat file of the events

    :type sfile_list: list
    :param sfile_list: List of s-files to sort and put into the database

    :returns: List of tuples of event ID (int) and Sfile name
    """
    from obspy.core.event import Catalog
    event_list = []
    sort_list = [(sfile_util.readheader(sfile).origins[0].time, sfile)
                 for sfile in sfile_list]
    sort_list.sort(key=lambda tup: tup[0])
    sfile_list = [sfile[1] for sfile in sort_list]
    catalog = Catalog()
    for i, sfile in enumerate(sfile_list):
        event_list.append((i, sfile))
        catalog.append(sfile_util.readheader(sfile))
    # Hand off to sister function
    write_event(catalog)
    return event_list
Exemplo n.º 33
0
Arquivo: io.py Projeto: preinh/RF
def convert_dmteventfile():
    eventsfile1 = os.path.join(conf.dmt_path, 'EVENT', 'event_list')
    eventsfile2 = os.path.join(conf.dmt_path, 'EVENT', 'events.xml')
    with open(eventsfile1) as f:
        events1 = pickle.load(f)
    events2 = Catalog()
    for ev in events1:
        orkw = {'time': ev['datetime'],
                'latitude': ev['latitude'],
                'longitude': ev['longitude'],
                'depth': ev['depth']}
        magkw = {'mag': ev['magnitude'],
                 'magnitude_type': ev['magnitude_type']}
        evdesargs = (ev['flynn_region'], 'Flinn-Engdahl region')
        evkw = {'resource_id': ev['event_id'],
                'event_type': 'earthquake',
                'creation_info': CreationInfo(author=ev['author']),
                'event_descriptions': [EventDescription(*evdesargs)],
                'origins': [Origin(**orkw)],
                'magnitudes': [Magnitude(**magkw)]}
        events2.append(Event(**evkw))
    events2.write(eventsfile2, 'QUAKEML')
Exemplo n.º 34
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Exemplo n.º 35
0
def event_to_quakeml(event, filename):
    """
    Write one of those events to QuakeML.
    """
    # Create all objects.
    cat = Catalog()
    ev = Event()
    org = Origin()
    mag = Magnitude()
    fm = FocalMechanism()
    mt = MomentTensor()
    t = Tensor()
    # Link them together.
    cat.append(ev)
    ev.origins.append(org)
    ev.magnitudes.append(mag)
    ev.focal_mechanisms.append(fm)
    fm.moment_tensor = mt
    mt.tensor = t

    # Fill values
    ev.resource_id = "smi:inversion/%s" % str(event["identifier"])
    org.time = event["time"]
    org.longitude = event["longitude"]
    org.latitude = event["latitude"]
    org.depth = event["depth_in_km"] * 1000

    mag.mag = event["Mw"]
    mag.magnitude_type = "Mw"

    t.m_rr = event["Mrr"]
    t.m_tt = event["Mpp"]
    t.m_pp = event["Mtt"]
    t.m_rt = event["Mrt"]
    t.m_rp = event["Mrp"]
    t.m_tp = event["Mtp"]

    cat.write(filename, format="quakeml")
Exemplo n.º 36
0
Arquivo: core.py Projeto: Qigaoo/obspy
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except:
            try:
                data = filename.decode()
            except:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1: next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(itertools.zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = (
                "Could not parse event %i (faulty file?). Will be "
                "skipped. Lines of the event:\n"
                "\t%s\n"
                "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(
            agency_id="GCMT",
            version=record["version_code"]
        )

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(
            force_resource_id=False,
            event_type="earthquake",
            event_type_certainty="known",
            event_descriptions=[
                EventDescription(text=region, type="Flinn-Engdahl region"),
                EventDescription(text=record["cmt_event_name"],
                                 type="earthquake name")
            ]
        )

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[Comment(text="Hypocenter catalog: %s" %
                              record["hypocenter_reference_catalog"],
                              force_resource_id=False)]
        )
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin", tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]},
            latitude=record["centroid_latitude"],
            latitude_errors={
                "uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000},
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy()
        )
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(
            force_resource_id=False,
            mag=round(record["Mw"], 2),
            magnitude_type="Mwc",
            origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude", tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["mb"],
            magnitude_type="mb",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'mb'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(Magnitude(
            force_resource_id=False,
            mag=record["MS"],
            magnitude_type="MS",
            comments=[Comment(
                force_resource_id=False,
                text="Reported magnitude in NDK file. Most likely 'MS'."
            )]
        ))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(
            m_rr=record["m_rr"],
            m_rr_errors={"uncertainty": record["m_rr_error"]},
            m_pp=record["m_pp"],
            m_pp_errors={"uncertainty": record["m_pp_error"]},
            m_tt=record["m_tt"],
            m_tt_errors={"uncertainty": record["m_tt_error"]},
            m_rt=record["m_rt"],
            m_rt_errors={"uncertainty": record["m_rt_error"]},
            m_rp=record["m_rp"],
            m_rp_errors={"uncertainty": record["m_rp_error"]},
            m_tp=record["m_tp"],
            m_tp_errors={"uncertainty": record["m_tp_error"]},
            creation_info=creation_info.copy()
        )
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]
            ),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy()
        )
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]
            ),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])
            ),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                             record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" %
                             record["cmt_timestamp"])],
            creation_info=creation_info.copy()
        )
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"],
                                             "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Exemplo n.º 37
0
def makeCatalog(StazList, mt, scale, args):

    epi = args.epi.rsplit()
    model = args.model.split(os.sep)
    NrSt = len(StazList)
    NrCo = NrSt * 3
    (Fmin, Fmax) = getFreq(args)
    Tmin = ('%.0f' % (1 / Fmax))
    Tmax = ('%.0f' % (1 / Fmin))
    mo = ('%.3e' % (mt[0]))
    mw = ('%.2f' % (mt[1]))
    Pdc = ('%.2f' % (float(mt[2]) / 100))
    Pclvd = ('%.2f' % (float(mt[3]) / 100))

    Tval = ('%10.3e' % (mt[22]))
    Tplg = ('%4.1f' % (mt[23]))
    Tazi = ('%5.1f' % (mt[24]))
    Nval = ('%10.3e' % (mt[25]))
    Nplg = ('%4.1f' % (mt[26]))
    Nazi = ('%5.1f' % (mt[27]))
    Pval = ('%10.3e' % (mt[28]))
    Pplg = ('%4.1f' % (mt[29]))
    Pazi = ('%5.1f' % (mt[30]))

    STp1 = ('%5.1f' % (mt[31]))
    DPp1 = ('%4.1f' % (mt[32]))
    RAp1 = ('%6.1f' % (mt[33]))
    STp2 = ('%5.1f' % (mt[34]))
    DPp2 = ('%4.1f' % (mt[35]))
    RAp2 = ('%6.1f' % (mt[36]))
    var = ('%.2f' % (mt[37]))
    qua = ('%d' % (mt[38]))
    mij = [mt[4], mt[5], mt[6], mt[7], mt[8], mt[9]]

    mm0 = str('%10.3e' % (mij[0]))
    mm1 = str('%10.3e' % (mij[1]))
    mm2 = str('%10.3e' % (mij[2]))
    mm3 = str('%10.3e' % (mij[3]))
    mm4 = str('%10.3e' % (mij[4]))
    mm5 = str('%10.3e' % (mij[5]))
    # Aki konvention
    Mrr = mm5
    Mtt = mm0
    Mff = mm1
    Mrt = mm3
    Mrf = mm4
    Mtf = mm2

    # stress regime
    A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24])
    A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27])
    A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30])

    (regime, sh) = stressRegime(A1, A2, A3)
    sh = ('%5.1f' % (sh))

    #### Build classes #################################
    #
    #Resource Id is the event origin time for definition

    res_id = ResourceIdentifier(args.ori)
    nowUTC = datetime.datetime.utcnow()
    info = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC)
    evOrigin = Origin(resource_id=res_id,
                      time=args.ori,
                      latitude=epi[0],
                      longitude=epi[1],
                      depth=epi[2],
                      earth_model_id=model[-1],
                      creation_info=info)
    # Magnitudes
    magnitude = Magnitude(mag=mw, magnitude_type="Mw")
    # Nodal Planes
    np1 = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1)
    np2 = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2)
    planes = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    # Principal axes
    Taxe = Axis(azimuth=Tazi, plunge=Tplg, length=Tval)
    Naxe = Axis(azimuth=Nazi, plunge=Nplg, length=Nval)
    Paxe = Axis(azimuth=Pazi, plunge=Pplg, length=Pval)
    axes = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe)
    # MT elements
    MT = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, m_rt=Mrt, m_rp=Mrf, m_tp=Mtf)
    # Stress regime
    regStr = 'Stress regime: ' + regime + ' -  SH = ' + sh
    strDes = EventDescription(regStr)
    # MT dataset
    dataInfo = DataUsed(wave_type="combined",
                        station_count=NrSt,
                        component_count=NrCo,
                        shortest_period=Tmin,
                        longest_period=Tmax)
    source = MomentTensor(data_used=dataInfo,
                          scalar_moment=mo,
                          tensor=MT,
                          variance_reduction=var,
                          double_couple=Pdc,
                          clvd=Pclvd,
                          iso=0)
    focMec = FocalMechanism(moment_tensor=source,
                            nodal_planes=planes,
                            principal_axes=axes,
                            azimuthal_gap=-1)

    #Initialize Event Catalog
    mtSolution = Event(creation_info=info)
    mtSolution.origins.append(evOrigin)
    mtSolution.magnitudes.append(magnitude)
    mtSolution.focal_mechanisms.append(focMec)
    mtSolution.event_descriptions.append(strDes)

    cat = Catalog()
    cat.append(mtSolution)

    return cat
Exemplo n.º 38
0
def makeCatalog(StazList, mt, scale, args):

    epi   = args.epi.rsplit()
    model = args.model.split(os.sep)
    NrSt  = len(StazList)
    NrCo  = NrSt*3
    (Fmin,Fmax) = getFreq(args)
    Tmin  = ('%.0f' % (1/Fmax))
    Tmax  = ('%.0f' % (1/Fmin))
    mo    = ('%.3e' % (mt[0]))
    mw    = ('%.2f' % (mt[1]))
    Pdc   = ('%.2f' % (float(mt[2])/100))
    Pclvd = ('%.2f' % (float(mt[3])/100))
   
    Tval  = ('%10.3e' % (mt[22]))
    Tplg  = ('%4.1f' % (mt[23]))
    Tazi  = ('%5.1f' % (mt[24]))
    Nval  = ('%10.3e' % (mt[25]))
    Nplg  = ('%4.1f' % (mt[26]))
    Nazi  = ('%5.1f' % (mt[27]))
    Pval  = ('%10.3e' % (mt[28]))
    Pplg  = ('%4.1f' % (mt[29]))
    Pazi  = ('%5.1f' % (mt[30]))

    STp1  = ('%5.1f' % (mt[31]))
    DPp1  = ('%4.1f' % (mt[32]))
    RAp1  = ('%6.1f' % (mt[33]))
    STp2  = ('%5.1f' % (mt[34]))
    DPp2  = ('%4.1f' % (mt[35]))
    RAp2  = ('%6.1f' % (mt[36]))
    var   = ('%.2f' % (mt[37]))
    qua   = ('%d'   % (mt[38]))
    mij   = [mt[4],mt[5],mt[6],mt[7],mt[8],mt[9]]

    mm0   = str('%10.3e' % (mij[0]))
    mm1   = str('%10.3e' % (mij[1]))
    mm2   = str('%10.3e' % (mij[2]))
    mm3   = str('%10.3e' % (mij[3]))
    mm4   = str('%10.3e' % (mij[4]))
    mm5   = str('%10.3e' % (mij[5]))
    # Aki konvention
    Mrr   = mm5
    Mtt   = mm0 
    Mff   = mm1
    Mrt   = mm3
    Mrf   = mm4
    Mtf   = mm2

    # stress regime
    A1 = PrincipalAxis(val=mt[22], dip=mt[23], strike=mt[24])
    A2 = PrincipalAxis(val=mt[25], dip=mt[26], strike=mt[27])
    A3 = PrincipalAxis(val=mt[28], dip=mt[29], strike=mt[30])

    (regime, sh) = stressRegime(A1, A2, A3)
    sh = ('%5.1f' % (sh))

    #### Build classes #################################
    #
    #Resource Id is the event origin time for definition
    
    res_id      = ResourceIdentifier(args.ori)
    nowUTC      = datetime.datetime.utcnow()
    info        = CreationInfo(author="pytdmt", version="2.4", creation_time=nowUTC)
    evOrigin    = Origin(resource_id    = res_id,
                         time           = args.ori,
                         latitude       = epi[0],
                         longitude      = epi[1],
                         depth          = epi[2],
                         earth_model_id = model[-1],
                         creation_info  = info)
    # Magnitudes
    magnitude = Magnitude(mag=mw, magnitude_type = "Mw")
    # Nodal Planes
    np1      = NodalPlane(strike=STp1, dip=DPp1, rake=RAp1)
    np2      = NodalPlane(strike=STp2, dip=DPp2, rake=RAp2)
    planes   = NodalPlanes(nodal_plane_1=np1, nodal_plane_2=np2)
    # Principal axes
    Taxe     = Axis(azimuth=Tazi, plunge=Tplg, length=Tval)
    Naxe     = Axis(azimuth=Nazi, plunge=Nplg, length=Nval)
    Paxe     = Axis(azimuth=Pazi, plunge=Pplg, length=Pval)
    axes     = PrincipalAxes(t_axis=Taxe, p_axis=Paxe, n_axis=Naxe)
    # MT elements
    MT       = Tensor(m_rr=Mrr, m_tt=Mtt, m_pp=Mff, 
                      m_rt=Mrt, m_rp=Mrf, m_tp=Mtf)
    # Stress regime
    regStr   = 'Stress regime: ' + regime + ' -  SH = ' + sh
    strDes   = EventDescription(regStr)
    # MT dataset
    dataInfo = DataUsed(wave_type          = "combined",
                      station_count        = NrSt,
                      component_count      = NrCo,
                      shortest_period      = Tmin,
                      longest_period       = Tmax)
    source = MomentTensor(data_used        = dataInfo,
                      scalar_moment        = mo,
                      tensor               = MT,
                      variance_reduction   = var,
                      double_couple        = Pdc,
                      clvd                 = Pclvd,
                      iso                  = 0)
    focMec      = FocalMechanism(moment_tensor        = source,
                      nodal_planes         = planes,
                      principal_axes       = axes,
                      azimuthal_gap        = -1)

    #Initialize Event Catalog
    mtSolution = Event(creation_info=info)
    mtSolution.origins.append(evOrigin)
    mtSolution.magnitudes.append(magnitude)
    mtSolution.focal_mechanisms.append(focMec)
    mtSolution.event_descriptions.append(strDes)

    cat = Catalog()
    cat.append(mtSolution)

    return cat
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()

    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # Read the event_type tag.
    pick_method = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    creation_info = {"author": user}

    # Create the event object.
    event = Event(resource_id=public_id, creation_info=creation_info)

    # Parse the origins.
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # There should always be only one origin.
    assert(len(event.origins) == 1)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el)
        event.magnitudes.append(magnitude)
    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        event.picks.append(pick)
    # Append the creation info to all picks. And also add the pick_method, e.g.
    # the event_type/account value as the method_id to the picks.
    for pick in event.picks:
        pick.creation_info = creation_info
        pick.method_id = pick_method

    # In QuakeML a StationMagnitude object has to be associated with an Origin.
    # This in turn means that the origin needs to have a resource_id.
    event.origins[0].resource_id = "smi:local/origins/%s" % \
        event.resource_id.resource_id
    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
Exemplo n.º 40
0
def filter_picks(catalog,
                 stations=None,
                 channels=None,
                 networks=None,
                 locations=None,
                 top_n_picks=None,
                 evaluation_mode='all'):
    """
    Filter events in the catalog based on a number of parameters.

    :param catalog: Catalog to filter.
    :type catalog: obspy.core.event.Catalog
    :param stations: List for stations to keep picks from.
    :type stations: list
    :param channels: List of channels to keep picks from.
    :type channels: list
    :param networks: List of networks to keep picks from.
    :type networks: list
    :param locations: List of location codes to use
    :type locations: list
    :param top_n_picks: Filter only the top N most used station-channel pairs.
    :type top_n_picks: int
    :param evaluation_mode:
        To select only manual or automatic picks, or use all (default).
    :type evaluation_mode: str


    :return:
        Filtered Catalog - if events are left with no picks, they are removed
        from the catalog.
    :rtype: obspy.core.event.Catalog

    .. note::
        Will filter first by station, then by channel, then by network, if
        using top_n_picks, this will be done last, after the other filters
        have been applied.

    .. note::
        Doesn't work in place on the catalog, your input catalog will be safe
        unless you overwrite it.

    .. note:: Doesn't expand wildcard characters.

    .. rubric:: Example

    >>> from obspy.clients.fdsn import Client
    >>> from eqcorrscan.utils.catalog_utils import filter_picks
    >>> from obspy import UTCDateTime
    >>> client = Client('NCEDC')
    >>> t1 = UTCDateTime(2004, 9, 28)
    >>> t2 = t1 + 86400
    >>> catalog = client.get_events(starttime=t1, endtime=t2, minmagnitude=3,
    ...                             minlatitude=35.7, maxlatitude=36.1,
    ...                             minlongitude=-120.6, maxlongitude=-120.2,
    ...                             includearrivals=True)
    >>> print(len(catalog))
    12
    >>> filtered_catalog = filter_picks(catalog, stations=['BMS', 'BAP',
    ...                                                    'PAG', 'PAN',
    ...                                                    'PBI', 'PKY',
    ...                                                    'YEG', 'WOF'])
    >>> print(len(filtered_catalog))
    12
    >>> stations = []
    >>> for event in filtered_catalog:
    ...     for pick in event.picks:
    ...         stations.append(pick.waveform_id.station_code)
    >>> print(sorted(list(set(stations))))
    ['BAP', 'BMS', 'PAG', 'PAN', 'PBI', 'PKY', 'WOF', 'YEG']
    """
    # Don't work in place on the catalog
    filtered_catalog = catalog.copy()

    if stations:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.station_code in stations
            ]
    if channels:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.channel_code in channels
            ]
    if networks:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.network_code in networks
            ]
    if locations:
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if pick.waveform_id.location_code in locations
            ]
    if evaluation_mode == 'manual':
        for event in filtered_catalog:
            event.picks = [
                pick for pick in event.picks
                if pick.evaluation_mode == 'manual'
            ]
    elif evaluation_mode == 'automatic':
        for event in filtered_catalog:
            event.picks = [
                pick for pick in event.picks
                if pick.evaluation_mode == 'automatic'
            ]
    elif evaluation_mode != 'all':
        warnings.warn('Unrecognised evaluation_mode: %s, using all picks' %
                      evaluation_mode)
    if top_n_picks:
        all_picks = []
        for event in filtered_catalog:
            all_picks += [(pick.waveform_id.station_code,
                           pick.waveform_id.channel_code)
                          for pick in event.picks]
        counted = Counter(all_picks).most_common()
        all_picks = []
        # Hack around sorting the counter object: Py 2 does it differently to 3
        for i in range(counted[0][1]):
            highest = [
                item[0] for item in counted if item[1] >= counted[0][1] - i
            ]
            # Sort them by alphabetical order in station
            highest = sorted(highest, key=lambda tup: tup[0])
            for stachan in highest:
                if stachan not in all_picks:
                    all_picks.append(stachan)
            if len(all_picks) > top_n_picks:
                all_picks = all_picks[0:top_n_picks]
                break
        for event in filtered_catalog:
            if len(event.picks) == 0:
                continue
            event.picks = [
                pick for pick in event.picks
                if (pick.waveform_id.station_code,
                    pick.waveform_id.channel_code) in all_picks
            ]
    # Remove events without picks
    tmp_catalog = Catalog()
    for event in filtered_catalog:
        if len(event.picks) > 0:
            tmp_catalog.append(event)

    return tmp_catalog
Exemplo n.º 41
0
def iris2quakeml(url, output_folder=None):
    if not "/spudservice/" in url:
        url = url.replace("/spud/", "/spudservice/")
        if url.endswith("/"):
            url += "quakeml"
        else:
            url += "/quakeml"
    print "Downloading %s..." % url
    r = requests.get(url)
    if r.status_code != 200:
        msg = "Error Downloading file!"
        raise Exception(msg)

    # For some reason the quakeml file is escaped HTML.
    h = HTMLParser.HTMLParser()

    data = h.unescape(r.content)

    # Replace some XML tags.
    data = data.replace("long-period body waves", "body waves")
    data = data.replace("intermediate-period surface waves", "surface waves")
    data = data.replace("long-period mantle waves", "mantle waves")

    data = data.replace("<html><body><pre>", "")
    data = data.replace("</pre></body></html>", "")

    # Change the resource identifiers. Colons are not allowed in QuakeML.
    pattern = r"(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})\.(\d{6})"
    data = re.sub(pattern, r"\1-\2-\3T\4-\5-\6.\7", data)

    data = StringIO(data)

    try:
        cat = readEvents(data)
    except:
        msg = "Could not read downloaded event data"
        raise ValueError(msg)

    # Parse the event, and use only one origin, magnitude and focal mechanism.
    # Only the first event is used. Should not be a problem for the chosen
    # global cmt application.
    ev = cat[0]

    if ev.preferred_origin():
        ev.origins = [ev.preferred_origin()]
    else:
        ev.origins = [ev.origins[0]]
    if ev.preferred_focal_mechanism():
        ev.focal_mechanisms = [ev.preferred_focal_mechanism()]
    else:
        ev.focal_mechanisms = [ev.focal_mechanisms[0]]

    try:
        mt = ev.focal_mechanisms[0].moment_tensor
    except:
        msg = "No moment tensor found in file."
        raise ValueError
    seismic_moment_in_dyn_cm = mt.scalar_moment
    if not seismic_moment_in_dyn_cm:
        msg = "No scalar moment found in file."
        raise ValueError(msg)

    # Create a new magnitude object with the moment magnitude calculated from
    # the given seismic moment.
    mag = Magnitude()
    mag.magnitude_type = "Mw"
    mag.origin_id = ev.origins[0].resource_id
    # This is the formula given on the GCMT homepage.
    mag.mag = (2.0 / 3.0) * (math.log10(seismic_moment_in_dyn_cm) - 16.1)
    mag.resource_id = ev.origins[0].resource_id.resource_id.replace("Origin",
        "Magnitude")
    ev.magnitudes = [mag]
    ev.preferred_magnitude_id = mag.resource_id

    # Convert the depth to meters.
    org = ev.origins[0]
    org.depth *= 1000.0
    if org.depth_errors.uncertainty:
        org.depth_errors.uncertainty *= 1000.0

    # Ugly asserts -- this is just a simple script.
    assert(len(ev.magnitudes) == 1)
    assert(len(ev.origins) == 1)
    assert(len(ev.focal_mechanisms) == 1)

    # All values given in the QuakeML file are given in dyne * cm. Convert them
    # to N * m.
    for key, value in mt.tensor.iteritems():
        if key.startswith("m_") and len(key) == 4:
            mt.tensor[key] /= 1E7
        if key.endswith("_errors") and hasattr(value, "uncertainty"):
            mt.tensor[key].uncertainty /= 1E7
    mt.scalar_moment /= 1E7
    if mt.scalar_moment_errors.uncertainty:
        mt.scalar_moment_errors.uncertainty /= 1E7
    p_axes = ev.focal_mechanisms[0].principal_axes
    for ax in [p_axes.t_axis, p_axes.p_axis, p_axes.n_axis]:
        if ax is None or not ax.length:
            continue
        ax.length /= 1E7

    # Check if it has a source time function
    stf = mt.source_time_function
    if stf:
        if stf.type != "triangle":
            msg = ("Source time function type '%s' not yet mapped. Please "
                "contact the developers.") % stf.type
            raise NotImplementedError(msg)
        if not stf.duration:
            if not stf.decay_time:
                msg = "Not known how to derive duration without decay time."
                raise NotImplementedError(msg)
            # Approximate the duraction for triangular STF.
            stf.duration = 2 * stf.decay_time

    # Get the flinn_engdahl region for a nice name.
    fe = FlinnEngdahl()
    region_name = fe.get_region(ev.origins[0].longitude,
        ev.origins[0].latitude)
    region_name = region_name.replace(" ", "_")
    event_name = "GCMT_event_%s_Mag_%.1f_%s-%s-%s-%s-%s.xml" % \
        (region_name, ev.magnitudes[0].mag, ev.origins[0].time.year,
        ev.origins[0].time.month, ev.origins[0].time.day,
        ev.origins[0].time.hour, ev.origins[0].time.minute)

    # Check if the ids of the magnitude and origin contain the corresponding
    # tag. Otherwise replace tme.
    ev.origins[0].resource_id = ev.origins[0].resource_id.resource_id.replace(
        "quakeml/gcmtid", "quakeml/origin/gcmtid")
    ev.magnitudes[0].resource_id = \
        ev.magnitudes[0].resource_id.resource_id.replace(
            "quakeml/gcmtid", "quakeml/magnitude/gcmtid")

    # Fix up the moment tensor resource_ids.
    mt.derived_origin_id = ev.origins[0].resource_id
    mt.resource_id = mt.resource_id.resource_id.replace("focalmechanism",
        "momenttensor")

    cat = Catalog()
    cat.resource_id = ev.origins[0].resource_id.resource_id.replace("origin",
        "event_parameters")
    cat.append(ev)
    if output_folder:
        event_name = os.path.join(output_folder, event_name)
    cat.write(event_name, format="quakeml", validate=True)
    print "Written file", event_name
Exemplo n.º 42
0
        event.resource_id=str(out_3[i][0]) #assign evid
        rd = str(out_3[i][0])
        picks = Pick(resource_id=rd, time=UTCDateTime(pick_time[x]), 
               waveform_id = WaveformStreamID(network_code="CI", station_code=str(sta[x]), channel_code=str(cha[x])), phase_hint=str(phase[x]))
        origin = Origin(resource_id=(str(orid[x])), 
                  time = UTCDateTime(e_time[x]), 
                  longitude= str(lon[x]), 
                  latitude=str(lat[x]), 
                  depth=str(dep[x]))
        magnitude = Magnitude(mag = ml[x], magnitude_type = "M", origin_id = (str(orid[x]))) 
        arrival = Arrival(pick_id = rd, phase = str(phase[x])) 
        event.picks.append(picks) 
        event.origins.append(origin)
        origin.arrivals.append(arrival) 
        event.magnitudes.append(magnitude)
    db_catalog.append(event)


#Only include picks for stations used 
all_picks=[]
for event in db_catalog:
    stations = ['KCT', 'KMPB', 'KCR', 'KHMB', 'KCS', 'KCO', 'KMR', 'KPP']                
    event.picks = [pick for pick in event.picks if pick.waveform_id.station_code in stations]
    all_picks+= [(pick.waveform_id.station_code, pick.waveform_id.channel_code) for pick in event.picks]

new_catalog=Catalog()
for event in db_catalog:
    event.picks = [pick for pick in event.picks if (pick.waveform_id.station_code, pick.waveform_id.channel_code) in all_picks]
    new_catalog.append(event)

# count number of picks for each event in catalog
Exemplo n.º 43
0
class ISFReader(object):
    encoding = 'UTF-8'
    resource_id_prefix = 'smi:local'

    def __init__(self, fh, **kwargs):
        self.lines = [_decode_if_possible(line, self.encoding).rstrip()
                      for line in fh.readlines()
                      if line.strip()]
        self.cat = Catalog()
        self._no_uuid_hashes = kwargs.get('_no_uuid_hashes', False)

    def deserialize(self):
        if not self.lines:
            raise ObsPyReadingError()
        line = self._get_next_line()
        if not line.startswith('DATA_TYPE BULLETIN IMS1.0:short'):
            raise ObsPyReadingError()
        try:
            self._deserialize()
        except ISFEndOfFile:
            pass
        return self.cat

    def _deserialize(self):
        line = self._get_next_line()
        catalog_description = line.strip()
        self.cat.description = catalog_description
        if not self.lines[0].startswith('Event'):
            raise ObsPyReadingError()
        # get next line stops the loop eventually, raising a controlled
        # exception
        while True:
            next_line_type = self._next_line_type()
            if next_line_type == 'event':
                self._read_event_header()
            elif next_line_type:
                self._process_block()
            else:
                raise ObsPyReadingError

    def _construct_id(self, parts, add_hash=False):
        id_ = '/'.join([str(self.cat.resource_id)] + list(parts))
        if add_hash and not self._no_uuid_hashes:
            id_ = str(ResourceIdentifier(prefix=id_))
        return id_

    def _get_next_line(self):
        if not self.lines:
            raise ISFEndOfFile
        line = self.lines.pop(0)
        if line.startswith('STOP'):
            raise ISFEndOfFile
        return line

    def _read_event_header(self):
        line = self._get_next_line()
        event_id = self._construct_id(['event', line[6:14].strip()])
        region = line[15:80].strip()
        event = Event(
            resource_id=event_id,
            event_descriptions=[EventDescription(text=region,
                                                 type='region name')])
        self.cat.append(event)

    def _next_line_type(self):
        if not self.lines:
            raise ISFEndOfFile
        return _block_header(self.lines[0])

    def _process_block(self):
        if not self.cat:
            raise ObsPyReadingError
        line = self._get_next_line()
        block_type = _block_header(line)
        # read origins block
        if block_type == 'origins':
            self._read_origins()
        # read publications block
        elif block_type == 'bibliography':
            self._read_bibliography()
        # read magnitudes block
        elif block_type == 'magnitudes':
            self._read_magnitudes()
        # read phases block
        elif block_type == 'phases':
            self._read_phases()
        # unexpected block header line
        else:
            msg = ('Unexpected line while reading file (line will be '
                   'ignored):\n' + line)
            warnings.warn(msg)

    def _read_phases(self):
        event = self.cat[-1]
        while not self._next_line_type():
            line = self._get_next_line()
            if line.strip().startswith('('):
                comment = self._parse_generic_comment(line)
                event.picks[-1].comments.append(comment)
                continue
            pick, amplitude, station_magnitude = self._parse_phase(line)
            if (pick, amplitude, station_magnitude) == (None, None, None):
                continue
            event.picks.append(pick)
            if amplitude:
                event.amplitudes.append(amplitude)
            if station_magnitude:
                event.station_magnitudes.append(station_magnitude)
            continue

    def _read_origins(self):
        event = self.cat[-1]
        origins = []
        event_types_certainties = []
        # just in case origin block is at end of file, make sure the event type
        # routine below gets executed, even if next line is EOF at some point
        try:
            while not self._next_line_type():
                line = self._get_next_line()
                if line.strip().startswith('('):
                    origins[-1].comments.append(
                        self._parse_generic_comment(line))
                    continue
                origin, event_type, event_type_certainty = \
                    self._parse_origin(line)
                origins.append(origin)
                event_types_certainties.append(
                    (event_type, event_type_certainty))
                continue
        finally:
            # check event types/certainties for consistency
            event_types = set(type_ for type_, _ in event_types_certainties)
            event_types.discard(None)
            if len(event_types) == 1:
                event_type = event_types.pop()
                certainties = set(
                    cert for type_, cert in event_types_certainties
                    if type_ == event_type)
                if "known" in certainties:
                    event_type_certainty = "known"
                elif "suspected" in certainties:
                    event_type_certainty = "suspected"
                else:
                    event_type_certainty = None
            else:
                event_type = None
                event_type_certainty = None
            event.origins.extend(origins)
            event.event_type = event_type
            event.event_type_certainty = event_type_certainty

    def _read_magnitudes(self):
        event = self.cat[-1]
        while not self._next_line_type():
            line = self._get_next_line()
            if line.strip().startswith('('):
                event.magnitudes[-1].comments.append(
                    self._parse_generic_comment(line))
                continue
            event.magnitudes.append(self._parse_magnitude(line))
            continue

    def _read_bibliography(self):
        event = self.cat[-1]
        while not self._next_line_type():
            line = self._get_next_line()
            if line.strip().startswith('('):
                # TODO parse bibliography comment blocks
                continue
            event.comments.append(self._parse_bibliography_item(line))
            continue

    def _make_comment(self, text):
        id_ = self._construct_id(['comment'], add_hash=True)
        comment = Comment(text=text, resource_id=id_)
        return comment

    def _parse_bibliography_item(self, line):
        return self._make_comment(line)

    def _parse_origin(self, line):
        # 1-10    i4,a1,i2,a1,i2    epicenter date (yyyy/mm/dd)
        # 12-22   i2,a1,i2,a1,f5.2  epicenter time (hh:mm:ss.ss)
        time = UTCDateTime.strptime(line[:17], '%Y/%m/%d %H:%M:')
        time += float(line[17:22])
        # 23      a1    fixed flag (f = fixed origin time solution, blank if
        #                           not a fixed origin time)
        time_fixed = fixed_flag(line[22])
        # 25-29   f5.2  origin time error (seconds; blank if fixed origin time)
        time_error = float_or_none(line[24:29])
        time_error = time_error and QuantityError(uncertainty=time_error)
        # 31-35   f5.2  root mean square of time residuals (seconds)
        rms = float_or_none(line[30:35])
        # 37-44   f8.4  latitude (negative for South)
        latitude = float_or_none(line[36:44])
        # 46-54   f9.4  longitude (negative for West)
        longitude = float_or_none(line[45:54])
        # 55      a1    fixed flag (f = fixed epicenter solution, blank if not
        #                           a fixed epicenter solution)
        epicenter_fixed = fixed_flag(line[54])
        # 56-60   f5.1  semi-major axis of 90% ellipse or its estimate
        #               (km, blank if fixed epicenter)
        _uncertainty_major_m = float_or_none(line[55:60], multiplier=1e3)
        # 62-66   f5.1  semi-minor axis of 90% ellipse or its estimate
        #               (km, blank if fixed epicenter)
        _uncertainty_minor_m = float_or_none(line[61:66], multiplier=1e3)
        # 68-70   i3    strike (0 <= x <= 360) of error ellipse clock-wise from
        #                       North (degrees)
        _uncertainty_major_azimuth = float_or_none(line[67:70])
        # 72-76   f5.1  depth (km)
        depth = float_or_none(line[71:76], multiplier=1e3)
        # 77      a1    fixed flag (f = fixed depth station, d = depth phases,
        #                           blank if not a fixed depth)
        epicenter_fixed = fixed_flag(line[76])
        # 79-82   f4.1  depth error 90% (km; blank if fixed depth)
        depth_error = float_or_none(line[78:82], multiplier=1e3)
        # 84-87   i4    number of defining phases
        used_phase_count = int_or_none(line[83:87])
        # 89-92   i4    number of defining stations
        used_station_count = int_or_none(line[88:92])
        # 94-96   i3    gap in azimuth coverage (degrees)
        azimuthal_gap = float_or_none(line[93:96])
        # 98-103  f6.2  distance to closest station (degrees)
        minimum_distance = float_or_none(line[97:103])
        # 105-110 f6.2  distance to furthest station (degrees)
        maximum_distance = float_or_none(line[104:110])
        # 112     a1    analysis type: (a = automatic, m = manual, g = guess)
        evaluation_mode, evaluation_status = \
            evaluation_mode_and_status(line[111])
        # 114     a1    location method: (i = inversion, p = pattern
        #                                 recognition, g = ground truth, o =
        #                                 other)
        location_method = LOCATION_METHODS[line[113].strip().lower()]
        # 116-117 a2    event type:
        # XXX event type and event type certainty is specified per origin,
        # XXX not sure how to bset handle this, for now only use it if
        # XXX information on the individual origins do not clash.. not sure yet
        # XXX how to identify the preferred origin..
        event_type, event_type_certainty = \
            EVENT_TYPE_CERTAINTY[line[115:117].strip().lower()]
        # 119-127 a9    author of the origin
        author = line[118:127].strip()
        # 129-136 a8    origin identification
        origin_id = self._construct_id(['origin', line[128:136].strip()])

        # do some combinations
        depth_error = depth_error and dict(uncertainty=depth_error,
                                           confidence_level=90)
        if all(v is not None for v in (_uncertainty_major_m,
                                       _uncertainty_minor_m,
                                       _uncertainty_major_azimuth)):
            origin_uncertainty = OriginUncertainty(
                min_horizontal_uncertainty=_uncertainty_minor_m,
                max_horizontal_uncertainty=_uncertainty_major_m,
                azimuth_max_horizontal_uncertainty=_uncertainty_major_azimuth,
                preferred_description='uncertainty ellipse',
                confidence_level=90)
            # event init always sets an empty QuantityError, even when
            # specifying None, which is strange
            for key in ['confidence_ellipsoid']:
                setattr(origin_uncertainty, key, None)
        else:
            origin_uncertainty = None
        origin_quality = OriginQuality(
            standard_error=rms, used_phase_count=used_phase_count,
            used_station_count=used_station_count, azimuthal_gap=azimuthal_gap,
            minimum_distance=minimum_distance,
            maximum_distance=maximum_distance)
        comments = []
        if location_method:
            comments.append(
                self._make_comment('location method: ' + location_method))
        if author:
            creation_info = CreationInfo(author=author)
        else:
            creation_info = None
        # assemble whole event
        origin = Origin(
            time=time, resource_id=origin_id, longitude=longitude,
            latitude=latitude, depth=depth, depth_errors=depth_error,
            origin_uncertainty=origin_uncertainty, time_fixed=time_fixed,
            epicenter_fixed=epicenter_fixed, origin_quality=origin_quality,
            comments=comments, creation_info=creation_info)
        # event init always sets an empty QuantityError, even when specifying
        # None, which is strange
        for key in ('time_errors', 'longitude_errors', 'latitude_errors',
                    'depth_errors'):
            setattr(origin, key, None)
        return origin, event_type, event_type_certainty

    def _parse_magnitude(self, line):
        #    1-5  a5   magnitude type (mb, Ms, ML, mbmle, msmle)
        magnitude_type = line[0:5].strip()
        #      6  a1   min max indicator (<, >, or blank)
        # TODO figure out the meaning of this min max indicator
        min_max_indicator = line[5:6].strip()
        #   7-10  f4.1 magnitude value
        mag = float_or_none(line[6:10])
        #  12-14  f3.1 standard magnitude error
        mag_errors = float_or_none(line[11:14])
        #  16-19  i4   number of stations used to calculate magni-tude
        station_count = int_or_none(line[15:19])
        #  21-29  a9   author of the origin
        author = line[20:29].strip()
        #  31-38  a8   origin identification
        origin_id = line[30:38].strip()

        # process items
        if author:
            creation_info = CreationInfo(author=author)
        else:
            creation_info = None
        mag_errors = mag_errors and QuantityError(uncertainty=mag_errors)
        if origin_id:
            origin_id = self._construct_id(['origin', origin_id])
        else:
            origin_id = None
        if not magnitude_type:
            magnitude_type = None
        # magnitudes have no id field, so construct a unique one at least
        resource_id = self._construct_id(['magnitude'], add_hash=True)

        if min_max_indicator:
            msg = 'Magnitude min/max indicator field not yet implemented'
            warnings.warn(msg)

        # combine and return
        mag = Magnitude(
            magnitude_type=magnitude_type, mag=mag,
            station_count=station_count, creation_info=creation_info,
            mag_errors=mag_errors, origin_id=origin_id,
            resource_id=resource_id)
        # event init always sets an empty QuantityError, even when specifying
        # None, which is strange
        for key in ['mag_errors']:
            setattr(mag, key, None)
        return mag

    def _get_pick_time(self, my_string):
        """
        Look up absolute time of pick including date, based on the time-of-day
        only representation in the phase line

        Returns absolute pick time or None if it can not be determined safely.
        """
        if not my_string.strip():
            return None
        # TODO maybe we should defer phases block parsing.. but that will make
        # the whole reading more complex
        if not self.cat.events:
            msg = ('Can not parse phases block before parsing origins block, '
                   'because phase lines do not contain date information, only '
                   'time-of-day')
            raise NotImplementedError(msg)
        origin_times = [origin.time for origin in self.cat.events[-1].origins]
        if not origin_times:
            msg = ('Can not parse phases block unless origins with origin '
                   'time information are present, because phase lines do not '
                   'contain date information, only time-of-day')
            raise NotImplementedError(msg)
        # XXX this whole routine is on shaky ground..
        # since picks only have a time-of-day and there's not even an
        # association to one of the origins, in principle this would need some
        # real tough logic to make it failsafe. actually this would mean using
        # taup with the given epicentral distance of the pick and check what
        # date is appropriate.
        # for now just do a very simple logic and raise exceptions when things
        # look fishy. this is ugly but it's not worth spending more time on
        # this, unless somebody starts bumping into one of the explicitly
        # raised exceptions below.
        origin_time_min = min(origin_times)
        origin_time_max = max(origin_times)
        hour = int(my_string[0:2])
        minute = int(my_string[3:5])
        seconds = float(my_string[6:])

        all_guesses = []
        for origin in self.cat.events[-1].origins:
            first_guess = UTCDateTime(
                origin.time.year, origin.time.month, origin.time.day, hour,
                minute, seconds)
            all_guesses.append((first_guess, origin.time))
            all_guesses.append((first_guess - 86400, origin.time))
            all_guesses.append((first_guess + 86400, origin.time))

        pick_date = sorted(all_guesses, key=lambda x: abs(x[0] - x[1]))[0][0]

        # make sure event origin times are reasonably close together
        if origin_time_max - origin_time_min > 5 * 3600:
            msg = ('Origin times in event differ by more than 5 hours, this '
                   'is currently not implemented as determining the date of '
                   'the pick might be tricky. Sorry.')
            warnings.warn(msg)
            return None
        # now try the date of the latest origin and raise if things seem fishy
        t = UTCDateTime(pick_date.year, pick_date.month, pick_date.day, hour,
                        minute, seconds)
        for origin_time in origin_times:
            if t - origin_time > 6 * 3600:
                msg = ('This pick would have a time more than 6 hours after '
                       'or before one of the origins in the event. This seems '
                       'fishy. Please report an issue on our github.')
                warnings.warn(msg)
                return None
        return t

    def _parse_phase(self, line):
        # since we can not identify which origin a phase line corresponds to,
        # we can not use any of the included information that would go in the
        # Arrival object, as that would have to be attached to the appropriate
        # origin..
        # for now, just append all of these items as comments to the pick
        comments = []

        # 1-5     a5      station code
        station_code = line[0:5].strip()
        # 7-12    f6.2    station-to-event distance (degrees)
        comments.append(
            'station-to-event distance (degrees): "{}"'.format(line[6:12]))
        # 14-18   f5.1    event-to-station azimuth (degrees)
        comments.append(
            'event-to-station azimuth (degrees): "{}"'.format(line[13:18]))
        # 20-27   a8      phase code
        phase_hint = line[19:27].strip()
        # 29-40   i2,a1,i2,a1,f6.3        arrival time (hh:mm:ss.sss)
        time = self._get_pick_time(line[28:40])
        if time is None:
            msg = ('Could not determine absolute time of pick. This phase '
                   'line will be ignored:\n{}').format(line)
            warnings.warn(msg)
            return None, None, None
        # 42-46   f5.1    time residual (seconds)
        comments.append('time residual (seconds): "{}"'.format(line[41:46]))
        # 48-52   f5.1    observed azimuth (degrees)
        comments.append('observed azimuth (degrees): "{}"'.format(line[47:52]))
        # 54-58   f5.1    azimuth residual (degrees)
        comments.append('azimuth residual (degrees): "{}"'.format(line[53:58]))
        # 60-65   f5.1    observed slowness (seconds/degree)
        comments.append(
            'observed slowness (seconds/degree): "{}"'.format(line[59:65]))
        # 67-72   f5.1    slowness residual (seconds/degree)
        comments.append(
            'slowness residual (seconds/degree): "{}"'.format(line[66:71]))
        # 74      a1      time defining flag (T or _)
        comments.append('time defining flag (T or _): "{}"'.format(line[73]))
        # 75      a1      azimuth defining flag (A or _)
        comments.append(
            'azimuth defining flag (A or _): "{}"'.format(line[74]))
        # 76      a1      slowness defining flag (S or _)
        comments.append(
            'slowness defining flag (S or _): "{}"'.format(line[75]))
        # 78-82   f5.1    signal-to-noise ratio
        comments.append('signal-to-noise ratio: "{}"'.format(line[77:82]))
        # 84-92   f9.1    amplitude (nanometers)
        amplitude = float_or_none(line[83:92])
        # 94-98   f5.2    period (seconds)
        period = float_or_none(line[93:98])
        # 100     a1      type of pick (a = automatic, m = manual)
        evaluation_mode = line[99]
        # 101     a1      direction of short period motion
        #                 (c = compression, d = dilatation, _= null)
        polarity = POLARITY[line[100].strip().lower()]
        # 102     a1      onset quality (i = impulsive, e = emergent,
        #                                q = questionable, _ = null)
        onset = ONSET[line[101].strip().lower()]
        # 104-108 a5      magnitude type (mb, Ms, ML, mbmle, msmle)
        magnitude_type = line[103:108].strip()
        # 109     a1      min max indicator (<, >, or blank)
        min_max_indicator = line[108]
        # 110-113 f4.1    magnitude value
        mag = float_or_none(line[109:113])
        # 115-122 a8      arrival identification
        phase_id = line[114:122].strip()

        # process items
        waveform_id = WaveformStreamID(station_code=station_code)
        evaluation_mode = PICK_EVALUATION_MODE[evaluation_mode.strip().lower()]
        comments = [self._make_comment(', '.join(comments))]
        if phase_id:
            resource_id = self._construct_id(['pick'], add_hash=True)
        else:
            resource_id = self._construct_id(['pick', phase_id])
        if mag:
            comment = ('min max indicator (<, >, or blank): ' +
                       min_max_indicator)
            station_magnitude = StationMagnitude(
                mag=mag, magnitude_type=magnitude_type,
                resource_id=self._construct_id(['station_magnitude'],
                                               add_hash=True),
                comments=[self._make_comment(comment)])
            # event init always sets an empty ResourceIdentifier, even when
            # specifying None, which is strange
            for key in ['origin_id', 'mag_errors']:
                setattr(station_magnitude, key, None)
        else:
            station_magnitude = None

        # assemble
        pick = Pick(phase_hint=phase_hint, time=time, waveform_id=waveform_id,
                    evaluation_mode=evaluation_mode, comments=comments,
                    polarity=polarity, onset=onset, resource_id=resource_id)
        # event init always sets an empty QuantityError, even when specifying
        # None, which is strange
        for key in ('time_errors', 'horizontal_slowness_errors',
                    'backazimuth_errors'):
            setattr(pick, key, None)
        if amplitude:
            amplitude /= 1e9  # convert from nanometers to meters
            amplitude = Amplitude(
                unit='m', generic_amplitude=amplitude, period=period)
        return pick, amplitude, station_magnitude

    def _parse_generic_comment(self, line):
        return self._make_comment(line)
Exemplo n.º 44
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = full_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        for i in range(len(read_cat[0].picks)):
            for key in read_cat[0].picks[i].keys():
                # Ignore backazimuth errors and horizontal_slowness_errors
                if key in ['backazimuth_errors', 'horizontal_slowness_errors']:
                    continue
                self.assertEqual(read_cat[0].picks[i][key],
                                 test_cat[0].picks[i][key])
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        # Check magnitudes
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # Check local magnitude amplitude
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)
        # Check coda magnitude pick
        self.assertEqual(read_cat[0].amplitudes[1].resource_id,
                         test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[1].pick_id,
                         test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id,
                         test_cat[0].amplitudes[1].waveform_id)
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        self.assertEqual(read_cat[0].amplitudes[1].snr,
                         test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0], userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True, overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        for i in range(len(read_cat[0].picks)):
            self.assertEqual(read_cat[0].picks[i].time,
                             test_cat[0].picks[i].time)
            self.assertEqual(read_cat[0].picks[i].backazimuth,
                             test_cat[0].picks[i].backazimuth)
            self.assertEqual(read_cat[0].picks[i].onset,
                             test_cat[0].picks[i].onset)
            self.assertEqual(read_cat[0].picks[i].phase_hint,
                             test_cat[0].picks[i].phase_hint)
            self.assertEqual(read_cat[0].picks[i].polarity,
                             test_cat[0].picks[i].polarity)
            self.assertEqual(read_cat[0].picks[i].waveform_id.station_code,
                             test_cat[0].picks[i].waveform_id.station_code)
            self.assertEqual(read_cat[0].picks[i].waveform_id.channel_code[-1],
                             test_cat[0].picks[i].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residual_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        # Check coda magnitude pick
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].resource_id,
        #                  test_cat[0].amplitudes[1].resource_id)
        self.assertEqual(read_cat[0].amplitudes[1].type,
                         test_cat[0].amplitudes[1].type)
        self.assertEqual(read_cat[0].amplitudes[1].unit,
                         test_cat[0].amplitudes[1].unit)
        self.assertEqual(read_cat[0].amplitudes[1].generic_amplitude,
                         test_cat[0].amplitudes[1].generic_amplitude)
        # Resource ids get overwritten because you can't have two the same in
        # memory
        # self.assertEqual(read_cat[0].amplitudes[1].pick_id,
        #                  test_cat[0].amplitudes[1].pick_id)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id.station_code,
                         test_cat[0].amplitudes[1].waveform_id.station_code)
        self.assertEqual(read_cat[0].amplitudes[1].waveform_id.channel_code,
                         test_cat[0].amplitudes[1].
                         waveform_id.channel_code[0] +
                         test_cat[0].amplitudes[1].
                         waveform_id.channel_code[-1])
        self.assertEqual(read_cat[0].amplitudes[1].magnitude_hint,
                         test_cat[0].amplitudes[1].magnitude_hint)
        # snr is not supported in s-file
        # self.assertEqual(read_cat[0].amplitudes[1].snr,
        #                  test_cat[0].amplitudes[1].snr)
        self.assertEqual(read_cat[0].amplitudes[1].category,
                         test_cat[0].amplitudes[1].category)
        del read_cat

        # Test a deliberate fail
        test_cat.append(full_test_event())
        with self.assertRaises(IOError):
            # Raises error due to multiple events in catalog
            sfile = eventtosfile(test_cat, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to too long userID
            sfile = eventtosfile(test_cat[0], userID='TESTICLE',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to unrecognised event type
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='U', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to no output directory
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='L', outdir='albatross',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
            # Raises error due to incorrect wavefil formatting
            sfile = eventtosfile(test_cat[0], userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles=1234, explosion=True,
                                 overwrite=True)
        with self.assertRaises(IndexError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins = []
            sfile = eventtosfile(invalid_origin, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
        with self.assertRaises(ValueError):
            invalid_origin = test_cat[0].copy()
            invalid_origin.origins[0].time = None
            sfile = eventtosfile(invalid_origin, userID='TEST',
                                 evtype='L', outdir='.',
                                 wavefiles='test', explosion=True,
                                 overwrite=True)
        # Write a near empty origin
        valid_origin = test_cat[0].copy()
        valid_origin.origins[0].latitude = None
        valid_origin.origins[0].longitude = None
        valid_origin.origins[0].depth = None
        sfile = eventtosfile(valid_origin, userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True,
                             overwrite=True)
        self.assertTrue(os.path.isfile(sfile))
        os.remove(sfile)
Exemplo n.º 45
0
    # Loop over stations
    for jj in range(0,len(inventory[ii])):
        ev_info.station = inventory[ii][jj].code
        ev_info.channel = channel # same as above

        ev_info.rlat = inventory[ii][jj].latitude
        ev_info.rlon = inventory[ii][jj].longitude
        ev_info.rtime

        # -------------------------------------------------------------------------------
        # subset_events = Find events common in alaska centroid ring (cat_0) and station ring (cat_ij)
        cat_subset = Catalog()
        for kk in range(0,len(cat)):
            dist = obspy.geodetics.base.locations2degrees(ev_info.rlat,ev_info.rlon,cat[kk].origins[0].latitude,cat[kk].origins[0].longitude)
            if dist >= st_minradius and dist <= st_maxradius:
                cat_subset.append(cat[kk])
        print(cat_subset)
        # -------------------------------------------------------------------------------

        # Create station directory
        sta_dir = ev_info.network + '_' + ev_info.station
        odir = out_dir + sta_dir
        if not os.path.exists(odir):
            os.makedirs(odir)

        # save catalog subset for this station
        fname = odir + '_event_subset'
        cat_subset.write(fname,'cnv')
        fname = odir + '_event_subset.eps'
        cat_subset.plot(outfile = fname)
Exemplo n.º 46
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = basic_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        self.assertEqual(read_cat[0].picks, test_cat[0].picks)
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0], userID='TEST',
                             evtype='L', outdir='.',
                             wavefiles='test', explosion=True, overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        self.assertEqual(read_cat[0].picks[0].time,
                         test_cat[0].picks[0].time)
        self.assertEqual(read_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(read_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(read_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(read_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(read_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(read_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        del read_cat
        # assert read_cat[0].amplitudes[0].pick_id ==\
        #     test_cat[0].amplitudes[0].pick_id
        # assert read_cat[0].amplitudes[0].waveform_id ==\
        #     test_cat[0].amplitudes[0].waveform_id

        # Test the wrappers for PICK and EVENTINFO classes
        picks, evinfo = eventtopick(test_cat)
        # Test the conversion back
        conv_cat = Catalog()
        conv_cat.append(picktoevent(evinfo, picks))
        self.assertEqual(conv_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(conv_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(conv_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(conv_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(conv_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # self.assertEqual(read_cat[0].origins[0].resource_id,
        #                  test_cat[0].origins[0].resource_id)
        self.assertEqual(conv_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(conv_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(conv_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(conv_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(conv_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(conv_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(conv_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(conv_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(conv_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # self.assertEqual(read_cat[0].amplitudes[0].resource_id,
        #                  test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(conv_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(conv_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
def readSeishubEventFile(filename):
    """
    Reads a Seishub event file and returns a ObsPy Catalog object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.event.readEvents` function, call this instead.

    :type filename: str
    :param filename: Seishub event file to be read.
    :rtype: :class:`~obspy.core.event.Catalog`
    :return: A ObsPy Catalog object.

    .. rubric:: Example
    """
    global CURRENT_TYPE

    base_name = os.path.basename(filename)

    if base_name.lower().startswith("baynet"):
        CURRENT_TYPE = "baynet"
    elif base_name.lower().startswith("earthworm"):
        CURRENT_TYPE = "earthworm"
    elif base_name.lower().startswith("gof"):
        CURRENT_TYPE = "seiscomp3"
    elif base_name.lower().startswith("obspyck") or base_name == "5622":
        CURRENT_TYPE = "obspyck"
    elif base_name.lower().startswith("toni"):
        CURRENT_TYPE = "toni"
    else:
        print "AAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHHHHHHHH"
        raise Exception

    # Just init the parser, the SeisHub event file format has no namespaces.
    parser = XMLParser(filename)
    # Create new Event object.
    public_id = parser.xpath('event_id/value')[0].text

    # A Seishub event just specifies a single event so Catalog information is
    # not really given.
    catalog = Catalog()
    catalog.resource_id = "/".join([RESOURCE_ROOT, "catalog", public_id])

    # Read the event_type tag.
    account = parser.xpath2obj('event_type/account', parser, str)
    user = parser.xpath2obj('event_type/user', parser, str)
    global_evaluation_mode = parser.xpath2obj('event_type/value', parser, str)
    public = parser.xpath2obj('event_type/public', parser, str)
    public = {"True": True, "False": False}.get(public, None)
    if account is not None and account.lower() != "sysop":
        public = False
    # The author will be stored in the CreationInfo object. This will be the
    # creation info of the event as well as on all picks.
    author = user
    if CURRENT_TYPE in ["seiscomp3", "earthworm"]:
        author = CURRENT_TYPE
    creation_info = {"author": author,
        "agency_id": "Erdbebendienst Bayern",
        "agency_uri": "%s/agency" % RESOURCE_ROOT,
        "creation_time": NOW}

    # Create the event object.
    event = Event(resource_id="/".join([RESOURCE_ROOT, "event", public_id]),
        creation_info=creation_info)
    # If account is None or 'sysop' and public is true, write 'public in the
    # comment, 'private' otherwise.
    event.extra = AttribDict()
    event.extra.public = {'value': public, 'namespace': NAMESPACE}
    event.extra.evaluationMode = {'value': global_evaluation_mode, 'namespace': NAMESPACE}

    event_type = parser.xpath2obj('type', parser, str)
    if event_type is not None:
        if event_type == "induced earthquake":
            event_type = "induced or triggered event"
        if event_type != "null":
            event.event_type = event_type

    # Parse the origins.
    origins = parser.xpath("origin")
    if len(origins) > 1:
        msg = "Only files with a single origin are currently supported"
        raise Exception(msg)
    for origin_el in parser.xpath("origin"):
        origin = __toOrigin(parser, origin_el)
        event.origins.append(origin)
    # Parse the magnitudes.
    for magnitude_el in parser.xpath("magnitude"):
        magnitude = __toMagnitude(parser, magnitude_el, origin)
        if magnitude.mag is None:
            continue
        event.magnitudes.append(magnitude)
    # Parse the picks. Pass the global evaluation mode (automatic, manual)
    for pick_el in parser.xpath("pick"):
        pick = __toPick(parser, pick_el, global_evaluation_mode)
        if pick is None:
            continue
        event.picks.append(pick)
        # The arrival object gets the following things from the Seishub.pick
        # objects
        # arrival.time_weight = pick.phase_weight
        # arrival.time_residual = pick.phase_res
        # arrival.azimuth = pick.azimuth
        # arrival.take_off_angle = pick.incident
        # arrival.distance = hyp_dist
        arrival = __toArrival(parser, pick_el, global_evaluation_mode, pick)
        if event.origins:
            event.origins[0].arrivals.append(arrival)

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    # Parse the station magnitudes.
    for stat_magnitude_el in parser.xpath("stationMagnitude"):
        stat_magnitude = __toStationMagnitude(parser, stat_magnitude_el)
        event.station_magnitudes.append(stat_magnitude)

    # Parse the amplitudes
    # we don't reference their id in the corresponding station magnitude,
    # because we use one amplitude measurement for each component
    for el in parser.xpath("stationMagnitude/amplitude"):
        event.amplitudes.append(__toAmplitude(parser, el))

    for mag in event.station_magnitudes:
        mag.origin_id = event.origins[0].resource_id

    for _i, stat_mag in enumerate(event.station_magnitudes):
        contrib = StationMagnitudeContribution()
        weight = None
        # The order of station magnitude objects is the same as in the xml
        # file.
        weight = parser.xpath2obj("weight",
            parser.xpath("stationMagnitude")[_i], float)
        if weight is not None:
            contrib.weight = weight
        contrib.station_magnitude_id = stat_mag.resource_id
        event.magnitudes[0].station_magnitude_contributions.append(contrib)

    for foc_mec_el in parser.xpath("focalMechanism"):
        foc_mec = __toFocalMechanism(parser, foc_mec_el)
        if foc_mec is not None:
            event.focal_mechanisms.append(foc_mec)

    # Set the origin id for the focal mechanisms. There is only one origin per
    # SeisHub event file.
    for focmec in event.focal_mechanisms:
        focmec.triggering_origin_id = event.origins[0].resource_id

    # Add the event to the catalog
    catalog.append(event)

    return catalog
Exemplo n.º 48
0
def match_filter(template_names, template_list, st, threshold,
                 threshold_type, trig_int, plotvar, plotdir='.', cores=1,
                 tempdir=False, debug=0, plot_format='png',
                 output_cat=False, extract_detections=False,
                 arg_check=True):
    """
    Main matched-filter detection function.
    Over-arching code to run the correlations of given templates with a \
    day of seismic data and output the detections based on a given threshold.
    For a functional example see the tutorials.

    :type template_names: list
    :param template_names: List of template names in the same order as \
        template_list
    :type template_list: list
    :param template_list: A list of templates of which each template is a \
        Stream of obspy traces containing seismic data and header information.
    :type st: obspy.core.stream.Stream
    :param st: An obspy.Stream object containing all the data available and \
        required for the correlations with templates given.  For efficiency \
        this should contain no excess traces which are not in one or more of \
        the templates.  This will now remove excess traces internally, but \
        will copy the stream and work on the copy, leaving your input stream \
        untouched.
    :type threshold: float
    :param threshold: A threshold value set based on the threshold_type
    :type threshold_type: str
    :param threshold_type: The type of threshold to be used, can be MAD, \
        absolute or av_chan_corr.    MAD threshold is calculated as the \
        threshold*(median(abs(cccsum))) where cccsum is the cross-correlation \
        sum for a given template. absolute threhsold is a true absolute \
        threshold based on the cccsum value av_chan_corr is based on the mean \
        values of single-channel cross-correlations assuming all data are \
        present as required for the template, \
        e.g. av_chan_corr_thresh=threshold*(cccsum/len(template)) where \
        template is a single template from the input and the length is the \
        number of channels within this template.
    :type trig_int: float
    :param trig_int: Minimum gap between detections in seconds.
    :type plotvar: bool
    :param plotvar: Turn plotting on or off
    :type plotdir: str
    :param plotdir: Path to plotting folder, plots will be output here, \
        defaults to run location.
    :type tempdir: str
    :param tempdir: Directory to put temporary files, or False
    :type cores: int
    :param cores: Number of cores to use
    :type debug: int
    :param debug: Debug output level, the bigger the number, the more the \
        output.
    :type plot_format: str
    :param plot_format: Specify format of output plots if saved
    :type output_cat: bool
    :param output_cat: Specifies if matched_filter will output an \
        obspy.Catalog class containing events for each detection. Default \
        is False, in which case matched_filter will output a list of \
        detection classes, as normal.
    :type extract_detections: bool
    :param extract_detections: Specifies whether or not to return a list of \
        streams, one stream per detection.
    :type arg_check: bool
    :param arg_check: Check arguments, defaults to True, but if running in \
        bulk, and you are certain of your arguments, then set to False.

    :return: :class: 'DETECTIONS' detections for each channel formatted as \
        :class: 'obspy.UTCDateTime' objects.
    :return: :class: obspy.Catalog containing events for each detection.
    :return: list of :class: obspy.Stream objects for each detection.

    .. note:: Plotting within the match-filter routine uses the Agg backend \
        with interactive plotting turned off.  This is because the function \
        is designed to work in bulk.  If you wish to turn interactive \
        plotting on you must import matplotlib in your script first, when you \
        them import match_filter you will get the warning that this call to \
        matplotlib has no effect, which will mean that match_filter has not \
        changed the plotting behaviour.

    .. note:: The output_cat flag will create an :class: obspy.Catalog \
        containing one event for each :class: 'DETECTIONS' generated by \
        match_filter. Each event will contain a number of comments dealing \
        with correlation values and channels used for the detection. Each \
        channel used for the detection will have a corresponding :class: Pick \
        which will contain time and waveform information. HOWEVER, the user \
        should note that, at present, the pick times do not account for the \
        prepick times inherent in each template. For example, if a template \
        trace starts 0.1 seconds before the actual arrival of that phase, \
        then the pick time generated by match_filter for that phase will be \
        0.1 seconds early. We are looking towards a solution which will \
        involve saving templates alongside associated metadata.
    """
    import matplotlib
    matplotlib.use('Agg')
    import matplotlib.pyplot as plt
    plt.ioff()
    import copy
    from eqcorrscan.utils import plotting
    from eqcorrscan.utils import findpeaks
    from obspy import Trace, Catalog, UTCDateTime, Stream
    from obspy.core.event import Event, Pick, CreationInfo, ResourceIdentifier
    from obspy.core.event import Comment, WaveformStreamID
    import time

    if arg_check:
        # Check the arguments to be nice - if arguments wrong type the parallel
        # output for the error won't be useful
        if not type(template_names) == list:
            raise IOError('template_names must be of type: list')
        if not type(template_list) == list:
            raise IOError('templates must be of type: list')
        for template in template_list:
            if not type(template) == Stream:
                msg = 'template in template_list must be of type: ' +\
                      'obspy.core.stream.Stream'
                raise IOError(msg)
        if not type(st) == Stream:
            msg = 'st must be of type: obspy.core.stream.Stream'
            raise IOError(msg)
        if threshold_type not in ['MAD', 'absolute', 'av_chan_corr']:
            msg = 'threshold_type must be one of: MAD, absolute, av_chan_corr'
            raise IOError(msg)

    # Copy the stream here because we will muck about with it
    stream = st.copy()
    templates = copy.deepcopy(template_list)
    # Debug option to confirm that the channel names match those in the
    # templates
    if debug >= 2:
        template_stachan = []
        data_stachan = []
        for template in templates:
            for tr in template:
                template_stachan.append(tr.stats.station + '.' +
                                        tr.stats.channel)
        for tr in stream:
            data_stachan.append(tr.stats.station + '.' + tr.stats.channel)
        template_stachan = list(set(template_stachan))
        data_stachan = list(set(data_stachan))
        if debug >= 3:
            print('I have template info for these stations:')
            print(template_stachan)
            print('I have daylong data for these stations:')
            print(data_stachan)
    # Perform a check that the daylong vectors are daylong
    for tr in stream:
        if not tr.stats.sampling_rate * 86400 == tr.stats.npts:
            msg = ' '.join(['Data are not daylong for', tr.stats.station,
                            tr.stats.channel])
            raise ValueError(msg)
    # Perform check that all template lengths are internally consistent
    for i, temp in enumerate(template_list):
        if len(set([tr.stats.npts for tr in temp])) > 1:
            msg = 'Template %s contains traces of differing length!! THIS \
                  WILL CAUSE ISSUES' % template_names[i]
            raise ValueError(msg)
    # Call the _template_loop function to do all the correlation work
    outtic = time.clock()
    # Edit here from previous, stable, but slow match_filter
    # Would be worth testing without an if statement, but with every station in
    # the possible template stations having data, but for those without real
    # data make the data NaN to return NaN ccc_sum
    # Note: this works
    if debug >= 2:
        print('Ensuring all template channels have matches in daylong data')
    template_stachan = []
    for template in templates:
        for tr in template:
            template_stachan += [(tr.stats.station, tr.stats.channel)]
    template_stachan = list(set(template_stachan))
    # Copy this here to keep it safe
    for stachan in template_stachan:
        if not stream.select(station=stachan[0], channel=stachan[1]):
            # Remove template traces rather than adding NaN data
            for template in templates:
                if template.select(station=stachan[0], channel=stachan[1]):
                    for tr in template.select(station=stachan[0],
                                              channel=stachan[1]):
                        template.remove(tr)
    # Remove un-needed channels
    for tr in stream:
        if not (tr.stats.station, tr.stats.channel) in template_stachan:
            stream.remove(tr)
    # Also pad out templates to have all channels
    for template, template_name in zip(templates, template_names):
        if len(template) == 0:
            msg = ('No channels matching in continuous data for ' +
                   'template' + template_name)
            warnings.warn(msg)
            templates.remove(template)
            template_names.remove(template_name)
            continue
        for stachan in template_stachan:
            if not template.select(station=stachan[0], channel=stachan[1]):
                nulltrace = Trace()
                nulltrace.stats.station = stachan[0]
                nulltrace.stats.channel = stachan[1]
                nulltrace.stats.sampling_rate = template[0].stats.sampling_rate
                nulltrace.stats.starttime = template[0].stats.starttime
                nulltrace.data = np.array([np.NaN] * len(template[0].data),
                                          dtype=np.float32)
                template += nulltrace
    if debug >= 2:
        print('Starting the correlation run for this day')
    [cccsums, no_chans, chans] = _channel_loop(templates, stream, cores, debug)
    if len(cccsums[0]) == 0:
        raise ValueError('Correlation has not run, zero length cccsum')
    outtoc = time.clock()
    print(' '.join(['Looping over templates and streams took:',
                    str(outtoc - outtic), 's']))
    if debug >= 2:
        print(' '.join(['The shape of the returned cccsums is:',
                        str(np.shape(cccsums))]))
        print(' '.join(['This is from', str(len(templates)), 'templates']))
        print(' '.join(['Correlated with', str(len(stream)),
                        'channels of data']))
    detections = []
    if output_cat:
        det_cat = Catalog()
    for i, cccsum in enumerate(cccsums):
        template = templates[i]
        if threshold_type == 'MAD':
            rawthresh = threshold * np.median(np.abs(cccsum))
        elif threshold_type == 'absolute':
            rawthresh = threshold
        elif threshold_type == 'av_chan_corr':
            rawthresh = threshold * no_chans[i]
        # Findpeaks returns a list of tuples in the form [(cccsum, sample)]
        print(' '.join(['Threshold is set at:', str(rawthresh)]))
        print(' '.join(['Max of data is:', str(max(cccsum))]))
        print(' '.join(['Mean of data is:', str(np.mean(cccsum))]))
        if np.abs(np.mean(cccsum)) > 0.05:
            warnings.warn('Mean is not zero!  Check this!')
        # Set up a trace object for the cccsum as this is easier to plot and
        # maintains timing
        if plotvar:
            stream_plot = copy.deepcopy(stream[0])
            # Downsample for plotting
            stream_plot.decimate(int(stream[0].stats.sampling_rate / 10))
            cccsum_plot = Trace(cccsum)
            cccsum_plot.stats.sampling_rate = stream[0].stats.sampling_rate
            # Resample here to maintain shape better
            cccsum_hist = cccsum_plot.copy()
            cccsum_hist = cccsum_hist.decimate(int(stream[0].stats.
                                                   sampling_rate / 10)).data
            cccsum_plot = plotting.chunk_data(cccsum_plot, 10,
                                              'Maxabs').data
            # Enforce same length
            stream_plot.data = stream_plot.data[0:len(cccsum_plot)]
            cccsum_plot = cccsum_plot[0:len(stream_plot.data)]
            cccsum_hist = cccsum_hist[0:len(stream_plot.data)]
            plotting.triple_plot(cccsum_plot, cccsum_hist,
                                 stream_plot, rawthresh, True,
                                 plotdir + '/cccsum_plot_' +
                                 template_names[i] + '_' +
                                 stream[0].stats.starttime.
                                 datetime.strftime('%Y-%m-%d') +
                                 '.' + plot_format)
            if debug >= 4:
                print(' '.join(['Saved the cccsum to:', template_names[i],
                                stream[0].stats.starttime.datetime.
                                strftime('%Y%j')]))
                np.save(template_names[i] +
                        stream[0].stats.starttime.datetime.strftime('%Y%j'),
                        cccsum)
        tic = time.clock()
        if debug >= 4:
            np.save('cccsum_' + str(i) + '.npy', cccsum)
        if debug >= 3 and max(cccsum) > rawthresh:
            peaks = findpeaks.find_peaks2_short(cccsum, rawthresh,
                                                trig_int * stream[0].stats.
                                                sampling_rate, debug,
                                                stream[0].stats.starttime,
                                                stream[0].stats.sampling_rate)
        elif max(cccsum) > rawthresh:
            peaks = findpeaks.find_peaks2_short(cccsum, rawthresh,
                                                trig_int * stream[0].stats.
                                                sampling_rate, debug)
        else:
            print('No peaks found above threshold')
            peaks = False
        toc = time.clock()
        if debug >= 1:
            print(' '.join(['Finding peaks took:', str(toc - tic), 's']))
        if peaks:
            for peak in peaks:
                detecttime = stream[0].stats.starttime +\
                    peak[1] / stream[0].stats.sampling_rate
                # Detect time must be valid QuakeML uri within resource_id.
                # This will write a formatted string which is still readable by UTCDateTime
                rid = ResourceIdentifier(id=template_names[i] + '_' +
                                         str(detecttime.strftime('%Y%m%dT%H%M%S.%f')),
                                         prefix='smi:local')
                ev = Event(resource_id=rid)
                cr_i = CreationInfo(author='EQcorrscan',
                                    creation_time=UTCDateTime())
                ev.creation_info = cr_i
                # All detection info in Comments for lack of a better idea
                thresh_str = 'threshold=' + str(rawthresh)
                ccc_str = 'detect_val=' + str(peak[0])
                used_chans = 'channels used: ' +\
                             ' '.join([str(pair) for pair in chans[i]])
                ev.comments.append(Comment(text=thresh_str))
                ev.comments.append(Comment(text=ccc_str))
                ev.comments.append(Comment(text=used_chans))
                min_template_tm = min([tr.stats.starttime for tr in template])
                for tr in template:
                    if (tr.stats.station, tr.stats.channel) not in chans[i]:
                        continue
                    else:
                        pick_tm = detecttime + (tr.stats.starttime - min_template_tm)
                        wv_id = WaveformStreamID(network_code=tr.stats.network,
                                                 station_code=tr.stats.station,
                                                 channel_code=tr.stats.channel)
                        ev.picks.append(Pick(time=pick_tm, waveform_id=wv_id))
                detections.append(DETECTION(template_names[i],
                                            detecttime,
                                            no_chans[i], peak[0], rawthresh,
                                            'corr', chans[i], event=ev))
                if output_cat:
                    det_cat.append(ev)
        if extract_detections:
            detection_streams = extract_from_stream(stream, detections)
    del stream, templates
    if output_cat and not extract_detections:
        return detections, det_cat
    elif not extract_detections:
        return detections
    elif extract_detections and not output_cat:
        return detections, detection_streams
    else:
        return detections, det_cat, detection_streams
Exemplo n.º 49
0
    def test_read_write(self):
        """
        Function to test the read and write capabilities of sfile_util.
        """
        import os
        from obspy.core.event import Catalog
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.core.event import read_events
        else:
            from obspy.core.event import readEvents as read_events

        # Set-up a test event
        test_event = basic_test_event()
        # Add the event to a catalogue which can be used for QuakeML testing
        test_cat = Catalog()
        test_cat += test_event
        # Write the catalog
        test_cat.write("Test_catalog.xml", format='QUAKEML')
        # Read and check
        read_cat = read_events("Test_catalog.xml")
        os.remove("Test_catalog.xml")
        self.assertEqual(read_cat[0].resource_id, test_cat[0].resource_id)
        self.assertEqual(read_cat[0].picks, test_cat[0].picks)
        self.assertEqual(read_cat[0].origins[0].resource_id,
                         test_cat[0].origins[0].resource_id)
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes, test_cat[0].magnitudes)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        self.assertEqual(read_cat[0].amplitudes[0].resource_id,
                         test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].unit,
                         test_cat[0].amplitudes[0].unit)
        self.assertEqual(read_cat[0].amplitudes[0].generic_amplitude,
                         test_cat[0].amplitudes[0].generic_amplitude)
        self.assertEqual(read_cat[0].amplitudes[0].pick_id,
                         test_cat[0].amplitudes[0].pick_id)
        self.assertEqual(read_cat[0].amplitudes[0].waveform_id,
                         test_cat[0].amplitudes[0].waveform_id)

        # Check the read-write s-file functionality
        sfile = eventtosfile(test_cat[0],
                             userID='TEST',
                             evtype='L',
                             outdir='.',
                             wavefiles='test',
                             explosion=True,
                             overwrite=True)
        del read_cat
        self.assertEqual(readwavename(sfile), ['test'])
        read_cat = Catalog()
        read_cat += readpicks(sfile)
        os.remove(sfile)
        self.assertEqual(read_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(read_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(read_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(read_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(read_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(read_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(read_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # assert read_cat[0].origins[0].resource_id ==\
        #     test_cat[0].origins[0].resource_id
        self.assertEqual(read_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(read_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(read_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(read_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(read_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(read_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(read_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(read_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(read_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(read_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(read_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(read_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(read_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # assert read_cat[0].amplitudes[0].resource_id ==\
        #     test_cat[0].amplitudes[0].resource_id
        self.assertEqual(read_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(read_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
        del read_cat
        # assert read_cat[0].amplitudes[0].pick_id ==\
        #     test_cat[0].amplitudes[0].pick_id
        # assert read_cat[0].amplitudes[0].waveform_id ==\
        #     test_cat[0].amplitudes[0].waveform_id

        # Test the wrappers for PICK and EVENTINFO classes
        picks, evinfo = eventtopick(test_cat)
        # Test the conversion back
        conv_cat = Catalog()
        conv_cat.append(picktoevent(evinfo, picks))
        self.assertEqual(conv_cat[0].picks[0].time, test_cat[0].picks[0].time)
        self.assertEqual(conv_cat[0].picks[0].backazimuth,
                         test_cat[0].picks[0].backazimuth)
        self.assertEqual(conv_cat[0].picks[0].onset,
                         test_cat[0].picks[0].onset)
        self.assertEqual(conv_cat[0].picks[0].phase_hint,
                         test_cat[0].picks[0].phase_hint)
        self.assertEqual(conv_cat[0].picks[0].polarity,
                         test_cat[0].picks[0].polarity)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.station_code,
                         test_cat[0].picks[0].waveform_id.station_code)
        self.assertEqual(conv_cat[0].picks[0].waveform_id.channel_code[-1],
                         test_cat[0].picks[0].waveform_id.channel_code[-1])
        # self.assertEqual(read_cat[0].origins[0].resource_id,
        #                  test_cat[0].origins[0].resource_id)
        self.assertEqual(conv_cat[0].origins[0].time,
                         test_cat[0].origins[0].time)
        # Note that time_residuel_RMS is not a quakeML format
        self.assertEqual(conv_cat[0].origins[0].longitude,
                         test_cat[0].origins[0].longitude)
        self.assertEqual(conv_cat[0].origins[0].latitude,
                         test_cat[0].origins[0].latitude)
        self.assertEqual(conv_cat[0].origins[0].depth,
                         test_cat[0].origins[0].depth)
        self.assertEqual(conv_cat[0].magnitudes[0].mag,
                         test_cat[0].magnitudes[0].mag)
        self.assertEqual(conv_cat[0].magnitudes[1].mag,
                         test_cat[0].magnitudes[1].mag)
        self.assertEqual(conv_cat[0].magnitudes[2].mag,
                         test_cat[0].magnitudes[2].mag)
        self.assertEqual(conv_cat[0].magnitudes[0].creation_info,
                         test_cat[0].magnitudes[0].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[1].creation_info,
                         test_cat[0].magnitudes[1].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[2].creation_info,
                         test_cat[0].magnitudes[2].creation_info)
        self.assertEqual(conv_cat[0].magnitudes[0].magnitude_type,
                         test_cat[0].magnitudes[0].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[1].magnitude_type,
                         test_cat[0].magnitudes[1].magnitude_type)
        self.assertEqual(conv_cat[0].magnitudes[2].magnitude_type,
                         test_cat[0].magnitudes[2].magnitude_type)
        self.assertEqual(conv_cat[0].event_descriptions,
                         test_cat[0].event_descriptions)
        # self.assertEqual(read_cat[0].amplitudes[0].resource_id,
        #                  test_cat[0].amplitudes[0].resource_id)
        self.assertEqual(conv_cat[0].amplitudes[0].period,
                         test_cat[0].amplitudes[0].period)
        self.assertEqual(conv_cat[0].amplitudes[0].snr,
                         test_cat[0].amplitudes[0].snr)
Exemplo n.º 50
0
def _read_ndk(filename, *args, **kwargs):  # @UnusedVariable
    """
    Reads an NDK file to a :class:`~obspy.core.event.Catalog` object.

    :param filename: File or file-like object in text mode.
    """
    # Read the whole file at once. While an iterator would be more efficient
    # the largest NDK file out in the wild is 13.7 MB so it does not matter
    # much.
    if not hasattr(filename, "read"):
        # Check if it exists, otherwise assume its a string.
        try:
            with open(filename, "rt") as fh:
                data = fh.read()
        except Exception:
            try:
                data = filename.decode()
            except Exception:
                data = str(filename)
            data = data.strip()
    else:
        data = filename.read()
        if hasattr(data, "decode"):
            data = data.decode()

    # Create iterator that yields lines.
    def lines_iter():
        prev_line = -1
        while True:
            next_line = data.find("\n", prev_line + 1)
            if next_line < 0:
                break
            yield data[prev_line + 1:next_line]
            prev_line = next_line
        if len(data) > prev_line + 1:
            yield data[prev_line + 1:]

    # Use one Flinn Engdahl object for all region determinations.
    fe = FlinnEngdahl()
    cat = Catalog(resource_id=_get_resource_id("catalog", str(uuid.uuid4())))

    # Loop over 5 lines at once.
    for _i, lines in enumerate(zip_longest(*[lines_iter()] * 5)):
        if None in lines:
            msg = "Skipped last %i lines. Not a multiple of 5 lines." % (
                lines.count(None))
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Parse the lines to a human readable dictionary.
        try:
            record = _read_lines(*lines)
        except (ValueError, ObsPyNDKException):
            exc = traceback.format_exc()
            msg = ("Could not parse event %i (faulty file?). Will be "
                   "skipped. Lines of the event:\n"
                   "\t%s\n"
                   "%s") % (_i + 1, "\n\t".join(lines), exc)
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Use one creation info for essentially every item.
        creation_info = CreationInfo(agency_id="GCMT",
                                     version=record["version_code"])

        # Use the ObsPy Flinn Engdahl region determiner as the region in the
        # NDK files is oftentimes trimmed.
        region = fe.get_region(record["centroid_longitude"],
                               record["centroid_latitude"])

        # Create an event object.
        event = Event(force_resource_id=False,
                      event_type="earthquake",
                      event_type_certainty="known",
                      event_descriptions=[
                          EventDescription(text=region,
                                           type="Flinn-Engdahl region"),
                          EventDescription(text=record["cmt_event_name"],
                                           type="earthquake name")
                      ])

        # Assemble the time for the reference origin.
        try:
            time = _parse_date_time(record["date"], record["time"])
        except ObsPyNDKException:
            msg = ("Invalid time in event %i. '%s' and '%s' cannot be "
                   "assembled to a valid time. Event will be skipped.") % \
                  (_i + 1, record["date"], record["time"])
            warnings.warn(msg, ObsPyNDKWarning)
            continue

        # Create two origins, one with the reference latitude/longitude and
        # one with the centroidal values.
        ref_origin = Origin(
            force_resource_id=False,
            time=time,
            longitude=record["hypo_lng"],
            latitude=record["hypo_lat"],
            # Convert to m.
            depth=record["hypo_depth_in_km"] * 1000.0,
            origin_type="hypocenter",
            comments=[
                Comment(text="Hypocenter catalog: %s" %
                        record["hypocenter_reference_catalog"],
                        force_resource_id=False)
            ])
        ref_origin.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="ref_origin")
        ref_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="reforigin")

        cmt_origin = Origin(
            force_resource_id=False,
            longitude=record["centroid_longitude"],
            longitude_errors={
                "uncertainty": record["centroid_longitude_error"]
            },
            latitude=record["centroid_latitude"],
            latitude_errors={"uncertainty": record["centroid_latitude_error"]},
            # Convert to m.
            depth=record["centroid_depth_in_km"] * 1000.0,
            depth_errors={
                "uncertainty": record["centroid_depth_in_km_error"] * 1000
            },
            time=ref_origin["time"] + record["centroid_time"],
            time_errors={"uncertainty": record["centroid_time_error"]},
            depth_type=record["type_of_centroid_depth"],
            origin_type="centroid",
            time_fixed=False,
            epicenter_fixed=False,
            creation_info=creation_info.copy())
        cmt_origin.resource_id = _get_resource_id(record["cmt_event_name"],
                                                  "origin",
                                                  tag="cmtorigin")
        event.origins = [ref_origin, cmt_origin]
        event.preferred_origin_id = cmt_origin.resource_id.id

        # Create the magnitude object.
        mag = Magnitude(force_resource_id=False,
                        mag=round(record["Mw"], 2),
                        magnitude_type="Mwc",
                        origin_id=cmt_origin.resource_id,
                        creation_info=creation_info.copy())
        mag.resource_id = _get_resource_id(record["cmt_event_name"],
                                           "magnitude",
                                           tag="moment_mag")
        event.magnitudes = [mag]
        event.preferred_magnitude_id = mag.resource_id.id

        # Add the reported mb, MS magnitudes as additional magnitude objects.
        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["mb"],
                magnitude_type="mb",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'mb'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="mb_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="mb")

        event.magnitudes.append(
            Magnitude(
                force_resource_id=False,
                mag=record["MS"],
                magnitude_type="MS",
                comments=[
                    Comment(
                        force_resource_id=False,
                        text="Reported magnitude in NDK file. Most likely 'MS'."
                    )
                ]))
        event.magnitudes[-1].comments[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="MS_magnitude")
        event.magnitudes[-1].resource_id = _get_resource_id(
            record["cmt_event_name"], "magnitude", tag="MS")

        # Take care of the moment tensor.
        tensor = Tensor(m_rr=record["m_rr"],
                        m_rr_errors={"uncertainty": record["m_rr_error"]},
                        m_pp=record["m_pp"],
                        m_pp_errors={"uncertainty": record["m_pp_error"]},
                        m_tt=record["m_tt"],
                        m_tt_errors={"uncertainty": record["m_tt_error"]},
                        m_rt=record["m_rt"],
                        m_rt_errors={"uncertainty": record["m_rt_error"]},
                        m_rp=record["m_rp"],
                        m_rp_errors={"uncertainty": record["m_rp_error"]},
                        m_tp=record["m_tp"],
                        m_tp_errors={"uncertainty": record["m_tp_error"]},
                        creation_info=creation_info.copy())
        mt = MomentTensor(
            force_resource_id=False,
            scalar_moment=record["scalar_moment"],
            tensor=tensor,
            data_used=[DataUsed(**i) for i in record["data_used"]],
            inversion_type=record["source_type"],
            source_time_function=SourceTimeFunction(
                type=record["moment_rate_type"],
                duration=record["moment_rate_duration"]),
            derived_origin_id=cmt_origin.resource_id,
            creation_info=creation_info.copy())
        mt.resource_id = _get_resource_id(record["cmt_event_name"],
                                          "momenttensor")
        axis = [Axis(**i) for i in record["principal_axis"]]
        focmec = FocalMechanism(
            force_resource_id=False,
            moment_tensor=mt,
            principal_axes=PrincipalAxes(
                # The ordering is the same as for the IRIS SPUD service and
                # from a website of the Saint Louis University Earthquake
                # center so it should be correct.
                t_axis=axis[0],
                p_axis=axis[2],
                n_axis=axis[1]),
            nodal_planes=NodalPlanes(
                nodal_plane_1=NodalPlane(**record["nodal_plane_1"]),
                nodal_plane_2=NodalPlane(**record["nodal_plane_2"])),
            comments=[
                Comment(force_resource_id=False,
                        text="CMT Analysis Type: %s" %
                        record["cmt_type"].capitalize()),
                Comment(force_resource_id=False,
                        text="CMT Timestamp: %s" % record["cmt_timestamp"])
            ],
            creation_info=creation_info.copy())
        focmec.comments[0].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_type")
        focmec.comments[1].resource_id = _get_resource_id(
            record["cmt_event_name"], "comment", tag="cmt_timestamp")
        focmec.resource_id = _get_resource_id(record["cmt_event_name"],
                                              "focal_mechanism")
        event.focal_mechanisms = [focmec]
        event.preferred_focal_mechanism_id = focmec.resource_id.id

        # Set at end to avoid duplicate resource id warning.
        event.resource_id = _get_resource_id(record["cmt_event_name"], "event")

        cat.append(event)

    if len(cat) == 0:
        msg = "No valid events found in NDK file."
        raise ObsPyNDKException(msg)

    return cat
Exemplo n.º 51
0
    def _deserialize(self):
        catalog = Catalog()
        catalog.description = 'Created from GSE2 format'
        catalog.creation_info = self._get_creation_info()

        # Flag used to ignore line which aren't in a BEGIN-STOP block
        begin_block = False
        # Flag used to ignore line which aren't in a BULLETIN block
        bulletin_block = False

        try:
            for line in self.lines:
                if line.startswith('BEGIN'):
                    if begin_block:
                        # 2 BEGIN without STOP
                        message = self._add_line_nb('Missing STOP tag')
                        raise GSE2BulletinSyntaxError(message)
                    else:
                        # Enter a BEGIN block
                        begin_block = True

                    self._check_header(line)
                elif line.startswith('STOP'):
                    if begin_block:
                        # Exit a BEGIN-STOP block
                        begin_block = False
                    else:
                        # STOP without BEGIN
                        message = self._add_line_nb('Missing BEGIN tag')
                        raise GSE2BulletinSyntaxError(message)
                elif line.startswith('DATA_TYPE'):
                    bulletin_block = line[10:18] == 'BULLETIN'

                if not begin_block or not bulletin_block:
                    # Not in a BEGIN-STOP block, nor a DATA_TYPE BULLETIN
                    # block.
                    continue

                # If a "Reviewed Event Bulletin" or "Reviewed Bulletin"
                # line exists, put it in comment
                if 'Reviewed Event Bulletin' in line \
                        or 'Reviewed Bulletin' in line:
                    comment = self._comment(line.strip())
                    if comment.text:
                        catalog.comments.append(comment)
                # Detect start of an event
                elif line.startswith('EVENT'):
                    event = self._parse_event(line)
                    if event:
                        catalog.append(event)

        except StopIteration:
            message = self._add_line_nb('Unexpected EOF while parsing')
            raise GSE2BulletinSyntaxError(message)
        except Exception:
            self._warn('Unexpected error')
            raise

        if begin_block:
            # BEGIN-STOP block not closed
            text = 'Unexpected EOF while parsing, BEGIN-STOP block not closed'
            message = self._add_line_nb(text)
            raise GSE2BulletinSyntaxError(message)

        catalog.resource_id = self._get_res_id('event/evid')

        return catalog
Exemplo n.º 52
0
def _dbs_associator(start_time,
                    end_time,
                    moving_window,
                    tbl,
                    pair_n,
                    save_dir,
                    station_list,
                    consider_combination=False):

    if consider_combination == True:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 0
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)),
                    ncols=100)
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:
                evid += 1

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                # QuakeML
                print(detections.iloc[0]['event_start_time'])

                if len(detections) / pair_n <= 2:
                    ch = pair_n
                else:
                    ch = int(len(detections) - pair_n)

                picks = []
                for ns in range(ch, len(detections) + 1):
                    comb = 0
                    for ind in list(combinations(detections.index, ns)):
                        comb += 1
                        selected_detections = detections.loc[ind, :]
                        sorted_detections = selected_detections.sort_values(
                            'p_arrival_time')

                        Y2000_writer.write(
                            "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                            % (int(yr), int(mo), int(dy), int(hr), int(mi),
                               float(sec), float(st_lat_DMS[0]),
                               str(st_lat_DMS[1]), float(st_lat_DMS[2]),
                               float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                               float(st_lon_DMS[2]), float(depth), float(mag)))

                        station_buffer = []
                        row_buffer = []
                        tr_names = []
                        tr_names2 = []
                        for _, row in sorted_detections.iterrows():

                            trace_name = row['traceID'] + '*' + row[
                                'station'] + '*' + str(row['event_start_time'])
                            p_unc = row['p_unc']
                            p_prob = row['p_prob']
                            s_unc = row['s_unc']
                            s_prob = row['s_prob']

                            if p_unc:
                                Pweihgt = _weighcalculator_prob(p_prob *
                                                                (1 - p_unc))
                            else:
                                Pweihgt = _weighcalculator_prob(p_prob)
                            try:
                                Pweihgt = int(Pweihgt)
                            except Exception:
                                Pweihgt = 4

                            if s_unc:
                                Sweihgt = _weighcalculator_prob(s_prob *
                                                                (1 - s_unc))
                            else:
                                Sweihgt = _weighcalculator_prob(s_prob)
                            try:
                                Sweihgt = int(Sweihgt)
                            except Exception:
                                Sweihgt = 4

                            station = "{:<5}".format(row['station'])
                            network = "{:<2}".format(row['network'])
                            try:
                                yrp = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mop = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dyp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrp = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mip = "{:>2}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_p = "{:>4}".format(
                                    str(row['p_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="P")
                                picks.append(p)
                            except Exception:
                                sec_p = None

                            try:
                                yrs = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[0])
                                mos = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[1])
                                dys = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [0].split('-')[2])
                                hrs = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[0])
                                mis = "{:>2}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[1])
                                sec_s = "{:>4}".format(
                                    str(row['s_arrival_time']).split(' ')
                                    [1].split(':')[2])
                                p = Pick(time=UTCDateTime(
                                    row['p_arrival_time']),
                                         waveform_id=WaveformStreamID(
                                             network_code=network,
                                             station_code=station.rstrip()),
                                         phase_hint="S")
                                picks.append(p)
                            except Exception:
                                sec_s = None

                            if row['station'] not in station_buffer:
                                tr_names.append(trace_name)
                                station_buffer.append(row['station'])
                                if sec_s:
                                    Y2000_writer.write(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis),
                                         float(0.0), float(sec_s), Sweihgt))
                                if sec_p:
                                    Y2000_writer.write(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                            else:
                                tr_names2.append(trace_name)
                                if sec_s:
                                    row_buffer.append(
                                        "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                        %
                                        (station, network, int(yrs), int(mos),
                                         int(dys), int(hrs), int(mis), 0.0,
                                         float(sec_s), Sweihgt))
                                if sec_p:
                                    row_buffer.append(
                                        "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                        % (station, network, Pweihgt, int(yrp),
                                           int(mop), int(dyp), int(hrp),
                                           int(mip), float(sec_p), float(0.0)))
                        Y2000_writer.write("{:<62}".format(' ') + "%10d" %
                                           (evid) + '\n')

                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

            tt += timedelta(seconds=moving_window)

    #   plt.scatter(LTTP, TTP, s=10, marker='o', c='b', alpha=0.4, label='P')
    #   plt.scatter(LTTS, TTS, s=10, marker='o', c='r', alpha=0.4, label='S')
    #   plt.legend('upper right')
    #   plt.show()

        print('The Number of Realizations: ' + str(evid) + '\n', flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")
        f.write(jj)
        f.close()

    else:
        if platform.system() == 'Windows':
            Y2000_writer = open(save_dir + "\\" + "Y2000.phs", "w")
        else:
            Y2000_writer = open(save_dir + "/" + "Y2000.phs", "w")

        cat = Catalog()
        traceNmae_dic = dict()
        st = datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S.%f')
        et = datetime.strptime(end_time, '%Y-%m-%d %H:%M:%S.%f')
        total_t = et - st
        evid = 200000
        evidd = 100000
        tt = st
        pbar = tqdm(total=int(np.ceil(total_t.total_seconds() /
                                      moving_window)))
        while tt < et:

            detections = tbl[(tbl.event_start_time >= tt) & (
                tbl.event_start_time < tt + timedelta(seconds=moving_window))]
            pbar.update()
            if len(detections) >= pair_n:

                yr = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[0])
                mo = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[1])
                dy = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [0].split('-')[2])
                hr = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[0])
                mi = "{:>2}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[1])
                sec = "{:>4}".format(
                    str(detections.iloc[0]['event_start_time']).split(' ')
                    [1].split(':')[2])
                st_lat_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlat']), "Latitude")
                st_lon_DMS = _decimalDegrees2DMS(
                    float(detections.iloc[0]['stlon']), "Longitude")
                depth = 5.0
                mag = 0.0

                Y2000_writer.write(
                    "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                    % (int(yr), int(mo), int(dy), int(hr), int(mi), float(sec),
                       float(st_lat_DMS[0]), str(st_lat_DMS[1]),
                       float(st_lat_DMS[2]), float(st_lon_DMS[0]),
                       str(st_lon_DMS[1]), float(
                           st_lon_DMS[2]), float(depth), float(mag)))
                event = Event()
                origin = Origin(time=UTCDateTime(
                    detections.iloc[0]['event_start_time']),
                                longitude=detections.iloc[0]['stlon'],
                                latitude=detections.iloc[0]['stlat'],
                                method="EqTransformer")
                event.origins.append(origin)

                station_buffer = []
                row_buffer = []
                sorted_detections = detections.sort_values('p_arrival_time')
                tr_names = []
                tr_names2 = []
                picks = []
                for _, row in sorted_detections.iterrows():
                    trace_name = row['traceID'] + '*' + row[
                        'station'] + '*' + str(row['event_start_time'])
                    p_unc = row['p_unc']
                    p_prob = row['p_prob']
                    s_unc = row['s_unc']
                    s_prob = row['s_prob']

                    if p_unc:
                        Pweihgt = _weighcalculator_prob(p_prob * (1 - p_unc))
                    else:
                        Pweihgt = _weighcalculator_prob(p_prob)
                    try:
                        Pweihgt = int(Pweihgt)
                    except Exception:
                        Pweihgt = 4

                    if s_unc:
                        Sweihgt = _weighcalculator_prob(s_prob * (1 - s_unc))
                    else:
                        Sweihgt = _weighcalculator_prob(s_prob)
                    try:
                        Sweihgt = int(Sweihgt)
                    except Exception:
                        Sweihgt = 4

                    station = "{:<5}".format(row['station'])
                    network = "{:<2}".format(row['network'])

                    try:
                        yrp = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mop = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dyp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrp = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mip = "{:>2}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_p = "{:>4}".format(
                            str(row['p_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['p_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="P",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_p = None

                    try:
                        yrs = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [0])
                        mos = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [1])
                        dys = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[0].split('-')
                            [2])
                        hrs = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [0])
                        mis = "{:>2}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [1])
                        sec_s = "{:>4}".format(
                            str(row['s_arrival_time']).split(' ')[1].split(':')
                            [2])
                        p = Pick(time=UTCDateTime(row['s_arrival_time']),
                                 waveform_id=WaveformStreamID(
                                     network_code=network,
                                     station_code=station.rstrip()),
                                 phase_hint="S",
                                 method_id="EqTransformer")
                        picks.append(p)
                    except Exception:
                        sec_s = None

                    if row['station'] not in station_buffer:
                        tr_names.append(trace_name)
                        station_buffer.append(row['station'])
                        if sec_s:
                            Y2000_writer.write(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), float(0.0),
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            Y2000_writer.write(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                    else:
                        tr_names2.append(trace_name)
                        if sec_s:
                            row_buffer.append(
                                "%5s%2s  HHE     %4d%2d%2d%2d%2d%5.2f       %5.2fES %1d\n"
                                % (station, network, int(yrs), int(mos),
                                   int(dys), int(hrs), int(mis), 0.0,
                                   float(sec_s), Sweihgt))
                        if sec_p:
                            row_buffer.append(
                                "%5s%2s  HHZ IP %1d%4d%2d%2d%2d%2d%5.2f       %5.2f   0\n"
                                % (station, network, Pweihgt, int(yrp),
                                   int(mop), int(dyp), int(hrp), int(mip),
                                   float(sec_p), float(0.0)))
                event.picks = picks
                event.preferred_origin_id = event.origins[0].resource_id
                cat.append(event)

                evid += 1
                Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                   '\n')
                traceNmae_dic[str(evid)] = tr_names

                if len(row_buffer) >= 2 * pair_n:
                    Y2000_writer.write(
                        "%4d%2d%2d%2d%2d%4.2f%2.0f%1s%4.2f%3.0f%1s%4.2f%5.2f%3.2f\n"
                        % (int(yr), int(mo), int(dy), int(hr), int(mi),
                           float(sec), float(st_lat_DMS[0]), str(
                               st_lat_DMS[1]), float(st_lat_DMS[2]),
                           float(st_lon_DMS[0]), str(st_lon_DMS[1]),
                           float(st_lon_DMS[2]), float(depth), float(mag)))
                    for rr in row_buffer:
                        Y2000_writer.write(rr)

                    evid += 1
                    Y2000_writer.write("{:<62}".format(' ') + "%10d" % (evid) +
                                       '\n')
                    traceNmae_dic[str(evid)] = tr_names2

                elif len(row_buffer) < pair_n and len(row_buffer) != 0:
                    evidd += 1
                    traceNmae_dic[str(evidd)] = tr_names2

            elif len(detections) < pair_n and len(detections) != 0:
                tr_names = []
                for _, row in detections.iterrows():
                    trace_name = row['traceID']
                    tr_names.append(trace_name)
                evidd += 1
                traceNmae_dic[str(evidd)] = tr_names

            tt += timedelta(seconds=moving_window)

        print('The Number of Associated Events: ' + str(evid - 200000) + '\n',
              flush=True)

        jj = json.dumps(traceNmae_dic)
        if platform.system() == 'Windows':
            f = open(save_dir + "\\" + "traceNmae_dic.json", "w")
        else:
            f = open(save_dir + "/" + "traceNmae_dic.json", "w")

        f.write(jj)
        f.close()
        print(cat.__str__(print_all=True))
        cat.write(save_dir + "/associations.xml", format="QUAKEML")
Exemplo n.º 53
0
    def _deserialize(self):
        catalog = Catalog()
        catalog.description = 'Created from GSE2 format'
        catalog.creation_info = self._get_creation_info()

        # Flag used to ignore line which aren't in a BEGIN-STOP block
        begin_block = False
        # Flag used to ignore line which aren't in a BULLETIN block
        bulletin_block = False

        try:
            for line in self.lines:
                if line.startswith('BEGIN'):
                    if begin_block:
                        # 2 BEGIN without STOP
                        message = self._add_line_nb('Missing STOP tag')
                        raise GSE2BulletinSyntaxError(message)
                    else:
                        # Enter a BEGIN block
                        begin_block = True

                    self._check_header(line)
                elif line.startswith('STOP'):
                    if begin_block:
                        # Exit a BEGIN-STOP block
                        begin_block = False
                    else:
                        # STOP without BEGIN
                        message = self._add_line_nb('Missing BEGIN tag')
                        raise GSE2BulletinSyntaxError(message)
                elif line.startswith('DATA_TYPE'):
                    bulletin_block = line[10:18] == 'BULLETIN'

                if not begin_block or not bulletin_block:
                    # Not in a BEGIN-STOP block, nor a DATA_TYPE BULLETIN
                    # block.
                    continue

                # If a "Reviewed Event Bulletin" or "Reviewed Bulletin"
                # line exists, put it in comment
                if 'Reviewed Event Bulletin' in line \
                        or 'Reviewed Bulletin' in line:
                    comment = self._comment(line.strip())
                    if comment.text:
                        catalog.comments.append(comment)
                # Detect start of an event
                elif line.startswith('EVENT'):
                    event = self._parse_event(line)
                    if event:
                        catalog.append(event)

        except StopIteration:
            message = self._add_line_nb('Unexpected EOF while parsing')
            raise GSE2BulletinSyntaxError(message)
        except Exception:
            self._warn('Unexpected error')
            raise

        if begin_block:
            # BEGIN-STOP block not closed
            text = 'Unexpected EOF while parsing, BEGIN-STOP block not closed'
            message = self._add_line_nb(text)
            raise GSE2BulletinSyntaxError(message)

        catalog.resource_id = self._get_res_id('event/evid')

        return catalog
Exemplo n.º 54
0
def request_gcmt(starttime, endtime, minmagnitude=None, mindepth=None, maxdepth=None, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None):
	import mechanize
	from mechanize import Browser
	import re

	"""
	Description
	I am using mechanize. My attempt is just preliminary, for the current globalcmt.org site. 
	"""

	#Split numbers and text
	r = re.compile("([a-zA-Z]+)([0-9]+)")


	br = Browser()
	br.open('http://www.globalcmt.org/CMTsearch.html')
	#Site has just one form
	br.select_form(nr=0)

	br.form['yr']    = str(starttime.year)
	br.form['mo']    = str(starttime.month)
	br.form['day']   = str(starttime.day)
	br.form['oyr']   = str(endtime.year)
	br.form['omo']   = str(endtime.month)
	br.form['oday']  = str(endtime.day)
	br.form['list']  = ['4']
	br.form['itype'] = ['ymd']
	br.form['otype'] = ['ymd']

	if minmagnitude: br.form['lmw']   = str(minmagnitude)
	if minlatitude : br.form['llat']  = str(minlatitude)
	if maxlatitude : br.form['ulat']  = str(maxlatitude)
	if minlongitude: br.form['llon']  = str(minlongitude)
	if maxlongitude: br.form['ulon']  = str(maxlongitude)
	if mindepth    : br.form['lhd']   = str(mindepth)
	if maxdepth    : br.form['uhd']   = str(maxdepth)

	print("Submitting parameters to globalcmt.")
	req = br.submit()
	print("Retrieving data, creating catalog.")

	data = []
	for line in req:
		data.append(line) 

	data_chunked = _chunking_list(keyword='\n', list=data)
	origins = []
	magnitudes = []
	tensor = []

	for line in data_chunked:
		for element in line:
			if 'event name' in element:
				for content in element:
					org       = line[1].split()
					year      = int(r.match(org[0]).groups()[1])
					mon       = int(org[1])
					day       = int(org[2])
					hour      = int(org[3])
					minute    = int(org[4])
					sec_temp  = int(org[5].split('.')[0])
					msec_temp = int(org[5].split('.')[1])

				origins_temp = UTCDateTime(year, mon, day, hour, minute, sec_temp, msec_temp)
				#adding time shift located in line[3]
				origin       = origins_temp + float(line[3].split()[2])
				magnitude    = float(line[1].split()[10])
				latitude     = float(line[5].split()[1])
				longitude    = float(line[6].split()[1])
				depth        = 1000. * float(line[7].split()[1])
				m_rr         = float(line[8].split()[1])
				m_tt         = float(line[9].split()[1])
				m_pp         = float(line[10].split()[1])
				m_rt         = float(line[11].split()[1])
				m_rp         = float(line[12].split()[1])
				m_tp         = float(line[13].split()[1])

				magnitudes.append( ("Mw", magnitude) )
				origins.append( (latitude, longitude, depth, origin) )
				tensor.append( (m_rr, m_tt, m_pp, m_rt, m_rp, m_tp) )

	cat = Catalog()

	for mag, org, ten in zip(magnitudes, origins, tensor):
		# Create magnitude object.
		magnitude = Magnitude()


		magnitude.magnitude_type = mag[0]
		magnitude.mag = mag[1]
		# Write origin object.
		origin = Origin()
		origin.latitude = org[0]
		origin.longitude = org[1]
		origin.depth = org[2]
		origin.time = org[3]
		# Create event object and append to catalog object.
		event = Event()
		event.magnitudes.append(magnitude)
		event.origins.append(origin)

		event.MomentTensor = MomentTensor()
		event.MomentTensor.m_rr = ten[0]
		event.MomentTensor.m_tt = ten[1]
		event.MomentTensor.m_pp = ten[2]
		event.MomentTensor.m_rt = ten[3]
		event.MomentTensor.m_rp = ten[4]
		event.MomentTensor.m_tp = ten[5]

		cat.append(event)

	return cat
Exemplo n.º 55
0
def par2quakeml(Par_filename, QuakeML_filename, rotation_axis=[0.0, 1.0, 0.0],
                rotation_angle=-57.5, origin_time="2000-01-01 00:00:00.0",
                event_type="other event"):
    # initialise event
    ev = Event()

    # open and read Par file
    fid = open(Par_filename, 'r')

    fid.readline()
    fid.readline()
    fid.readline()
    fid.readline()

    lat_old = 90.0 - float(fid.readline().strip().split()[0])
    lon_old = float(fid.readline().strip().split()[0])
    depth = float(fid.readline().strip().split()[0])

    fid.readline()

    Mtt_old = float(fid.readline().strip().split()[0])
    Mpp_old = float(fid.readline().strip().split()[0])
    Mrr_old = float(fid.readline().strip().split()[0])
    Mtp_old = float(fid.readline().strip().split()[0])
    Mtr_old = float(fid.readline().strip().split()[0])
    Mpr_old = float(fid.readline().strip().split()[0])

    # rotate event into physical domain

    lat, lon = rot.rotate_lat_lon(lat_old, lon_old, rotation_axis,
                                  rotation_angle)
    Mrr, Mtt, Mpp, Mtr, Mpr, Mtp = rot.rotate_moment_tensor(
        Mrr_old, Mtt_old, Mpp_old, Mtr_old, Mpr_old, Mtp_old, lat_old, lon_old,
        rotation_axis, rotation_angle)

    # populate event origin data
    ev.event_type = event_type

    ev_origin = Origin()
    ev_origin.time = UTCDateTime(origin_time)
    ev_origin.latitude = lat
    ev_origin.longitude = lon
    ev_origin.depth = depth
    ev.origins.append(ev_origin)

    # populte event moment tensor

    ev_tensor = Tensor()
    ev_tensor.m_rr = Mrr
    ev_tensor.m_tt = Mtt
    ev_tensor.m_pp = Mpp
    ev_tensor.m_rt = Mtr
    ev_tensor.m_rp = Mpr
    ev_tensor.m_tp = Mtp

    ev_momenttensor = MomentTensor()
    ev_momenttensor.tensor = ev_tensor
    ev_momenttensor.scalar_moment = np.sqrt(Mrr ** 2 + Mtt ** 2 + Mpp ** 2 +
                                            Mtr ** 2 + Mpr ** 2 + Mtp ** 2)

    ev_focalmechanism = FocalMechanism()
    ev_focalmechanism.moment_tensor = ev_momenttensor
    ev_focalmechanism.nodal_planes = NodalPlanes().setdefault(0, 0)

    ev.focal_mechanisms.append(ev_focalmechanism)

    # populate event magnitude
    ev_magnitude = Magnitude()
    ev_magnitude.mag = 0.667 * (np.log10(ev_momenttensor.scalar_moment) - 9.1)
    ev_magnitude.magnitude_type = 'Mw'
    ev.magnitudes.append(ev_magnitude)

    # write QuakeML file
    cat = Catalog()
    cat.append(ev)
    cat.write(QuakeML_filename, format="quakeml")

    # clean up
    fid.close()