def build_isf(self, identifier, name): """ Creates an instance of the ISFCatalogue class from the hdf5 format :param str identifier: Identifier string of the ISFCatalogue object :param str name: Name for the ISFCatalogue object :returns: Catalogue as instance of :class: ISFCatalogue """ isf_catalogue = ISFCatalogue(identifier, name) event_groups = self.origins.groupby("eventID") mag_groups = self.magnitudes.groupby("eventID") mag_keys = mag_groups.indices.keys() ngrps = len(event_groups) for iloc, grp in enumerate(event_groups): if (iloc % 1000) == 0: print "Processing event %d of %d" % (iloc, ngrps) # Get magnitudes list if grp[0] in mag_keys: # Magnitudes associated to this origin mag_list = self._get_magnitude_classes( mag_groups.get_group(grp[0])) else: mag_list = [] # Get origins origin_list = self._get_origin_classes(grp[1], mag_list) event = Event(grp[0], origin_list, mag_list) isf_catalogue.events.append(event) return isf_catalogue
def parse(self): ''' Returns the catalogue as an instance of an ISFCatalogue An ISF catalogue will have two origins: The hypocentre solution and the centroid ''' isf_cat = ISFCatalogue('GCMT', 'GCMT') n_cmts = self.catalogue.number_events() base_id = 'GCMT_' counter = 1 for gcmt in self.catalogue.gcmts: # Get IDs event_id = base_id + ("%06d" % counter) counter += 1 origin_id = gcmt.identifier.strip(' ') # Two origins - 1 hypocentre (mb, Ms, Mw), 2 - centroid (Mw) origin_mags = [] if gcmt.hypocentre.m_b: origin_mags.append(Magnitude(event_id, origin_id, gcmt.hypocentre.m_b, gcmt.hypocentre.source, scale='mb')) if gcmt.hypocentre.m_s: origin_mags.append(Magnitude(event_id, origin_id, gcmt.hypocentre.m_s, gcmt.hypocentre.source, scale='Ms')) m_w = Magnitude(event_id, gcmt.centroid.centroid_id, gcmt.magnitude, 'GCMT', scale='Mw') origin_mags.append(m_w) # Get locations hypo_loc = Location(origin_id, gcmt.hypocentre.longitude, gcmt.hypocentre.latitude, gcmt.hypocentre.depth) centroid_loc = Location(gcmt.centroid.centroid_id, gcmt.centroid.longitude, gcmt.centroid.latitude, gcmt.centroid.depth, depth_error=gcmt.centroid.depth_error) # Get origins hypo = Origin(origin_id, gcmt.hypocentre.date, gcmt.hypocentre.time, hypo_loc, gcmt.hypocentre.source, is_prime=True) hypo.magnitudes = origin_mags # Get centroids centroid = Origin(gcmt.centroid.centroid_id, gcmt.centroid.date, gcmt.centroid.time, centroid_loc, 'GCMT', is_centroid=True, time_error=gcmt.centroid.time_error) centroid.magnitudes = [m_w] event = Event(event_id, [hypo, centroid], origin_mags, gcmt.hypocentre.location) setattr(event, 'tensor', gcmt.moment_tensor) isf_cat.events.append(event) return isf_cat
def write_to_isf_catalogue(self, catalogue_id, name): """ Exports the catalogue to an instance of the :class: eqcat.isf_catalogue.ISFCatalogue """ isf_cat = ISFCatalogue(catalogue_id, name) for iloc in range(0, self.get_number_events()): # Origin ID event_id = str(self.data['eventID'][iloc]) origin_id = event_id # Create Magnitude mag = self.data["magnitude"][iloc] if not mag or np.isnan(mag): # No magnitude - not useful continue mag = [Magnitude(event_id, origin_id, mag, catalogue_id, scale=self.data["magnitudeType"][iloc], sigma=self.data['sigmaMagnitude'][iloc])] # Create Location semimajor90 = self.data['SemiMajor90'][iloc] semiminor90 = self.data['SemiMinor90'][iloc] error_strike = self.data['ErrorStrike'][iloc] if np.isnan(semimajor90): semimajor90 = None if np.isnan(semiminor90): semiminor90 = None if np.isnan(error_strike): error_strike = None depth_error = self.data['depthError'][iloc] if np.isnan(depth_error): depth_error = None locn = Location(origin_id, self.data['longitude'][iloc], self.data['latitude'][iloc], self.data['depth'][iloc], semimajor90, semiminor90, error_strike, depth_error) # Create Origin # Date eq_date = datetime.date(self.data['year'][iloc], self.data['month'][iloc], self.data['day'][iloc]) # Time secs = self.data['second'][iloc] microsecs = int((secs - floor(secs)) * 1E6) eq_time = datetime.time(self.data['hour'][iloc], self.data['minute'][iloc], int(secs), microsecs) origin = Origin(origin_id, eq_date, eq_time, locn, catalogue_id, is_prime=True) origin.magnitudes = mag event = Event(event_id, [origin], origin.magnitudes) isf_cat.events.append(event) return isf_cat
def write_to_isf_catalogue(self, catalogue_id, name): """ Exports the catalogue to an instance of the :class: eqcat.isf_catalogue.ISFCatalogue """ isf_cat = ISFCatalogue(catalogue_id, name) for iloc in range(0, self.get_number_events()): # Origin ID event_id = str(self.data['eventID'][iloc]) origin_id = event_id # Create Magnitude mag = [Magnitude(event_id, origin_id, self.data['magnitude'][iloc], catalogue_id, scale='Mw', sigma=self.data['sigmaMagnitude'][iloc])] # Create Moment if not np.isnan(self.data['moment'][iloc]): moment = self.data['moment'][iloc] *\ (10. ** self.data['scaling'][iloc]) mag.append(Magnitude(event_id, origin_id, moment, catalogue_id, scale='Mo')) # Create Location semimajor90 = self.data['SemiMajor90'][iloc] semiminor90 = self.data['SemiMinor90'][iloc] error_strike = self.data['ErrorStrike'][iloc] if np.isnan(semimajor90): semimajor90 = None if np.isnan(semiminor90): semiminor90 = None if np.isnan(error_strike): error_strike = None depth_error = self.data['depthError'][iloc] if np.isnan(depth_error): depth_error = None locn = Location(origin_id, self.data['longitude'][iloc], self.data['latitude'][iloc], self.data['depth'][iloc], semimajor90, semiminor90, error_strike, depth_error) # Create Origin # Date eq_date = datetime.date(self.data['year'][iloc], self.data['month'][iloc], self.data['day'][iloc]) # Time secs = self.data['second'][iloc] microsecs = int((secs - floor(secs)) * 1E6) eq_time = datetime.time(self.data['hour'][iloc], self.data['minute'][iloc], int(secs), microsecs) origin = Origin(origin_id, eq_date, eq_time, locn, catalogue_id, is_prime=True) origin.magnitudes = mag event = Event(event_id, [origin], origin.magnitudes) if self._check_moment_tensor_components(iloc): # If a moment tensor is found then add it to the event moment_tensor = GCMTMomentTensor() scaling = 10. ** self.data['scaling'][iloc] moment_tensor.tensor = scaling * utils.COORD_SYSTEM['USE']( self.data['mrr'][iloc], self.data['mtt'][iloc], self.data['mpp'][iloc], self.data['mrt'][iloc], self.data['mpr'][iloc], self.data['mtp'][iloc]) moment_tensor.exponent = self.data['scaling'][iloc] setattr(event, 'tensor', moment_tensor) isf_cat.events.append(event) return isf_cat
def read_file(self, identifier, name): """ Reads the catalogue from the file and assigns the identifier and name """ self.catalogue = ISFCatalogue(identifier, name) f = open(self.filename, 'rt') counter = 0 is_origin = False is_magnitude = False comment_str = "" for row in f.readlines(): # Strip newline carriage if row.endswith("\r\n"): # If the file was compiled on windows machines row = row.rstrip("\r\n") elif row.endswith("\n"): row = row.rstrip("\n") else: pass if not row: # Ignore empty rows continue elif "DATA_TYPE EVENT IMS1.0" in row: # Ignore header row continue elif "ISC Bulletin" in row: # Yet anothet header row continue elif "STOP" in row: # Footer row continue else: pass if '(#PRIME)' in row: # Previous origin block was the prime origin if len(origins) > 0: origins[-1].is_prime = True continue if '(#CENTROID)' in row: # Previous origin block is a centroid if len(origins) > 0: origins[-1].is_centroid = True continue comment_find = re.search("\((.*?)\)", row) if comment_find and not row.startswith("Event"): comment_find.group(1) comment_str += "{:s}\n".format(comment_find.group(1)) # Not sure - but sometimes this needs to be switched off continue if row.startswith('Event'): # Is an event header row if counter > 0: self._build_event(event, origins, magnitudes, comment_str) # Get a new event event = get_event_header_row(row) comment_str = "" origins = [] magnitudes = [] counter += 1 continue if row == origin_header: is_origin = True is_magnitude = False continue elif row == magnitude_header: is_origin = False is_magnitude = True continue else: pass if is_magnitude and len(row) == 38: # Is a magnitude row mag = get_event_magnitude(row, event.id, self.selected_magnitude_agencies) if mag: magnitudes.append(mag) continue if is_origin and len(row) == 136: # Is an origin row orig = get_event_origin_row(row, self.selected_origin_agencies) #pdb.set_trace() if orig: origins.append(orig) if event is not None: self._build_event(event, origins, magnitudes, comment_str) if len(self.rejected_catalogue): # Turn list of rejected events into its own instance of # ISFCatalogue self.rejected_catalogue = ISFCatalogue( identifier + "-R", name + " - Rejected", events=self.rejected_catalogue) f.close() return self.catalogue
class ISFReader(BaseCatalogueDatabaseReader): ''' Class to read an ISF formatted earthquake catalogue considering only the origin agencies, the magnitude agencies and the magnitude types defined by the user ''' ANTHROPOGENIC_KEYWORDS = ["Geothermal", "Reservoir", "Mining", "Anthropogenic"] def __init__(self, filename, selected_origin_agencies=[], selected_magnitude_agencies=[], rejection_keywords=[], bbox=[], lower_magnitude=None, upper_magnitude=None, store_all_comments=False): super(ISFReader, self).__init__(filename, selected_origin_agencies, selected_magnitude_agencies) self.rejected_catalogue = [] self.rejection_keywords = rejection_keywords self.store_comments = store_all_comments if lower_magnitude and upper_magnitude: assert upper_magnitude > lower_magnitude if lower_magnitude: self.lower_mag = lower_magnitude else: self.lower_mag = -np.inf if upper_magnitude: self.upper_mag = upper_magnitude else: self.upper_mag = np.inf if len(bbox): assert len(bbox) == 4 self.lower_long = bbox[0] self.lower_lat = bbox[1] self.upper_long = bbox[2] self.upper_lat = bbox[3] else: self.lower_long = -180.0 self.lower_lat = -90.0 self.upper_long = 180.0 self.upper_lat = 90.0 def read_file(self, identifier, name): """ Reads the catalogue from the file and assigns the identifier and name """ self.catalogue = ISFCatalogue(identifier, name) f = open(self.filename, 'rt') counter = 0 is_origin = False is_magnitude = False comment_str = "" for row in f.readlines(): # Strip newline carriage if row.endswith("\r\n"): # If the file was compiled on windows machines row = row.rstrip("\r\n") elif row.endswith("\n"): row = row.rstrip("\n") else: pass if not row: # Ignore empty rows continue elif "DATA_TYPE EVENT IMS1.0" in row: # Ignore header row continue elif "ISC Bulletin" in row: # Yet anothet header row continue elif "STOP" in row: # Footer row continue else: pass if '(#PRIME)' in row: # Previous origin block was the prime origin if len(origins) > 0: origins[-1].is_prime = True continue if '(#CENTROID)' in row: # Previous origin block is a centroid if len(origins) > 0: origins[-1].is_centroid = True continue comment_find = re.search("\((.*?)\)", row) if comment_find and not row.startswith("Event"): comment_find.group(1) comment_str += "{:s}\n".format(comment_find.group(1)) # Not sure - but sometimes this needs to be switched off continue if row.startswith('Event'): # Is an event header row if counter > 0: self._build_event(event, origins, magnitudes, comment_str) # Get a new event event = get_event_header_row(row) comment_str = "" origins = [] magnitudes = [] counter += 1 continue if row == origin_header: is_origin = True is_magnitude = False continue elif row == magnitude_header: is_origin = False is_magnitude = True continue else: pass if is_magnitude and len(row) == 38: # Is a magnitude row mag = get_event_magnitude(row, event.id, self.selected_magnitude_agencies) if mag: magnitudes.append(mag) continue if is_origin and len(row) == 136: # Is an origin row orig = get_event_origin_row(row, self.selected_origin_agencies) #pdb.set_trace() if orig: origins.append(orig) if event is not None: self._build_event(event, origins, magnitudes, comment_str) if len(self.rejected_catalogue): # Turn list of rejected events into its own instance of # ISFCatalogue self.rejected_catalogue = ISFCatalogue( identifier + "-R", name + " - Rejected", events=self.rejected_catalogue) f.close() return self.catalogue def _build_event(self, event, origins, magnitudes, comment_str): """ Add magnitudes and origins and event and append to the catalogue """ event.origins = origins event.magnitudes = magnitudes if len(event.origins) and len(event.magnitudes): event.assign_magnitudes_to_origins() event.comment = comment_str #print "%s - %s" % (Event.id, Event.description) #print Event.comment if self._acceptance(event): if not self.store_comments: event.comment = "" self.catalogue.events.append(event) def _acceptance(self, event): """ Determines whether to accept the event according to the magnitude and keyword criteria :param event: Event as instance of Event class :returns: True (if event is accepted), False otherwise """ # Magnitude rejection - based on an "any" criterion valid_magnitude = False for mag in event.magnitudes: if (mag.value >= self.lower_mag) and (mag.value <= self.upper_mag): valid_magnitude = True break if not valid_magnitude: return False # Location rejection valid_location = False for orig in event.origins: valid_location = (orig.location.longitude >= self.lower_long) and\ (orig.location.longitude <= self.upper_long) and\ (orig.location.latitude >= self.lower_lat) and\ (orig.location.latitude <= self.upper_lat) if valid_location: break if not valid_location: return False for keyword in self.ANTHROPOGENIC_KEYWORDS: if keyword.lower() in event.comment.lower(): event.induced_flag = keyword for keyword in self.rejection_keywords: if keyword.lower() in event.comment.lower(): self.rejected_catalogue.append(event) return False return True
def read_file(self, identifier, name): """ Reads the catalogue from the file and assigns the identifier and name """ self.catalogue = ISFCatalogue(identifier, name) f = open(self.filename, 'rt') counter = 0 is_origin = False is_magnitude = False comment_str = "" for row in f.readlines(): if not row.rstrip('\n'): # Ignore empty rows continue elif "DATA_TYPE EVENT IMS1.0" in row: # Ignore header row continue elif "ISC Bulletin" in row: # Yet anothet header row continue elif "STOP" in row: # Footer row continue else: pass if '(#PRIME)' in row: # Previous origin block was the prime origin if len(origins) > 0: origins[-1].is_prime = True continue if '(#CENTROID)' in row: # Previous origin block is a centroid if len(origins) > 0: origins[-1].is_centroid = True continue comment_find = re.search("\((.*?)\)", row) if comment_find: comment_find.group(1) comment_str += "{:s}\n".format(comment_find.group(1)) continue if 'Event' in row[:5]: # Is an event header row if counter > 0: # Add magnitudes and origins to previous event # and previous event to catalogue Event.origins = origins Event.magnitudes = magnitudes if len(Event.origins) and len(Event.magnitudes): Event.assign_magnitudes_to_origins() Event.comment = comment_str #print "%s - %s" % (Event.id, Event.description) #print Event.comment if self._acceptance(Event): Event.comment = "" self.catalogue.events.append(Event) # Get a new event Event = get_event_header_row(row.rstrip('\n')) comment_str = "" origins = [] magnitudes = [] counter += 1 continue if row.rstrip('\n') == origin_header: is_origin = True is_magnitude = False continue elif row.strip('\n') == magnitude_header: is_origin = False is_magnitude = True continue else: pass if is_magnitude and len(row.strip('\n')) == 38: # Is a magnitude row mag = get_event_magnitude(row.strip('\n'), Event.id, self.selected_magnitude_agencies) # ,self.selected_magnitude_types) if mag: magnitudes.append(mag) continue if is_origin and len(row.strip('\n')) == 136: # Is an origin row orig = get_event_origin_row(row.strip('\n'), self.selected_origin_agencies) if orig: origins.append(orig) if len(self.rejected_catalogue): # Turn list of rejected events into its own instance of # ISFCatalogue self.rejected_catalogue = ISFCatalogue( identifier + "-R", name + " - Rejected", events=self.rejected_catalogue) return self.catalogue
class ISFReader(BaseCatalogueDatabaseReader): ''' Class to read an ISF formatted earthquake catalogue considering only the origin agencies, the magnitude agencies and the magnitude types defined by the user ''' def __init__(self, filename, selected_origin_agencies=[], selected_magnitude_agencies=[], #selected_magnitude_types =[], rejection_keywords=[], lower_magnitude=None, upper_magnitude=None): super(ISFReader, self).__init__(filename, selected_origin_agencies, selected_magnitude_agencies) # ,selected_magnitude_types) self.rejected_catalogue = [] self.rejection_keywords = rejection_keywords if lower_magnitude and upper_magnitude: assert upper_magnitude > lower_magnitude if lower_magnitude: self.lower_mag = lower_magnitude else: self.lower_mag = -np.inf if upper_magnitude: self.upper_mag = upper_magnitude else: self.upper_mag = np.inf def read_file(self, identifier, name): """ Reads the catalogue from the file and assigns the identifier and name """ self.catalogue = ISFCatalogue(identifier, name) f = open(self.filename, 'rt') counter = 0 is_origin = False is_magnitude = False comment_str = "" for row in f.readlines(): if not row.rstrip('\n'): # Ignore empty rows continue elif "DATA_TYPE EVENT IMS1.0" in row: # Ignore header row continue elif "ISC Bulletin" in row: # Yet anothet header row continue elif "STOP" in row: # Footer row continue else: pass if '(#PRIME)' in row: # Previous origin block was the prime origin if len(origins) > 0: origins[-1].is_prime = True continue if '(#CENTROID)' in row: # Previous origin block is a centroid if len(origins) > 0: origins[-1].is_centroid = True continue comment_find = re.search("\((.*?)\)", row) if comment_find: comment_find.group(1) comment_str += "{:s}\n".format(comment_find.group(1)) continue if 'Event' in row[:5]: # Is an event header row if counter > 0: # Add magnitudes and origins to previous event # and previous event to catalogue Event.origins = origins Event.magnitudes = magnitudes if len(Event.origins) and len(Event.magnitudes): Event.assign_magnitudes_to_origins() Event.comment = comment_str #print "%s - %s" % (Event.id, Event.description) #print Event.comment if self._acceptance(Event): Event.comment = "" self.catalogue.events.append(Event) # Get a new event Event = get_event_header_row(row.rstrip('\n')) comment_str = "" origins = [] magnitudes = [] counter += 1 continue if row.rstrip('\n') == origin_header: is_origin = True is_magnitude = False continue elif row.strip('\n') == magnitude_header: is_origin = False is_magnitude = True continue else: pass if is_magnitude and len(row.strip('\n')) == 38: # Is a magnitude row mag = get_event_magnitude(row.strip('\n'), Event.id, self.selected_magnitude_agencies) # ,self.selected_magnitude_types) if mag: magnitudes.append(mag) continue if is_origin and len(row.strip('\n')) == 136: # Is an origin row orig = get_event_origin_row(row.strip('\n'), self.selected_origin_agencies) if orig: origins.append(orig) if len(self.rejected_catalogue): # Turn list of rejected events into its own instance of # ISFCatalogue self.rejected_catalogue = ISFCatalogue( identifier + "-R", name + " - Rejected", events=self.rejected_catalogue) return self.catalogue def _acceptance(self, event): """ Determines whether to accept the event according to the magnitude and keyword criteria :param event: Event as instance of Event class :returns: True (if event is accepted), False otherwise """ # Magnitude rejection - based on an "any" criterion valid_magnitude = False for mag in event.magnitudes: if (mag.value >= self.lower_mag) and (mag.value <= self.upper_mag): valid_magnitude = True break if not valid_magnitude: return False for keyword in self.rejection_keywords: if keyword.lower() in event.comment.lower(): self.rejected_catalogue.append(event) return False return True
def read_file(self, identifier, name): """ Reads the catalogue from the file and assigns the identifier and name """ self.catalogue = ISFCatalogue(identifier, name) f = open(self.filename, 'rt') counter = 0 is_origin = False is_magnitude = False for row in f.readlines(): if not row.rstrip('\n'): # Ignore empty rows continue if '(#PRIME)' in row: # Previous origin block was the prime origin if len(origins) > 0: origins[-1].is_prime = True continue if '(#CENTROID)' in row: # Previous origin block is a centroid if len(origins) > 0: origins[-1].is_centroid = True continue if 'Event' in row: # Is an event header row if counter > 0: # Add magnitudes and origins to previous event # and previous event to catalogue Event.origins = origins Event.magnitudes = magnitudes if len(Event.origins) and len(Event.magnitudes): Event.assign_magnitudes_to_origins() self.catalogue.events.append(Event) # Get a new event Event = get_event_header_row(row.rstrip('\n')) origins = [] magnitudes = [] counter += 1 continue if row.rstrip('\n') == origin_header: is_origin = True is_magnitude = False continue elif row.strip('\n') == magnitude_header: is_origin = False is_magnitude = True continue else: pass if is_magnitude and len(row.strip('\n')) == 38: # Is a magnitude row mag = get_event_magnitude(row.strip('\n'), Event.id, self.selected_magnitude_agencies) if mag: magnitudes.append(mag) continue if is_origin and len(row.strip('\n')) == 136: # Is an origin row orig = get_event_origin_row(row.strip('\n'), self.selected_origin_agencies) if orig: origins.append(orig) return self.catalogue