def test_write_to_database_with_events(self): ''' Test the writing to the database of a catalog with events. ''' creation_time = UTCDateTime() catalog = ev_core.Catalog(name='test', description='A test description.', agency_uri='uot', author_uri='tester', creation_time=creation_time) # Create an event. start_time = '2000-01-01T00:00:00' end_time = '2000-01-01T01:00:00' creation_time = UTCDateTime() event = ev_core.Event(start_time=start_time, end_time=end_time, creation_time=creation_time) catalog.add_events([ event, ]) catalog.write_to_database(self.project) db_catalog_orm = self.project.dbTables['event_catalog'] db_session = self.project.getDbSession() result = db_session.query(db_catalog_orm).all() db_session.close() self.assertEqual(len(result), 1) tmp = result[0] self.assertEqual(len(tmp.events), 1) self.assertEqual(tmp.events[0].ev_catalog_id, catalog.db_id) # Add a second event. start_time = '2000-01-02T00:00:00' end_time = '2000-01-02T01:00:00' creation_time = UTCDateTime() event = ev_core.Event(start_time=start_time, end_time=end_time, creation_time=creation_time) catalog.add_events([ event, ]) catalog.write_to_database(self.project) db_session = self.project.getDbSession() result = db_session.query(db_catalog_orm).all() db_session.close() self.assertEqual(len(result), 1) tmp = result[0] self.assertEqual(len(tmp.events), 2) self.assertEqual(tmp.events[0].ev_catalog_id, catalog.db_id) self.assertEqual(tmp.events[1].ev_catalog_id, catalog.db_id)
def parse(self, filename): ''' Parse a text file in CSV format. First row is a header line. The following lines contain the data. public_id, start_time, end_time, description, author_uri, agency_uri event_1,2015-01-01T01:00:00.000000,2015-01-01T01:00:10.000000,example event 1,sm,mr event_2,2015-01-02T01:00:00.000000,2015-01-02T01:00:10.000000,example event 1,sm,mr If the author_uri AND the agency uri is empty, the URIs of the current psysmon user will be used. Parameters ---------- filename : String The CSV file to parse. ''' if not os.path.exists(filename): self.logger.error("The filename %s doesn't exist.", filename) return False with open(filename, 'rb') as event_file: csv_reader = csv.reader(event_file, delimiter = ',', quotechar = '"') header_line = next(csv_reader) for cur_row in csv_reader: if cur_row: cur_event = ev_core.Event(public_id = cur_row[0], start_time = utcdatetime.UTCDateTime(cur_row[1]), end_time = utcdatetime.UTCDateTime(cur_row[2]), description = cur_row[3], author_uri = cur_row[4], agency_uri = cur_row[5]) self.events.append(cur_event) return True
def get_catalog(self, name = 'ims1_short_parsed', agency_uri = None, author_uri = None): ''' Get a catalog instance of the parsed bulletin. ''' catalog = ev_core.Catalog(name = name, agency_uri = agency_uri) for cur_event_dict in self.events: if len(cur_event_dict['origins']) == 0: self.logger.error("No origins found for event %s. Can't compute the start time.", cur_event_dict['event_id']) orig_start_time = min([x['starttime'] for x in cur_event_dict['origins']]) start_time = min([utcdatetime.UTCDateTime(orig_start_time.year, orig_start_time.month, orig_start_time.day, x['arrival_time']['hour'], x['arrival_time']['minute'], int(x['arrival_time']['second']), int(round((x['arrival_time']['second'] - int(x['arrival_time']['second'])) * 1000000))) for x in cur_event_dict['phases']]) end_time = max([utcdatetime.UTCDateTime(orig_start_time.year, orig_start_time.month, orig_start_time.day, x['arrival_time']['hour'], x['arrival_time']['minute'], int(x['arrival_time']['second']), int(round((x['arrival_time']['second'] - int(x['arrival_time']['second'])) * 1000000))) for x in cur_event_dict['phases']]) if start_time == end_time: end_time = start_time + 1; # TODO: The event type should be an instance of an event_type class # which is related to the event_type database table. cur_event = ev_core.Event(start_time = start_time, end_time = end_time, public_id = cur_event_dict['event_id'], #event_type = cur_event_dict['origins'][0]['event_type'], description = cur_event_dict['location'], agency_uri = cur_event_dict['origins'][0]['author']) catalog.add_events([cur_event,]) return catalog
def create_event(self, start_time, end_time): ''' Create a new event in the database. ''' cur_catalog = self.library.catalogs[self.selected_catalog_name] event = event_core.Event(start_time = UTCDateTime(start_time), end_time = UTCDateTime(end_time), event_type = self.selected_event_type_id, agency_uri = self.parent.project.activeUser.agency_uri, author_uri = self.parent.project.activeUser.author_uri, creation_time = UTCDateTime().isoformat()) cur_catalog.add_events([event, ]) event.write_to_database(self.parent.project)
def test_add_events(self): ''' Test the add_events method. ''' catalog = ev_core.Catalog(name='test') # Create an event. start_time = '2000-01-01T00:00:00' end_time = '2000-01-01T01:00:00' creation_time = UTCDateTime() event = ev_core.Event(start_time=start_time, end_time=end_time, creation_time=creation_time) catalog.add_events([ event, ]) self.assertEqual(len(catalog.events), 1) self.assertEqual(catalog.events[0], event) self.assertEqual(event.parent, catalog)
def bind(self, catalog, channel_scnl, n_neighbors = 2, min_match_neighbors = 2, search_win_extend = 0.1): ''' Bind the detections to events. ''' # Get the detections of the channels and sort them according to time. detections = {} for cur_scnl in channel_scnl: detections[cur_scnl] = catalog.get_detections(scnl = cur_scnl) detections[cur_scnl] = sorted(detections[cur_scnl], key = op.attrgetter('start_time')) # Get the earlies detection of each channel. Remove these detections # from the detections list. next_detections = [x[0] for x in list(detections.values()) if len(x) > 0] while len(next_detections) > 0: next_detections = sorted(next_detections, key = op.attrgetter('start_time')) first_detection = next_detections.pop(0) self.logger.debug('Processing detection %d, %s, %s.', first_detection.db_id, first_detection.start_time, first_detection.snl) # Get the search windows for the detection combinations. search_windows = self.get_search_window(first_detection, next_detections) self.logger.debug('Search windows: %s', search_windows) # Get the detections matching the search window. match_detections = [x for k,x in enumerate(next_detections) if x.start_time <= first_detection.start_time + search_windows[k] + search_win_extend] self.logger.debug('Matching detections: %s.', [(x.db_id, x.start_time, x.snl) for x in match_detections]) # TODO: Make the min_match_neighbors a user preference. # Check if there are matching detections on neighboring stations. # There have to be detections at at least min_match_neighbors. # If there are not enough matching detections, all matching # detections have to be on neighbor stations. #TODO: The neighbors should contain only stations which have been # selected for binding the detections. # TODO: Check if the neighbors have the correct length. neighbors = self.epi_dist[first_detection.snl][1:n_neighbors + 1] #neighbors = [x for x in neighbors if x[1] <= max_neighbor_dist] neighbors_snl = [x[0] for x in neighbors] match_snl = [x.snl for x in match_detections] match_neighbors = [x for x in neighbors_snl if x in match_snl] self.logger.debug('neighbors_snl: %s', neighbors_snl) self.logger.debug('match_snl: %s', match_snl) # TODO: Add a check for detections on distant neighbors. # If there is a small number of detections on distant stations, # check for detections on stations with a similar epi distance. If # no detections are found on these similar stations, reject the # binding. if len(neighbors) < min_match_neighbors: raise RuntimeError("Not enough neighbors found for station %s. Detection ID: %d.", first_detection.snl, first_detection.db_id) # TODO: Maybe add the following option. # Add a dedicated option to check, if a # small number of neighbors have matching detections, they should # all be direct neighbors to the master station. if len(match_neighbors) < min_match_neighbors: # There are not enough detection on neigboring stations. # Reset the matched detections. self.logger.debug('Not enough detections on neighboring stations. Detection ID: %d. Start time: %s', first_detection.db_id, first_detection.start_time) match_detections = [] else: # This is a valid event. # Add the first detection to the match detections. match_detections.append(first_detection) if match_detections: # Create an event using the matching detections. event = ev_core.Event(start_time = min([x.start_time for x in match_detections]), end_time = max([x.end_time for x in match_detections]), author_uri = self.author_uri, agency_uri = self.agency_uri, creation_time = utcdatetime.UTCDateTime(), detections = match_detections) self.event_catalog.add_events([event, ]) self.logger.debug('Added event %s to %s.', event.start_time, event.end_time) # Remove the matching detections from the detections list. for cur_detection in match_detections: detections[cur_detection.scnl].remove(cur_detection) else: # Remove the first detection from the detections list. detections[first_detection.scnl].remove(first_detection) # Get the next earliest detection of each channel. next_detections = [x[0] for x in list(detections.values()) if len(x) > 0]