def __init__(self, name, db_id = None, description = None, agency_uri = None, author_uri = None, creation_time = None, events = None): ''' Instance initialization. ''' # The logging logger instance. logger_name = __name__ + "." + self.__class__.__name__ self.logger = logging.getLogger(logger_name) # The unique database ID. self.db_id = db_id # The name of the catalog. self.name = name # The description of the catalog. self.description = description # The agency_uri of the creator. self.agency_uri = agency_uri # The author_uri of the creator. self.author_uri = author_uri # The time of creation of this event. if creation_time is None: self.creation_time = utcdatetime.UTCDateTime() else: self.creation_time = utcdatetime.UTCDateTime(creation_time) # The events of the catalog. if events is None: self.events = [] else: self.events = events
def execute(self, prefNodeOutput = {}): ''' ''' archive_dir = self.pref_manager.get_value('archive_dir') archive_scan_file = os.path.join(archive_dir, 'psysmon_archive_scan.json') if os.path.isfile(archive_scan_file): try: fp = open(archive_scan_file) self.logger.info('Loading the archive scan result file: %s.', archive_scan_file) ac = json.load(fp = fp, cls = psysmon.packages.reftek.archive.ArchiveScanDecoder) ac.sort_raw_files() finally: fp.close() stream_list = self.pref_manager.get_value('unit_list') start_time = self.pref_manager.get_value('start_time') end_time = self.pref_manager.get_value('end_time') output_dir = self.pref_manager.get_value('output_dir') ac.output_directory = output_dir for cur_stream in stream_list: cur_start_time = start_time cur_end_time = end_time #self.logger.debug("Converting stream %s.", cur_stream) stream_start_time = utcdatetime.UTCDateTime(cur_stream[2]) stream_end_time = utcdatetime.UTCDateTime(cur_stream[3]) if stream_start_time > cur_start_time: cur_start_time = stream_start_time if stream_end_time < cur_end_time: cur_end_time = stream_end_time ac.archive_to_mseed(unit_id = cur_stream[0], stream = cur_stream[1], start_time = cur_start_time, end_time = cur_end_time)
def get_catalog(self, name = 'ims1_short_parsed', agency_uri = None, author_uri = None): ''' Get a catalog instance of the parsed bulletin. ''' catalog = ev_core.Catalog(name = name, agency_uri = agency_uri) for cur_event_dict in self.events: if len(cur_event_dict['origins']) == 0: self.logger.error("No origins found for event %s. Can't compute the start time.", cur_event_dict['event_id']) orig_start_time = min([x['starttime'] for x in cur_event_dict['origins']]) start_time = min([utcdatetime.UTCDateTime(orig_start_time.year, orig_start_time.month, orig_start_time.day, x['arrival_time']['hour'], x['arrival_time']['minute'], int(x['arrival_time']['second']), int(round((x['arrival_time']['second'] - int(x['arrival_time']['second'])) * 1000000))) for x in cur_event_dict['phases']]) end_time = max([utcdatetime.UTCDateTime(orig_start_time.year, orig_start_time.month, orig_start_time.day, x['arrival_time']['hour'], x['arrival_time']['minute'], int(x['arrival_time']['second']), int(round((x['arrival_time']['second'] - int(x['arrival_time']['second'])) * 1000000))) for x in cur_event_dict['phases']]) if start_time == end_time: end_time = start_time + 1; # TODO: The event type should be an instance of an event_type class # which is related to the event_type database table. cur_event = ev_core.Event(start_time = start_time, end_time = end_time, public_id = cur_event_dict['event_id'], #event_type = cur_event_dict['origins'][0]['event_type'], description = cur_event_dict['location'], agency_uri = cur_event_dict['origins'][0]['author']) catalog.add_events([cur_event,]) return catalog
def parse(self, filename): ''' Parse a text file in CSV format. First row is a header line. The following lines contain the data. public_id, start_time, end_time, description, author_uri, agency_uri event_1,2015-01-01T01:00:00.000000,2015-01-01T01:00:10.000000,example event 1,sm,mr event_2,2015-01-02T01:00:00.000000,2015-01-02T01:00:10.000000,example event 1,sm,mr If the author_uri AND the agency uri is empty, the URIs of the current psysmon user will be used. Parameters ---------- filename : String The CSV file to parse. ''' if not os.path.exists(filename): self.logger.error("The filename %s doesn't exist.", filename) return False with open(filename, 'rb') as event_file: csv_reader = csv.reader(event_file, delimiter = ',', quotechar = '"') header_line = next(csv_reader) for cur_row in csv_reader: if cur_row: cur_event = ev_core.Event(public_id = cur_row[0], start_time = utcdatetime.UTCDateTime(cur_row[1]), end_time = utcdatetime.UTCDateTime(cur_row[2]), description = cur_row[3], author_uri = cur_row[4], agency_uri = cur_row[5]) self.events.append(cur_event) return True
def __init__(self, label, time, amp1, channel, amp2 = None, first_motion = 0, error = None, agency_uri = None, author_uri = None, creation_time = None, db_id = None, event_id = None, parent = None, changed = True): ''' Initialize the instance. ''' # The logging logger instance. logger_prefix = psysmon.logConfig['package_prefix'] loggerName = logger_prefix + "." + __name__ + "." + self.__class__.__name__ self.logger = logging.getLogger(loggerName) # The parent object holding this pick. self.parent = parent # The unique database id. self.db_id = db_id # The unique database id of the event to which the pick is associated. self.event_id = event_id # The label of the pick. self.label = label # The picked time value. self.time = time # The picked amplitude value. self.amp1 = amp1 # The channel instance with which the pick is associated. self.channel = channel # The second amplitude value used for amplitude range picking. self.amp2 = amp2 # The first motion assigend to the pick (up: 1, down: -1, undefinded: # 0). self.first_motion = first_motion # The error of the time pick. self.error = error # The agency_uri of the creator. self.agency_uri = agency_uri # The author_uri of the creator. self.author_uri = author_uri # The time of creation of this event. if creation_time is None: self.creation_time = utcdatetime.UTCDateTime(); else: self.creation_time = utcdatetime.UTCDateTime(creation_time); # Flag to indicate a change of the event attributes. self.changed = changed
def create_archive_prefs(self): ''' Create the archive input preference items. ''' pagename = '1 archive' self.pref_manager.add_page(pagename) # The archive directory pref_item = psy_pm.DirBrowsePrefItem(name = 'archive_dir', label = 'archive directory', group = 'archive', value = '', hooks = {'on_value_change': self.on_archive_dir_changed}, tool_tip = 'The root directory of the Reftek raw data archive.' ) self.pref_manager.add_item(pagename = pagename, item = pref_item) # Scan archive button. item = psy_pm.ActionItem(name = 'acan_archive', label = 'scan archive', group = 'archive', mode = 'button', action = self.on_scan_archive, tool_tip = 'Scan the reftek raw data archive.') self.pref_manager.add_item(pagename = pagename, item = item) # The start time pref_item = psy_pm.DateTimeEditPrefItem(name = 'start_time', label = 'start time', value = utcdatetime.UTCDateTime('2012-07-09T00:00:00'), group = 'time range', tool_tip = 'The start time of the interval to process.') self.pref_manager.add_item(pagename = pagename, item = pref_item) # The end time pref_item = psy_pm.DateTimeEditPrefItem(name = 'end_time', label = 'end time', value = utcdatetime.UTCDateTime('2012-07-09T00:00:00'), group = 'time range', tool_tip = 'The end time of the interval to process.') self.pref_manager.add_item(pagename = pagename, item = pref_item) # The SCNL list pref_item = psy_pm.ListCtrlEditPrefItem(name = 'unit_list', label = 'units', value = [], column_labels = ['unit id', 'stream', 'first data', 'last data'], limit = [], group = 'unit selection', tool_tip = 'Select the units to process.' ) self.pref_manager.add_item(pagename = pagename, item = pref_item)
def utcdatetime_from_string(string): """ Converts string of formats: YYYYMMDD, YYYYMMDDHHmm, YYYYMMDDHHmmss to obspy.core.utcdatetime.UTCDateTime :param string: string datetime string of either formats: YYYYMMDD, YYYYMMDDHHmm, YYYYMMDDHHmmss :return: obspy.core.utcdatetime.UTCDateTime None - conversion failed """ if len(string) in [8, 14]: return utcdatetime.UTCDateTime(string) elif len(string) == 12: new_string = string + "00" return utcdatetime.UTCDateTime(new_string) return None
def _dataframe_to_station(statcode, station_df, instrument_register=None): """ Convert Pandas dataframe with unique station code to obspy Station object. :param statcode: Station code :type statcode: str :param station_df: Dataframe containing records for a single station code. :type station_df: pandas.DataFrame conforming to table_format.TABLE_SCHEMA :param instrument_register: Dictionary of nominal instrument responses indexed by channel code, defaults to None :param instrument_register: dict of {str, Instrument(obspy.core.inventory.util.Equipment, obspy.core.inventory.response.Response)}, optional :return: Station object containing the station information from the dataframe :rtype: obspy.core.inventory.station.Station """ station_data = station_df.iloc[0] st_start = station_data['StationStart'] assert pd.notnull(st_start) st_start = utcdatetime.UTCDateTime(st_start) st_end = station_data['StationEnd'] assert pd.notnull(st_end) st_end = utcdatetime.UTCDateTime(st_end) station = Station(statcode, station_data['Latitude'], station_data['Longitude'], station_data['Elevation'], start_date=st_start, creation_date=st_start, end_date=st_end, termination_date=st_end, site=Site(name=' ')) for _, d in station_df.iterrows(): ch_start = d['ChannelStart'] ch_start = utcdatetime.UTCDateTime(ch_start) if not pd.isnull(ch_start) else None ch_end = d['ChannelEnd'] ch_end = utcdatetime.UTCDateTime(ch_end) if not pd.isnull(ch_end) else None ch_code = d['ChannelCode'] instrument = instrument_register[ch_code] if instrument is not None: sensor = instrument.sensor response = instrument.response elif 'LAST_RESORT' in instrument_register: last_resort = instrument_register['LAST_RESORT'] sensor = last_resort.sensor response = last_resort.response else: sensor = None response = None cha = Channel(ch_code, '', float(d['Latitude']), float(d['Longitude']), float(d['Elevation']), depth=0.0, azimuth=0.0, dip=-90.0, sample_rate=0.0, clock_drift_in_seconds_per_sample=0.0, start_date=ch_start, end_date=ch_end, sensor=sensor, response=response) station.channels.append(cha) return station
def execute(self, prevNodeOutput = {}): ''' Execute the looper collection node. ''' start_times = ['2018-03-07T12:30', '2018-03-07T13:10', '2018-03-08T14:50', '2018-03-15T13:00', '2018-03-21T12:50', '2018-03-23T10:40', '2018-03-28T13:20', '2018-03-28T13:50', '2018-03-29T13:50', '2018-04-10T09:40', '2018-04-18T10:00', '2018-04-18T13:40', '2018-04-27T09:40', '2018-04-27T10:00', '2018-05-02T11:40', '2018-05-08T11:20', '2018-05-08T11:40', '2018-05-14T12:20', '2018-05-22T13:00'] start_times = [utcdatetime.UTCDateTime(x) for x in start_times] end_times = [x + 1200 for x in start_times] self.parentCollection.runtime_att.start_time = start_times[0] self.parentCollection.runtime_att.end_time = end_times[0] self.parentCollection.runtime_att.loop_start_times = start_times[1:] self.parentCollection.runtime_att.loop_end_times = end_times[1:]
def getDbData(self, filename, format, Trace): # Get the database traceheader table mapper class. Header = self.project.dbTables['traceheader'] wfDirId = "" for curWfDir in self.project.waveclient['db client'].waveformDirList: if filename.startswith(curWfDir.alias): wfDirId = curWfDir.id break if wfDirId: # Remove the waveform directory from the file path. relativeFilename = filename.replace(curWfDir.alias, '') relativeFilename = relativeFilename[1:] labels = ['id', 'file_type', 'wf_id', 'filename', 'orig_path', 'network', 'recorder_serial', 'stream', 'sps', 'numsamp', 'begin_date', 'begin_time', 'agency_uri', 'author_uri', 'creation_time'] header2Insert = dict(list(zip(labels, (None, format, wfDirId, relativeFilename, os.path.dirname(filename), Trace.stats.network, Trace.stats.station, Trace.stats.location + ":" + Trace.stats.channel, Trace.stats.sampling_rate, Trace.stats.npts, Trace.stats.starttime.isoformat(' '), Trace.stats.starttime.timestamp, self.project.activeUser.author_uri, self.project.activeUser.agency_uri, op_utcdatetime.UTCDateTime().isoformat())))) return Header(**header2Insert) else: self.logger.error("File %s is not inside a waveform directory. Skipping this trace.", filename) return None
def measure_view(self, event, parent): ''' Measure the seismogram line in the seismogram view. ''' measurement = self.view.measure(event) if measurement is None: return if isinstance(measurement, dict): measurement = [ measurement, ] measure_string = '' for cur_measurement in measurement: cur_axes = cur_measurement['axes'] xy = cur_measurement['xy'] if (self.view, cur_axes) not in iter(self.crosshair.keys()): ml_x = cur_axes.axvline(x=xy[0]) ml_y = cur_axes.axhline(y=xy[1]) self.crosshair[(self.view, cur_axes)] = (ml_x, ml_y) cur_crosshair = self.crosshair[(self.view, cur_axes)] for cur_line in cur_crosshair: cur_line.set_color('r') cur_crosshair[0].set_xdata(xy[0]) cur_crosshair[1].set_ydata(xy[1]) date_string = utcdatetime.UTCDateTime(xy[0]) measure_string += 'time: {0:s}\n{1:s}: {2:g}\n\n'.format( date_string.isoformat(), cur_measurement['label'], xy[1]) self.view.set_annotation(measure_string) self.view.draw()
def read_locs_from_file(filename): from obspy.core import utcdatetime locs = [] f = open(filename, 'r') lines = f.readlines() f.close() for line in lines: if not line.isspace() and line.split()[0][0] != '#': loc = {} loc['max_trig'] = np.float(line.split()[2].split(',')[0]) loc['o_time'] = utcdatetime.UTCDateTime(line.split()[3]) loc['o_err_left'] = np.float(line.split()[5]) loc['o_err_right'] = np.float(line.split()[8]) loc['x_mean'] = np.float(line.split()[11]) loc['x_sigma'] = np.float(line.split()[13]) loc['y_mean'] = np.float(line.split()[16]) loc['y_sigma'] = np.float(line.split()[18]) loc['z_mean'] = np.float(line.split()[21]) loc['z_sigma'] = np.float(line.split()[23]) if len(line.split()) > 25: loc['ml'] = np.float(line.split()[26]) locs.append(loc) return locs
def getEvent(self): _command = "" _event_id = "" _offset = 120 #sec _end = datetime.datetime.utcnow() - datetime.timedelta(seconds=_offset) _begin = obspy_utc.UTCDateTime(_end - datetime.timedelta( seconds=self.interval - 1)) # _begin = datetime.datetime(2007,09,01,00,00,00) #needs remove!! _end_date = _end.strftime("%Y-%m-%d %H:%M:%S.%f") _begin_date = _begin.strftime("%Y-%m-%d %H:%M:%S.%f") _command = self.command + " --begin \"" + _begin_date + "\"" + \ " --end \"" + _end_date + "\"" _proc = subprocess.Popen(_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) _event_id = _proc.communicate()[0].split("\n") _event_id = _event_id[:-1] # remove last (none) if _event_id: return str(_event_id[0]) else: return None
def qd_read_picks_from_hyp_file(filename): f = open(filename, 'r') lines = f.readlines() f.close() for iline in range(len(lines)): line = lines[iline] words = line.split() if words[0] == 'PHASE': iline_phase = iline break phases = {} for line in lines[iline + 1:]: words = line.split() try: if words[4] == 'P': station = words[0] year = np.int(words[6][0:4]) month = np.int(words[6][4:6]) day = np.int(words[6][6:8]) hour = np.int(words[7][0:2]) minute = np.int(words[7][2:4]) seconds = np.float(words[8]) ptime = utcdatetime.UTCDateTime(year, month, day, hour, minute, seconds) phases[station] = ptime except IndexError: pass return phases
def qd_read_hyp_file(filename): f = open(filename, 'r') lines = f.readlines() f.close() for line in lines: words = line.split() try: if words[0] == 'HYPOCENTER': hypo_x = np.float(words[2]) hypo_y = np.float(words[4]) hypo_z = np.float(words[6]) if words[0] == 'GEOGRAPHIC': year = np.int(words[2]) month = np.int(words[3]) day = np.int(words[4]) hour = np.int(words[5]) minute = np.int(words[6]) seconds = np.float(words[7]) otime = utcdatetime.UTCDateTime(year, month, day, hour, minute, seconds) if words[0] == 'STATISTICS': sigma_x = np.sqrt(np.float(words[8])) sigma_y = np.sqrt(np.float(words[14])) sigma_z = np.sqrt(np.float(words[18])) except IndexError: pass return (otime, hypo_x, sigma_x, hypo_y, sigma_y, hypo_z, sigma_z)
def read_prob_locs_from_file(filename): """ Read file containing probability determined locations. :param filename: File to be read. :returns: Dictionary of locations """ from obspy.core import utcdatetime locs = [] f = open(filename, 'r') lines = f.readlines() f.close() for line in lines: loc = {} loc['o_time'] = utcdatetime.UTCDateTime(line.split()[5]) loc['o_err'] = np.float(line.split()[8]) loc['x_mean'] = np.float(line.split()[11]) loc['x_sigma'] = np.float(line.split()[13]) loc['y_mean'] = np.float(line.split()[16]) loc['y_sigma'] = np.float(line.split()[18]) loc['z_mean'] = np.float(line.split()[21]) loc['z_sigma'] = np.float(line.split()[23]) locs.append(loc) return locs
def common_events_catalogs(): dir = '/home/nadege/Desktop/IJEN_catalogues' list_cat = [ 'DB_moi_detect_volc_common_POS.csv', 'DB_moi_detect_volc_common_POSbis.csv' ] #,'DB_detect_volc_common_new7.csv'] list = [] for cat in list_cat: df = pd.read_csv('%s/%s' % (dir, cat)) list.append(np.array(df[df.columns[0]].values)) print cat, len(df) for i in range(len(list)): for j in range(len(list[i])): list[i][j] = utcdatetime.UTCDateTime(list[i][j]) diff = np.array([ np.min(np.abs(list[0] - utcdatetime.UTCDateTime(date))) for date in list[1] ]) id = np.where(diff == 0.)[0] print "\nDates" dates = np.array(list[1])[id] print dates df = pd.read_csv('%S/DB_moi_detect_volc_common_POS.csv' % dir) pos_spec = df[df.columns[2]].values[id] print "\nClassification spectre" print pos_spec pos_wf = df[df.columns[3]].values[id] print "\nClassification forme d'onde" print pos_wf df = pd.read_csv('%s/DB_moi_detect_volc_common_POSbis.csv' % dir) pos_bis_indo = df[df.columns[2]].values[id] print "\nClassification spectre" print pos_bis_indo datadir = '/home/nadege/Desktop/NEW_CLASS/Cat_POS/POS' for idate, date in enumerate(dates): # Affichage, dans l'ordre, de classification spectrale, forme d'onde, indonesienne print date, pos_spec[idate], pos_wf[idate], pos_bis_indo[idate] st = read('%s/*%d%02d%02d_%02d%02d%02d*' % (datadir, date.year, date.month, date.day, date.hour, date.minute, date.second)) st.filter('bandpass', freqmin=1, freqmax=10) st.plot()
def get_sampling_rate(info, ev): channels = parser.getInventory()["channels"] time = utcdatetime.UTCDateTime(ev["datetime"]) for channel in channels: channel_id = channel.pop("channel_id") net, sta, loc, comp = channel_id.split(".") if not channel["end_date"]: channel["end_date"] = utcdatetime.UTCDateTime.now() + 1 if net == info["network"] and sta == info["station"] and loc == info[ "loc"] and comp == info["comp"]: if utcdatetime.UTCDateTime( channel["start_date"]) <= time and utcdatetime.UTCDateTime( channel["end_date"]) >= time: return int(channel["sampling_rate"]) return int(0)
def create_selector_preferences(self): ''' Create the preference items of the event selection section. ''' events_page = self.pref_manager.add_page('events') time_group = events_page.add_group('time span') event_group = events_page.add_group('event selection') item = psy_pm.DateTimeEditPrefItem(name = 'start_time', label = 'start time', value = utcdatetime.UTCDateTime('2015-01-01T00:00:00'), tool_tip = 'The start time of the selection time span (UTCDateTime string format YYYY-MM-DDTHH:MM:SS).') time_group.add_item(item) item = psy_pm.DateTimeEditPrefItem(name = 'end_time', label = 'end time', value = utcdatetime.UTCDateTime('2015-01-01T00:00:00'), tool_tip = 'The end time of the selection time span (UTCDateTime string format YYYY-MM-DDTHH:MM:SS).') time_group.add_item(item) item = psy_pm.SingleChoicePrefItem(name = 'event_catalog', label = 'event catalog', value = '', limit = [], tool_tip = 'Select an event catalog for which to load the events.') event_group.add_item(item) item = psy_pm.CheckBoxPrefItem(name = 'select_individual', label = 'select individual events', value = False, tool_tip = 'Do a manual selection of the events to process.', hooks = {'on_value_change': self.on_select_individual}) event_group.add_item(item) item = psy_pm.ActionItem(name = 'load_events', label = 'load events', mode = 'button', action = self.on_load_events, tool_tip = 'Load events from the database.') event_group.add_item(item) item = psy_pm.CustomPrefItem(name = 'events', label = 'events', value = [], gui_class = plugins_event_selector.EventListField, tool_tip = 'The available events. Selected events will be used for processing.') event_group.add_item(item)
def parse_origin_block(self, cur_line): ''' Parse the IMS1.0 origin block. ''' self.logger.debug('Parsing the origin block.') if len(cur_line) < 136: self.logger.warn("The length of the origin block line is too small. Skipping this line: %s", cur_line) return cur_origin = {} cur_date = cur_line[0:10] [cur_year, cur_month, cur_day] = cur_date.split('/') cur_time = cur_line[11:22] [cur_hour, cur_min, cur_sec, cur_ms] = re.split('[:\.]', cur_time) cur_origin['starttime'] = utcdatetime.UTCDateTime(year = int(cur_year), month = int(cur_month), day = int(cur_day), hour = int(cur_hour), minute = int(cur_min), second = int(cur_sec), microsecond = int(cur_ms)*1000) cur_origin['fixed_ot_flag'] = cur_line[22] cur_ot_error = cur_line[24:29] if len(cur_ot_error.strip()) == 0: cur_ot_error = None else: cur_ot_error = float(cur_ot_error) cur_origin['origin_time_error'] = cur_ot_error cur_origin['rms_residuals'] = float(cur_line[30:35]) cur_origin['latitude'] = float(cur_line[36:44]) cur_origin['longitude'] = float(cur_line[45:54]) cur_origin['fixed_epi_flag'] = cur_line[54].strip() cur_origin['ellips_semi_major_axis'] = float(cur_line[56:60]) cur_origin['ellips_semi_minor_axis'] = float(cur_line[61:66]) cur_origin['ellips_strike'] = int(cur_line[67:70]) cur_origin['depth'] = float(cur_line[71:76]) cur_origin['fixed_depth_flag'] = cur_line[76].strip() cur_depth_error = cur_line[24:29] if len(cur_depth_error.strip()) == 0: cur_depth_error = None else: cur_depth_error = float(cur_depth_error) cur_origin['depth_error'] = cur_depth_error cur_origin['num_def_phases'] = int(cur_line[83:87]) cur_origin['num_def_stations'] = int(cur_line[88:92]) cur_origin['gap'] = int(cur_line[93:96]) cur_origin['dist_closest_station'] = float(cur_line[97:103]) cur_origin['dist_furthest_station'] = float(cur_line[104:110]) cur_origin['analysis_type'] = cur_line[111].strip() cur_origin['location_method'] = cur_line[113].strip() cur_origin['event_type'] = cur_line[115:117].strip() cur_origin['author'] = cur_line[118:127].strip() cur_origin['origin_id'] = cur_line[128:136].strip() self.event_dict['origins'].append(cur_origin)
def __init__(self, name, mode='time', description=None, agency_uri=None, author_uri=None, creation_time=None, db_id=None): ''' Initialize the instance. ''' # The logging logger instance. logger_prefix = psysmon.logConfig['package_prefix'] loggerName = logger_prefix + "." + __name__ + "." + self.__class__.__name__ self.logger = logging.getLogger(loggerName) # The unique database ID. self.db_id = db_id # The name of the catalog. self.name = name # The mode of the catalog ('time', 'amplitude', 'amplitude-range'). self.mode = mode # The description of the catalog. self.description = description # The agency_uri of the creator. self.agency_uri = agency_uri # The author_uri of the creator. self.author_uri = author_uri # The time of creation of this event. if creation_time is None: self.creation_time = utcdatetime.UTCDateTime() else: self.creation_time = utcdatetime.UTCDateTime(creation_time) # The picks of the catalog. self.picks = []
def plotOneSampleWF_redac(): """ Tire au hasard un événement de chaque classe et affiche les formes d'ondes. 1 figure avec toutes les formes d'ondes. """ from matplotlib.gridspec import GridSpec datadir = '../data/Ijen/ID/IJEN/EHZ.D' catname = '../lib/Ijen/Ijen_reclass_all.csv' df = pd.read_csv(catname) df = df.dropna(how='any') #df = df.reindex(index=df[df.Type!='?'].index) df = df.reindex(index=df[df.Type != 'n'].index) tuniq = np.unique(df.Type.values) print tuniq fig = plt.figure(figsize=(12, 12)) fig.set_facecolor('white') grid = GridSpec(16, 3) for i in range(len(tuniq)): df_type = df[df.Type == tuniq[i]] permut = np.random.permutation(df_type.index) marker = 1 j = 0 compteur = 0 while marker: p = permut[j] j = j + 1 date = utcdatetime.UTCDateTime(str(df.Date[p])) files = glob.glob('%s/*%d%02d%02d_%02d%02d%02d*' % (datadir, date.year, date.month, date.day, date.hour, date.minute, date.second)) files.sort() if len(files) > 0: compteur = compteur + 1 if compteur == 3: marker = 0 file = files[0] st = read(file) st.filter('bandpass', freqmin=1, freqmax=10) if compteur == 1: ax = fig.add_subplot(grid[2 * i + 1, :]) ax.plot(st[0], 'k') else: ax = fig.add_subplot(grid[2 * i, compteur - 1]) ax.plot(st[0], 'k') ax.set_axis_off() ax = fig.add_subplot(grid[2 * i, 0]) ax.text(.2, .5, tuniq[i], transform=ax.transAxes) ax.set_axis_off() plt.savefig('/home/nadege/Desktop/sample_waveforms.png') plt.show()
def from_orm(cls, pick_orm, inventory = None): ''' Convert a database orm mapper pick to a pick instance. Parameters ---------- pick_orm : SQLAlchemy ORM The ORM of the pick database table. ''' if inventory is None: channel = None else: channel = inventory.get_channel_from_stream(name = pick_orm.stream.name, serial = pick_orm.stream.parent.serial, start_time = utcdatetime.UTCDateTime(pick_orm.time), end_time = utcdatetime.UTCDateTime(pick_orm.time)) if channel: if len(channel) == 1: channel = channel[0] else: channel = None else: channel = None pick = cls(db_id = pick_orm.id, event_id = pick_orm.ev_id, channel = channel, label = pick_orm.label, time = utcdatetime.UTCDateTime(pick_orm.time), amp1 = pick_orm.amp1, amp2 = pick_orm.amp2, first_motion = pick_orm.first_motion, error = pick_orm.error, agency_uri = pick_orm.agency_uri, author_uri = pick_orm.author_uri, creation_time = utcdatetime.UTCDateTime(pick_orm.creation_time) ) return pick
def do_double_diff(x, y, z, to, stations, coeff, delay, cluster, threshold, t_th, arr_times): """ Do double difference location (inner routine) and return new coordinates. :param x: x-coordinates of events of a given cluster :param y: y-coordinates of events of a given cluster :param z: z-coordinates of events of a given cluster :param to: origin times of events of a given cluster :param stations: dictionary of stations :param coeff: cross-correlation coefficients between all possible pairs of events :param delay: time delays measured between all possible pairs of events :param cluster: indices of events in the cluster :param threshold: minimum value of cross-correlation coefficient used to form a cluster :param t_th: theoretical traveltimes :param arr_times: theoretical arrival times :type x: list :type y: list :type z: list :type to: list :type stations: dictionary :type coeff: dictionary :type delay: dictionary :type cluster: list :type threshold: float :type t_th: dictionary :type arr_times: dictionary :rtype: list :returns: x, y, z, to """ N = len(cluster) # Fill G, d and W G, d, W = fill_matrix(cluster, x, y, z, to, stations, t_th, arr_times, coeff, delay, threshold) # Centroid constraint : add 4 lines to G, d and W G, d, W = centroid_constraint(G, d, W) # Inversion m = inversion(G, d, W) for i in range(N): x[i] = x[i] + m[4 * i, 0] y[i] = y[i] + m[4 * i + 1, 0] z[i] = z[i] + m[4 * i + 2, 0] to[i] = utcdatetime.UTCDateTime(to[i]) + m[4 * i + 3, 0] return x, y, z, to
def create_timespan_prefs(self): ''' Create the time-span preference items. ''' timespan_page = self.pref_manager.add_page('time-span') time_group = timespan_page.add_group('time-span') pref_item = psy_pm.DateTimeEditPrefItem( name='start_time', label='start time', value=utcdatetime.UTCDateTime('2012-07-09T00:00:00'), tool_tip= 'The start time overriding the start time preference values of all collection nodes in the collection.' ) time_group.add_item(pref_item) pref_item = psy_pm.DateTimeEditPrefItem( name='end_time', label='end time', value=utcdatetime.UTCDateTime('2012-07-09T00:00:00'), tool_tip= 'The end time overriding the start time preference values of all collection nodes in the collection.' ) time_group.add_item(pref_item)
def plot_trace(self): event_time = self.filtered_catalog.utc_timestamp.values[self.event_idx] self.statusBar.showMessage('Event {} of {}: {}'.format( self.event_idx + 1, self.num_events, utc.UTCDateTime(event_time))) window_sz = 20 # in sec utc_time = utc.UTCDateTime(event_time) start = utc_time end = utc_time + window_sz local_stream = self.stream.slice(start, end) local_stream.filter('highpass', freq=2.0) sample_rate = local_stream[0].stats.sampling_rate npts = local_stream[0].stats.npts event_sample = (utc_time - start) * sample_rate n_traces = len(local_stream) n_samples = len(local_stream[0].data) data = np.zeros((n_traces, n_samples), dtype=np.float32) for i in range(n_traces): data[i, :] = local_stream[i].data[...] mean = np.mean(data[i, :]) data[i, :] -= mean self.trace_x.clear() self.trace_y.clear() self.trace_z.clear() self.trace_x.plot(data[0, :], pen=(255, 120, 120, 200)) self.trace_y.plot(data[1, :], pen=(120, 255, 120, 200)) self.trace_z.plot(data[2, :], pen=(120, 120, 255, 200)) self.lrx = pg.LinearRegionItem( [event_sample, event_sample + sample_rate * 1]) self.lrx.setZValue(-10) self.trace_x.addItem(self.lrx)
def test_polarization(self): #self.mat = np.array([[1,0,0],[0,-1,1],[1,1,1]]) #l1, l2, l3 = np.sqrt(2), 1, -np.sqrt(2) #expRect = 1 - (l2+l3)/(2*l1) #expPlan = 1 - (2*l3)/(l1+l2) #rect, plan, lambda_max = polarization_analysis(self.mat,plot=False) #self.assertAlmostEquals(lambda_max,l1,places=6) #self.assertAlmostEquals(rect,expRect,places=6) #self.assertAlmostEquals(plan,expPlan,places=6) t_before = 115 t_after = 135 file = "/home/nadege/waveloc/data/Piton/2011-02-02/2011-02-02T00:00:00.YA.UV15.HHZ.filt.mseed" file_n = "%s/HHN/*UV15*HHN*.filt.*" % (os.path.dirname(file)) file_e = "%s/HHE/*UV15*HHE*.filt.*" % (os.path.dirname(file)) filenames = [file_n, file_e, file] cmin = utcdatetime.UTCDateTime( "2011-02-02T00:58:47.720000Z") - t_before cmax = utcdatetime.UTCDateTime("2011-02-02T00:58:47.720000Z") + t_after ponset = 1400 rect, plan, azimuth, iangle = polarization_analysis( filenames, t_before + t_after, ponset) expRect = 0.715057485804 expPlan = 0.629470186394 expAz = -0.328873516012 expIAn = 2.30349693071 self.assertAlmostEquals(rect, expRect, places=6) self.assertAlmostEquals(plan, expPlan, places=6) self.assertAlmostEquals(azimuth, expAz, places=6) self.assertAlmostEquals(iangle, expIAn, places=6)
def add_catalog(self, name, description): ''' Add a catalog to the database. ''' cat_table = self.project.dbTables['event_catalog'] cat_orm = cat_table( name=name, description=description, agency_uri=self.project.activeUser.agency_uri, author_uri=self.project.activeUser.author_uri, creation_time=op_utcdatetime.UTCDateTime().isoformat()) self.db_session.add(cat_orm) self.db_session.commit() self.catalogs.append(cat_orm) self.update_list_ctrl()
def traveltimes(x, y, z, t_orig, stations, time_grids): t_th = {} arr_times = {} for staname in sorted(stations): if not staname in time_grids.keys(): logging.info("%s station not in time_grids" % staname) continue t_th[staname] = [] arr_times[staname] = [] for i in range(len(x)): t_th[staname].append(time_grids[staname].value_at_point( x[i], y[i], z[i])) # traveltime arr_times[staname].append( utcdatetime.UTCDateTime(t_orig[i]) + t_th[staname][i]) # arrival time return t_th, arr_times
def get_datafile_db_data(self, filename, file_format, waveform_dir): filestat = os.stat(filename) # Remove the waveform directory from the file path. relativeFilename = filename.replace(waveform_dir.alias, '') relativeFilename = relativeFilename[1:] labels = ['id', 'wf_id', 'filename', 'filesize', 'file_type', 'orig_path', 'agency_uri', 'author_uri', 'creation_time'] db_data = dict(list(zip(labels, (None, waveform_dir.id, relativeFilename, filestat.st_size, file_format, os.path.dirname(filename), self.project.activeUser.author_uri, self.project.activeUser.agency_uri, utcdatetime.UTCDateTime().isoformat())))) return self.datafile(**db_data)