class ReportIndexEntry(Object): path = String.T() problem_name = String.T() event_reference = Event.T(optional=True) event_best = Event.T(optional=True) silvertine_version = String.T(optional=True) run_info = RunInfo.T(optional=True)
def setup(self): self.data_pile = pile.make_pile( self.data_paths, fileformat=self.data_format) if self.data_pile.is_empty(): sys.exit('Data pile is empty!') self.deltat_want = self.config.deltat_want or \ min(self.data_pile.deltats.keys()) self.n_samples = int( (self.config.sample_length + self.config.tpad) / self.deltat_want) logger.debug('loading marker file %s' % self.fn_markers) # loads just plain markers: markers = marker.load_markers(self.fn_markers) if self.fn_events: markers.extend( [marker.EventMarker(e) for e in load_events(self.fn_events)]) if self.sort_markers: logger.info('sorting markers!') markers.sort(key=lambda x: x.tmin) marker.associate_phases_to_events(markers) markers_by_nsl = {} for m in markers: if not m.match_nsl(self.config.reference_target.codes[:3]): continue if m.get_phasename().upper() != self.align_phase: continue markers_by_nsl.setdefault(m.one_nslc()[:3], []).append(m) assert(len(markers_by_nsl) == 1) # filter markers that do not have an event assigned: self.markers = list(markers_by_nsl.values())[0] if not self.labeled: dummy_event = Event(lat=0., lon=0., depth=0.) for m in self.markers: if not m.get_event(): m.set_event(dummy_event) self.markers = [m for m in self.markers if m.get_event() is not None] if not len(self.markers): raise Exception('No markers left in dataset') self.config.channels = list(self.data_pile.nslc_ids.keys()) self.config.channels.sort()
def setup(self): self.data_pile = pile.make_pile( self.data_paths, fileformat=self.data_format) if self.data_pile.is_empty(): sys.exit('Data pile is empty!') self.deltat_want = self.config.deltat_want or \ min(self.data_pile.deltats.keys()) self.n_samples = int( (self.config.sample_length + self.config.tpad) / self.deltat_want) logger.debug('loading marker file %s' % self.fn_markers) # loads just plain markers: markers = marker.load_markers(self.fn_markers) if self.fn_events: markers.extend( [marker.EventMarker(e) for e in load_events(self.fn_events)]) marker.associate_phases_to_events(markers) markers = [m for m in markers if isinstance(m, marker.PhaseMarker)] markers_dict = defaultdict(list) for m in markers: if m.get_phasename().upper() != self.align_phase: continue markers_dict[m.get_event()].append(m) self.markers = [] for e, _markers in markers_dict.items(): first = min(_markers, key=lambda x: x.tmin) self.markers.append(first) if not self.labeled: dummy_event = Event(lat=0., lon=0., depth=0.) for m in self.markers: if not m.get_event(): m.set_event(dummy_event) self.markers = [m for m in self.markers if m.get_event() is not None] if not len(self.markers): raise Exception('No markers left in dataset') self.config.channels = list(self.data_pile.nslc_ids.keys()) self.config.channels.sort()
if True: _map.gmt.psmeca( S='%s%g' % ('m', size_cm * 2.0), #G=gmtpy.color(colors[e.cluster]), #G=colors[i_e], G='red', C='3p,0/0/0', #W='thinnest,%i/%i/%i' % (255, 255, 255), #L='thinnest,%i/%i/%i' % (255, 255, 255), in_rows=[data], *_map.jxyr) _map.save(outpath=outfn) if __name__ == '__main__': e = list(Event.load_catalog(filename='event.pf'))[0] #stations = model.load_stations('arrays.pf') stations = model.load_stations('array_center.pf') color_wet = [200, 200, 200] color_dry = [253, 253, 253] params = MapParameters(lat=e.lat, lon=e.lon, radius=8000000, outfn='array-map-new.pdf', stations=stations, events=[e], show_topo=False, show_grid=False, color_wet=color_wet, color_dry=color_dry) make_map(map_parameters=params)
data = (e.lon, e.lat, 10, 1,1,1,0,0,0, 1, e.lon, e.lat, 'Test site') if True: _map.gmt.psmeca( S='%s%g' % ('m', size_cm*2.0), #G=gmtpy.color(colors[e.cluster]), #G=colors[i_e], G='red', C='3p,0/0/0', #W='thinnest,%i/%i/%i' % (255, 255, 255), #L='thinnest,%i/%i/%i' % (255, 255, 255), in_rows=[data], *_map.jxyr) _map.save(outpath=outfn) if __name__=='__main__': e = list(Event.load_catalog(filename='event.pf'))[0] #stations = model.load_stations('arrays.pf') stations = model.load_stations('array_center.pf') color_wet = [200, 200, 200] color_dry = [253, 253, 253] params = MapParameters(lat=e.lat, lon=e.lon, radius=8000000, outfn='array-map-new.pdf',stations=stations, events=[e], show_topo=False, show_grid=False, color_wet=color_wet, color_dry=color_dry) make_map(map_parameters=params) print '.'*40 fdomain_station_locs = [] #with open('northkoreaplot/stations.table.mec', 'r') as f: # for line in f.readlines(): # lat, lon, c = line.split()
config_ids = [c.id for c in configs] return config_ids if __name__ == "__main__": import argparse from pyrocko.model import Event, load_stations parser = argparse.ArgumentParser('suggest a store for P phases only') parser.add_argument('--stations', help='stations file') parser.add_argument('--events', help='event file') parser.add_argument('--force', action='store_true', help='force_overwrite') parser.add_argument('--superdir', default='.', help='directory where to put the store') parser.add_argument( '--number_of_distances', help='number of distances between outer grid nodes in GFDB', default=2) args = parser.parse_args() stations = load_stations(args.stations) if len(stations) == 1: s = stations[0] events = list(Event.load_catalog(args.events)) propose_store(s, events, superdir=args.superdir, force=args.force)
tmin = str_to_time('2013-01-01 00:00:00') tmax = str_to_time('2014-01-01 00:00:00') print 'download event data....' all_events = map(lambda x: x.get_events((tmin, tmax)), catalogs) print 'done' # unravel lists and filter lists with more than two entries: # TODO: die Fehlerbalken muessen unterschieden werden, um keine Wertigkeit in # Abhaengigkeit von der Anzahl der Verfuegbaren Kataloge zu haben. all_events = [item for sublist in all_events for item in sublist] all_events_shallow = filter(lambda x: x.depth<33000, all_events) magmin = 0 magmax = 6. grouped_events = Event.grouped(all_events) grouped_events = filter(lambda x: len(x)>=2, grouped_events) grouped_events = filter_by_attribute(grouped_events, attr='magnitude', maxlim=magmax, minlim=magmin) remove_duplicates(grouped_events) grouped_events_shallow = Event.grouped(all_events_shallow) grouped_events_shallow = filter(lambda x: len(x)>=2, grouped_events_shallow) grouped_events_shallow = filter_by_attribute(grouped_events_shallow, attr='magnitude', maxlim=8., minlim=0.) remove_duplicates(grouped_events_shallow) grp_mean = group_mean(grouped_events_shallow) fig = plt.figure(figsize=(4,3), dpi=160)
class ReportIndexEntry(Object): path = String.T() problem_name = String.T() event_reference = Event.T(optional=True) event_best = Event.T(optional=True) grond_version = String.T(optional=True)
) _map.gmt.psmeca( S="%s%g" % ("m", size_cm * 2.0), # G = gmtpy.color(colors[e.cluster]), # G = colors[i_e], G="red", C="3p,0/0/0", # W = 'thinnest,%i/%i/%i' % (255, 255, 255), # L = 'thinnest,%i/%i/%i' % (255, 255, 255), in_rows=[data], *_map.jxyr) _map.save(outpath=outfn) if __name__ == "__main__": e = list(Event.load_catalog(filename="event.pf"))[0] stations = model.load_stations("array_center.pf") color_wet = [200, 200, 200] color_dry = [253, 253, 253] params = MapParameters( lat=e.lat, lon=e.lon, radius=8000000, outfn="array-map-new.pdf", stations=stations, events=[e], show_topo=False, show_grid=False, color_wet=color_wet, color_dry=color_dry, )
output[tuple(d)] = e return output def readnsplit(fn): ''' read and split file content....''' with open(fn, 'r') as f: return [l.split() for l in f.readlines()] def convert_time(t): return util.str_to_time('%s-%s-%s %s:%s:%s.' % (t[0:4], t[4:6], t[6:8], t[8:10], t[10:12], t[12:14])) fn_loki = 'catalogue_loki.dat' fn_mts = 'qresults_definitivo_ok.dat' events = list(Event.load_catalog(fn_loki)) mts_data = readnsplit(fn_mts) # check pattern pattern = ['time', 'misfit', 'lat', 'lon', 'depth', 'magnitude', 'misfit2', '_', 'moment', 'strike', 'dip', 'rake', 'dz', 'dx', 'dy', 'rt'] wanted_arguments = ['moment', 'strike', 'dip', 'rake'] mts = classify(MomentTensor, mts_data, pattern, wanted_arguments) time_threshold = 80. print 'number of events: ', len(events) done = [] print 'number of mts: ', len(mts.keys()) for e in events: for d, mt in mts.items(): # specific to this pattern because time is at first place: if abs(e.time-convert_time(d[0])) < time_threshold:
event_id = int(line[:4]) date = line[5:27] try: t = datetime.strptime(date, '%d-%b-%Y %H:%M:%S.%f') except ValueError: continue t = (t - datetime(1970, 1, 1)).total_seconds() lat = float(line[28:34]) lon = float(line[35:41]) depth = float(line[43:47]) print line[56:60] if year == 'temp': mag = float(line[56:60]) else: print line[60:64] mag = float(line[60:64]) name = line[69::] e = Event(lat=lat, lon=lon, depth=depth, magnitude=mag, name=name, time=t, catalog='Colm') print e m = EventMarker(e) markers.append(m) EventMarker.save_markers(markers, 'event_markers_Colm%s.txt' % year)
if __name__=="__main__": import argparse from pyrocko.model import Event, load_stations parser = argparse.ArgumentParser('suggest a store for P phases only') parser.add_argument('--stations', help='stations file') parser.add_argument('--events', help='event file') parser.add_argument('--force', action='store_true', help='force_overwrite') parser.add_argument('--superdir', default='.', help='directory where to put the store') parser.add_argument('--number_of_distances', help='number of distances between outer grid nodes in GFDB', default=2) args = parser.parse_args() stations = load_stations(args.stations) if len(stations)==1: s = stations[0] events = list(Event.load_catalog(args.events)) propose_store(s, events, superdir=args.superdir, force=args.force)