def init(): C = config.Config(options.evpath) print(options.evpath) Config = C.parseConfig('config') yaml_file = C.parseConfig('yaml') cfg = guts.load(filename=yaml_file[0]) tests = int(cfg.config_cluster.runs) import palantiri path = palantiri.__path__ at = os.path.join(path[0], 'cluster/cluster2.py') cmd = sys.executable + ' ' + at + ' -f ' + options.evpath print('cmd = ', cmd) for i in range(tests): print('RUN: ', i) os.system(cmd) cmd = ('%s evaluatecluster.py -f %s') % (sys.executable, os.path.join(options.evpath, 'cluster')) at = os.path.join(path[0], 'cluster/evaluateCluster.py') cmd = sys.executable + ' ' + at + ' -f ' + os.path.join(options.evpath, "cluster") os.system(cmd)
def process(self): t = time.time() C = config.Config(self.eventpath) Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) Origin = C.parseConfig('origin') if cfg.pyrocko_download() is True: if cfg.quantity() == 'displacement': disp = True else: disp = False Meta = readpyrockostations(self.eventpath, disp, cfg) elif cfg.colesseo_input() is True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = readcolosseostations(scenario_path) events = scenario.get_events() ev = events[0] Origin['strike'] = str(ev.moment_tensor.strike1) Origin['rake'] = str(ev.moment_tensor.rake1) Origin['dip'] = str(ev.moment_tensor.dip1) Origin['lat'] = str(ev.lat) Origin['lon'] = str(ev.lon) Origin['depth'] = str(ev.depth / 1000.) else: Meta = readMetaInfoFile(self.eventpath) Folder = createFolder(self.eventpath) FilterMeta = filterStations(Meta, Config, Origin) km(Config, FilterMeta, Folder, Origin, t) return True
def load(filter, step=None, path=None): if path is None: rel = 'events/' + str(sys.argv[1]) + '/work/semblance/' else: rel = path boot = False if path is not None: evpath = path else: evpath = 'events/' + str(sys.argv[1]) C = config.Config(evpath) Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) dimx = int(Config['dimx']) dimy = int(Config['dimy']) data_int = None data = None data_boot = None data_int_boot = None datamax = 0 phase = "P" for argv in sys.argv: if argv == "--phases:S": phase = "S" if argv == "--phases:all": phase = "" if argv == "--phases:P,S": phase = "" if argv == "--phases:P": phase = "P" if step is None: try: pathlist = Path(rel).glob('*.ASC') except: pathlist = Path(rel).glob('%s-*%s.ASC' % (filter, phase)) else: try: try: pathlist = Path(rel).glob('*0%s.ASC' % step) except: pathlist = Path(rel).glob('*%s.ASC' % step) except: pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' % (filter, step, phase)) maxs = 0. count = 0 for path in sorted(pathlist): path_in_str = str(path) data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5) maxd = num.max(data[:, 2]) count = count + 1 if maxs < maxd: maxs = maxd datamax = data[:, 2] if sys.argv[3] == 'max': if step is None: try: pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) + '*.ASC' % filter) except: pathlist = Path(rel).glob('%s-*%s.ASC' % (filter, phase)) else: try: pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) + '00%s_*.ASC' % (filter, step)) except: pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' % (filter, step, phase)) data_int = num.zeros(num.shape(data[:, 2])) for path in sorted(pathlist): path_in_str = str(path) data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5) i = 0 for k in num.nan_to_num(data[:, 2]): if k > data_int[i]: data_int[i] = k if num.max(datamax) == 0: data_int[i] = 0 i = i + 1 try: if sys.argv[4] == 'boot': boot = True if step is None: try: pathlist = Path(rel).glob('%s-*boot*' + str(sys.argv[5]) + '*.ASC' % filter) except: pathlist = Path(rel).glob('%s-*boot*%s.ASC' % (filter, phase)) else: try: pathlist = Path(rel).glob('%s-*boot*' + str(sys.argv[5]) + '00%s_*.ASC' % (filter, step)) except: pathlist = Path(rel).glob('%s-*boot00%s_*%s.ASC' % (filter, step, phase)) data_int_boot = num.zeros(num.shape(data[:, 2])) for path in sorted(pathlist): path_in_str = str(path) data_boot = num.loadtxt(path_in_str, delimiter=' ', skiprows=5) i = 0 for k in num.nan_to_num(data[:, 2]): if k > data_int_boot[i]: data_int_boot[i] = k if num.max(datamax) == 0: data_int[i] = 0 i = i + 1 except IndexError: pass if sys.argv[3] == 'combined': if step is None: try: pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) + '*.ASC' % filter) except: pathlist = Path(rel).glob('%s*-%s*.ASC' % (filter, phase)) else: try: pathlist = Path(rel).glob('%s-' + str(sys.argv[5]) + '00%s_*.ASC' % (filter, step)) except: pathlist = Path(rel).glob('%s-*00%s_*%s.ASC' % (filter, step, phase)) data_int = num.zeros(num.shape(data[:, 2])) for path in sorted(pathlist): path_in_str = str(path) if path_in_str[-14] is not "o": data = num.loadtxt(path_in_str, delimiter=' ', skiprows=5) data_int += num.nan_to_num(data[:, 2]) try: if sys.argv[4] == 'boot': boot = True if step is None: try: pathlist = Path(rel).glob('%s-*boot*' + str(sys.argv[5]) + '*.ASC' % filter) except: pathlist = Path(rel).glob('%s-*boot*.ASC' % filter) else: try: pathlist = Path(rel).glob('%s-*boot*' + str(sys.argv[5]) + '00%s_*.ASC' % (filter, step)) except: pathlist = Path(rel).glob('%s-*boot*00%s_*.ASC' % (filter, step)) data_int_boot = num.zeros(num.shape(data[:, 2])) for path in sorted(pathlist): path_in_str = str(path) data_boot = num.loadtxt(path_in_str, delimiter=' ', skiprows=5) data_int_boot += num.nan_to_num(data_boot[:, 2]) except IndexError: pass return data, data_int, data_boot, data_int_boot, path_in_str, maxs, datamax, count
def from_palantiri(): km = 1000. try: path = sys.argv[3] evpath = path except: path = None evpath = 'events/' + str(sys.argv[1]) C = config.Config(evpath) Origin = C.parseConfig('origin') Config = C.parseConfig('config') cfg = ConfigObj(dict=Config) step = cfg.UInt('step') step2 = cfg.UInt('step_f2') duration = cfg.UInt('duration') forerun = cfg.UInt('forerun') deltat = step deltat2 = step2 rel = 'events/' + str(sys.argv[1]) + '/work/semblance/' dimx = int(Config['dimx']) dimy = int(Config['dimy']) origin = OriginCfg(Origin) depth = origin.depth() * 1000. ev = event.Event(lat=origin.lat(), lon=origin.lon(), depth=depth, time=util.str_to_time(origin.time())) data, data_int, data_boot, data_int_boot, path_in_str, maxs, datamax, n_files = load( 0, path=path) values_orig = data[:, 2] values_orig = num.append(values_orig, num.array([0., 0.])) lat_orig = data[:, 1] lon_orig = data[:, 0] ncorners = 4 lon_grid_orig = num.linspace(num.min(lat_orig), num.max(lat_orig), (dimy)) lat_grid_orig = num.linspace(num.min(lon_orig), num.max(lon_orig), dimx) if path is None: ntimes = int((forerun + duration) / step) else: ntimes = n_files verts = [] lon_diff = ((lon_orig)[dimy + 1] - (lon_orig)[0]) / 4. lat_diff = ((lat_orig)[1] - (lat_orig)[0]) / 4. dist = orthodrome.distance_accurate50m(lat_grid_orig[1], lon_grid_orig[1], lat_grid_orig[0], lon_grid_orig[0]) for x, y in zip(lon_orig, lat_orig): xyz = ([dist / 2., dist / 2., depth], [-dist / 2., dist / 2., depth], [-dist / 2., -dist / 2., depth], [dist / 2., -dist / 2., depth]) latlon = ([x, y], [x, y], [x, y], [x, y]) patchverts = num.hstack((latlon, xyz)) verts.append(patchverts) vertices = num.vstack(verts) npatches = int(len(vertices)) #*2? faces1 = num.arange(ncorners * npatches, dtype='int64').reshape(npatches, ncorners) faces2 = num.fliplr(faces1) faces = num.vstack((faces2, faces1)) srf_semblance_list = [] for i in range(0, ntimes): if len(sys.argv) < 4: print("missing input arrayname") else: data, data_int, data_boot, data_int_boot, path_in_str, maxsb, datamaxb, n_files = load( 0, step=i, path=path) srf_semblance = data[:, 2] srf_semblance = num.append(srf_semblance, num.array([0., 0.])) srf_semblance = duplicate_property(srf_semblance) srf_semblance_list.append(srf_semblance) srf_semblance = num.asarray(srf_semblance_list).T srf_times = num.linspace(0, forerun + duration, ntimes) geom = Geometry(times=srf_times, event=ev) geom.setup(vertices, faces) sub_headers = tuple([str(i) for i in srf_times]) geom.add_property((('semblance', 'float64', sub_headers)), srf_semblance) dump(geom, filename='geom.yaml')
def optimization(*params, **args): counter = params[1] Config = params[2] Wdf = params[3] FilterMeta = params[4] mint = params[5] maxt = params[6] TTTGridMap = params[7] Folder = params[8] Origin = params[9] ntimes = params[10] switch = params[11] ev = params[12] arrayfolder = params[13] syn_in = params[14] data = params[15] evpath = params[16] XDict = params[17] RefDict = params[18] workdepth = params[19] filterindex = params[20] Wdfs = params[21] networks = Config['networks'].split(',') params = num.asarray(params) parameter = num.ndarray.tolist(params) ASL_syn = [] C = config.Config (evpath) Config = C.parseConfig ('config') cfg = ConfigObj (dict=Config) if cfg.pyrocko_download() == True: Meta = C.readpyrockostations()# elif cfg.colesseo_input() == True: scenario = guts.load(filename=cfg.colosseo_scenario_yml()) scenario_path = cfg.colosseo_scenario_yml()[:-12] Meta = C.readcolosseostations(scenario_path) else: Meta = C.readMetaInfoFile() l = 0 for i in networks: arrayname = i arrayfolder = os.path.join (Folder['semb'],arrayname) network = Config[i].split('|') FilterMeta = ttt.filterStations (Meta,Config,Origin,network) if len(FilterMeta) < 3: continue W = XDict[i] refshift = RefDict[i] FilterMeta = cmpFilterMetavsXCORR (W, FilterMeta) Logfile.add ('BOUNDING BOX DIMX: %s DIMY: %s GRIDSPACING: %s \n' % (Config['dimx'],Config['dimy'],Config['gridspacing'])) f = open('../tttgrid/tttgrid_%s_%s_%s.pkl' % (ev.time, arrayname, workdepth), 'rb') TTTGridMap,mint,maxt = pickle.load(f) f.close() switch = filterindex tw = times.calculateTimeWindows (mint,maxt,Config,ev, switch) Wdf = Wdfs[l] semb_syn = doCalc_syn (counter,Config,Wdf,FilterMeta,mint,maxt,TTTGridMap, Folder,Origin,ntimes,switch, ev,arrayfolder, syn_in, parameter[0]) ASL_syn.append(semb_syn) counter += 1 l += 1 sembmax_syn = sembCalc.collectSemb(ASL_syn,Config,Origin,Folder,ntimes,len(networks),switch) misfit_list = [] # init a list for a all the singular misfits norm_list = [] # init a list for a all the singular normalizations taper = trace.CosFader(xfade=2.0) # Cosine taper with fade in and out of 2s. bw_filter = trace.ButterworthResponse(corner=0.000055, # in Hz order=4, type='high') # "low"pass or "high"pass setup = trace.MisfitSetup(description='Misfit Setup', norm=2, # L1 or L2 norm taper=taper, filter=bw_filter, domain='time_domain') nsamples = len(data) tmin = util.str_to_time('2010-02-20 15:15:30.100') tr = trace.Trace(station='TEST', channel='Z', deltat=0.5, tmin=tmin, ydata=data) syn = trace.Trace(station='TEST', channel='Z', deltat=0.5, tmin=tmin, ydata=sembmax_syn) misfit, norm = tr.misfit(candidate=syn, setup=setup) # calculate the misfit of a single observed trace with its synthetics # with the setup from above misfit_list.append(misfit), norm_list.append(norm) # append the misfit into a list global_misfit_normed = num.sqrt(num.nansum((num.asarray(misfit_list))**2) / # sum all the misfits and normalize to get a single minimizable value num.nansum((num.asarray(norm_list))**2)) return global_misfit_normed