def command_init(args): parser, options, args = cl_parse('init', args) if len(args) == 0: parser.print_help() sys.exit(1) if args[0] == 'redeploy': if len(args) != 3: parser.error('incorrect number of arguments') source_dir, dest_dir = args[1:] try: source = gf.Store(source_dir) except gf.StoreError as e: die(e) config = copy.deepcopy(source.config) config.derived_from_id = source.config.id try: config_filenames = gf.store.Store.create_editables(dest_dir, config=config) except gf.StoreError as e: die(e) try: dest = gf.Store(dest_dir) except gf.StoreError as e: die(e) for k in source.extra_keys(): source_fn = source.get_extra_path(k) dest_fn = dest.get_extra_path(k) shutil.copyfile(source_fn, dest_fn) logger.info('(1) configure settings in files:\n %s' % '\n '.join(config_filenames)) logger.info('(2) run "fomosto redeploy <source> <dest>", as needed') else: if len(args) != 2: parser.error('incorrect number of arguments') (modelling_code_id, store_dir) = args module, variant = fomo_wrapper_module(modelling_code_id) try: config_filenames = module.init(store_dir, variant) except gf.StoreError as e: die(e) logger.info('(1) configure settings in files:\n %s' % '\n '.join(config_filenames)) logger.info('(2) run "fomosto ttt" in directory "%s"' % store_dir) logger.info('(3) run "fomosto build" in directory "%s"' % store_dir)
def test_create_default(self): d = mkdtemp(prefix='gfstore') self.tempdirs.append(d) ahfullgreen.init(d, None) store = gf.Store(d) store.make_ttt() ahfullgreen.build(d)
def test_timing(self): for typ, args, args_out_list in [ ('a', (10*km, 1500*km), [(10*km, 5000*km), (30*km, 1500*km)]), ('b', (5*km, 10*km, 1500*km), [(100*km, 10*km, 1500*km), (5*km, 10*km, 5000*km), (5*km, 30*km, 1500*km)])]: store_dir = self.get_regional_ttt_store_dir(typ) store = gf.Store(store_dir) assert(store.t('P', args) is not None) self.assertEqual(store.t('last(S|P)', args), store.t('S', args)) self.assertEqual(store.t('(S|P)', args), store.t('S', args)) self.assertEqual(store.t('(P|S)', args), store.t('P', args)) self.assertEqual(store.t('first(S|P)', args), store.t('P', args)) with self.assertRaises(gf.NoSuchPhase): store.t('nonexistant', args) with self.assertRaises(AssertionError): store.t('P', (10*km,)) for args_out in args_out_list: with self.assertRaises(gf.OutOfBounds): store.t('P', args_out)
def _create_benchmark_store(self): conf = gf.ConfigTypeA(id='benchmark_store', source_depth_min=0., source_depth_max=2., source_depth_delta=1., distance_min=1.0, distance_max=5001.0, distance_delta=5.0, sample_rate=2.0, ncomponents=5) deltat = 1.0 / conf.sample_rate store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) store = gf.Store(store_dir, 'w') for args in conf.iter_nodes(): nsamples = int(round(args[1])) data = num.ones(nsamples) itmin = int(round(args[1])) tr = gf.GFTrace(data=data, itmin=itmin, deltat=deltat) store.put(args, tr) store.close() return store_dir
def _create_regional_ttt_store(self): conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20 * km, source_depth_delta=10 * km, distance_min=1000 * km, distance_max=2000 * km, distance_delta=10 * km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(), tabulated_phases=[ gf.TPDef(id=id, definition=defi) for (id, defi) in [('depthp', 'p'), ('pS', 'pS'), ('P', 'P'), ('S', 'S')] ]) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) store = gf.Store(store_dir) store.make_ttt() store.close() return store_dir
def command_tttlsd(args): def setup(parser): pass parser, options, args = cl_parse('tttlsd', args, setup=setup) try: sphase_ids = args.pop() except Exception: parser.error('cannot get <phase> argument') try: phase_ids = [x.strip() for x in sphase_ids.split(',')] except gf.meta.InvalidTimingSpecification: parser.error('invalid phase specification: "%s"' % sphase_ids) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) for phase_id in phase_ids: store.fix_ttt_holes(phase_id) except gf.StoreError as e: die(e)
def benchmark_get(self): store_dir = self.get_benchmark_store_dir() import pylab as lab for implementation in ('c', 'python'): store = gf.Store(store_dir, use_memmap=True) for nrepeats in (1, 2): data = [] for distance in store.config.coords[1]: sdepths = num.repeat(store.config.coords[0], store.config.ncomponents) t = time.time() for repeat in xrange(nrepeats): for sdepth in sdepths: for icomp in xrange(1): store.get((sdepth, distance, icomp), implementation=implementation) tnew = time.time() data.append((distance, tnew - t)) if nrepeats != 1: d, t1 = num.array(data, dtype=num.float).T nread = nrepeats * store.config.ns[0] smmap = implementation label = 'nrepeats %i, impl %s' % (nrepeats, smmap) print label, num.mean(nread / t1) lab.plot(d, nread / t1, label=label) lab.legend() lab.show()
def command_decimate(args): def setup(parser): parser.add_option('--config', dest='config_fn', metavar='FILE', help='use modified spacial sampling given in FILE') parser.add_option('--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('decimate', args, setup=setup) try: decimate = int(args.pop()) except Exception: parser.error('cannot get <factor> argument') store_dir = get_store_dir(args) config = None if options.config_fn: config = load_config(options.config_fn) try: store = gf.Store(store_dir) store.make_decimated(decimate, config=config, force=options.force, show_progress=True) except gf.StoreError as e: die(e)
def dummy_homogeneous_store(self): mod = cake.LayeredModel.from_scanlines( cake.read_nd_model_str(''' 0 6 3.46 3.0 1000 500 20 6 3.46 3.0 1000 500 '''.lstrip())) if self._dummy_homogeneous_store is None: conf = gf.ConfigTypeA(id='empty_homogeneous', source_depth_min=0., source_depth_max=20 * km, source_depth_delta=10 * km, distance_min=1000 * km, distance_max=2000 * km, distance_delta=10 * km, sample_rate=2.0, ncomponents=10, earthmodel_1d=mod) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) self._dummy_homogeneous_store = gf.Store(store_dir) return self._dummy_homogeneous_store
def command_redeploy(args): parser, options, args = cl_parse('redeploy', args) if not len(args) == 2: parser.print_help() sys.exit(1) source_store_dir, dest_store_dir = args try: source = gf.Store(source_store_dir) except gf.StoreError as e: die(e) try: gf.store.Store.create_dependants(dest_store_dir) except gf.StoreError: pass try: dest = gf.Store(dest_store_dir, 'w') except gf.StoreError as e: die(e) show_progress = True if show_progress: pbar = util.progressbar('redeploying', dest.config.nrecords) for i, args in enumerate(dest.config.iter_nodes()): try: tr = source.get(args, interpolation='off') dest.put(args, tr) except (gf.meta.OutOfBounds, gf.store.NotAllowedToInterpolate) as e: logger.debug('skipping %s, (%s)' % (sindex(args), e)) except gf.store.StoreError as e: logger.warn('cannot insert %s, (%s)' % (sindex(args), e)) if show_progress: pbar.update(i + 1) if show_progress: pbar.finish()
def command_extract(args): def setup(parser): parser.add_option( '--format', dest='format', default='mseed', choices=['mseed', 'sac', 'text', 'yaff'], help='export to format "mseed", "sac", "text", or "yaff". ' 'Default is "mseed".') fndfl = 'extracted/%(irecord)s_%(args)s.%(extension)s' parser.add_option( '--output', dest='output_fn', default=fndfl, metavar='TEMPLATE', help='output path template [default: "%s"]' % fndfl) parser, options, args = cl_parse('extract', args, setup=setup) try: sdef = args.pop() except Exception: parser.error('cannot get <selection> argument') try: gdef = gf.meta.parse_grid_spec(sdef) except gf.meta.GridSpecError as e: die(e) store_dir = get_store_dir(args) extensions = { 'mseed': 'mseed', 'sac': 'sac', 'text': 'txt', 'yaff': 'yaff'} try: store = gf.Store(store_dir) for args in store.config.iter_extraction(gdef): gtr = store.get(args) if gtr: tr = trace.Trace( '', '', '', util.zfmt(store.config.ncomponents) % args[-1], ydata=gtr.data, deltat=gtr.deltat, tmin=gtr.deltat * gtr.itmin) additional = dict( args='_'.join('%g' % x for x in args), irecord=store.str_irecord(args), extension=extensions[options.format]) io.save( tr, options.output_fn, format=options.format, additional=additional) except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e: die(e)
def command_tttextract(args): def setup(parser): parser.add_option('--output', dest='output_fn', metavar='TEMPLATE', help='output to text files instead of stdout ' '(example TEMPLATE: "extracted/%(args)s.txt")') parser, options, args = cl_parse('tttextract', args, setup=setup) try: sdef = args.pop() except Exception: parser.error('cannot get <selection> argument') try: sphase = args.pop() except Exception: parser.error('cannot get <phase> argument') try: phases = [gf.meta.Timing(x.strip()) for x in sphase.split(',')] except gf.meta.InvalidTimingSpecification: parser.error('invalid phase specification: "%s"' % sphase) try: gdef = gf.meta.parse_grid_spec(sdef) except gf.meta.GridSpecError as e: die(e) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) for args in store.config.iter_extraction(gdef, level=-1): s = ['%e' % x for x in args] for phase in phases: t = store.t(phase, args) if t is not None: s.append('%e' % t) else: s.append('nan') if options.output_fn: d = dict(args='_'.join('%e' % x for x in args), extension='txt') fn = options.output_fn % d util.ensuredirs(fn) with open(fn, 'w') as f: f.write(' '.join(s)) f.write('\n') else: print(' '.join(s)) except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e: die(e)
def test_timing_new_syntax(self): store_dir = self.get_regional_ttt_store_dir() store = gf.Store(store_dir) args = (10 * km, 1500 * km) assert numeq(store.t('stored:P', args), store.t('cake:P', args), 0.1) assert numeq(store.t('vel_surface:15', args), 100., 0.1)
def command_build(args): def setup(parser): parser.add_option('--force', dest='force', action='store_true', help='overwrite existing files') parser.add_option('--nworkers', dest='nworkers', type='int', metavar='N', help='run N worker processes in parallel') parser.add_option('--continue', dest='continue_', action='store_true', help='continue suspended build') parser.add_option('--step', dest='step', type='int', metavar='I', help='process block number IBLOCK') parser.add_option('--block', dest='iblock', type='int', metavar='I', help='process block number IBLOCK') parser, options, args = cl_parse('build', args, setup=setup) store_dir = get_store_dir(args) try: if options.step is not None: step = options.step - 1 else: step = None if options.iblock is not None: iblock = options.iblock - 1 else: iblock = None store = gf.Store(store_dir) module, _ = fomo_wrapper_module(store.config.modelling_code_id) module.build(store_dir, force=options.force, nworkers=options.nworkers, continue_=options.continue_, step=step, iblock=iblock) except gf.StoreError as e: die(e)
def test_get_shear_moduli(self): store_dir = self.get_regional_ttt_store_dir() store = gf.Store(store_dir) sample_points = num.empty((20, 3)) sample_points[:, 2] = num.linspace(0, store.config.coords[0].max(), 20) for interp in ('nearest_neighbor', 'multilinear'): store.config.get_shear_moduli(lat=0., lon=0., points=sample_points, interpolation=interp)
def command_check(args): parser, options, args = cl_parse('check', args) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) problems = store.check(show_progress=True) if problems: die('problems detected with gf store: %s' % store_dir) except gf.StoreError as e: die(e)
def test_ttt_lsd(self): for typ in ['a', 'b']: store_dir = self.get_regional_ttt_store_dir(typ) phase_id = 'P' store = gf.Store(store_dir) ph = store.get_stored_phase(phase_id) assert ph.check_holes() store.fix_ttt_holes(phase_id) ph = store.get_stored_phase(phase_id + '.lsd') assert not ph.check_holes()
def command_stats(args): parser, options, args = cl_parse('stats', args) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) s = store.stats() except gf.StoreError as e: die(e) for k in store.stats_keys: print('%s: %s' % (k, s[k]))
def command_ttt(args): def setup(parser): parser.add_option( '--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('ttt', args, setup=setup) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) store.make_ttt(force=options.force) except gf.StoreError as e: die(e)
def command_upgrade(args): parser, options, args = cl_parse('upgrade', args) store_dirs = get_store_dirs(args) try: for store_dir in store_dirs: store = gf.Store(store_dir) nup = store.upgrade() if nup == 0: print('%s: already up-to-date.' % store_dir) else: print('%s: %i file%s upgraded.' % (store_dir, nup, ['s', ''][nup == 1])) except gf.StoreError as e: die(e)
def benchmark_sum(self): store_dir = self.get_benchmark_store_dir() import pylab as lab for implementation in ('c', 'python'): store = gf.Store(store_dir, use_memmap=True) for weight in (0.0, 1.0): for nrepeats in (1, 2): data = [] for distance in store.config.coords[1]: n = store.config.ncomponents * store.config.ns[0] sdepths = num.repeat(store.config.coords[0], store.config.ncomponents) distances = num.repeat([distance], n) comps = num.tile(store.config.coords[2], store.config.ns[0]) args = (sdepths, distances, comps) weights = num.repeat([weight], n) delays = num.arange(n, dtype=num.float) \ * store.config.deltat * 0.5 t = time.time() for repeat in xrange(nrepeats): store.sum(args, delays, weights, implementation=implementation) tnew = time.time() data.append( ((distance - store.config.distance_min) + 1, tnew - t)) if nrepeats != 1: d, t1 = num.array(data, dtype=num.float).T nread = nrepeats * store.config.ns[0] \ * store.config.ncomponents label = 'nrepeats %i, weight %g, impl %s' % ( nrepeats, weight, implementation) print label, num.mean(nread / t1) lab.plot(d, nread / t1, label=label) lab.legend() lab.show()
def test_timing_new_syntax(self): for typ, args in [ ('a', (10*km, 1500*km)), ('b', (5*km, 10*km, 1500*km))]: store_dir = self.get_regional_ttt_store_dir(typ) store = gf.Store(store_dir) assert numeq( store.t('stored:P', args), store.t('cake:P', args), 0.1) assert numeq(store.t('vel_surface:15', args), 100., 0.1) assert numeq(store.t('+0.1S', args), 150., 0.1) assert numeq( store.t('{stored:P}+0.1S', args), store.t('{cake:P}', args) + store.t('{vel_surface:10}', args), 0.1)
def command_qc(args): parser, options, args = cl_parse('qc', args) store_dir = get_store_dir(args) try: store = gf.Store(store_dir) s = store.stats() if s['empty'] != 0: print('has empty records') for aname in ['author', 'author_email', 'description', 'references']: if not getattr(store.config, aname): print('%s empty' % aname) except gf.StoreError as e: die(e)
def test_pulse_decimate(self): store_dir = self.get_pulse_store_dir() store = gf.Store(store_dir) store.make_decimated(2) engine = gf.LocalEngine(store_dirs=[store_dir]) # pulse = engine.get_store_extra(None, 'pulse') source = gf.ExplosionSource( time=0.0, depth=100., moment=1.0) targets = [ gf.Target( codes=('', 'STA', '%s' % sample_rate, component), sample_rate=sample_rate, north_shift=500., east_shift=0.) for component in 'N' for sample_rate in [None, store.config.sample_rate / 2.0] ] response = engine.process(source, targets) trs = [] for source, target, tr in response.iter_results(): tr.extend(0., 1.) if target.sample_rate is None: tr.downsample_to(2./store.config.sample_rate, snap=True) trs.append(tr) tmin = max(tr.tmin for tr in trs) tmax = min(tr.tmax for tr in trs) for tr in trs: tr.chop(tmin, tmax) num.testing.assert_almost_equal( trs[0].ydata, trs[1].ydata, 2)
def dummy_store(self): if self._dummy_store is None: conf = gf.ConfigTypeA(id='empty_regional', source_depth_min=0., source_depth_max=20 * km, source_depth_delta=1 * km, distance_min=1 * km, distance_max=2000 * km, distance_delta=1 * km, sample_rate=2.0, ncomponents=10) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) self._dummy_store = gf.Store(store_dir) return self._dummy_store
def command_addref(args): parser, options, args = cl_parse('addref', args) store_dirs = [] filenames = [] for arg in args: if os.path.isdir(arg): store_dirs.append(arg) elif os.path.isfile(arg): filenames.append(arg) else: die('invalid argument: %s' % arg) if not store_dirs: store_dirs.append('.') references = [] try: for filename in args: references.extend(gf.meta.Reference.from_bibtex(filename=filename)) except ImportError: die('pybtex module must be installed to use this function') if not references: die('no references found') for store_dir in store_dirs: try: store = gf.Store(store_dir) ids = [ref.id for ref in store.config.references] for ref in references: if ref.id in ids: die('duplicate reference id: %s' % ref.id) ids.append(ref.id) store.config.references.append(ref) store.save_config(make_backup=True) except gf.StoreError as e: die(e)
def dummy_store(self): if self._dummy_store is None: conf = gf.ConfigTypeA( id='empty_regional', source_depth_min=0., source_depth_max=20 * km, source_depth_delta=10 * km, distance_min=1000 * km, distance_max=2000 * km, distance_delta=10 * km, sample_rate=2.0, ncomponents=10, earthmodel_1d=cake.load_model(crust2_profile=(50., 10.))) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf) self._dummy_store = gf.Store(store_dir) return self._dummy_store
def test_timing(self): store_dir = self.get_regional_ttt_store_dir() store = gf.Store(store_dir) args = (10 * km, 1500 * km) assert (store.t('P', args) is not None) self.assertEqual(store.t('last(S|P)', args), store.t('S', args)) self.assertEqual(store.t('(S|P)', args), store.t('S', args)) self.assertEqual(store.t('(P|S)', args), store.t('P', args)) self.assertEqual(store.t('first(S|P)', args), store.t('P', args)) with self.assertRaises(gf.NoSuchPhase): store.t('nonexistant', args) with self.assertRaises(AssertionError): store.t('P', (10 * km, )) with self.assertRaises(gf.OutOfBounds): print store.t('P', (10 * km, 5000 * km)) with self.assertRaises(gf.OutOfBounds): print store.t('P', (30 * km, 1500 * km))
def _create_pulse_store(self): conf = gf.ConfigTypeB(id='pulse', receiver_depth_min=0., receiver_depth_max=10., receiver_depth_delta=10., source_depth_min=0., source_depth_max=1000., source_depth_delta=10., distance_min=10., distance_max=1000., distance_delta=10., sample_rate=200., ncomponents=2., component_scheme='elastic2') pulse = PulseConfig() # fnyq_spatial = pulse.velocity / math.sqrt(conf.distance_delta**2 + # conf.source_depth_delta**2) store_dir = mkdtemp(prefix='gfstore') self.tempdirs.append(store_dir) gf.Store.create(store_dir, config=conf, force=True, extra={'pulse': pulse}) deltat = conf.deltat store = gf.Store(store_dir, mode='w') for args in store.config.iter_nodes(level=-1): rdepth, sdepth, surfdist = args dist = math.sqrt((rdepth - sdepth)**2 + surfdist**2) tarr = dist / pulse.velocity tmin = tarr - 5 * pulse.fwhm tmax = tarr + 5 * pulse.fwhm itmin = int(num.floor(tmin / deltat)) itmax = int(num.ceil(tmax / deltat)) tmin = itmin * deltat tmax = itmax * deltat nsamples = itmax - itmin + 1 t = tmin + num.arange(nsamples) * deltat data = pulse.evaluate(dist, t) phi = math.atan2(rdepth - sdepth, surfdist) data = [data * math.cos(phi), data * math.sin(phi)] for icomponent, data in enumerate(data): is_zero = num.all(data == 0.0) tr = gf.GFTrace(data=data, itmin=itmin, deltat=deltat, is_zero=is_zero) store.put(args + (icomponent, ), tr) store.close() return store_dir
def test_new_static(self): from pyrocko.gf import store_ext benchmark.show_factor = True store = gf.Store(self.get_pscmp_store_dir()) store.open() src_length = 2 * km src_width = 2 * km ntargets = 20 north_shifts, east_shifts = num.meshgrid( num.linspace(-20 * km, 20 * km, ntargets), num.linspace(-20 * km, 20 * km, ntargets)) interp = ['nearest_neighbor', 'multilinear'] interpolation = interp[1] source = gf.RectangularSource(lat=0., lon=0., depth=5 * km, north_shift=0., east_shift=0., anchor='top', width=src_width, length=src_length) static_target = gf.GNSSCampaignTarget( north_shifts=north_shifts, east_shifts=east_shifts, lats=num.zeros_like(north_shifts), lons=num.zeros_like(north_shifts)) targets = static_target.get_targets() dsource = source.discretize_basesource(store, targets[0]) mts_arr = dsource.m6s delays_s = dsource.times.astype(num.float64) pos = 1 scheme_desc = ['displacement.n', 'displacement.e', 'displacement.d'] benchmark.clear() def run(interpolation=interp[0], nthreads=1, niter=1): @benchmark.labeled(' sum_statics %d cpu (%s)' % (nthreads, interpolation)) def fwd_model_seperate(interpolation=interp[0]): args = (store.cstore, dsource.coords5(), mts_arr, static_target.coords5, 'elastic10', interpolation, nthreads) sum_params = store_ext.make_sum_params(*args) out = {} for icomp, comp in enumerate(scheme_desc): weights, irecords = sum_params[icomp] out[comp] = store_ext.store_sum_static( store.cstore, irecords, delays_s, weights, pos, ntargets**2, nthreads) return out @benchmark.labeled('calc_statics %d cpu (%s)' % (nthreads, interpolation)) def fwd_model_unified(interpolation=interp[0]): out = {} res = store_ext.store_calc_static(store.cstore, dsource.coords5(), mts_arr, dsource.times, static_target.coords5, 'elastic10', interpolation, pos, nthreads) for comp, r in zip(scheme_desc, res): out[comp] = r return out for _ in range(niter): res1 = fwd_model_seperate(interpolation) for _ in range(niter): res2 = fwd_model_unified(interpolation) for r1, r2 in zip(res1.values(), res2.values()): num.testing.assert_equal(r1, r2) for interpolation in interp: continue for nthreads in [1, 2, 4, 8, 0]: run(interpolation, nthreads) print(benchmark) benchmark.clear() run(interpolation, nthreads=0, niter=30) print(benchmark) def plot(displ): import matplotlib.pyplot as plt size = int(num.sqrt(displ.size)) fig = plt.figure() ax = fig.gca() ax.imshow(displ.reshape((size, size))) plt.show()