def setup(self): self.sources = guts.load(filename=self.fn_sources) self.targets = [] if self.fn_targets: self.targets.extend(guts.load(filename=self.fn_targets)) if self.fn_stations: stats = load_stations(self.fn_stations) self.targets.extend(self.cast_stations_to_targets(stats)) if self.store_id: for t in self.targets: t.store_id = self.store_id if self.center_sources: self.move_sources_to_station_center() self.config.channels = [t.codes for t in self.targets] store_ids = [t.store_id for t in self.targets] store_id = set(store_ids) assert len(store_id) == 1, 'More than one store used. Not \ implemented yet' self.store = self.engine.get_store(store_id.pop()) self.sources = filter_oob(self.sources, self.targets, self.store.config) dt = self.config.deltat_want or self.store.config.deltat self.n_samples = int((self.config.sample_length + self.config.tpad) / dt)
def command_fill(args): def setup(parser): parser.add_option( '--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('fill', args, setup=setup) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = args[0] gf_stores_path = op.join(project_dir, 'gf_stores') engine = get_engine([gf_stores_path]) scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.ensure_gfstores(interactive=True) scenario.dump_data( path=project_dir, overwrite=options.force) scenario.make_map(op.join(project_dir, 'map.pdf'))
def command_fill(args): def setup(parser): parser.add_option('--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('fill', args, setup=setup) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = op.abspath(args[0]) logger.info('Initializing gf.LocalEngine...') engine = get_engine() scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.dump_data(path=project_dir, overwrite=options.force) scenario.make_map(op.join(project_dir, 'map.pdf'))
def init(): C = config.Config(options.evpath) print(options.evpath) Config = C.parseConfig('config') yaml_file = C.parseConfig('yaml') cfg = guts.load(filename=yaml_file[0]) tests = int(cfg.config_cluster.runs) import palantiri path = palantiri.__path__ at = os.path.join(path[0], 'cluster/cluster2.py') cmd = sys.executable + ' ' + at + ' -f ' + options.evpath print('cmd = ', cmd) for i in range(tests): print('RUN: ', i) os.system(cmd) cmd = ('%s evaluatecluster.py -f %s') % (sys.executable, os.path.join(options.evpath, 'cluster')) at = os.path.join(path[0], 'cluster/evaluateCluster.py') cmd = sys.executable + ' ' + at + ' -f ' + os.path.join(options.evpath, "cluster") os.system(cmd)
def load_config(project_dir, mode): """ Load configuration file. Parameters ---------- project_dir : str path to the directory of the configuration file mode : str type of optimization problem: 'Geometry' / 'Static'/ 'Kinematic' Returns ------- :class:`BEATconfig` """ config_file_name = 'config_' + mode + '.yaml' config_fn = os.path.join(project_dir, config_file_name) config = load(filename=config_fn) if config.problem_config.hyperparameters is None or \ len(config.problem_config.hyperparameters) == 0: config.update_hypers() logger.info('Updated hyper parameters!') dump(config, filename=config_fn) return config
def run(self): logger.info('Waiting to follow environment %s...' % self.rundir) env = Environment.discover(self.rundir) if env is None: logger.error('Could not attach to Grond environment.') return self.environment = env self.history = self.environment.get_history() optimiser_fn = op.join(self.rundir, 'optimiser.yaml') self.optimiser = guts.load(filename=optimiser_fn) self.problem = self.history.problem self.niter = self.optimiser.niterations self.starttime = time.time() self.last_update = self.starttime self.history.add_listener(self) with TerminalMonitor(10) as tm: self._tm = tm ii = 0 while True: ii += 1 self.history.update() time.sleep(0.1) if self.sig_terminate.is_set(): break logger.debug('Monitor thread exiting.')
def load_config(project_dir, mode, update=False): """ Load configuration file. Parameters ---------- project_dir : str path to the directory of the configuration file mode : str type of optimization problem: 'geometry' / 'static'/ 'kinematic' Returns ------- :class:`BEATconfig` """ config_file_name = 'config_' + mode + '.yaml' config_fn = os.path.join(project_dir, config_file_name) try: config = load(filename=config_fn) except IOError: raise IOError('Cannot load config, file %s' ' does not exist!' % config_fn) config.problem_config.validate_priors() if update: config.update_hypers() logger.info('Updated hyper parameters! Previous hyper' ' parameter bounds are invalid now!') dump(config, filename=config_fn) return config
def testIOEvent(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'event.txt') e1 = model.Event( 10., 20., 1234567890., 'bubu', region='taka tuka land', moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90), magnitude=5.1, magnitude_type='Mw') guts.dump(e1, filename=fn) e2 = guts.load(filename=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type assert e1.get_hash() == e2.get_hash() fn2 = pjoin(tempdir, 'events.txt') guts.dump_all([e1, e2], filename=fn2) with self.assertRaises(model.OneEventRequired): model.load_one_event(fn2) shutil.rmtree(tempdir)
def get_scenario(self): if not self._scenario: if self.rebuild: scenario_file = op.join(self.project_dir, 'data', 'scenario', 'scenario.yml') self._scenario = guts.load(filename=scenario_file) else: if not self.observations: raise AttributeError('No observations set,' ' use .add_observation(Observation)') if not self.problem: raise AttributeError('No Source Problem set,' ' use .set_problem(Problem).') self._scenario = scenario.ScenarioGenerator( center_lat=self.center_lat, center_lon=self.center_lon, radius=self.radius, target_generators=[ obs.get_scenario_target_generator() for obs in self.observations ], source_generator=self.problem. get_scenario_source_generator()) # noqa return self._scenario
def load(cls, filename): """Load a kite scene from file ``filename.[npz,yml]`` structure. :param filename: Filenames the scene data is saved under :type filename: str :returns: Scene object from data resources :rtype: :class:`~kite.Scene` """ filename = op.abspath(filename) basename = op.splitext(filename)[0] try: data = num.load("%s.npz" % basename) displacement = data["arr_0"] theta = data["arr_1"] phi = data["arr_2"] except IOError: raise UserIOWarning("Could not load data from %s.npz" % basename) try: config = load(filename="%s.yml" % basename) config.meta.filename = filename except IOError: raise UserIOWarning("Could not load %s.yml" % basename) scene = cls(displacement=displacement, theta=theta, phi=phi, config=config) scene._log.debug("Loading from %s[.npz,.yml]", basename) scene.meta.filename = filename scene._testImport() return scene
def load(cls, path): collection = load(filename=path) if not isinstance(collection, PlotConfigCollection): raise GrondError( 'invalid plot collection configuration in file "%s"' % path) return collection
def command_fill(args): def setup(parser): parser.add_option( '--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('fill', args, setup=setup) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = args[0] gf_stores_path = op.join(project_dir, 'gf_stores') engine = get_engine([gf_stores_path]) scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.ensure_gfstores(interactive=True) scenario.dump_data( path=project_dir, overwrite=options.force) scenario.make_map([op.join(project_dir, 'map.pdf'), op.join(project_dir, 'map.ps')])
def report_index(report_config=None): if report_config is None: report_config = ReportConfig() reports_base_path = report_config.reports_base_path reports = [] for report_path in iter_report_dirs(reports_base_path): fn = op.join(report_path, 'index.yaml') if not os.path.exists(fn): logger.warn('Skipping indexing of incomplete report: %s' % report_path) continue logger.info('Indexing %s...' % report_path) rie = guts.load(filename=fn) report_relpath = op.relpath(report_path, reports_base_path) rie.path = report_relpath reports.append(rie) guts.dump_all(reports, filename=op.join(reports_base_path, 'report_list.yaml')) from grond import info guts.dump(info.version_info(), filename=op.join(reports_base_path, 'version_info.yaml')) app_dir = op.join(op.split(__file__)[0], 'app') copytree(app_dir, reports_base_path) logger.info('Created report in %s/index.html' % reports_base_path)
def load_config(project_dir, mode): """ Load configuration file. Parameters ---------- project_dir : str path to the directory of the configuration file mode : str type of optimization problem: 'geometry' / 'static'/ 'kinematic' update : list of strings to update parameters 'hypers' or/and 'hierarchicals' Returns ------- :class:`BEATconfig` """ config_file_name = 'config_' + mode + '.yaml' config_fn = os.path.join(project_dir, config_file_name) try: config = load(filename=config_fn) except IOError: raise IOError('Cannot load config, file %s' ' does not exist!' % config_fn) config.problem_config.validate_priors() return config
def testIOEvent(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'event.txt') e1 = model.Event(10., 20., 1234567890., 'bubu', depth=10., region='taka tuka land', moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90), magnitude=5.1, magnitude_type='Mw', tags=['cluster:-1', 'custom_magnitude:2.5']) guts.dump(e1, filename=fn) e2 = guts.load(filename=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type assert e1.get_hash() == e2.get_hash() assert e1.tags == e2.tags fn2 = pjoin(tempdir, 'events.txt') guts.dump_all([e1, e2], filename=fn2) with self.assertRaises(model.OneEventRequired): model.load_one_event(fn2) shutil.rmtree(tempdir)
def testEventExtras(self): tempdir = self.make_tempdir() eextra = model.Event(lat=12., lon=12.) data = [ (dict(i=1, f=1.0, n=None, b=True, s='abc', e=eextra), None), ({1: 'abc'}, guts.ValidationError), ({'e': model.Event(lat=1, lon=1)}, guts.ValidationError)] for d, exc in data: ev1 = model.Event( lat=10., lon=11., depth=4000., magnitude=5., extras=d) fn = pjoin(tempdir, 'test.events') with self.assertRaises(model.EventExtrasDumpError): model.dump_events([ev1], fn) if exc is None: ev1.validate() ev2 = guts.load(string=ev1.dump()) for k in d: assert isinstance(ev2.extras[k], type(d[k])) else: with self.assertRaises(exc): ev1.validate()
def command_fill(args): def setup(parser): parser.add_option( '--force', dest='force', action='store_true', help='overwrite existing files') parser, options, args = cl_parse('fill', args, setup=setup) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = op.abspath(args[0]) logger.info('Initializing gf.LocalEngine...') engine = get_engine() scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.dump_data(path=project_dir, overwrite=options.force) scenario.make_map(op.join(project_dir, 'map.pdf'))
def report_index(report_config=None): if report_config is None: report_config = ReportConfig() report_base_path = report_config.report_base_path entries = [] for entry_path in iter_report_entry_dirs(report_base_path): fn = op.join(entry_path, 'index.yaml') if not os.path.exists(fn): logger.warn('Skipping indexing of incomplete report entry: %s' % entry_path) continue logger.info('Indexing %s...' % entry_path) rie = guts.load(filename=fn) report_relpath = op.relpath(entry_path, report_base_path) rie.path = report_relpath entries.append(rie) guts.dump_all(entries, filename=op.join(report_base_path, 'report_list.yaml')) guts.dump(ReportInfo(title=report_config.title, description=report_config.description, have_archive=report_config.make_archive), filename=op.join(report_base_path, 'info.yaml')) app_dir = op.join(op.split(__file__)[0], 'app') copytree(app_dir, report_base_path) logger.info('Created report in %s/index.html' % report_base_path)
def load_problem_info(dirname): try: fn = op.join(dirname, 'problem.yaml') return guts.load(filename=fn) except OSError as e: logger.debug(e) raise ProblemInfoNotAvailable('No problem info available (%s).' % dirname)
def load(cls, path): from silvertine.plot import get_all_plot_classes get_all_plot_classes() # make sure all plot classes are loaded collection = load(filename=path) if not isinstance(collection, PlotConfigCollection): raise Error return collection
def load_config(self, filename): try: config = load(filename=filename) except IOError: raise IOError('Cannot load config, file %s does not exist!' % filename) self.config = config
def test_ahfull_kiwi(self): setup = load( filename=common.test_data_file('test_ahfull_kiwi_setup.yaml')) trs_ref = io.load( common.test_data_file('test_ahfull_kiwi_traces.mseed')) for i, s in enumerate(setup.setups): d3d = math.sqrt(s.x[0]**2 + s.x[1]**2 + s.x[2]**2) tlen = d3d / s.vs * 2 n = int(num.round(tlen / s.deltat)) out_x = num.zeros(n) out_y = num.zeros(n) out_z = num.zeros(n) ahfullgreen.add_seismogram(s.vp, s.vs, s.density, 1000000.0, 1000000.0, s.x, s.f, s.m6, 'displacement', s.deltat, 0., out_x, out_y, out_z, ahfullgreen.Gauss(s.tau)) trs = [] for out, comp in zip([out_x, out_y, out_z], 'NED'): tr = trace.Trace('', 'S%03i' % i, 'P', comp, deltat=s.deltat, tmin=0.0, ydata=out) trs.append(tr) trs2 = [] for cha in 'NED': t1 = g(trs, 'S%03i' % i, cha) t2 = g(trs_ref, 'S%03i' % i, cha) tmin = max(t1.tmin, t2.tmin) tmax = min(t1.tmax, t2.tmax) t1 = t1.chop(tmin, tmax, inplace=False) t2 = t2.chop(tmin, tmax, inplace=False) trs2.append(t2) d = 2.0 * num.sum((t1.ydata - t2.ydata)**2) / \ (num.sum(t1.ydata**2) + num.sum(t2.ydata**2)) if d >= 0.02: print(d) # trace.snuffle([t1, t2]) assert d < 0.02
def load(cls, path): from grond.plot import get_all_plot_classes get_all_plot_classes() # make sure all plot classes are loaded collection = load(filename=path) if not isinstance(collection, PlotConfigCollection): raise GrondError( 'invalid plot collection configuration in file "%s"' % path) return collection
def read_config(path): config = guts.load(filename=path) if not isinstance(config, Config): raise LassieError('invalid Lassie configuration in file "%s"' % path) config.set_basepath(op.dirname(path) or '.') config.set_config_name(op.splitext(op.basename(path))[0]) return config
def getConfig(): if not path.isfile(config_file): createDefaultConfig() try: logger.info('Loading config from %s...' % config_file) config = load(filename=config_file) except KeyError: createDefaultConfig() config = TalpaConfig() return config
def read_info(path): try: info = guts.load(filename=path) except OSError: raise GrondError('Cannot read Grond run info file: %s' % path) if not isinstance(info, RunInfo): raise GrondError('Invalid Grond run info in file "%s".' % path) return info
def test_ahfull_kiwi(self): setup = load(filename=common.test_data_file( 'test_ahfull_kiwi_setup.yaml')) trs_ref = io.load(common.test_data_file( 'test_ahfull_kiwi_traces.mseed')) for i, s in enumerate(setup.setups): d3d = math.sqrt(s.x[0]**2 + s.x[1]**2 + s.x[2]**2) tlen = d3d / s.vs * 2 n = int(num.round(tlen / s.deltat)) out_x = num.zeros(n) out_y = num.zeros(n) out_z = num.zeros(n) ahfullgreen.add_seismogram( s.vp, s.vs, s.density, 1000000.0, 1000000.0, s.x, s.f, s.m6, 'displacement', s.deltat, 0., out_x, out_y, out_z, ahfullgreen.Gauss(s.tau)) trs = [] for out, comp in zip([out_x, out_y, out_z], 'NED'): tr = trace.Trace( '', 'S%03i' % i, 'P', comp, deltat=s.deltat, tmin=0.0, ydata=out) trs.append(tr) trs2 = [] for cha in 'NED': t1 = g(trs, 'S%03i' % i, cha) t2 = g(trs_ref, 'S%03i' % i, cha) tmin = max(t1.tmin, t2.tmin) tmax = min(t1.tmax, t2.tmax) t1 = t1.chop(tmin, tmax, inplace=False) t2 = t2.chop(tmin, tmax, inplace=False) trs2.append(t2) d = 2.0 * num.sum((t1.ydata - t2.ydata)**2) / \ (num.sum(t1.ydata**2) + num.sum(t2.ydata**2)) if d >= 0.02: print(d) # trace.snuffle([t1, t2]) assert d < 0.02
def read_config(path): try: config = guts.load(filename=path) except OSError: raise GrondError('Cannot read Grond configuration file: %s' % path) if not isinstance(config, Config): raise GrondError('Invalid Grond configuration in file "%s".' % path) config.set_basepath(op.dirname(path) or '.') return config
def command_init(args): ''' Execution of command init ''' def setup(parser): parser.add_option('--force', dest='force', action='store_true', help='overwrite existing project directory') parser, options, args = cl_parse('init', args, setup) if len(args) != 1: help_and_die(parser, 'missing argument') else: fn_config = args[0] if not os.path.isfile(fn_config): die('config file missing: %s' % fn_config) conf = load(filename=fn_config) config.check(conf) if ((not options.force) and (os.path.isdir(conf.project_dir))): die('project dir exists: %s; use force option' % conf.project_dir) else: if os.path.isdir(conf.project_dir): shutil.rmtree(conf.project_dir) os.mkdir(conf.project_dir) conf.dump(filename=os.path.join(conf.project_dir, 'seiscloud.config')) dst = os.path.join(conf.project_dir, 'catalog.pf') if conf.catalog_origin == 'file': src = conf.catalog_fn shutil.copyfile(src, dst) else: if conf.catalog_origin == 'globalcmt': orig_catalog = catalog.GlobalCMT() else: # geofon orig_catalog = catalog.Geofon() events = orig_catalog.get_events(time_range=(util.str_to_time( conf.tmin), util.str_to_time(conf.tmax)), magmin=conf.magmin, latmin=conf.latmin, latmax=conf.latmax, lonmin=conf.lonmin, lonmax=conf.lonmax) selevents = [ev for ev in events if ev.magnitude <= conf.magmax] model.dump_events(selevents, dst) print('Project directory prepared "%s"' % conf.project_dir)
def read_responses(dirname): responses = {} entries = os.listdir(dirname) for entry in entries: if entry.endswith('.pf'): key = tuple(entry[:-3].split('.')) fn = os.path.join(dirname, entry) resp = guts.load(filename=fn) responses[key] = resp return responses
def read_config(path): try: config = guts.load(filename=path) except OSError: raise GrondError( 'cannot read Grond clustering configuration file: %s' % path) if not isinstance(config, Clustering): raise GrondError( 'invalid Grond clustering configuration in file "%s"' % path) return config
def read_config(path): try: config = load(filename=path) except FileNotFoundError as e: raise meta.WafeError(str(e)) if not isinstance(config, Config): raise meta.WafeError('invalid Wafe configuration in file "%s"' % path) config.set_basepath(op.dirname(path) or '.') config.set_config_name(op.splitext(op.basename(path))[0]) return config
def read_config(path): get_all_plot_classes() # make sure all plot modules are imported try: config = guts.load(filename=path) except OSError: raise silvertineError( 'Cannot read silvertine report configuration file: %s' % path) if not isinstance(config, ReportConfig): raise silvertineError( 'Invalid silvertine report configuration in file "%s".' % path) config.set_basepath(op.dirname(path) or '.') return config
def load(cls, filename): """Load a :class:`kite.SandboxScene` :param filename: Config file to load [.yml] :type filename: str :returns: A sandbox from config file :rtype: :class:`kite.SandboxScene` """ config = guts.load(filename=filename) sandbox_scene = cls(config=config) sandbox_scene._log.debug('Loading config from %s' % filename) for source in sandbox_scene.sources: sandbox_scene.addSource(source) return sandbox_scene
def _load_scenarios(self): scenarios = [] base_path = self.get_path() for path_entry in os.listdir(base_path): scenario_id, suffix = op.splitext(path_entry) if suffix == '.' + self._scenario_suffix: path = op.join(base_path, path_entry, 'scenario.yaml') scenario = guts.load(filename=path) assert scenario.scenario_id == scenario_id scenario.set_base_path(op.join(base_path, path_entry)) scenario.init_modelling(self._engine) scenarios.append(scenario) self._scenarios = scenarios self._scenarios.sort(key=lambda s: s.time_created)
def testGNSSCampaign(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'gnss_campaign.yml') campaign = self.getGNSSCampaign() campaign.dump(filename=fn) campaign2 = load(filename=fn) s1 = campaign.stations[0] s_add = s1.north + s1.north assert s_add.shift == (s1.north.shift + s1.north.shift) assert len(campaign.stations) == len(campaign2.stations) shutil.rmtree(tempdir)
def testGNSSCampaign(self): tempdir = tempfile.mkdtemp(prefix='pyrocko-model') fn = pjoin(tempdir, 'gnss_campaign.yml') nstations = 25 lats = num.random.uniform(90, -90, nstations) lons = num.random.uniform(90, -90, nstations) shifts = num.random.uniform(-2.5, 2.5, (nstations, 3)) sigma = num.random.uniform(-0.5, 0.5, (nstations, 3)) campaign = model.gnss.GNSSCampaign() for ista in range(nstations): north = model.gnss.GNSSComponent( shift=float(shifts[ista, 0]), sigma=float(sigma[ista, 0])) east = model.gnss.GNSSComponent( shift=float(shifts[ista, 1]), sigma=float(sigma[ista, 1])) up = model.gnss.GNSSComponent( shift=float(shifts[ista, 2]), sigma=float(sigma[ista, 2])) station = model.gnss.GNSSStation( lat=float(lats[ista]), lon=float(lons[ista]), north=north, east=east, up=up) campaign.add_station(station) campaign.dump(filename=fn) campaign2 = load(filename=fn) s1 = campaign.stations[0] s_add = s1.north + s1.north assert s_add.shift == (s1.north.shift + s1.north.shift) assert len(campaign.stations) == len(campaign2.stations)
def testIOEvent(self): tempdir = tempfile.mkdtemp() fn = pjoin(tempdir, 'event.txt') e1 = model.Event( 10., 20., 1234567890., 'bubu', region='taka tuka land', moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90), magnitude=5.1, magnitude_type='Mw') guts.dump(e1, filename=fn) e2 = guts.load(filename=fn) assert e1.region == e2.region assert e1.name == e2.name assert e1.lat == e2.lat assert e1.lon == e2.lon assert e1.time == e2.time assert e1.region == e2.region assert e1.magnitude == e2.magnitude assert e1.magnitude_type == e2.magnitude_type shutil.rmtree(tempdir)
def raw_config(config_name='config'): conf_path = expand(make_conf_path_tmpl(config_name)) if not op.exists(conf_path): g_conf[config_name] = config_cls[config_name].default() write_config(g_conf[config_name], config_name) conf_mtime_now = mtime(conf_path) if conf_mtime_now != g_conf_mtime.get(config_name, None): g_conf[config_name] = load(filename=conf_path) if not isinstance(g_conf[config_name], config_cls[config_name]): raise BadConfig('config file does not contain a ' 'valid "%s" section.' % config_cls[config_name].__name__) g_conf_mtime[config_name] = conf_mtime_now return g_conf[config_name]
def command_map(args): parser, options, args = cl_parse('map', args) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = args[0] gf_stores_path = op.join(project_dir, 'gf_stores') engine = get_engine([gf_stores_path]) scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.make_map(op.join(project_dir, 'map.pdf'))
def command_map(args): parser, options, args = cl_parse('map', args) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) project_dir = op.abspath(args[0]) logger.info('Initializing gf.LocalEngine...') engine = get_engine() scenario = guts.load(filename=fn) scenario.init_modelling(engine) scenario.make_map(op.join(project_dir, 'map.pdf'))
def raw_config(): global g_conf global g_conf_mtime conf_path = expand(conf_path_tmpl) if not os.path.exists(conf_path): g_conf = PyrockoConfig() write_config(g_conf) conf_mtime_now = mtime(conf_path) if conf_mtime_now != g_conf_mtime: g_conf = load(filename=conf_path) if not isinstance(g_conf, PyrockoConfig): raise BadConfig('config file does not contain a ' 'valid "pf.PyrockoConfig" section.') g_conf_mtime = conf_mtime_now return g_conf
def command_snuffle(args): from pyrocko.gui import snuffler parser, options, args = cl_parse('map', args) if len(args) == 0: args.append('.') fn = get_scenario_yml(args[0]) if not fn: parser.print_help() sys.exit(1) engine = get_engine() scenario = guts.load(filename=fn) scenario.init_modelling(engine) return snuffler.snuffle( scenario.get_pile(), stations=scenario.get_stations(), events=scenario.get_events())
def testIO(self): m1 = MomentTensor(dip=90.0) sdr1 = m1.both_strike_dip_rake() m2 = guts.load(string=m1.dump()) sdr2 = m2.both_strike_dip_rake() self.assertSame(sdr1, sdr2, 0.1, "failed io via guts")
elevation: 0.0 interpolation: nearest_neighbor # attributes defined within the SensorArray class distance_min: 1000.0 distance_max: 100000.0 strike: 0.0 sensor_count: 50 name: Sensor array 2 ''' # export the object definition to a file sa1.dump(filename='sensorarray1') # import object definition from file sa3 = load(filename='sensorarray1') sa3.name = 'Sensor array 3' print(sa3) ''' output would look like --- !gft.SensorArray # properies defined by the base type Target depth: 0.0 codes: ['', STA, '', Z] elevation: 0.0 interpolation: nearest_neighbor # attributes defined within the SensorArray class distance_min: 1000.0 distance_max: 100000.0 strike: 0.0
def load_response_information( filename, format, nslc_patterns=None, fake_input_units=None): from pyrocko import pz, trace from pyrocko.io import resp as fresp resps = [] labels = [] if format == 'sacpz': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain SAC PZ files') zeros, poles, constant = pz.read_sac_zpk(filename) resp = trace.PoleZeroResponse( zeros=zeros, poles=poles, constant=constant) resps.append(resp) labels.append(filename) elif format == 'pf': if fake_input_units is not None: raise Exception( 'cannot guess true input units from plain response files') resp = guts.load(filename=filename) resps.append(resp) labels.append(filename) elif format == 'resp': for resp in list(fresp.iload_filename(filename)): if nslc_patterns is not None and not util.match_nslc( nslc_patterns, resp.codes): continue units = '' if resp.response.instrument_sensitivity: s = resp.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(resp.response.get_pyrocko_response( resp.codes, fake_input_units=fake_input_units)) labels.append('%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + resp.codes + (tts(resp.start_date), tts(resp.end_date), units))) elif format == 'stationxml': from pyrocko.fdsn import station as fs sx = fs.load_xml(filename=filename) for network in sx.network_list: for station in network.station_list: for channel in station.channel_list: nslc = ( network.code, station.code, channel.location_code, channel.code) if nslc_patterns is not None and not util.match_nslc( nslc_patterns, nslc): continue if not channel.response: logger.warn( 'no response for channel %s.%s.%s.%s given.' % nslc) continue units = '' if channel.response.instrument_sensitivity: s = channel.response.instrument_sensitivity if s.input_units and s.output_units: units = ', %s -> %s' % ( fake_input_units or s.input_units.name, s.output_units.name) resps.append(channel.response.get_pyrocko_response( nslc, fake_input_units=fake_input_units)) labels.append( '%s (%s.%s.%s.%s, %s - %s%s)' % ( (filename, ) + nslc + (tts(channel.start_date), tts(channel.end_date), units))) return resps, labels
lat=float(row[1]), lon=float(row[2]), elevation=float(row[3])) campaign.add_station(sta) # Load the displacements with open(fn_displacements, 'r') as f: for line in f: if line.startswith('#'): continue row = line.split(',') station_id = row[0].strip() station = campaign.get_station(station_id) station.east = gnss.GNSSComponent( shift=float(row[1]) / mm, sigma=float(row[2]) / mm) station.north = gnss.GNSSComponent( shift=float(row[7]) / mm, sigma=float(row[8]) / mm) station.up = gnss.GNSSComponent( shift=float(row[14]) / mm, sigma=float(row[15]) / mm) print('Campaign %s has %d stations' % (campaign.name, campaign.nstations)) campaign.dump(filename='GPS_Northridge-1994_Hudnut.yml') # Load the campaign back in campaign_loaded = guts.load(filename='GPS_Northridge-1994_Hudnut.yml')
def get_generator(self): generator = guts.load(filename=self.get_path('generator.yaml')) generator.init_modelling(self._engine) return generator