def __init__(self, config, hypers=False): logger.info('... Initialising Interseismic Optimizer ... \n') super(InterseismicOptimizer, self).__init__(config, hypers) pc = config.problem_config if pc.source_type == 'RectangularSource': dsources = transform_sources(self.sources, pc.datatypes) else: raise TypeError('Interseismic Optimizer has to be used with' ' RectangularSources!') for datatype in pc.datatypes: self.composites[datatype] = \ interseismic_composite_catalog[datatype]( config[datatype + '_config'], config.project_dir, dsources[datatype], self.event, hypers) self.config = config # updating source objects with fixed values point = self.get_random_point() self.point2sources(point)
def __init__(self, config, hypers=False): logger.info('... Initialising Geometry Optimizer ... \n') super(GeometryOptimizer, self).__init__(config, hypers) pc = config.problem_config dsources = transform_sources( self.sources, pc.datatypes, pc.decimation_factors) for datatype in pc.datatypes: self.composites[datatype] = geometry_composite_catalog[datatype]( config[datatype + '_config'], config.project_dir, dsources[datatype], self.event, hypers) self.config = config # updating source objects with test-value in bounds tpoint = pc.get_test_point() self.point2sources(tpoint)
def update_weights(self, point, n_jobs=1, plot=False): """ Calculate and update model prediction uncertainty covariances due to uncertainty in the velocity model with respect to one point in the solution space. Shared variables are updated. Parameters ---------- point : :func:`pymc3.Point` Dictionary with model parameters, for which the covariance matrixes with respect to velocity model uncertainties are calculated n_jobs : int Number of processors to use for calculation of seismic covariances plot : boolean Flag for opening the seismic waveforms in the snuffler """ tpoint = copy.deepcopy(point) # update sources tpoint = utility.adjust_point_units(tpoint) # remove hyperparameters from point hps = self.config.problem_config.hyperparameters if len(hps) > 0: for hyper in hps.keys(): tpoint.pop(hyper) if self._seismic_flag: tpoint['time'] += self.event.time source_points = utility.split_point(tpoint) for i, source in enumerate(self.sources): utility.update_source(source, **source_points[i]) dsources = utility.transform_sources( self.sources, self.config.problem_config.datasets) # seismic if self._seismic_flag: sc = self.config.seismic_config for j, channel in enumerate(sc.channels): for i, station in enumerate(self.stations): logger.debug('Channel %s of Station %s ' % ( channel, station.station)) crust_targets = heart.init_targets( stations=[station], channels=channel, sample_rate=sc.gf_config.sample_rate, crust_inds=range(sc.gf_config.n_variations)) cov_pv = cov.get_seis_cov_velocity_models( engine=self.engine, sources=dsources['seismic'], targets=crust_targets, arrival_taper=sc.arrival_taper, filterer=sc.filterer, plot=plot, n_jobs=n_jobs) cov_pv = utility.ensure_cov_psd(cov_pv) self.engine.close_cashed_stores() index = j * len(self.stations) + i self.stargets[index].covariance.pred_v = cov_pv icov = self.stargets[index].covariance.inverse self.sweights[index].set_value(icov) # geodetic if self._geodetic_flag: gc = self.config.geodetic_config for i, gtarget in enumerate(self.gtargets): logger.debug('Track %s' % gtarget.track) cov_pv = cov.get_geo_cov_velocity_models( store_superdir=gc.gf_config.store_superdir, crust_inds=range(gc.gf_config.n_variations), dataset=gtarget, sources=dsources['geodetic']) cov_pv = utility.ensure_cov_psd(cov_pv) gtarget.covariance.pred_v = cov_pv icov = gtarget.covariance.inverse self.gweights[i].set_value(icov)
def get_synthetics(self, point, **kwargs): """ Get synthetics for given point in solution space. Parameters ---------- point : :func:`pymc3.Point` Dictionary with model parameters kwargs especially to change output of seismic forward model outmode = 'traces'/ 'array' / 'data' Returns ------- Dictionary with keys according to datasets containing the synthetics as lists. """ tpoint = copy.deepcopy(point) tpoint = utility.adjust_point_units(tpoint) # remove hyperparameters from point hps = self.config.problem_config.hyperparameters if len(hps) > 0: for hyper in hps.keys(): if hyper in tpoint: tpoint.pop(hyper) else: pass d = dict() if self._seismic_flag: tpoint['time'] += self.event.time source_points = utility.split_point(tpoint) for i, source in enumerate(self.sources): utility.update_source(source, **source_points[i]) dsources = utility.transform_sources( self.sources, self.config.problem_config.datasets) # seismic if self._seismic_flag: sc = self.config.seismic_config seis_synths, _ = heart.seis_synthetics( engine=self.engine, sources=dsources['seismic'], targets=self.stargets, arrival_taper=sc.arrival_taper, filterer=sc.filterer, **kwargs) d['seismic'] = seis_synths # geodetic if self._geodetic_flag: gc = self.config.geodetic_config crust_inds = [0] geo_synths = [] for crust_ind in crust_inds: for gtarget in self.gtargets: disp = heart.geo_layer_synthetics( gc.gf_config.store_superdir, crust_ind, lons=gtarget.lons, lats=gtarget.lats, sources=dsources['geodetic']) geo_synths.append(( disp[:, 0] * gtarget.los_vector[:, 0] + \ disp[:, 1] * gtarget.los_vector[:, 1] + \ disp[:, 2] * gtarget.los_vector[:, 2])) d['geodetic'] = geo_synths return d
def __init__(self, config): logger.info('... Initialising Geometry Optimizer ... \n') pc = config.problem_config super(GeometryOptimizer, self).__init__(pc) # Load event if config.event is None: if self._seismic_flag: self.event = model.load_one_event( os.path.join( config.seismic_config.datadir, 'event.txt')) else: logger.warn('Found no event information!') else: self.event = config.event # Init sources self.sources = [] for i in range(pc.n_faults): if self.event: source = heart.RectangularSource.from_pyrocko_event(self.event) # hardcoded inversion for hypocentral time source.stf.anchor = -1. else: source = heart.RectangularSource() self.sources.append(source) dsources = utility.transform_sources(self.sources, pc.datasets) if self._seismic_flag: logger.debug('Setting up seismic structure ...\n') sc = config.seismic_config self.engine = gf.LocalEngine( store_superdirs=[sc.gf_config.store_superdir]) seismic_data_path = os.path.join( config.project_dir, bconfig.seismic_data_name) stations, data_traces = utility.load_objects( seismic_data_path) stations = utility.apply_station_blacklist(stations, sc.blacklist) self.stations = utility.weed_stations( stations, self.event, distances=sc.distances) self.data_traces = utility.weed_data_traces( data_traces, self.stations) target_deltat = 1. / sc.gf_config.sample_rate if self.data_traces[0].deltat != target_deltat: utility.downsample_traces( self.data_traces, deltat=target_deltat) self.stargets = heart.init_targets( self.stations, channels=sc.channels, sample_rate=sc.gf_config.sample_rate, crust_inds=[0], # always reference model interpolation='multilinear') self.ns_t = len(self.stargets) logger.info('Number of seismic datasets: %i ' % self.ns_t) if sc.calc_data_cov: logger.info('Estimating seismic data-covariances ...\n') cov_ds_seismic = cov.get_seismic_data_covariances( data_traces=self.data_traces, filterer=sc.filterer, sample_rate=sc.gf_config.sample_rate, arrival_taper=sc.arrival_taper, engine=self.engine, event=self.event, targets=self.stargets) else: logger.info('No data-covariance estimation ...\n') cov_ds_seismic = [] at = sc.arrival_taper n_samples = int(num.ceil( (num.abs(at.a) + at.d) * sc.gf_config.sample_rate)) for tr in self.data_traces: cov_ds_seismic.append( num.power(bconfig.default_seis_std, 2) * \ num.eye(n_samples)) self.sweights = [] for s_t in range(self.ns_t): if self.stargets[s_t].covariance.data is None: logger.debug( 'No data covariance given. Seting default: sigma2 * I') self.stargets[s_t].covariance.data = cov_ds_seismic[s_t] icov = self.stargets[s_t].covariance.inverse self.sweights.append(shared(icov)) # syntetics generation logger.debug('Initialising synthetics functions ... \n') self.get_seis_synths = theanof.SeisSynthesizer( engine=self.engine, sources=dsources['seismic'], targets=self.stargets, event=self.event, arrival_taper=sc.arrival_taper, filterer=sc.filterer) self.chop_traces = theanof.SeisDataChopper( sample_rate=sc.gf_config.sample_rate, traces=self.data_traces, arrival_taper=sc.arrival_taper, filterer=sc.filterer) if self._geodetic_flag: logger.debug('Setting up geodetic structure ...\n') gc = config.geodetic_config geodetic_data_path = os.path.join( config.project_dir, bconfig.geodetic_data_name) self.gtargets = utility.load_objects(geodetic_data_path) self.ng_t = len(self.gtargets) logger.info('Number of geodetic datasets: %i ' % self.ng_t) # geodetic data _disp_list = [self.gtargets[i].displacement for i in range(self.ng_t)] _lons_list = [self.gtargets[i].lons for i in range(self.ng_t)] _lats_list = [self.gtargets[i].lats for i in range(self.ng_t)] _odws_list = [self.gtargets[i].odw for i in range(self.ng_t)] _lv_list = [self.gtargets[i].update_los_vector() for i in range(self.ng_t)] if gc.calc_data_cov: logger.info('Using data covariance!') else: logger.info('No data-covariance estimation ...\n') for g_t in self.gtargets: g_t.covariance.data = num.power( bconfig.default_geo_std, 2) * \ num.eye(g_t.lats.size) self.gweights = [] for g_t in range(self.ng_t): icov = self.gtargets[g_t].covariance.inverse self.gweights.append(shared(icov)) # merge geodetic data to call pscmp only once each forward model ordering = utility.ListArrayOrdering(_disp_list, intype='numpy') self.Bij = utility.ListToArrayBijection(ordering, _disp_list) odws = self.Bij.fmap(_odws_list) lons = self.Bij.fmap(_lons_list) lats = self.Bij.fmap(_lats_list) logger.info('Number of geodetic data points: %i ' % lats.shape[0]) self.wdata = shared(self.Bij.fmap(_disp_list) * odws) self.lv = shared(self.Bij.f3map(_lv_list)) self.odws = shared(odws) # syntetics generation logger.debug('Initialising synthetics functions ... \n') self.get_geo_synths = theanof.GeoLayerSynthesizerStatic( lats=lats, lons=lons, store_superdir=gc.gf_config.store_superdir, crust_ind=0, # always reference model sources=dsources['geodetic']) self.config = config