Beispiel #1
0
    def update_weights(self, point, n_jobs=1, plot=False):
        """
        Updates weighting matrixes (in place) with respect to the point in the
        solution space.

        Parameters
        ----------
        point : dict
            with numpy array-like items and variable name keys
        """
        gc = self.config

        self.point2sources(point)

        crust_inds = range(*gc.gf_config.n_variations)
        thresh = 5
        if len(crust_inds) > thresh:
            logger.info('Updating geodetic velocity model-covariances ...')
            for i, data in enumerate(self.datasets):
                crust_targets = heart.init_geodetic_targets(
                    datasets=[data],
                    earth_model_name=gc.gf_config.earth_model_name,
                    interpolation=gc.interpolation,
                    crust_inds=crust_inds,
                    sample_rate=gc.gf_config.sample_rate)

                logger.debug('Track %s' % data.name)
                cov_pv = cov.geodetic_cov_velocity_models(
                    engine=self.engine,
                    sources=self.sources,
                    targets=crust_targets,
                    dataset=data,
                    plot=plot,
                    event=self.event,
                    n_jobs=1)

                cov_pv = utility.ensure_cov_psd(cov_pv)
                data.covariance.pred_v = cov_pv
                choli = data.covariance.chol_inverse

                self.weights[i].set_value(choli)
                data.covariance.update_slog_pdet()
        else:
            logger.info(
                'Not updating geodetic velocity model-covariances because '
                'number of model variations is too low! < %i' % thresh)
Beispiel #2
0
    def __init__(self, gc, project_dir, event, hypers=False):

        super(GeodeticComposite, self).__init__()

        self.event = event

        logger.debug('Setting up geodetic structure ...\n')
        self.name = 'geodetic'
        self._like_name = 'geo_like'

        geodetic_data_path = os.path.join(project_dir,
                                          bconfig.geodetic_data_name)

        self.datasets = utility.load_objects(geodetic_data_path)

        logger.info('Number of geodetic datasets: %i ' % self.n_t)

        # init geodetic targets
        self.targets = heart.init_geodetic_targets(
            datasets=self.datasets,
            earth_model_name=gc.gf_config.earth_model_name,
            interpolation=gc.interpolation,
            crust_inds=[gc.gf_config.reference_model_idx],
            sample_rate=gc.gf_config.sample_rate)

        # merge geodetic data to calculate residuals on single array
        datasets, los_vectors, odws, self.Bij = heart.concatenate_datasets(
            self.datasets)
        logger.info('Number of geodetic data points: %i ' %
                    self.Bij.ordering.size)

        self.sdata = shared(datasets, name='geodetic_data', borrow=True)
        self.slos_vectors = shared(los_vectors, name='los_vecs', borrow=True)
        self.sodws = shared(odws, name='odws', borrow=True)

        if gc.calc_data_cov:
            logger.warning('Covariance estimation not implemented (yet)!'
                           ' Using imported covariances!')
        else:
            logger.info('No data-covariance estimation! Using imported'
                        ' covariances \n')

        self.weights = []
        for i, data in enumerate(self.datasets):
            if int(data.covariance.data.sum()) == data.ncoords:
                logger.warn('Data covariance is identity matrix!'
                            ' Please double check!!!')

            choli = data.covariance.chol_inverse
            self.weights.append(
                shared(choli, name='geo_weight_%i' % i, borrow=True))
            data.covariance.update_slog_pdet()

        if gc.fit_plane:
            logger.info('Fit residual ramp selected!')
            self._slocx = []
            self._slocy = []
            for j, data in enumerate(self.datasets):
                if isinstance(data, heart.DiffIFG):
                    locy, locx = data.update_local_coords(self.event)
                    self._slocx.append(
                        shared(locx.astype(tconfig.floatX) / km,
                               name='localx_%s' % j,
                               borrow=True))
                    self._slocy.append(
                        shared(locy.astype(tconfig.floatX) / km,
                               name='localy_%s' % j,
                               borrow=True))
                else:
                    logger.debug('Appending placeholder for non-SAR data!')
                    self._slocx.append(None)
                    self._slocy.append(None)

        self.config = gc

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]), name='geo_llk_%i' % t,
                           borrow=True))
Beispiel #3
0
    def __init__(self, gc, project_dir, events, hypers=False):

        super(GeodeticComposite, self).__init__(events)

        logger.debug('Setting up geodetic structure ...\n')
        self.name = 'geodetic'
        self._like_name = 'geo_like'

        geodetic_data_path = os.path.join(project_dir,
                                          bconfig.geodetic_data_name)

        self.datasets = utility.load_objects(geodetic_data_path)
        logger.info('Number of geodetic datasets: %i ' % self.n_t)

        # init geodetic targets
        self.targets = heart.init_geodetic_targets(
            datasets=self.datasets,
            earth_model_name=gc.gf_config.earth_model_name,
            interpolation=gc.interpolation,
            crust_inds=[gc.gf_config.reference_model_idx],
            sample_rate=gc.gf_config.sample_rate)

        # merge geodetic data to calculate residuals on single array
        datasets, los_vectors, odws, self.Bij = heart.concatenate_datasets(
            self.datasets)
        logger.info('Number of geodetic data points: %i ' %
                    self.Bij.ordering.size)

        self.sdata = shared(datasets, name='geodetic_data', borrow=True)
        self.slos_vectors = shared(los_vectors, name='los_vecs', borrow=True)
        self.sodws = shared(odws, name='odws', borrow=True)

        if gc.calc_data_cov:
            logger.warning('Covariance estimation not implemented (yet)!'
                           ' Using imported covariances!')
        else:
            logger.info('No data-covariance estimation! Using imported'
                        ' covariances \n')

        self.weights = []
        for i, data in enumerate(self.datasets):
            if int(data.covariance.data.sum()) == data.ncoords:
                logger.warning('Data covariance is identity matrix!'
                               ' Please double check!!!')

            choli = data.covariance.chol_inverse
            self.weights.append(
                shared(choli, name='geo_weight_%i' % i, borrow=True))
            data.covariance.update_slog_pdet()

        if gc.corrections_config.has_enabled_corrections:
            correction_configs = gc.corrections_config.iter_corrections()
            logger.info('Initialising corrections ...')
            for data in self.datasets:
                data.setup_corrections(event=self.event,
                                       correction_configs=correction_configs)

        self.config = gc

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]), name='geo_llk_%i' % t,
                           borrow=True))
            lons=lons.ravel(),
            los_vector=los,
            displacement=num.zeros_like(data_yloc).ravel())
]

fault = discretize_sources(config,
                           sources=sources,
                           datatypes=['geodetic'],
                           varnames=varnames,
                           tolerance=0.5)

engine = LocalEngine(store_superdirs=[store_superdirs])

targets = init_geodetic_targets(datasets,
                                earth_model_name='ak135-f-continental.m',
                                interpolation='multilinear',
                                crust_inds=[0],
                                sample_rate=0.0)

print(event)
opt_fault, R = optimize_discretization(config,
                                       fault,
                                       datasets=datasets,
                                       varnames=varnames,
                                       crust_ind=0,
                                       engine=engine,
                                       targets=targets,
                                       event=event,
                                       force=True,
                                       nworkers=nworkers,
                                       plot=True,