Beispiel #1
0
def geo_construct_gf_linear_patches(
        engine, datasets=None, targets=None, patches=None, nworkers=1):
    """
    Create geodetic Greens Function matrix for given patches.

    Parameters
    ----------
    engine : :class:`pyrocko.gf.seismosizer.LocalEngine`
        main path to directory containing the different Greensfunction stores
    datasets : list
        of :class:`heart.GeodeticDataset` for which the GFs are calculated
    targets : list
        of :class:`heart.GeodeticDataset`
    patches : :class:`FaultGeometry`
        fault object that may comprise of several sub-faults. thus forming a
        complex fault-geometry
    nworkers : int
        number of CPUs to use for processing
    """

    _, los_vectors, odws, _ = heart.concatenate_datasets(datasets)

    nsamples = odws.size
    npatches = len(patches)

    logger.debug('Using %i workers ...' % nworkers)

    shared_gflibrary = RawArray('d', npatches * nsamples)

    work = [
        (engine, None, targets, patch, patchidx, los_vectors, odws)
            for patchidx, patch in enumerate(patches)]

    p = parallel.paripool(
        _process_patch_geodetic, work,
        initializer=_init_shared,
        initargs=(shared_gflibrary, None), nprocs=nworkers)

    for res in p:
        pass

    # collect and store away
    gfmatrix = num.frombuffer(
        shared_gflibrary).reshape((npatches, nsamples))
    return gfmatrix
Beispiel #2
0
    def __init__(self, gc, project_dir, event, hypers=False):

        super(GeodeticComposite, self).__init__()

        self.event = event

        logger.debug('Setting up geodetic structure ...\n')
        self.name = 'geodetic'
        self._like_name = 'geo_like'

        geodetic_data_path = os.path.join(project_dir,
                                          bconfig.geodetic_data_name)

        self.datasets = utility.load_objects(geodetic_data_path)

        logger.info('Number of geodetic datasets: %i ' % self.n_t)

        # init geodetic targets
        self.targets = heart.init_geodetic_targets(
            datasets=self.datasets,
            earth_model_name=gc.gf_config.earth_model_name,
            interpolation=gc.interpolation,
            crust_inds=[gc.gf_config.reference_model_idx],
            sample_rate=gc.gf_config.sample_rate)

        # merge geodetic data to calculate residuals on single array
        datasets, los_vectors, odws, self.Bij = heart.concatenate_datasets(
            self.datasets)
        logger.info('Number of geodetic data points: %i ' %
                    self.Bij.ordering.size)

        self.sdata = shared(datasets, name='geodetic_data', borrow=True)
        self.slos_vectors = shared(los_vectors, name='los_vecs', borrow=True)
        self.sodws = shared(odws, name='odws', borrow=True)

        if gc.calc_data_cov:
            logger.warning('Covariance estimation not implemented (yet)!'
                           ' Using imported covariances!')
        else:
            logger.info('No data-covariance estimation! Using imported'
                        ' covariances \n')

        self.weights = []
        for i, data in enumerate(self.datasets):
            if int(data.covariance.data.sum()) == data.ncoords:
                logger.warn('Data covariance is identity matrix!'
                            ' Please double check!!!')

            choli = data.covariance.chol_inverse
            self.weights.append(
                shared(choli, name='geo_weight_%i' % i, borrow=True))
            data.covariance.update_slog_pdet()

        if gc.fit_plane:
            logger.info('Fit residual ramp selected!')
            self._slocx = []
            self._slocy = []
            for j, data in enumerate(self.datasets):
                if isinstance(data, heart.DiffIFG):
                    locy, locx = data.update_local_coords(self.event)
                    self._slocx.append(
                        shared(locx.astype(tconfig.floatX) / km,
                               name='localx_%s' % j,
                               borrow=True))
                    self._slocy.append(
                        shared(locy.astype(tconfig.floatX) / km,
                               name='localy_%s' % j,
                               borrow=True))
                else:
                    logger.debug('Appending placeholder for non-SAR data!')
                    self._slocx.append(None)
                    self._slocy.append(None)

        self.config = gc

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]), name='geo_llk_%i' % t,
                           borrow=True))
Beispiel #3
0
    def __init__(self, gc, project_dir, events, hypers=False):

        super(GeodeticComposite, self).__init__(events)

        logger.debug('Setting up geodetic structure ...\n')
        self.name = 'geodetic'
        self._like_name = 'geo_like'

        geodetic_data_path = os.path.join(project_dir,
                                          bconfig.geodetic_data_name)

        self.datasets = utility.load_objects(geodetic_data_path)
        logger.info('Number of geodetic datasets: %i ' % self.n_t)

        # init geodetic targets
        self.targets = heart.init_geodetic_targets(
            datasets=self.datasets,
            earth_model_name=gc.gf_config.earth_model_name,
            interpolation=gc.interpolation,
            crust_inds=[gc.gf_config.reference_model_idx],
            sample_rate=gc.gf_config.sample_rate)

        # merge geodetic data to calculate residuals on single array
        datasets, los_vectors, odws, self.Bij = heart.concatenate_datasets(
            self.datasets)
        logger.info('Number of geodetic data points: %i ' %
                    self.Bij.ordering.size)

        self.sdata = shared(datasets, name='geodetic_data', borrow=True)
        self.slos_vectors = shared(los_vectors, name='los_vecs', borrow=True)
        self.sodws = shared(odws, name='odws', borrow=True)

        if gc.calc_data_cov:
            logger.warning('Covariance estimation not implemented (yet)!'
                           ' Using imported covariances!')
        else:
            logger.info('No data-covariance estimation! Using imported'
                        ' covariances \n')

        self.weights = []
        for i, data in enumerate(self.datasets):
            if int(data.covariance.data.sum()) == data.ncoords:
                logger.warning('Data covariance is identity matrix!'
                               ' Please double check!!!')

            choli = data.covariance.chol_inverse
            self.weights.append(
                shared(choli, name='geo_weight_%i' % i, borrow=True))
            data.covariance.update_slog_pdet()

        if gc.corrections_config.has_enabled_corrections:
            correction_configs = gc.corrections_config.iter_corrections()
            logger.info('Initialising corrections ...')
            for data in self.datasets:
                data.setup_corrections(event=self.event,
                                       correction_configs=correction_configs)

        self.config = gc

        if hypers:
            self._llks = []
            for t in range(self.n_t):
                self._llks.append(
                    shared(num.array([1.]), name='geo_llk_%i' % t,
                           borrow=True))
Beispiel #4
0
def geo_construct_gf_linear(engine,
                            outdirectory,
                            crust_ind=0,
                            datasets=None,
                            targets=None,
                            fault=None,
                            varnames=[''],
                            force=False,
                            event=None,
                            nworkers=1):
    """
    Create geodetic Greens Function matrix for defined source geometry.

    Parameters
    ----------
    engine : :class:`pyrocko.gf.seismosizer.LocalEngine`
        main path to directory containing the different Greensfunction stores
    outpath : str
        absolute path to the directory and filename where to store the
        Green's Functions
    crust_ind : int
        of index of Greens Function store to use
    datasets : list
        of :class:`heart.GeodeticDataset` for which the GFs are calculated
    targets : list
        of :class:`heart.GeodeticDataset`
    fault : :class:`FaultGeometry`
        fault object that may comprise of several sub-faults. thus forming a
        complex fault-geometry
    varnames : list
        of str with variable names that are being optimized for
    force : bool
        Force to overwrite existing files.
    """

    _, los_vectors, odws, _ = heart.concatenate_datasets(datasets)

    nsamples = odws.size
    npatches = fault.npatches
    logger.info('Using %i workers ...' % nworkers)

    for var in varnames:
        logger.info('For slip component: %s' % var)

        gfl_config = GeodeticGFLibraryConfig(
            component=var,
            dimensions=(npatches, nsamples),
            event=event,
            crust_ind=crust_ind,
            datatype='geodetic',
            reference_sources=fault.get_all_subfaults(datatype='geodetic',
                                                      component=var))
        gfs = GeodeticGFLibrary(config=gfl_config)

        outpath = os.path.join(outdirectory, gfs.filename + '.npz')

        if os.path.exists(outpath) and not force:
            logger.info('Library exists: %s. '
                        'Please use --force to override!' % outpath)

        else:
            if nworkers < 2:
                allocate = True
            else:
                allocate = False

            gfs.setup(npatches, nsamples, allocate=allocate)

            logger.info("Setting up Green's Function Library: %s \n ",
                        gfs.__str__())

            parallel.check_available_memory(gfs.filesize)

            shared_gflibrary = RawArray('d', gfs.size)

            work = [(engine, gfs, targets, patch, patchidx, los_vectors, odws)
                    for patchidx, patch in enumerate(
                        fault.get_all_patches('geodetic', component=var))]

            p = parallel.paripool(_process_patch_geodetic,
                                  work,
                                  initializer=_init_shared,
                                  initargs=(shared_gflibrary, None),
                                  nprocs=nworkers)

            for res in p:
                pass

            # collect and store away
            gfs._gfmatrix = num.frombuffer(shared_gflibrary).reshape(
                gfs.dimensions)

            logger.info('Storing geodetic linear GF Library ...')

            gfs.save(outdir=outdirectory)