Esempio n. 1
0
    def combine(self, input_path='', output_path='', parameters=[]):
        """ Sums individual source contributions. Wrapper over xcombine_sem
            utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        unix.cd(self.cwd)

        names = self.check_source_names()
        subset = [names[isrc] for isrc in self._source_subset]

        with open('kernel_paths', 'w') as f:
            f.writelines([join(input_path, dir) + '\n' for dir in subset])

        # SAGA component - include contributions from reference gradient
        remainder = list(set(self._source_names) - set(subset))

        with open('kernel_paths', 'a') as f:
            f.writelines(
                [join(PATH.GRAD_AGG, dir) + '\n' for dir in remainder])

        for name in parameters or self.parameters:
            call_solver(
                system.mpiexec(), PATH.SPECFEM_BIN + '/' + 'xcombine_sem ' +
                name + '_kernel' + ' ' + 'kernel_paths' + ' ' + output_path)
Esempio n. 2
0
    def smooth(self, input_path='', output_path='', parameters=[], span=0.):
        """ Smooths kernels by convolving them with a Gaussian.  Wrapper over 
            xsmooth_sem utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        # apply smoothing operator
        unix.cd(self.cwd)
        for name in parameters or self.parameters:
            print ' smoothing', name
            call_solver(system.mpiexec(),
                        PATH.SPECFEM_BIN + '/' + 'xsmooth_sem ' + str(span) +
                        ' ' + str(span) + ' ' + name + '_kernel' + ' ' +
                        input_path + '/ ' + output_path + '/ ',
                        output='/dev/null')

        print ''

        # rename output files
        files = glob(output_path + '/*')
        unix.rename('_smooth', '', files)
Esempio n. 3
0
    def check_mesh_properties(self, path=None):
        """ path contains binary files such as:
            proc000000_z.bin, proc000000_x.bin, proc000000_vs.bin
            proc000000_vp.bin, proc000000_rho.bin, proc000001_z.bin,
            proc000001_x.bin, proc000001_vs.bin ...
            These will be read to get the number of processors used, the number
            of gll points and the coordinates of those points
        """
        if not path:
            path = PATH.MODEL_INIT
        if not exists(path):
            raise Exception

        # count slices (number of processors used to create the mesh) and grid
        # points
        key = self.parameters[0]
        iproc = 0
        ngll = []
        while True:
            dummy = self.io.read_slice(path, key, iproc)[0]
            ngll += [len(dummy)]
            iproc += 1
            if not exists('%s/proc%06d_%s.bin' % (path, iproc, key)):
                break
        nproc = iproc

        # create coordinate pointers
        coords = Struct()
        for key in ['x', 'y', 'z']:
            # The following define coords['x'] as the function io.read_slice
            # when called with path, 'x'. It is not executed
            coords[key] = partial(self.io.read_slice, self, path, key)

        self._mesh_properties = Struct([['nproc', nproc], ['ngll', ngll],
                                        ['path', path], ['coords', coords]])
Esempio n. 4
0
    def check_mesh_properties(self, path=None):
        if not path:
            path = PATH.MODEL_INIT
        if not exists(path):
            raise Exception

        # count slices and grid points
        key = self.parameters[0]
        iproc = 0
        ngll = []
        while True:
            dummy = self.io.read_slice(path, key, iproc)[0]
            ngll += [len(dummy)]
            iproc += 1
            if not exists('%s/proc%06d_%s.bin' % (path, iproc, key)):
                break
        nproc = iproc

        # create coordinate pointers
        coords = Struct()
        for key in ['x', 'y', 'z']:
           coords[key] = partial(self.io.read_slice, self, path, key)

        self._mesh_properties = Struct([
            ['nproc', nproc],
            ['ngll', ngll],
            ['path', path],
            ['coords', coords]])
Esempio n. 5
0
    def check_mesh_properties(self, path=None):
        if not path:
            path = PATH.MODEL_INIT
        if not exists(path):
            raise Exception

        # count slices and grid points
        key = self.parameters[0]
        iproc = 0
        ngll = []
        while True:
            dummy = self.io.read_slice(path, key, iproc)[0]
            ngll += [len(dummy)]
            iproc += 1
            if not exists('%s/proc%06d_%s.bin' % (path, iproc, key)):
                break
        nproc = iproc

        # create coordinate pointers
        coords = Struct()
        for key in ['x', 'y', 'z']:
            coords[key] = partial(self.io.read_slice, self, path, key)

        self._mesh_properties = Struct([['nproc', nproc], ['ngll', ngll],
                                        ['path', path], ['coords', coords]])
Esempio n. 6
0
    def smooth(self, input_path='', output_path='', parameters=[], span=0.):
        """ Smooths kernels by convolving them with a Gaussian.  Wrapper over 
            xsmooth_sem utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        # apply smoothing operator
        unix.cd(self.cwd)
        for name in parameters or self.parameters:
            print ' smoothing', name
            call_solver(
                system.mpiexec(),
                PATH.SPECFEM_BIN +'/'+ 'xsmooth_sem '
                + str(span) + ' '
                + str(span) + ' '
                + name + '_kernel' + ' '
                + input_path + '/ '
                + output_path + '/ ',
                output='/dev/null')

        print ''

        # rename output files
        files = glob(output_path+'/*')
        unix.rename('_smooth', '', files)
Esempio n. 7
0
    def combine(self, input_path='', output_path='', parameters=[]):
        """ Sums individual source contributions. Wrapper over xcombine_sem
            utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        unix.cd(self.cwd)

        names = self.check_source_names()
        subset = [names[isrc] for isrc in self._source_subset]

        with open('kernel_paths', 'w') as f:
            f.writelines([join(input_path, dir)+'\n' for dir in subset])

        # SAGA component - include contributions from reference gradient
        remainder = list(set(self._source_names) - set(subset))

        with open('kernel_paths', 'a') as f:
            f.writelines([join(PATH.GRAD_AGG, dir)+'\n' for dir in remainder])

        for name in parameters or self.parameters:
            call_solver(
                system.mpiexec(),
                PATH.SPECFEM_BIN +'/'+ 'xcombine_sem '
                + name + '_kernel' + ' '
                + 'kernel_paths' + ' '
                + output_path)
Esempio n. 8
0
    def smooth(self, input_path='', output_path='', parameters=[], span=0.):
        """ Smooths kernels by convolving them with a Gaussian.  Wrapper over 
            xsmooth_sem utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        # apply smoothing operator
        unix.cd(self.cwd)

        files = []
        files += glob(PATH.MODEL_INIT + '/proc??????_x.bin')
        files += glob(PATH.MODEL_INIT + '/proc??????_z.bin')
        files += glob(PATH.MODEL_INIT + '/proc??????_NSPEC_ibool.bin')
        files += glob(PATH.MODEL_INIT + '/proc??????_jacobian.bin')
        for file in files:
            unix.cp(file, input_path)

        for name in parameters or self.parameters:
            print(' smoothing', name)
            call_solver(system.mpiexec(),
                        PATH.SPECFEM_BIN + '/' + 'xsmooth_sem ' + str(span) +
                        ' ' + str(span) + ' ' + name + '_kernel' + ' ' +
                        input_path + '/ ' + output_path + '/ F',
                        output=output_path + '/smooth_' + name + '.log')

        print('')

        # rename output files
        files = glob(output_path + '/*')
        unix.rename('_smooth', '', files)
Esempio n. 9
0
 def test_exists(self):
     existent = os.listdir(os.curdir)
     non_existent = [str(uuid.uuid4().hex.upper()[0:6])
                     for i in range(10)]
     for name in existent:
         self.assertTrue(tools.exists(name))
     for name in non_existent:
         self.assertFalse(tools.exists(name))
Esempio n. 10
0
 def test_exists(self):
     existent = os.listdir(os.curdir)
     non_existent = [str(uuid.uuid4().get_hex().upper()[0:6])
                     for i in range(10)]
     for name in existent:
         self.assertTrue(tools.exists(name))
     for name in non_existent:
         self.assertFalse(tools.exists(name))
Esempio n. 11
0
 def test_exists(self):
     existent = os.listdir(os.curdir)
     non_existent = [str(uuid.uuid4().get_hex().upper()[0:6])
                     for i in range(10)]
     not_names = [None, 0, 1, False, True]
     for name in existent:
         self.assertTrue(tools.exists(name))
     for name in non_existent:
         self.assertFalse(tools.exists(name))
     for name in not_names:
         self.assertFalse(tools.exists(name))
Esempio n. 12
0
    def check(self):
        """ Checks parameters and paths
        """
        # signifiy if data-synth. or synth.-synth. case
        if 'CASE' not in PAR:
            raise ParameterError(PAR, 'CASE')

        # scratch paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)

        if 'FUNC' not in PATH:
            setattr(PATH, 'FUNC', os.path.join(PATH.SCRATCH, 'evalfunc'))

        if 'GRAD' not in PATH:
            setattr(PATH, 'GRAD', os.path.join(PATH.SCRATCH, 'evalgrad'))

        if 'OPTIMIZE' not in PATH:
            setattr(PATH, 'OPTIMIZE', os.path.join(PATH.SCRATCH, 'optimize'))

        # input paths
        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')

        # output paths
        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        # pyatoa specific paths
        # Config file should be present here.
        if 'PYATOA_IO' not in PATH:
            raise ParameterError(PATH, 'PYATOA_IO')

        # make sure the Pyatoa plugin run script is present
        if 'PYATOA_RUN' not in PATH:
            raise ParameterError(PATH, 'PYATOA_RUN')

        # make sure a Python3 binary is avilalable
        if 'PYTHON3' not in PATH:
            raise ParameterError(PATH, 'PYTHON3')

        # check that their is a given starting model
        if not exists(PATH.MODEL_INIT):
            raise Exception()

        # synthetic-synthetic examples require a true model to create the 'data'
        if PAR.CASE == 'Synthetic' and not exists(PATH.MODEL_TRUE):
            raise Exception()
Esempio n. 13
0
    def process_kernels(self, path, parameters):
        """ 
        Sums kernels from individual sources, with optional smoothing

        :input path: directory containing sensitivity kernels
        :input parameters: list of material parameters e.g. ['vp','vs']
        """
        if not exists(path):
            raise Exception

        if PAR.SMOOTH > 0:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum_nosmooth',
                   parameters=parameters)

            solver.smooth(
                   input_path=path+'/'+'sum_nosmooth',
                   output_path=path+'/'+'sum',
                   parameters=parameters,
                   span=PAR.SMOOTH)
        else:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum',
                   parameters=parameters)
Esempio n. 14
0
    def write_gradient(self, path, iterf=0):
        """ Writes gradient of objective function

          Combines and processes contributions to the gradient from individual
          sources

          INPUT
              PATH - directory containing output of adjoint simulation
              iterf - number of iterations
        """
        if not exists(path):
            raise Exception

        system.run('postprocess', 'process_kernels',
                 hosts='head',
                 path=path+'/kernels',iter=iterf,
                 parameters=solver.parameters)

        gradient = solver.merge(solver.load(
                 path +'/'+ 'kernels/sum',
                 suffix='_kernel'))

        self.save(gradient, path)

        if PAR.KERNELTYPE=='Relative':
            # convert from relative to absolute perturbations
            gradient *= solver.merge(solver.load(path +'/'+ 'model'))
            self.save(gradient, path, backup='relative')
Esempio n. 15
0
    def check(self):
        """ Checks parameters and paths
        """
        # check parameters
        if 'CLIP' not in PAR:
            setattr(PAR, 'CLIP', 0.)

        if 'SMOOTH' not in PAR:
            setattr(PAR, 'SMOOTH', 0.)

        if 'KERNELTYPE' not in PAR:
            setattr(PAR, 'KERNELTYPE', 'Relative')

        if 'PRECOND' not in PAR:
            setattr(PAR, 'PRECOND', False)

        # check paths
        if 'MASK' not in PATH:
            setattr(PATH, 'MASK', None)

        if 'PRECOND' not in PATH:
            setattr(PATH, 'PRECOND', None)

        if PATH.MASK:
            assert exists(PATH.MASK)
Esempio n. 16
0
    def process_kernels(self, path, parameters):
        """ 
        Sums kernels from individual sources, with optional smoothing

        :input path: directory containing sensitivity kernels
        :input parameters: list of material parameters e.g. ['vp','vs']
        """
        if not exists(path):
            raise Exception

        if PAR.SMOOTH > 0:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum_nosmooth',
                   parameters=parameters)

            solver.smooth(
                   input_path=path+'/'+'sum_nosmooth',
                   output_path=path+'/'+'sum',
                   parameters=parameters,
                   span=PAR.SMOOTH)
        else:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum',
                   parameters=parameters)
Esempio n. 17
0
    def generate_mesh(self,
                      model_path=None,
                      model_name=None,
                      model_type='gll'):
        """ Performs meshing and database generation
        """
        assert (model_name)
        assert (model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        if model_type == 'gll':
            assert (exists(model_path))
            self.check_mesh_properties(model_path)

            unix.cp(glob(model_path + '/' + '*'), self.model_databases)

            call_solver(system.mpiexec(), 'bin/xmeshfem3D')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT + '/' + model_name)

        else:
            raise NotImplementedError
Esempio n. 18
0
    def check(self):
        """ Checks parameters and paths
        """
        # check parameters
        if 'CLIP' not in PAR:
            setattr(PAR, 'CLIP', 0.)

        if 'SMOOTH' not in PAR:
            setattr(PAR, 'SMOOTH', 0.)

        if 'LOGARITHMIC' not in PAR:
            setattr(PAR, 'LOGARITHMIC', True)

        if 'PRECOND' not in PAR:
            setattr(PAR, 'PRECOND', False)

        # check paths
        if 'MASK' not in PATH:
            setattr(PATH, 'MASK', None)

        if 'PRECOND' not in PATH:
            setattr(PATH, 'PRECOND', None)

        if PATH.MASK:
            assert exists(PATH.MASK)
Esempio n. 19
0
    def clip(self, path='', parameters=[], minval=-np.inf, maxval=np.inf):
        """ Clips kernels by convolving them with a Gaussian.  Wrapper over 
            xclip_sem utility.
        """
        assert exists(path)
        assert len(parameters) > 0

        unix.cd(self.getpath)
        for name in parameters or self.parameters:
            call_solver(
                system.mpiexec,
                PATH.SPECFEM_BIN +'/'+ 'xclip_sem '
                + str(minval) + ' '
                + str(maxval) + ' '
                + name + '_kernel' + ' '
                + path + '/ '
                + path + '/ ')

        # move input files
        src = path
        dst = path + '_noclip'
        unix.mkdir(dst)
        for name in parameters or self.parameters:
            unix.mv(glob(src+'/*'+name+'.bin'), dst)

        # rename output files
        unix.rename('_clip', '', glob(src+'/*'))
Esempio n. 20
0
    def write_gradient(self, path):
        """ Processes and combines contributions to the gradient from
          individual sources
        """
        if not exists(path):
            raise Exception

        system.run_single('postprocess', 'process_kernels',
                 path=path+'/kernels',
                 parameters=solver.parameters)

        g = solver.merge(solver.load(
                 path +'/'+ 'kernels/sum',
                 suffix='_kernel'))

        self.save(g, path)

        if PAR.KERNELTYPE=='Relative':
            # convert from relative to absolute perturbations
            g *= solver.merge(solver.load(path +'/'+ 'model'))
            self.save(g, path, backup='relative')

        if PATH.MASK:
            # apply mask
            g *= solver.merge(solver.load(PATH.MASK))
            self.save(g, path, backup='nomask')
Esempio n. 21
0
    def process_kernels(self, path='', parameters=[]):
        """ Combines contributions from individual sources and performs any 
         required processing steps

          INPUT
              PATH - directory containing sensitivity kernels
              PARAMETERS - list of material parameters e.g. ['vp','vs']
        """
        if not exists(path):
            raise Exception

        if not parameters:
            parameters = solver.parameters

        if PAR.SMOOTH > 0:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum_nosmooth',
                   parameters=parameters)

            solver.smooth(
                   input_path=path+'/'+'sum_nosmooth',
                   output_path=path+'/'+'sum',
                   parameters=parameters,
                   span=PAR.SMOOTH)
        else:
            solver.combine(
                   input_path=path,
                   output_path=path+'/'+'sum',
                   parameters=parameters)
Esempio n. 22
0
    def check(self):
        """ Checks parameters and paths
        """
        # check paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)
        # Jiang add
        if 'FUNC' not in PATH:
            setattr(PATH, 'FUNC', join(PATH.SCRATCH, 'evalfunc'))

        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        # check input
        if 'DATA' not in PATH:
            setattr(PATH, 'DATA', None)

        if not exists(PATH.DATA):
            assert 'MODEL_TRUE' in PATH

        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')

        # check output
        if 'SAVEGRADIENT' not in PAR:
            setattr(PAR, 'SAVEGRADIENT', 1)

        if 'SAVEKERNELS' not in PAR:
            setattr(PAR, 'SAVEKERNELS', 0)

        if 'SAVETRACES' not in PAR:
            setattr(PAR, 'SAVETRACES', 0)
Esempio n. 23
0
    def check(self):
        """ Checks parameters and paths
        """
        # check parameters
        if 'CLIP' not in PAR:
            setattr(PAR, 'CLIP', 0.)

        if 'SMOOTH' not in PAR:
            setattr(PAR, 'SMOOTH', 0.)

        if 'KERNELTYPE' not in PAR:
            setattr(PAR, 'KERNELTYPE', 'Relative')

        if 'PRECOND' not in PAR:
            setattr(PAR, 'PRECOND', False)

        # check paths
        if 'MASK' not in PATH:
            setattr(PATH, 'MASK', None)

        if 'PRECOND' not in PATH:
            setattr(PATH, 'PRECOND', None)

        if PATH.MASK:
            assert exists(PATH.MASK)
Esempio n. 24
0
    def combine_vol_data(self, output_path='', quantity=''):
        """
        This does not work
        Call Specfems executable combine_vol_data_vtk on kernels or model files
        """
        if not exists(output_path):
            unix.mkdir(output_path)

        # This should probably be moved to its own function
        # def import_kernels()
        unix.cd(self.cwd)
        src = glob(join(PATH.GRAD, self.source_name, "*{}*".format(quantity)))
        dst = join(self.cwd, "kernels")
        unix.mkdir(dst)
        unix.ln(src=src, dst=dst)

        solver_call = " ".join([
            PATH.SPECFEM_BIN + '/' + 'xcombine_vol_data_vtk',
            0,  # NPROC_START
            PAR.NPROC,  # NPROC_END
            quantity,  # QUANTITY
            dst,  # DIR_IN
            dst,  # DIR_OUT, we will rename the files first
            0  # GPU ACCEL
        ])
        call_solver(system_mpiexec(), solver_call)

        unix.rm(dst)
        print ''
Esempio n. 25
0
    def generate_mesh(self,
                      model_path=None,
                      model_name=None,
                      model_type='gll'):
        """ Performs meshing and database generation
        """
        print 'specfem3d_nz.generate mesh'
        assert (model_name)
        assert (model_type)

        unix.cd(self.cwd)

        if model_type in ['gll']:
            par = getpar('MODEL').strip()
            if par != 'gll':
                if self.taskid == 0:
                    print 'WARNING: Unexpected Par_file setting:'
                    print 'MODEL =', par

            assert (exists(model_path))
            self.check_mesh_properties(model_path)

            src = glob(model_path + '/' + '*')
            dst = self.model_databases
            unix.cp(src, dst)

            call_solver(system.mpiexec(), 'bin/xgenerate_databases')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT + '/' + model_name)

        else:
            raise NotImplementedError
Esempio n. 26
0
    def check_source_names(self):
        """ Determines names of sources by applying wildcard rule to user-
            supplied input files
            If source_prefix is 'SOURCE' and that in specfem DATA folder are
            the files SOURCE_00001, SOURCE_00002, SOURCE_00003, ...
            Then this will build the list names = ['00001','00002','00003',...]
            If, for ex, taskid is 1 the function returns ['00001', '00002']
        """
        path = PATH.SPECFEM_DATA
        if not exists(path):
            raise Exception

        # apply wildcard rule
        wildcard = self.source_prefix+'_*'
        globstar = sorted(glob(path + '/' + wildcard))
        if not globstar:
            print msg.SourceError_SPECFEM % (path, wildcard)
            sys.exit(-1)

        # If source_prefix is 'SOURCE' and that in specfem DATA folder are the
        # files SOURCE_00001, SOURCE_00002, SOURCE_00003, ...
        # Then this will build the list names = ['00001', '00002', '00003' ...]
        # If taskid is 1 the function returns ['00001', '00002']
        names = []
        for path in globstar:
            names += [basename(path).split('_')[-1]]
        self._source_names = names[:PAR.NTASK]
Esempio n. 27
0
    def generate_mesh(self,
                      model_path=None,
                      model_name=None,
                      model_type='gll'):
        """ Performs meshing and database generation
        """
        # Assert that the model name and path are not empty
        assert (model_name)
        assert (model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        assert (exists(model_path))  # Check that the path exist
        # Fill _mesh_properties which contain the number of integration points,
        # the number of procs used and the coordinates of the points
        self.check_mesh_properties(model_path)

        # Copy the model files (ex: proc000023_vp.bin ...) into DATA
        src = glob(join(model_path, '*'))
        dst = join(self.cwd, 'DATA')
        unix.cp(src, dst)

        # Export the model into output folder
        if self.taskid == 0:
            self.export_model(PATH.OUTPUT + '/' + model_name)
Esempio n. 28
0
    def write_residuals(self, path, syn, obs):
        """ Computes residuals from observations and synthetics

          INPUT
            PATH - location residuals will be written
            SYN - obspy Stream object containing synthetic data
            OBS - obspy Stream object containing observed data
        """
        nt, dt, _ = self.get_time_scheme(syn)
        nn, _ = self.get_network_size(syn)

        residuals = []
        # TODO freq_mask = np.loadtxt('/data1/etienneb/freq_mask.txt')
        ft_obs_se = self.load('ft_obs_se')
        freq_mask = self.load('freq_mask_se')

        for ii in range(nn):
            residuals.append(
                self.misfit(syn[ii].data, nt, dt, ft_obs_se[:, ii],
                            freq_mask[:, ii]))

        filename = path + '/' + 'residuals'
        if exists(filename):
            residuals.extend(list(np.loadtxt(filename)))

        np.savetxt(filename, residuals)
    def check(self):
        """ Checks parameters and paths
        """

        # check paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)

        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        if 'SOLVER' not in PATH:
            raise ParameterError(PATH, 'SOLVER')

        # check input
        if 'DATA' not in PATH:
            setattr(PATH, 'DATA', None)

        if not exists(PATH.DATA):
            assert 'MODEL_TRUE' in PATH

        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')


        # assertions
        if PAR.NSRC != 1:
            raise ParameterError(PAR, 'NSRC')
Esempio n. 30
0
    def check(self):
        """ Checks parameters and paths
        """

        # check paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)

        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        if 'SOLVER' not in PATH:
            raise ParameterError(PATH, 'SOLVER')

        # check input
        if 'DATA' not in PATH:
            setattr(PATH, 'DATA', None)

        if not exists(PATH.DATA):
            assert 'MODEL_TRUE' in PATH

        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')

        # assertions
        if PAR.NSRC != 1:
            raise ParameterError(PAR, 'NSRC')
Esempio n. 31
0
    def check(self):
        """ Checks parameters and paths
        """
        # check paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)

        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        # check input
        if 'DATA' not in PATH:
            setattr(PATH, 'DATA', None)

        if not exists(PATH.DATA):
            assert 'MODEL_TRUE' in PATH

        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')

        # check output
        if 'SAVEGRADIENT' not in PAR:
            setattr(PAR, 'SAVEGRADIENT', 1)

        if 'SAVEKERNELS' not in PAR:
            setattr(PAR, 'SAVEKERNELS', 0)

        if 'SAVETRACES' not in PAR:
            setattr(PAR, 'SAVETRACES', 0)
Esempio n. 32
0
    def write_gradient(self, path):
        """ Reads kernels and writes gradient of objective function
        """
        if not exists(path):
            raise Exception()

        system.run('postprocess', 'process_kernels',
                 hosts='head',
                 path=path,
                 parameters=solver.parameters)

        g = solver.merge(solver.load(
                 path +'/'+ 'kernels/sum',
                 suffix='_kernel',
                 verbose=True))

        if PAR.LOGARITHMIC:
            # convert from logarithmic to absolute perturbations
            g *= solver.merge(solver.load(path +'/'+ 'model'))
        self.save(path, g)

        if PATH.MASK:
            # apply mask
            g *= solver.merge(solver.load(PATH.MASK))
            self.save(path, g, backup='nomask')
Esempio n. 33
0
    def generate_mesh(self, model_path=None, model_name=None, model_type='gll'):
        """ Performs meshing and database generation
        """
        assert(model_name)
        assert(model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        if model_type in ['gll']:
            par = getpar('MODEL').strip()
            if par != 'gll':
                if self.taskid == 0:
                    print 'WARNING: Unexpected Par_file setting:'
                    print 'MODEL =', par
            
            assert(exists(model_path))
            self.check_mesh_properties(model_path)

            src = glob(model_path +'/'+ '*')
            dst = self.model_databases
            unix.cp(src, dst)

            call_solver(system.mpiexec(), 'bin/xmeshfem3D')
            call_solver(system.mpiexec(), 'bin/xgenerate_databases')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT +'/'+ model_name)

        else:
            raise NotImplementedError
Esempio n. 34
0
    def process_kernels(self, path, parameters, span):
        assert exists(path)
        assert len(parameters) > 0

        # take absolute value
        parts = solver.load(path +'/'+ 'kernels/sum', suffix='_kernel')
        for key in parameters:
            parts[key] = np.abs(parts[key])

        self._save(path, parts)


        # smooth
        system.run('solver', 'smooth',
                   hosts='head',
                   path=path +'/'+ 'kernels/absval',
                   parameters=parameters,
                   span=span)

        # normalize
        parts = solver.load(path +'/'+ 'kernels/absval', suffix='_kernel')
        for key in parameters:
            parts[key] = np.mean(parts[key])/parts[key]

        self._save(path, parts)
Esempio n. 35
0
    def smooth(self, path='', parameters=[], span=0.):
        """ Smooths kernels by convolving them with a Gaussian.  Wrapper over 
            xsmooth_sem utility.
        """
        assert exists(path)
        assert len(parameters) > 0

        # apply smoothing operator
        unix.cd(self.getpath)
        for name in parameters or self.parameters:
            print ' smoothing', name
            call_solver(
                system.mpiexec(),
                PATH.SPECFEM_BIN +'/'+ 'xsmooth_sem '
                + str(span) + ' '
                + str(span) + ' '
                + name + '_kernel' + ' '
                + path + '/ '
                + path + '/ ',
                output=self.getpath+'/'+'OUTPUT_FILES/output_smooth_sem.txt')

        print ''

        # move input files
        src = path
        dst = path + '_nosmooth'
        unix.mkdir(dst)
        for name in parameters or self.parameters:
            unix.mv(glob(src+'/*'+name+'_kernel.bin'), dst)

        # rename output files
        unix.rename('_smooth', '', glob(src+'/*'))
Esempio n. 36
0
    def write_gradient(self, path):
        """
        Combines contributions from individual sources and material parameters
        to get the gradient, and optionally applies user-supplied scaling

        :input path: directory from which kernels are read and to which
                     gradient is written
        """
        if not exists(path):
            raise Exception

        # because processing operations can be quite expensive, they must be
        # run through the HPC system interface; processing does not involve
        # embarassingly parallel tasks, we use system.run_single instead of 
        # system.run
        system.run_single('postprocess', 'process_kernels',
                 path=path+'/kernels',
                 parameters=solver.parameters)

        #print 'parameters', solver.parameters

        gradient = solver.load(
            path+'/'+'kernels/sum',
            parameters=solver.parameters,
            suffix='_kernel')

        # merge into a single vector
        gradient = solver.merge(gradient)

        # convert to absolute perturbations, log dm --> dm
        # see Eq.13 Tromp et al 2005
        gradient *= solver.merge(solver.load(path +'/'+ 'model'))

        if PATH.MASK:
            # to scale the gradient, users can supply "masks" in exactly
            # the same file format used for models and gradients; each value of
            # the gradient is scaled by the corresponding value of the mask in a
            # point-wise fashion
            mask = solver.merge(solver.load(PATH.MASK))

            # while both masking and preconditioning involve scaling the
            # gradient, they are fundamentally different operations:
            # masking is ad hoc, preconditioning is a change of variables;
            # see Modrak & Tromp 2016 GJI
            solver.save(solver.split(gradient),
                        path +'/'+ 'gradient_nomask',
                        parameters=solver.parameters,
                        suffix='_kernel')

            solver.save(solver.split(gradient*mask),
                        path +'/'+ 'gradient',
                        parameters=solver.parameters,
                        suffix='_kernel')

        else:
            solver.save(solver.split(gradient),
                        path +'/'+ 'gradient',
                        parameters=solver.parameters,
                        suffix='_kernel')
    def process_kernels(self, path, parameters):
        """ Processes kernels in accordance with parameter settings
        """
        fullpath = path + '/' + 'kernels'
        assert exists(path)

        if exists(fullpath + '/' + 'sum'):
            unix.mv(fullpath + '/' + 'sum', fullpath + '/' + 'sum_nofix')

        # mask sources and receivers
        system.run('postprocess', 'fix_near_field', hosts='all', path=fullpath)

        system.run('solver',
                   'combine',
                   hosts='head',
                   path=fullpath,
                   parameters=parameters)
Esempio n. 38
0
    def check(self):
        if 'COORDS' not in PATH:
            raise ParameterError

        if not exists(PATH.COORDS):
            raise ParameterError

        if not hasattr(PAR, 'UNITS'):
            setattr(PAR, 'UNITS', 'lonlat')
Esempio n. 39
0
    def combine(self, input_path='', output_path='', parameters=[]):
        """ Sums individual source contributions. Wrapper over xcombine_sem
            utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        unix.cd(self.cwd)
        with open('kernel_paths', 'w') as file:
            file.writelines(
                [join(input_path, name + '\n') for name in self.source_names])

        for name in parameters or self.parameters:
            call_solver(
                system.mpiexec(), PATH.SPECFEM_BIN + '/' + 'xcombine_sem ' +
                name + '_kernel' + ' ' + 'kernel_paths' + ' ' + output_path)
Esempio n. 40
0
def smooth_legacy(input_path='', output_path='', parameters=[], span=0.):
    solver = sys.modules['seisflows_solver']
    PATH = sys.modules['seisflows_paths']

    if not exists(input_path):
        raise Exception

    if not exists(output_path):
        unix.mkdir(output_path)

    if solver.mesh_properties.nproc != 1:
        raise NotImplementedError

    # intialize arrays
    kernels = {}
    for key in parameters or solver.parameters:
        kernels[key] = []

    coords = {}
    for key in ['x', 'z']:
        coords[key] = []

    # read kernels
    for key in parameters or solver.parameters:
        kernels[key] += solver.io.read_slice(input_path, key + '_kernel', 0)

    if not span:
        return kernels

    # read coordinates
    for key in ['x', 'z']:
        coords[key] += solver.io.read_slice(PATH.MODEL_INIT, key, 0)

    mesh = array.stack(coords['x'][0], coords['z'][0])

    # apply smoother
    for key in parameters or solver.parameters:
        kernels[key] = [array.meshsmooth(kernels[key][0], mesh, span)]

    # write smooth kernels
    for key in parameters or solver.parameters:
        solver.io.write_slice(kernels[key][0], output_path, key + '_kernel', 0)
Esempio n. 41
0
def smooth_legacy(input_path='', output_path='', parameters=[], span=0.):
        solver = sys.modules['seisflows_solver']
        PATH = sys.modules['seisflows_paths']

        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        if solver.mesh_properties.nproc!=1:
            raise NotImplementedError

        # intialize arrays
        kernels = {}
        for key in parameters or solver.parameters:
            kernels[key] = []

        coords = {}
        for key in ['x', 'z']:
            coords[key] = []

        # read kernels
        for key in parameters or solver.parameters:
            kernels[key] += solver.io.read_slice(input_path, key+'_kernel', 0)

        if not span:
            return kernels

        # read coordinates
        for key in ['x', 'z']:
            coords[key] += solver.io.read_slice(PATH.MODEL_INIT, key, 0)

        mesh = array.stack(coords['x'][0], coords['z'][0])

        # apply smoother
        for key in parameters or solver.parameters:
            kernels[key] = [array.meshsmooth(kernels[key][0], mesh, span)]

        # write smooth kernels
        for key in parameters or solver.parameters:
            solver.io.write_slice(kernels[key][0], output_path, key+'_kernel', 0)
Esempio n. 42
0
    def check(self):
        """ Checks parameters and paths
        """
        if 'SMOOTH' not in PAR:
            setattr(PAR, 'SMOOTH', 0.)

        if 'MASK' not in PATH:
            setattr(PATH, 'MASK', None)

        if PATH.MASK:
            assert exists(PATH.MASK)
Esempio n. 43
0
    def check(self):
        """ Checks parameters and paths
        """
        if 'SMOOTH' not in PAR:
            setattr(PAR, 'SMOOTH', 0.)

        if 'MASK' not in PATH:
            setattr(PATH, 'MASK', None)

        if PATH.MASK:
            assert exists(PATH.MASK)
Esempio n. 44
0
    def write_gradient(self, path):
        """
        Combines contributions from individual sources and material parameters
        to get the gradient, and optionally applies user-supplied scaling

        :input path: directory from which kernels are read and to which
                     gradient is written
        """
        if not exists(path):
            raise Exception

        # because processing operations can be quite expensive, they must be
        # run through the HPC system interface; processing does not involve
        # embarassingly parallel tasks, we use system.run_single instead of 
        # system.run
        system.run_single('postprocess', 'process_kernels',
                 path=path+'/kernels',
                 parameters=solver.parameters)

        gradient = solver.load(
            path+'/'+'kernels/sum', suffix='_kernel')

        # merge into a single vector
        gradient = solver.merge(gradient)

        # convert to absolute perturbations, log dm --> dm
        # see Eq.13 Tromp et al 2005
        gradient *= solver.merge(solver.load(path +'/'+ 'model'))

        if PATH.MASK:
            # to scale the gradient, users can supply "masks" by exactly
            # mimicking the file format in which models stored
            mask = solver.merge(solver.load(PATH.MASK))

            # while both masking and preconditioning involve scaling the
            # gradient, they are fundamentally different operations:
            # masking is ad hoc, preconditioning is a change of variables;
            # see Modrak & Tromp 2016 GJI
            solver.save(solver.split(gradient),
                        path +'/'+ 'gradient_nomask',
                        parameters=solver.parameters,
                        suffix='_kernel')

            solver.save(solver.split(gradient*mask),
                        path +'/'+ 'gradient',
                        parameters=solver.parameters,
                        suffix='_kernel')

        else:
            solver.save(solver.split(gradient),
                        path +'/'+ 'gradient',
                        parameters=solver.parameters,
                        suffix='_kernel')
Esempio n. 45
0
    def combine(self, input_path='', output_path='', parameters=[]):
        """ Sums individual source contributions. Wrapper over xcombine_sem
            utility.
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        unix.cd(self.cwd)
        with open('kernel_paths', 'w') as file:
            file.writelines([join(input_path, name+'\n')
                for name in self.source_names])

        for name in parameters or self.parameters:
            call_solver(
                system.mpiexec(),
                PATH.SPECFEM_BIN +'/'+ 'xcombine_sem '
                + name + '_kernel' + ' '
                + 'kernel_paths' + ' '
                + output_path)
Esempio n. 46
0
    def __init__(self, path='.', load=loadnpy, save=savenpy, memory=5, thresh=0., maxiter=np.inf, precond=None):
        assert exists(path)
        unix.cd(path)
        unix.mkdir('LBFGS')

        self.path = path
        self.load = load
        self.save = save
        self.thresh = thresh
        self.maxiter = maxiter
        self.precond = precond
        self.memory = memory

        self.iter = 0
        self.memory_used = 0
Esempio n. 47
0
    def initialize_adjoint_traces(self):
        super(specfem2d, self).initialize_adjoint_traces()

        # work around SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['SU', 'su']:
            files = glob('traces/adj/*.su')
            unix.rename('.su', '.su.adj', files)

        # work around SPECFEM2D's requirement that all components exist,
        # even ones not in use
        if PAR.FORMAT in ['SU', 'su']:
            unix.cd(self.cwd +'/'+ 'traces/adj')
            for channel in ['x', 'y', 'z', 'p']:
                src = 'U%s_file_single.su.adj' % PAR.CHANNELS[0]
                dst = 'U%s_file_single.su.adj' % channel
                if not exists(dst):
                    unix.cp(src, dst)
Esempio n. 48
0
    def generate_mesh(self, model_path=None, model_name=None, model_type='gll'):
        """ Performs meshing and database generation
        """
        assert(model_name)
        assert(model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        assert(exists(model_path))
        self.check_mesh_properties(model_path)

        src = glob(join(model_path, '*'))
        dst = join(self.cwd, 'DATA')
        unix.cp(src, dst)

        if self.taskid == 0:
            self.export_model(PATH.OUTPUT +'/'+ model_name)
Esempio n. 49
0
    def initialize_adjoint_traces(self):
        super(specfem3d, self).initialize_adjoint_traces()

        # workaround for SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['SU', 'su']:
            files = glob(self.cwd +'/'+ 'traces/adj/*SU')
            unix.rename('_SU', '_SU.adj', files)

        # workaround for SPECFEM3D's requirement that all components exist,
        # even ones not in use
        unix.cd(self.cwd +'/'+ 'traces/adj')
        for iproc in range(PAR.NPROC):
            for channel in ['x', 'y', 'z']:
                src = '%d_d%s_SU.adj' % (iproc, PAR.CHANNELS[0])
                dst = '%d_d%s_SU.adj' % (iproc, channel)
                if not exists(dst):
                    unix.cp(src, dst)
Esempio n. 50
0
    def save(self, g, path='', parameters=[], backup=None):
        """ Utility for saving dictionary representation of gradient
        """
        if not exists(path):
            raise Exception

        if not parameters:
            parameters = solver.parameters

        if backup:
            src = path +'/'+ 'gradient'
            dst = path +'/'+ 'gradient_'+backup
            unix.mv(src, dst)

        solver.save(solver.split(g),
                    path +'/'+ 'gradient',
                    parameters=parameters,
                    suffix='_kernel')
Esempio n. 51
0
    def check_source_names(self):
        """ Determines names of sources by applying wildcard rule to user-
            supplied input files
        """
        path = PATH.SPECFEM_DATA
        if not exists(path):
            raise Exception

        # apply wildcard rule
        wildcard = self.source_prefix+'_*'
        globstar = sorted(glob(path +'/'+ wildcard))
        if not globstar:
             print msg.SourceError_SPECFEM % (path, wildcard)
             sys.exit(-1)

        names = []
        for path in globstar:
            names += [basename(path).split('_')[-1]]
        self._source_names = names[:PAR.NTASK]
Esempio n. 52
0
    def write_residuals(self, path, syn, obs):
        """ Computes residuals from observations and synthetics

          INPUT
            PATH - location residuals will be written
            SYN - obspy Stream object containing synthetic data
            OBS - obspy Stream object containing observed data
        """
        nt, dt, _ = self.get_time_scheme(syn)
        nn, _ = self.get_network_size(syn)

        residuals = []
        for ii in range(nn):
            residuals.append(self.misfit(syn[ii].data, obs[ii].data, nt, dt))

        filename = path+'/'+'residuals'
        if exists(filename):
            residuals.extend(list(np.loadtxt(filename)))

        np.savetxt(filename, residuals)
Esempio n. 53
0
    def write_residuals(self, path, syn, obs):
        """
        Computes residuals

        :input path: location "adjoint traces" will be written
        :input syn: obspy Stream object containing synthetic data
        :input obs: obspy Stream object containing observed data
        """
        nt, dt, _ = self.get_time_scheme(syn)
        nn, _ = self.get_network_size(syn)

        residuals = []
        for ii in range(nn):
            residuals.append(self.misfit(syn[ii].data, obs[ii].data, nt, dt))

        filename = path+'/'+'residuals'
        if exists(filename):
            residuals.extend(list(np.loadtxt(filename)))

        np.savetxt(filename, residuals)
    def check_mesh_properties(self, path=None, parameters=None):
        if not hasattr(self, '_mesh_properties'):
            if not path:
                path = PATH.MODEL_INIT

            if not parameters:
                parameters = self.parameters

            nproc = 0
            ngll = []
            while True:
                dummy = loadbin(path, nproc, 'reg1_'+parameters[0])
                ngll += [len(dummy)]
                nproc += 1
                if not exists('%s/proc%06d_reg1_%s.bin' % (path, nproc, parameters[0])):
                    break

            self._mesh_properties = Struct([
                ['nproc', nproc],
                ['ngll', ngll]])

        return self._mesh_properties
    def generate_mesh(self, model_path=None, model_name=None, model_type='gll'):
        """ Performs meshing and database generation
        """
        assert(model_name)
        assert(model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        if model_type == 'gll':
            assert (exists(model_path))
            self.check_mesh_properties(model_path)

            unix.cp(glob(model_path +'/'+ '*'), self.model_databases)

            call_solver(system.mpiexec(), 'bin/xmeshfem3D')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT +'/'+ model_name)

        else:
            raise NotImplementedError
Esempio n. 56
0
    def check(self):
        """ Checks parameters and paths
        """

        # starting and stopping iterations
        if 'BEGIN' not in PAR:
            raise ParameterError(PAR, 'BEGIN')

        if 'END' not in PAR:
            raise ParameterError(PAR, 'END')

        # scratch paths
        if 'SCRATCH' not in PATH:
            raise ParameterError(PATH, 'SCRATCH')

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)

        if 'FUNC' not in PATH:
            setattr(PATH, 'FUNC', join(PATH.SCRATCH, 'evalfunc'))

        if 'GRAD' not in PATH:
            setattr(PATH, 'GRAD', join(PATH.SCRATCH, 'evalgrad'))

        if 'HESS' not in PATH:
            setattr(PATH, 'HESS', join(PATH.SCRATCH, 'evalhess'))

        if 'OPTIMIZE' not in PATH:
            setattr(PATH, 'OPTIMIZE', join(PATH.SCRATCH, 'optimize'))

        # input paths
        if 'DATA' not in PATH:
            setattr(PATH, 'DATA', None)

        if 'MODEL_INIT' not in PATH:
            raise ParameterError(PATH, 'MODEL_INIT')

        # output paths
        if 'OUTPUT' not in PATH:
            raise ParameterError(PATH, 'OUTPUT')

        if 'SAVEMODEL' not in PAR:
            setattr(PAR, 'SAVEMODEL', 1)

        if 'SAVEGRADIENT' not in PAR:
            setattr(PAR, 'SAVEGRADIENT', 0)

        if 'SAVEKERNELS' not in PAR:
            setattr(PAR, 'SAVEKERNELS', 0)

        if 'SAVETRACES' not in PAR:
            setattr(PAR, 'SAVETRACES', 0)

        if 'SAVERESIDUALS' not in PAR:
            setattr(PAR, 'SAVERESIDUALS', 0)

        # parameter assertions
        assert 1 <= PAR.BEGIN <= PAR.END

        # path assertions
        if not exists(PATH.DATA):
            assert 'MODEL_TRUE' in PATH
            assert exists(PATH.MODEL_TRUE)

        if not exists(PATH.MODEL_INIT):
            raise Exception()
 def prepare_model(self):
     model = PATH.OUTPUT +'/'+ 'model_init'
     assert exists(model)
     unix.ln(model, PATH.SCRATCH +'/'+ 'model')