示例#1
0
    def evaluate_gradient(self, model_dir=''):
        """ Compute event gradient by running adjoint simulation
        """

        # get task number
        itask = system.getnode()

        # setup directories
        syn_dir = join(self.getpath, 'traces', 'syn')
        adj_dir = join(self.getpath, 'traces', 'adj')

        # set par.cfg file for solver
        self.set_par_cfg(external_model_dir=model_dir, output_dir=syn_dir, save_forward_wavefield=False,
                         adjoint_sim=True, adjoint_dir=adj_dir)

        # set src.cfg for solver
        xsrc = self.sources[itask][0]
        zsrc = self.sources[itask][1]
        self.set_src_cfg(xs=float(xsrc), zs=float(zsrc))

        # copy cfg files
        unix.cp(join(self.getpath, 'INPUT', 'par.cfg'), adj_dir)
        unix.cp(join(self.getpath, 'INPUT', 'src.cfg'), adj_dir)

        # run adjoint sim
        self.adjoint()

        # clean saved boundaries
        unix.rm(glob(join(syn_dir, 'proc*')))
示例#2
0
    def smooth(self, path='', parameters='dummy', span=0. ):
        """ Smooths SPECFEM2D kernels by convolving them with a Gaussian
        """
        from seisflows.tools.array import meshsmooth, stack

        #assert parameters == self.parameters

        # implementing nproc > 1 would be straightforward, but a bit tedious
        #assert self.mesh.nproc == 1

        kernels = self.load(path, suffix='_kernel')
        if not span:
            return kernels

        # set up grid
        _,x = loadbypar(PATH.MODEL_INIT, ['x'], 0)
        _,z = loadbypar(PATH.MODEL_INIT, ['z'], 0)
        mesh = stack(x[0], z[0])

        for key in self.parameters:
            kernels[key] = [meshsmooth(kernels[key][0], mesh, span)]

        unix.rm(path + '_nosmooth')
        unix.mv(path, path + '_nosmooth')
        self.save(path, kernels, suffix='_kernel')
示例#3
0
    def apply_hessian(self, m, dm, h):
        """ Computes the action of the Hessian on a given vector through
          solver calls
        """
        system = sys.modules['seisflows_system']
        solver = sys.modules['seisflows_solver']
        postprocess = sys.modules['seisflows_postprocess']

        self.save('m_lcg', m + h*dm)

        solver.save(solver.split(m + h*dm), 
                PATH.HESS+'/'+'model')

        system.run('optimize', 'apply_hess',
                path=PATH.HESS)

        postprocess.write_gradient(
                path=PATH.HESS)

        self.save('g_lcg', solver.merge(solver.load(
                PATH.HESS+'/'+'gradient', suffix='_kernel')))

        # uncomment for debugging
        #if True:
        #    unix.rm(PATH.HESS+'_debug')
        #    unix.mv(PATH.HESS, PATH.HESS+'_debug')
        #    unix.mkdir(PATH.HESS)

        unix.rm(PATH.HESS)
        unix.mkdir(PATH.HESS)

        return self.hessian_product(h)
示例#4
0
    def setup(self):
        """ Prepares solver for inversion or migration
        """
        # clean up for new inversion
        unix.rm(self.getpath)

        # As input for an inversion or migration, users can choose between
        # providing data, or providing a target model from which data are
        # generated on the fly. In the former case, a value for PATH.DATA must
        # be supplied; in the latter case, a value for PATH.MODEL_TRUE must be
        # provided

        if PATH.DATA:
            # copy user supplied data
            self.initialize_solver_directories()

            src = glob(PATH.DATA +'/'+ basename(self.getpath) +'/'+ '*')
            dst = 'traces/obs/'
            unix.cp(src, dst)

        else:
            # generate data on the fly
            self.generate_data(
                model_path=PATH.MODEL_TRUE,
                model_name='model_true',
                model_type='gll')

        # prepare initial model
        self.generate_mesh(
            model_path=PATH.MODEL_INIT,
            model_name='model_init',
            model_type='gll')

        self.initialize_adjoint_traces()
 def adjoint(self):
     """ Calls SPECFEM3D_GLOBE adjoint solver
     """
     solvertools.setpar('SIMULATION_TYPE', '3')
     solvertools.setpar('SAVE_FORWARD', '.false.')
     unix.rm('SEM')
     unix.ln('traces/adj', 'SEM')
     call_solver(system.mpiexec(), 'bin/xspecfem3D')
示例#6
0
 def adjoint(self):
     """ Calls SPECFEM3D adjoint solver
     """
     setpar('SIMULATION_TYPE', '3')
     setpar('SAVE_FORWARD', '.false.')
     unix.rm('SEM')
     unix.ln('traces/adj', 'SEM')
     self.call('bin/xspecfem3D')
示例#7
0
 def clean(self):
     """ Cleans directories in which function and gradient evaluations were
       carried out
     """
     unix.rm(PATH.GRAD)
     unix.rm(PATH.FUNC)
     unix.mkdir(PATH.GRAD)
     unix.mkdir(PATH.FUNC)
    def clean_directory(self, path):
        """ If dir exists clean otherwise make
        """

        if not exists(path):
            unix.mkdir(path)
        else:
            unix.rm(path)
            unix.mkdir(path)
    def clean(self):
        # can forward simulations from line search be carried over?
        self.update_status()

        if self.status==1:
            unix.rm(PATH.GRAD)
            unix.mv(PATH.FUNC, PATH.GRAD)
            unix.mkdir(PATH.FUNC)
        else:
            super(thrifty_inversion, self).clean()
 def clean(self):
     isready = self.solver_status()
     if isready:
         unix.rm(PATH.GRAD)
         unix.mv(PATH.FUNC, PATH.GRAD)
         unix.mkdir(PATH.FUNC)
         unix.rm(PATH.SOLVER)
         unix.mv(PATH.SOLVER+'_best', PATH.SOLVER)
     else:
         super(thrifty_inversion, self).clean()
    def iterate_search(self):
        super(thrifty_inversion, self).iterate_search()

        isdone = optimize.isdone
        isready = self.solver_status()

        # to avoid redundant forward simulation, save solver files associated
        # with 'best' trial model
        if isready and isdone:
            unix.rm(PATH.SOLVER+'_best')
            unix.mv(PATH.SOLVER, PATH.SOLVER+'_best')
示例#12
0
    def main(self):
        """ Migrates seismic data
        """
        # prepare directory structure
        unix.rm(PATH.GLOBAL)
        unix.mkdir(PATH.GLOBAL)

        # set up workflow machinery
        preprocess.setup()
        postprocess.setup()

        # set up solver machinery
        print 'Preparing solver...'
        system.run('solver', 'setup',
                   hosts='all')

        self.prepare_model()

        # perform migration
        print 'Generating synthetics...'
        system.run('solver', 'eval_func',
                   hosts='all',
                   path=PATH.GLOBAL)

        print 'Backprojecting data...'
        system.run('solver', 'eval_grad',
                   hosts='all',
                   path=PATH.GLOBAL,
                   export_traces=PAR.SAVETRACES)

        postprocess.combine_kernels(
            path=PATH.GLOBAL,
            parameters=solver.parameters)

        try:
            postprocess.combine_kernels(
                path=PATH.GLOBAL,
                parameters=['rhop'])
        except:
            pass


        if PAR.SAVETRACES:
            self.save_traces()

        if PAR.SAVEKERNELS:
            self.save_kernels()
        else:
            self.save_kernels_sum()

        print 'Finished\n'
示例#13
0
    def setup(self):
        """ Lays groundwork for inversion
        """
        # clean scratch directories
        if PAR.BEGIN == 1:
            unix.rm(PATH.GLOBAL)
            unix.mkdir(PATH.GLOBAL)

            preprocess.setup()
            postprocess.setup()
            optimize.setup()

        system.run('solver', 'setup', 
                   hosts='all')
示例#14
0
    def adjoint(self):
        """ Calls SPECFEM2D adjoint solver
        """
        setpar('SIMULATION_TYPE', '3')
        setpar('SAVE_FORWARD', '.false.')
        unix.rm('SEM')
        unix.ln('traces/adj', 'SEM')

        # hack to deal with different SPECFEM2D name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['SU', 'su']:
            files = glob('traces/adj/*.su')
            unix.rename('.su', '.su.adj', files)

        call_solver(system.mpiexec(), 'bin/xmeshfem2D')
        call_solver(system.mpiexec(), 'bin/xspecfem2D')
示例#15
0
    def main(self):
        unix.rm(PATH.SCRATCH)
        unix.mkdir(PATH.SCRATCH)
        preprocess.setup()


        print 'SIMULATION 1 OF 3'
        system.run('solver', 'setup',
                   hosts='all')

        print 'SIMULATION 2 OF 3'
        self.prepare_model()
        system.run('solver', 'eval_func',
                   hosts='all',
                   path=PATH.SCRATCH)

        print 'SIMULATION 3 OF 3'
        system.run('solver', 'eval_grad',
                   hosts='all',
                   path=PATH.SCRATCH)

        # collect traces
        obs = join(PATH.SOLVER, self.event, 'traces/obs')
        syn = join(PATH.SOLVER, self.event, 'traces/syn')
        adj = join(PATH.SOLVER, self.event, 'traces/adj')

        obs,_ = preprocess.load(obs)
        syn,_ = preprocess.load(syn)
        adj,_ = preprocess.load(adj, suffix='.su.adj')

        # collect model and kernels
        model = solver.load(PATH.MODEL_INIT)
        kernels = solver.load(PATH.SCRATCH+'/'+'kernels'+'/'+self.event, suffix='_kernel')

        # dot prodcut in data space
        keys = obs.keys()
        LHS = DotProductLHS(keys, syn, adj)

        # dot product in model space
        keys = ['rho', 'vp', 'vs'] # model.keys()
        RHS = DotProductRHS(keys, model, kernels)

        print 
        print 'LHS:', LHS
        print 'RHS:', RHS
        print 'RELATIVE DIFFERENCE:', (LHS-RHS)/RHS
        print
示例#16
0
    def main(self):
        """ Migrates seismic data
        """
        # prepare directory structure
        unix.rm(PATH.SCRATCH)
        unix.mkdir(PATH.SCRATCH)

        # set up workflow machinery
        preprocess.setup()
        postprocess.setup()

        # set up solver machinery
        print 'Preparing solver...'
        system.run('solver', 'setup', hosts='all')

        self.prepare_model()

        # perform migration
        print 'Generating synthetics...'
        system.run('solver', 'eval_func', hosts='all', path=PATH.SCRATCH)

        print 'Backprojecting...'
        system.run('solver',
                   'eval_grad',
                   hosts='all',
                   path=PATH.SCRATCH,
                   export_traces=PAR.SAVETRACES)

        postprocess.combine_kernels(path=PATH.SCRATCH,
                                    parameters=solver.parameters)

        try:
            postprocess.combine_kernels(path=PATH.SCRATCH, parameters=['rhop'])
        except:
            pass

        if PAR.SAVETRACES:
            self.save_traces()

        if PAR.SAVEKERNELS:
            self.save_kernels()
        else:
            self.save_kernels_sum()

        print 'Finished\n'
示例#17
0
    def setup(self):
        """ Perform setup. Generates synthetic observed data.
        """

        # clean up solver directories
        unix.rm(self.getpath)
        self.initialize_solver_directories()

        if PATH.DATA:
            # copy data to scratch dirs
            src = glob(PATH.DATA +'/'+ basename(self.getpath) +'/'+ '*')
            dst = 'traces/obs/'
            unix.cp(src, dst)

        else:
            # generate data on the fly
            output_dir = join(self.getpath, 'traces', 'obs')
            self.generate_data(model_dir=PATH.MODEL_TRUE, output_dir=output_dir)
示例#18
0
    def setup(self):
        """ Lays groundwork for inversion
        """
        # clean scratch directories
        if PAR.BEGIN == 1:
            unix.rm(PATH.SCRATCH)
            unix.mkdir(PATH.SCRATCH)

            preprocess.setup()
            postprocess.setup()
            optimize.setup()

        if PATH.DATA:
            print 'Copying data'
        else:
            print 'Generating data'

        system.run('solver', 'setup', hosts='all')
    def setup(self):
        """ Lays groundwork for inversion
        """
        # clean scratch directories
        if PAR.BEGIN == 1:
            unix.rm(PATH.GLOBAL)
            unix.mkdir(PATH.GLOBAL)

            preprocess.setup()
            postprocess.setup()
            optimize.setup()

        if PATH.DATA:
            print('Copying data...')
        else:
            print('Generating data...')
        system.run('solver', 'setup', 
                   hosts='all')
示例#20
0
    def main(self):
        unix.rm(PATH.SCRATCH)
        unix.mkdir(PATH.SCRATCH)
        preprocess.setup()


        print('SIMULATION 1 OF 3')
        system.run('solver', 'setup')

        print('SIMULATION 2 OF 3')
        self.prepare_model()
        system.run('solver', 'eval_func',
                   path=PATH.SCRATCH)

        print('SIMULATION 3 OF 3')
        system.run('solver', 'eval_grad',
                   path=PATH.SCRATCH)

        # collect traces
        obs = join(PATH.SOLVER, self.event, 'traces/obs')
        syn = join(PATH.SOLVER, self.event, 'traces/syn')
        adj = join(PATH.SOLVER, self.event, 'traces/adj')

        obs,_ = preprocess.load(obs)
        syn,_ = preprocess.load(syn)
        adj,_ = preprocess.load(adj, suffix='.su.adj')

        # collect model and kernels
        model = solver.load(PATH.MODEL_INIT)
        kernels = solver.load(PATH.SCRATCH+'/'+'kernels'+'/'+self.event, suffix='_kernel')

        # dot prodcut in data space
        keys = list(obs.keys())
        LHS = DotProductLHS(keys, syn, adj)

        # dot product in model space
        keys = ['rho', 'vp', 'vs'] # model.keys()
        RHS = DotProductRHS(keys, model, kernels)

        print() 
        print('LHS:', LHS)
        print('RHS:', RHS)
        print('RELATIVE DIFFERENCE:', (LHS-RHS)/RHS)
        print()
示例#21
0
    def adjoint(self):
        """ Calls SPECFEM2D adjoint solver
        """
        setpar('SIMULATION_TYPE', '3')
        setpar('SAVE_FORWARD', '.false.')
        unix.rm('SEM')
        unix.ln('traces/adj', 'SEM')

        # hack to deal with SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['SU', 'su']:
            files = glob('traces/adj/*.su')
            unix.rename('.su', '.su.adj', files)

        if PAR.WITH_MPI:
            call_solver(system.mpiexec(), 'bin/xmeshfem2D')
            call_solver(system.mpiexec(), 'bin/xspecfem2D')
        else:
            call_solver_nompi('bin/xmeshfem2D')
            call_solver_nompi('bin/xspecfem2D')
示例#22
0
    def finalize_search(self):
        """ Prepares algorithm machinery and scratch directory for next
          model upate
        """
        m = self.load('m_new')
        print("finalize_search")
        print(m)
        g = self.load('g_new')
        p = self.load('p_new')
        x = self.line_search.search_history()[0]
        f = self.line_search.search_history()[1]

        # clean scratch directory
        unix.cd(PATH.OPTIMIZE)
        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')
        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')

        unix.mv('m_try', 'm_new')
        self.savetxt('f_new', f.min())

        # output latest statistics
        self.writer('factor',
                    -self.dot(g, g)**-0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1] - f[0]) / (x[1] - x[0]))
        self.writer('step_count', self.line_search.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180. * np.pi**-1 * angle(p, -g))

        self.line_search.writer.newline()
示例#23
0
    def main(self):
        """ Migrates seismic data
        """
        # prepare directory structure
        unix.rm(PATH.GLOBAL)
        unix.mkdir(PATH.GLOBAL)

        # set up pre- and post-processing
        preprocess.setup()
        postprocess.setup()

        # prepare solver
        print 'Preparing solver...'
        system.run('solver', 'setup', hosts='all')

        self.prepare_model()

        system.run('solver', 'eval_func', hosts='all', path=PATH.GLOBAL)

        # backproject data
        print 'Backprojecting data...'
        system.run('solver',
                   'eval_grad',
                   hosts='all',
                   path=PATH.GLOBAL,
                   export_traces=PAR.SAVETRACES)

        # process gradient
        postprocess.process_kernels(path=PATH.GLOBAL, tag='gradient')

        # save results
        if PAR.SAVEGRADIENT:
            self.save_gradient()

        if PAR.SAVETRACES:
            self.save_traces()

        if PAR.SAVEKERNELS:
            self.save_kernels()

        print 'Finished\n'
示例#24
0
    def setup(self):
        """ Lays groundwork for inversion
        """
        # clean scratch directories
        if PAR.BEGIN == 1:
            unix.rm(PATH.SCRATCH)
            unix.mkdir(PATH.SCRATCH)

            preprocess.setup()
            postprocess.setup()
            optimize.setup()

        isready = self.solver_status()
        if not isready:
            if PATH.DATA:
                print 'Copying data...'
            else:
                print 'Generating data...'

            system.run('solver', 'setup',
                       hosts='all')
示例#25
0
def smooth_legacy(path='', parameters=[], span=0.):
    solver = sys.modules['seisflows_solver']
    PATH = sys.modules['seisflows_paths']

    # intialize arrays
    kernels = {}
    for key in parameters or solver.parameters:
        kernels[key] = []

    coords = {}
    for key in ['x', 'z']:
        coords[key] = []

    # read kernels
    for key in parameters or solver.parameters:
        kernels[key] += solver.io.read_slice(path, key + '_kernel', 0)

    if not span:
        return kernels

    # read coordinates
    for key in ['x', 'z']:
        coords[key] += solver.io.read_slice(PATH.MODEL_INIT, key, 0)

    mesh = array.stack(coords['x'][0], coords['z'][0])

    #mesh = array.stack(solver.mesh_properties.coords['x'][0],
    #                   solver.mesh_properties.coords['z'][0])

    for key in parameters or solver.parameters:
        kernels[key] = [array.meshsmooth(kernels[key][0], mesh, span)]

    unix.rm(path + '_nosmooth')
    unix.mv(path, path + '_nosmooth')

    unix.mkdir(path)
    for key in parameters or solver.parameters:
        solver.io.write_slice(kernels[key][0], path, key + '_kernel', 0)
示例#26
0
    def finalize_search(self):
        """ Prepares algorithm machinery and scratch directory for next
          model upate
        """
        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        x = self.line_search.search_history()[0]
        f = self.line_search.search_history()[1]

        # clean scratch directory
        unix.cd(PATH.OPTIMIZE)
        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')
        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')

        unix.mv('m_try', 'm_new')
        self.savetxt('f_new', f.min())

        # output latest statistics
        self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1]-f[0])/(x[1]-x[0]))
        self.writer('step_count', self.line_search.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180.*np.pi**-1*angle(p,-g))

        self.line_search.writer.newline()
示例#27
0
    def setup(self):
        """ Prepares solver for inversion or migration

          Sets up directory structure expected by SPECFEM and copies or 
          generates seismic data to be inverted or migrated
        """
        # clean up for new inversion
        unix.rm(self.cwd)

        # As input for an inversion or migration, users can choose between
        # providing data, or providing a target model from which data are
        # generated on the fly. In the former case, a value for PATH.DATA must
        # be supplied; in the latter case, a value for PATH.MODEL_TRUE must be
        # provided

        if PATH.DATA:
            # copy user supplied data
            self.initialize_solver_directories()

            src = glob(PATH.DATA +'/'+ self.source_name +'/'+ '*')
            dst = 'traces/obs/'
            unix.cp(src, dst)

        else:
            # generate data on the fly
            self.generate_data(
                model_path=PATH.MODEL_TRUE,
                model_name='model_true',
                model_type='gll')

        # prepare initial model
        self.generate_mesh(
            model_path=PATH.MODEL_INIT,
            model_name='model_init',
            model_type='gll')

        self.initialize_adjoint_traces()
示例#28
0
    def main(self):
        path = PATH.GLOBAL

        # prepare directory structure
        unix.rm(path)
        unix.mkdir(path)

        # set up workflow machinery
        preprocess.setup()
        postprocess.setup()

        system.run('solver', 'setup',
                   hosts='all')

        print 'Computing preconditioner...'
        system.run('workflow', 'compute_precond',
                   hosts='all',
                   model_path=PATH.MODEL_INIT,
                   model_name='model',
                   model_type='gll')

        postprocess.combine_kernels(
            path=path,
            parameters=solver.parameters)

        for span in cast(PAR.SMOOTH):
            self.process_kernels(
                path=path,
                parameters=solver.parameters,
                span=span)

            # save preconditioner
            src = path +'/'+ 'kernels/absval'
            dst = PATH.OUTPUT +'/'+ 'precond_%04d' % span
            unix.cp(src, dst)

        print 'Finished\n'
示例#29
0
    def setup(self):
        """ Lays groundwork for inversion
        """
        # clean scratch directories
        if PAR.BEGIN == 1:
            unix.rm(PATH.GLOBAL)
            unix.mkdir(PATH.GLOBAL)

        # set up optimization
        optimize.setup()

        # set up pre- and post-processing
        preprocess.setup()
        postprocess.setup()

        # set up solver
        if PAR.BEGIN == 1:
            system.run('solver', 'setup',
                       hosts='all')
            return

        if PATH.LOCAL:
             system.run('solver', 'setup',
                        hosts='all')
示例#30
0
    def smooth(self, path='', parameters=None, span=0.):
        """ For a long time SPECFEM2D lacked its own smoothing utility; this 
          method was intended only as a crude workaround
        """
        from seisflows.tools import array

        assert self.mesh_properties.nproc == 1,\
            msg.SmoothingError_SPECFEM2D

        kernels = self.load(path, suffix='_kernel')
        if not span:
            return kernels

        # set up grid
        x = sem.read(PATH.MODEL_INIT, 'x', 0)
        z = sem.read(PATH.MODEL_INIT, 'z', 0)
        mesh = array.stack(x, z)

        for key in parameters or self.parameters:
            kernels[key] = [array.meshsmooth(kernels[key][0], mesh, span)]

        unix.rm(path + '_nosmooth')
        unix.mv(path, path + '_nosmooth')
        self.save(path, kernels, suffix='_kernel')
示例#31
0
    def combine(self, path=''):
        """ combines SPECFEM3D_GLOBE kernels
        """
        dirs = unix.ls(path)

        # initialize kernels
        unix.cd(path)
        for key in self.model_parameters:
            if key not in self.inversion_parameters:
                for i in range(PAR.NPROC):
                    proc = '%06d' % i
                    name = self.kernel_map[key]
                    src = PATH.GLOBAL + '/' + 'mesh' + '/' + key + '/' + proc
                    dst = path + '/' + 'sum' + '/' + 'proc' + proc + '_' + name + '.bin'
                    savebin(np.load(src), dst)

        # create temporary files and directories
        unix.cd(self.getpath)
        with open('kernels_list.txt', 'w') as file:
            file.write('\n'.join(dirs) + '\n')
        unix.mkdir('INPUT_KERNELS')
        unix.mkdir('OUTPUT_SUM')
        for dir in dirs:
            src = path + '/' + dir
            dst = 'INPUT_KERNELS' + '/' + dir
            unix.ln(src, dst)

        # sum kernels
        self.mpirun(PATH.SOLVER_BINARIES + '/' + 'xsum_kernels')
        unix.mv('OUTPUT_SUM', path + '/' + 'sum')

        # remove temporary files and directories
        unix.rm('INPUT_KERNELS')
        unix.rm('kernels_list.txt')

        unix.cd(path)
示例#32
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load('m_new')
        g = self.load('g_new')
        p = self.load('p_new')
        s = loadtxt('s_new')

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if self.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        self.save('m_new', m + alpha*p)
        savetxt('f_new', f.min())

        # append latest statistics
        self.writer('factor', -self.dot(g,g)**-0.5 * (f[1]-f[0])/(x[1]-x[0]))
        self.writer('gradient_norm_L1', np.linalg.norm(g, 1))
        self.writer('gradient_norm_L2', np.linalg.norm(g, 2))
        self.writer('misfit', f[0])
        self.writer('restarted', self.restarted)
        self.writer('slope', (f[1]-f[0])/(x[1]-x[0]))
        self.writer('step_count', self.step_count)
        self.writer('step_length', x[f.argmin()])
        self.writer('theta', 180.*np.pi**-1*angle(p,-g))

        self.stepwriter.newline()
示例#33
0
    def finalize_search(self):
        """ Cleans working directory and writes updated model
        """
        unix.cd(PATH.OPTIMIZE)

        m = self.load("m_new")
        g = self.load("g_new")
        p = self.load("p_new")
        s = loadtxt("s_new")

        x = self.step_lens()
        f = self.func_vals()

        # clean working directory
        unix.rm("alpha")
        unix.rm("m_try")
        unix.rm("f_try")

        if self.iter > 1:
            unix.rm("m_old")
            unix.rm("f_old")
            unix.rm("g_old")
            unix.rm("p_old")
            unix.rm("s_old")

        unix.mv("m_new", "m_old")
        unix.mv("f_new", "f_old")
        unix.mv("g_new", "g_old")
        unix.mv("p_new", "p_old")
        unix.mv("s_new", "s_old")

        # write updated model
        alpha = x[f.argmin()]
        savetxt("alpha", alpha)
        self.save("m_new", m + alpha * p)
        savetxt("f_new", f.min())

        # append latest output
        self.writer("factor", -self.dot(g, g) ** -0.5 * (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("gradient_norm_L1", np.linalg.norm(g, 1))
        self.writer("gradient_norm_L2", np.linalg.norm(g, 2))
        self.writer("misfit", f[0])
        self.writer("restarted", self.restarted)
        self.writer("slope", (f[1] - f[0]) / (x[1] - x[0]))
        self.writer("step_count", self.step_count)
        self.writer("step_length", x[f.argmin()])
        self.writer("theta", 180.0 * np.pi ** -1 * angle(p, -g))

        self.stepwriter.newline()
示例#34
0
 def clean(self):
     unix.cd(self.cwd)
     unix.rm('OUTPUT_FILES')
     unix.mkdir('OUTPUT_FILES')
示例#35
0
文件: base.py 项目: wangyf/seisflows
 def clean(self):
     unix.cd(self.cwd)
     unix.rm('OUTPUT_FILES')
     unix.mkdir('OUTPUT_FILES')
示例#36
0
#!/usr/bin/env python

from glob import glob

from seisflows.tools import unix

unix.rm('scratch')
unix.rm(glob('output*'))
unix.rm(glob('*.pyc'))

示例#37
0
文件: base.py 项目: mpbl/seisflows
    def finalize_search(cls):
        """ Cleans working directory and writes updated model
        """
        unix.cd(cls.path)
        m0 = loadnpy('m_new')
        p = loadnpy('p_new')

        x = cls.step_lens()
        f = cls.func_vals()

        # clean working directory
        unix.rm('alpha')
        unix.rm('m_try')
        unix.rm('f_try')

        if cls.iter > 1:
            unix.rm('m_old')
            unix.rm('f_old')
            unix.rm('g_old')
            unix.rm('p_old')
            unix.rm('s_old')

        unix.mv('m_new', 'm_old')
        unix.mv('f_new', 'f_old')
        unix.mv('g_new', 'g_old')
        unix.mv('p_new', 'p_old')
        unix.mv('s_new', 's_old')

        # write updated model
        alpha = x[f.argmin()]
        savetxt('alpha', alpha)
        savenpy('m_new', m0 + p * alpha)
        savetxt('f_new', f.min())

        cls.writer([], [], [])