Ejemplo n.º 1
0
class ChenTromp_tti(custom_import('solver', 'ChenTromp_base')):

    # model parameters included in inversion
    parameters = []
    parameters += ['A']
    parameters += ['C']
    parameters += ['L']
    parameters += ['N']
    parameters += ['F']
    parameters += ['Jc']
    parameters += ['Js']
    parameters += ['Kc']
    parameters += ['Ks']
    parameters += ['Mc']
    parameters += ['Ms']
    parameters += ['Gc']
    parameters += ['Gs']
    parameters += ['Bc']
    parameters += ['Bs']
    parameters += ['Hc']
    parameters += ['Hs']
    parameters += ['Dc']
    parameters += ['Ds']
    parameters += ['Ec']
    parameters += ['Es']
Ejemplo n.º 2
0
class tiger_sm(custom_import('system', 'slurm_sm')):
    """ Specially designed system interface for tiger.princeton.edu

      See parent class SLURM_SM for more information
    """
    def check(self):
        """ Checks parameters and paths
        """
        # where job was submitted
        if 'WORKDIR' not in PATH:
            setattr(PATH, 'WORKDIR', abspath('.'))

        # where temporary files are written
        if 'SCRATCH' not in PATH:
            setattr(PATH, 'SCRATCH', PATH.WORKDIR + '/' + 'scratch')

        super(tiger_sm, self).check()

    def submit(self, *args, **kwargs):
        """ Submits job
        """
        if not exists(PATH.SCRATCH):
            path = '/scratch/gpfs' + '/' + getuser(
            ) + '/' + 'seisflows' + '/' + str(uuid4())
            unix.mkdir(path)
            unix.ln(path, PATH.SCRATCH)

        super(tiger_sm, self).submit(*args, **kwargs)
Ejemplo n.º 3
0
class tikhonov1(custom_import('postprocess', 'regularize')):
    """ Adds regularization options to base class

        Available options include 0-, 1-, and 2- order Tikhonov and total
        variation regularization. While the underlying theory is classical,
        application to unstructured numerical grids via the
        "seisflows.tools.math.nabla" operator is somewhat complicated. 

        So far, can only be used for 2D inversion, because the required spatial
        derivative operator "nabla" is not yet available for 3D grids.
    """
    def check(self):
        """ Checks parameters and paths
        """
        super(tikhonov1, self).check()

        if 'CREEPING' not in PAR:
            setattr(PAR, 'CREEPING', False)

        if not PAR.LAMBDA:
            raise ValueError

    def nabla(self, mesh, m, g):
        if PAR.CREEPING:
            G, grid = mesh2grid(g, mesh)
            DG = nabla(G, order=1)
            dg = grid2mesh(DG, grid, mesh)
            return -dg / np.mean(m)

        else:
            M, grid = mesh2grid(m, mesh)
            DM = nabla(M, order=1)
            dm = grid2mesh(DM, grid, mesh)
            return dm / np.mean(m)
Ejemplo n.º 4
0
class ChenTromp_base(custom_import('solver', 'specfem3d_legacy')):

    # model parameters expected by solver
    solver_parameters = []
    solver_parameters += ['A']
    solver_parameters += ['C']
    solver_parameters += ['N']
    solver_parameters += ['L']
    solver_parameters += ['F']
    solver_parameters += ['Jc']
    solver_parameters += ['Js']
    solver_parameters += ['Kc']
    solver_parameters += ['Ks']
    solver_parameters += ['Mc']
    solver_parameters += ['Ms']
    solver_parameters += ['Gc']
    solver_parameters += ['Gs']
    solver_parameters += ['Bc']
    solver_parameters += ['Bs']
    solver_parameters += ['Hc']
    solver_parameters += ['Hs']
    solver_parameters += ['Dc']
    solver_parameters += ['Ds']
    solver_parameters += ['Ec']
    solver_parameters += ['Es']


    def save(self, path, model, prefix='', suffix=''):
        super(ChenTromp_base, self).save(
            path, model, prefix, suffix, self.solver_parameters)

    def export_model(self, path):
        super(ChenTromp_base, self).export_model(
            path, self.solver_parameters+['rho'])
Ejemplo n.º 5
0
class tiger_lg(custom_import('system', 'slurm_lg')):
    """ Specially designed system interface for tiger.princeton.edu

      See parent class SLURM_LG for more information
    """
    def check(self):
        """ Checks parameters and paths
        """

        if 'UUID' not in PAR:
            setattr(PAR, 'UUID', str(uuid4()))

        if 'SCRATCH' not in PATH:
            setattr(PATH, 'SCRATCH',
                    join('/scratch/gpfs', getuser(), 'seisflows', PAR.UUID))

        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', '')

        if 'NODESIZE' not in PAR:
            setattr(PAR, 'NODESIZE', 16)

        super(tiger_lg, self).check()

    def submit(self, *args, **kwargs):
        """ Submits job
        """
        if not exists(PATH.WORKDIR + '/' + 'scratch'):
            unix.ln(PATH.SCRATCH, PATH.WORKDIR + '/' + 'scratch')
        super(tiger_lg, self).submit(*args, **kwargs)
Ejemplo n.º 6
0
class steepest_descent(custom_import('optimize', 'base')):
    """ Steepest descent method
    """
    restarted = False

    def check(self):
        """ Checks parameters, paths, and dependencies
        """
        # line search algorithm
        if 'LINESEARCH' not in PAR:
            setattr(PAR, 'LINESEARCH', 'Bracket')

        super(steepest_descent, self).check()

    def setup(self):
        super(steepest_descent, self).setup()

    def compute_direction(self):
        g_new = self.load('g_new')

        if self.precond:
            p_new = -self.precond(g_new)
        else:
            p_new = -g_new

        self.save('p_new', p_new)
        savetxt('s_new', self.dot(g_new, p_new))

        return p_new

    def restart(self):
        pass
Ejemplo n.º 7
0
class stochastic_saga_2d(custom_import('solver', 'stochastic_saga'),
                         custom_import('solver', 'specfem2d')):
    """ Adds stochastic optimization (SAGA) machinery to SPECFEM2D
    """
    def get_source_positions(self):
        """ Read in source positions.
            Order coincides with self._source_names.
        """
        positions = np.zeros((len(self._source_names), 2))
        for isrc, source_name in enumerate(self._source_names):
            source_file = join(PATH.SPECFEM_DATA,
                               self.source_prefix + '_' + source_name)
            xs = getpar('xs', source_file, cast=float)
            zs = getpar('zs', source_file, cast=float)
            positions[isrc, :] = [xs, zs]

        return positions
Ejemplo n.º 8
0
class default(custom_import('postprocess', 'base')):
    """ Default postprocesing option

      Provides default image processing and regularization functions for models
      or gradients
    """
    # currently identical to base class
    pass
Ejemplo n.º 9
0
class default(custom_import('preprocess', 'base')):
    """ Default preprocesing class

      Provides data processing functions for seismic traces, with options for
      data misfit, filtering, normalization and muting
    """
    # currently identical to base class
    pass
Ejemplo n.º 10
0
class ChenTromp_vti(custom_import('solver', 'ChenTromp_base')):

    # model parameters included in inversion
    parameters = []
    parameters += ['A']
    parameters += ['C']
    parameters += ['L']
    parameters += ['N']
    parameters += ['F']
Ejemplo n.º 11
0
class Thomsen_vti(custom_import('solver', 'Thomsen_base')):

    # model parameters included in inversion
    parameters = []
    parameters += ['vp']
    parameters += ['vs']
    parameters += ['epsilon']
    parameters += ['delta']
    parameters += ['gamma']
Ejemplo n.º 12
0
class gauss_newton(custom_import('optimize', 'newton')):
    """ Implements Gauss-Newton-CG algorithm
    """
    def check(cls):
        """ Checks parameters and paths
        """
        super(gauss_newton, cls).check()

    def hessian_product(cls, h):
        return cls.load('g_lcg') / h
Ejemplo n.º 13
0
class thrifty_inversion(custom_import('workflow', 'inversion')):
    """ Thrifty inversion subclass

      Provides savings over conventional inversion by carrying over forward
      simulations from line search

      The results of 'inversion' and 'thrifty_inversion' should be exactly the
      same
    """

    status=0

    def initialize(self):
        if self.status==0:
            super(thrifty_inversion, self).initialize()


    def clean(self):
        # can forward simulations from line search be carried over?
        self.update_status()

        if self.status==1:
            unix.rm(PATH.GRAD)
            unix.mv(PATH.FUNC, PATH.GRAD)
            unix.mkdir(PATH.FUNC)
        else:
            super(thrifty_inversion, self).clean()


    def update_status(self):
        if PAR.LINESEARCH != 'Backtrack':
            # only works for backtracking line search
            self.status=0

        elif optimize.iter==PAR.BEGIN or \
             optimize.restarted:
            # even if backtracking line search is chosen, may not work on
            # first iteration or following a restart
            self.status=0

        elif optimize.iter==PAR.END:
            # may not work after resuming saved workflow
            self.status=0

        elif PATH.LOCAL:
            # may not work if using local filesystems
            self.status=0

        else:
            self.status=1
Ejemplo n.º 14
0
class saga(custom_import('preprocess', 'base')):
    """ SAGA data processing class

      Adds finite sum data misfit functions to base class
    """
    def sum_residuals(self, files, n=1):
        """ Sums squares of residuals (normalized by 1/N)

          INPUT
            FILES - files containing residuals
        """
        total_misfit = 0.
        for file in files:
            total_misfit += np.sum(np.loadtxt(file)**2.)
        return total_misfit / n
Ejemplo n.º 15
0
class anisotropic(custom_import('solver', 'elastic')):
    """ Adds elastic inversion machinery
    """
    model_parameters = []
    model_parameters += ['c11']
    model_parameters += ['c13']
    model_parameters += ['c15']
    model_parameters += ['c33']
    model_parameters += ['c35']
    model_parameters += ['c55']


    if PAR.MATERIALS == 'ChenTromp2d':
        from seisflows.plugins.maps import voigt_chentromp_2d as map_forward
        from seisflows.plugins.maps import chentromp_voigt_2d as map_inverse
        kernel_parameters = []
        kernel_parameters += ['A']
        kernel_parameters += ['C']
        kernel_parameters += ['N']
        kernel_parameters += ['L']
        kernel_parameters += ['F']


    elif PAR.MATERIALS == 'Voigt2d':
        from seisflows.plugins.maps import voigt_voigt_2d as map_forward
        from seisflows.plugins.maps import voigt_voigt_2d as map_inverse
        kernel_parameters = []
        kernel_parameters += ['c11']
        kernel_parameters += ['c13']
        kernel_parameters += ['c15']
        kernel_parameters += ['c33']
        kernel_parameters += ['c35']
        kernel_parameters += ['c55']


    elif PAR.MATERIALS == 'Thomsen2d':
        from seisflows.plugins.maps import voigt_thomsen_2d as map_forward
        from seisflows.plugins.maps import thomsen_voigt_2d as map_inverse
        kernel_parameters = []
        kernel_parameters += ['vp']
        kernel_parameters += ['vs']
        kernel_parameters += ['epsilon']
        kernel_parameters += ['delta']
        kernel_parameters += ['gamma']
        kernel_parameters += ['theta']

    else:
        raise ParameterError(PAR, 'MATERIALS')
Ejemplo n.º 16
0
class ChenTromp(custom_import('postprocess', 'base')):
    """ Postprocessing class
    """

    def write_gradient(self, path):
        """ Writes gradient of objective function
        """
        # check parameters
        if 'OPTIMIZE' not in PATH:
            raise ParameterError(PATH, 'OPTIMIZE')

        # check input arguments
        if not exists(path):
            raise Exception()

        self.combine_kernels(path)
        self.process_kernels(path)

        g = solver.merge(solver.load(
                path +'/'+ 'kernels/sum',
                suffix='_kernel',
                verbose=True))

        # apply scaling
        if float(PAR.SCALE) == 1.:
            pass
        elif not PAR.SCALE:
            pass
        else:
            g *= PAR.SCALE

        # write gradient
        solver.save(PATH.GRAD +'/'+ 'gradient', solver.split(g), suffix='_kernel')
        savenpy(PATH.OPTIMIZE +'/'+ 'g_new', g)


        try:
            for iproc in range(PAR.NPROC):
                y = g['Gs'][iproc]
                x = - g['Gc'][iproc]
                t = 0.5*np.arctan2(y, x)
                filename = 'proc%06d_%s.bin' % (iproc, 'azimuth')
                savebin(t, PATH.GRAD +'/'+ filename)
        except:
            pass
Ejemplo n.º 17
0
class NLCG(custom_import('optimize', 'base')):
    """ Nonlinear conjugate gradient method
    """

    def check(self):
        """ Checks parameters, paths, and dependencies
        """
        # line search algorithm
        if 'LINESEARCH' not in PAR:
            setattr(PAR, 'LINESEARCH', 'Bracket')

        # NLCG memory
        if 'NLCGMEM' not in PAR:
            setattr(PAR, 'NLCGMEM', 3)

        # NLCG periodic restart interval
        if 'NLCGMAX' not in PAR:
            setattr(PAR, 'NLCGMAX', np.inf)

        # NLCG conjugacy restart threshold
        if 'NLCGTHRESH' not in PAR:
            setattr(PAR, 'NLCGTHRESH', np.inf)

        super(NLCG, self).check()


    def setup(self):
        super(NLCG, self).setup()

        self.NLCG = getattr(optimize, 'NLCG')(
            path=PATH.OPTIMIZE,
            maxiter=PAR.NLCGMAX,
            thresh=PAR.NLCGTHRESH,
            precond=self.precond)


    def compute_direction(self):
        g_new = self.load('g_new')
        p_new, self.restarted = self.NLCG()
        self.save('p_new', p_new)


    def restart(self):
        super(NLCG, self).restart()
        self.NLCG.restart()
Ejemplo n.º 18
0
class LBFGS(custom_import('optimize', 'base')):
    """ Limited memory BFGS algorithm
    """
    def check(self):
        """ Checks parameters, paths, and dependencies
        """
        # line search algorithm
        if 'LINESEARCH' not in PAR:
            setattr(PAR, 'LINESEARCH', 'Backtrack')

        # LBFGS memory
        if 'LBFGSMEM' not in PAR:
            setattr(PAR, 'LBFGSMEM', 3)

        # LBFGS periodic restart interval
        if 'LBFGSMAX' not in PAR:
            setattr(PAR, 'LBFGSMAX', np.inf)

        # LBFGS angle restart threshold
        if 'LBFGSTHRESH' not in PAR:
            setattr(PAR, 'LBFGSTHRESH', 0.)

        super(LBFGS, self).check()

    def setup(self):
        super(LBFGS, self).setup()

        self.LBFGS = lib(path=PATH.OPTIMIZE,
                         memory=PAR.LBFGSMEM,
                         maxiter=PAR.LBFGSMAX,
                         thresh=PAR.LBFGSTHRESH,
                         precond=self.precond())

    def compute_direction(self):
        g_new = self.load('g_new')
        p_new, self.restarted = self.LBFGS()
        self.save('p_new', p_new)
        self.savetxt('s_new', self.dot(g_new, p_new))
        return p_new

    def restart(self):
        super(LBFGS, self).restart()
        self.LBFGS.restart()
Ejemplo n.º 19
0
class tikhonov0(custom_import('postprocess', 'regularize')):
    """ Adds regularization options to base class

        Available options include 0-, 1-, and 2- order Tikhonov and total
        variation regularization. While the underlying theory is classical,
        application to unstructured numerical grids via the 
        "seisflows.tools.math.nabla" operator is somewhat complicated. 

        So far, can only be used for 2D inversion, because the required spatial
        derivative operator "nabla" is not yet available for 3D grids.
    """
    def check(self):
        """ Checks parameters and paths
        """
        super(tikhonov0, self).check()

        if not PAR.LAMBDA:
            raise ValueError

    def nabla(self, mesh, m, g):
        return m / np.mean(m)
Ejemplo n.º 20
0
class total_variation(custom_import('postprocess', 'regularize')):
    """ Adds regularization options to base class

        So far, can only be used for 2D inversion, because the required spatial
        derivative operator "nabla" is not yet available for 3D grids.
    """
    def check(self):
        """ Checks parameters and paths
        """
        super(total_variation, self).check()

        if not PAR.LAMBDA:
            raise ValueError

        if not hasattr(PAR, 'EPSILON'):
            setattr(PAR, 'EPSILON', 0.)

    def nabla(self, mesh, m, g):
        M, grid = mesh2grid(g, mesh)
        DM = tv(M, epsilon=PAR.EPSILON)
        dm = grid2mesh(DM, grid, mesh)
        return dm / np.mean(m)
Ejemplo n.º 21
0
class Thomsen_base(custom_import('solver', 'specfem3d_legacy')):

    #raise NotImplementedError("Need to fix xsum_kernels utility".)

    # parameters expected by solver
    solver_parameters = []
    solver_parameters += ['vp']
    solver_parameters += ['vs']
    solver_parameters += ['epsilon']
    solver_parameters += ['delta']
    solver_parameters += ['gamma']
    solver_parameters += ['theta']
    solver_parameters += ['azimuth']


    def save(self, path, model, prefix='', suffix=''):
        super(Thomsen_base, self).save(
            path, model, prefix, suffix, self.solver_parameters)


    def export_model(self, path):
        super(Thomsen_base, self).export_model(
            path, self.solver_parameters+['rho'])
Ejemplo n.º 22
0
class multithreaded(custom_import('system', 'multicore')):
    """ An interface through which to submit workflows, run tasks in serial or 
      parallel, and perform other system functions.

      By hiding environment details behind a python interface layer, these 
      classes provide a consistent command set across different computing
      environments.

      For important additional information, please see 
      http://seisflows.readthedocs.org/en/latest/manual/manual.html#system-configuration
    """
    def check(self):
        """ Checks parameters and paths
        """
        print("""
            DEPRECATION WARNING

                SYSTEM.MULTITHREADED has been renamed SYSTEM.MULTICORE

                Please update your parameter file.
        """)

        super(multithreaded, self).check()
Ejemplo n.º 23
0
class steepest_descent(custom_import('optimize', 'base')):
    """ Steepest descent method
    """
    restarted = False

    def check(self):
        """ Checks parameters, paths, and dependencies
        """

        if 'LINESEARCH' not in PAR:
            setattr(PAR, 'LINESEARCH', 'Bracket')

        super(steepest_descent, self).check()

    def setup(self):
        super(steepest_descent, self).setup()

    def compute_direction(self):
        super(steepest_descent, self).compute_direction()

    def restart(self):
        # steepest descent never requires restarts
        pass
Ejemplo n.º 24
0
class source_encoding_2d(custom_import('solver', 'specfem2d')):
    def check(self):
        """ Checks parameters, paths, and dependencies
        """
        super(source_encoding_2d, self).check()

    def initialize_solver_directories(self):
        """ Sets up directory in which to run solver
        """
        if 'NT_PADDED' not in PAR:
            raise Exception

        super(source_encoding_2d, self).initialize_solver_directories()
        solvertools.setpar('NSOURCES', PAR.NSRC)
        solvertools.setpar('nt', PAR.NT_PADDED)

    def write_receivers(self, coords):
        """ Writes receivers file
        """
        solvertools.write_receivers(coords, self.cwd)

    def write_sources(self, coords, stats=[], mapping=lambda i: [i]):
        """ Writes sources file
        """
        unix.cd(self.cwd)
        nodes = mapping(system.taskid())
        lines = []
        for i in nodes:
            solvertools.write_sources(
                [coords[0][i], coords[1][i], coords[2][i]], self.cwd,
                stats['ws'][i])

            with open('DATA/SOURCE', 'r') as f:
                lines.extend(f.readlines())

        with open('DATA/SOURCE', 'w') as f:
            f.writelines(lines)
Ejemplo n.º 25
0
class lambda_mu_2d(custom_import('solver', 'elastic2d')):
    """ Adds Lame parameter machinery to SPECFEM2D
    """
    assert PAR.MATERIALS == 'lambda_mu'

    def export_kernels(self, path):
        assert self.mesh_properties.nproc == 1
        iproc = 0

        path = join(path, 'kernels')

        model_parameters = ['rho', 'vp', 'vs']
        kernel_parameters = ['rho', 'kappa', 'mu']

        model = getstruct(self.cwd + '/' + 'DATA/', model_parameters, iproc)
        kernels = getstruct(self.cwd + '/' + 'OUTPUT_FILES/',
                            kernel_parameters,
                            iproc,
                            suffix='_kernel')

        unix.mkdir(join(path, basename(self.cwd)))
        self.save(join(path, basename(self.cwd)),
                  map(model, kernels),
                  suffix='_kernel')
Ejemplo n.º 26
0
class pbs_lg(custom_import('system', 'base')):
    """ An interface through which to submit workflows, run tasks in serial or
      parallel, and perform other system functions.

      By hiding environment details behind a python interface layer, these
      classes provide a consistent command set across different computing
      environments.

      Intermediate files are written to a global scratch path PATH.SCRATCH,
      which must be accessible to all compute nodes.

      Optionally, users can provide a local scratch path PATH.LOCAL if each
      compute node has its own local filesystem.

      For important additional information, please see
      http://seisflows.readthedocs.org/en/latest/manual/manual.html#system-configuration
    """

    def check(self):
        """ Checks parameters and paths
        """
        print msg.Warning_pbs_lg

        # name of job
        if 'TITLE' not in PAR:
            setattr(PAR, 'TITLE', basename(abspath('.')))

        # time allocated for entire workflow
        if 'WALLTIME' not in PAR:
            setattr(PAR, 'WALLTIME', 30.)

        # time allocated for each individual task
        if 'STEPTIME' not in PAR:
            setattr(PAR, 'STEPTIME', 15.)

        # number of tasks
        if 'NTASK' not in PAR:
            raise ParameterError(PAR, 'NTASK')

        # number of cores per task
        if 'NPROC' not in PAR:
            raise ParameterError(PAR, 'NPROC')

        # number of cores per node
        if 'NODESIZE' not in PAR:
            raise ParameterError(PAR, 'NODESIZE')

        # optional additional PBS arguments
        if 'PBSARGS' not in PAR:
            setattr(PAR, 'PBSARGS', '')

        # optional environment variable list VAR1=val1,VAR2=val2,...
        if 'ENVIRONS' not in PAR:
            setattr(PAR, 'ENVIRONS', '')

        # level of detail in output messages
        if 'VERBOSE' not in PAR:
            setattr(PAR, 'VERBOSE', 1)

        # where job was submitted
        if 'WORKDIR' not in PATH:
            setattr(PATH, 'WORKDIR', abspath('.'))

        # where output files are written
        if 'OUTPUT' not in PATH:
            setattr(PATH, 'OUTPUT', PATH.WORKDIR+'/'+'output')

        # where temporary files are written
        if 'SCRATCH' not in PATH:
            setattr(PATH, 'SCRATCH', PATH.WORKDIR+'/'+'scratch')

        # where system files are written
        if 'SYSTEM' not in PATH:
            setattr(PATH, 'SYSTEM', PATH.SCRATCH+'/'+'system')

        # optional local scratch path
        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)


    def submit(self, workflow):
        """ Submits workflow
        """
        # create scratch directories
        unix.mkdir(PATH.SCRATCH)
        unix.mkdir(PATH.SYSTEM)

        # create output directories
        unix.mkdir(PATH.OUTPUT)
        unix.mkdir(PATH.WORKDIR+'/'+'output.pbs')

        self.checkpoint()

        hours = PAR.WALLTIME/60
        minutes = PAR.WALLTIME%60
        walltime = 'walltime=%02d:%02d:00 ' % (hours, minutes)

        ncpus = PAR.NODESIZE
        mpiprocs = PAR.NODESIZE

        # prepare qsub arguments
        call( 'qsub '
                + '%s ' % PAR.PBSARGS
                + '-l select=1:ncpus=%d:mpiprocs=%d ' % (ncpus, mpiprocs)
                + '-l %s ' % walltime
                + '-N %s ' % PAR.TITLE
                + '-j %s '%'oe'
                + '-o %s ' % (PATH.WORKDIR+'/'+'output.log')
                + '-V '
                + ' -- ' + findpath('seisflows.system') +'/'+ 'wrappers/submit '
                + PATH.OUTPUT)


    def run(self, classname, funcname, hosts='all', **kwargs):
        """ Runs tasks in serial or parallel on specified hosts.
        """
        self.checkpoint()

        self.save_kwargs(classname, funcname, kwargs)
        jobs = self.submit_job_array(classname, funcname, hosts)
        while True:
            # wait a few seconds before checking again
            time.sleep(5)
            self._timestamp()
            isdone, jobs = self.job_array_status(classname, funcname, jobs)
            if isdone:
                return


    def mpiexec(self):
        """ Specifies MPI exectuable; used to invoke solver
        """
        return 'mpiexec '


    def getnode(self):
        """ Gets number of running task
        """
        try:
            return os.getenv('PBS_ARRAY_INDEX')
        except:
            raise Exception("PBS_ARRAY_INDEX environment variable not defined.")


    ### private methods

    def submit_job_array(self, classname, funcname, hosts='all'):
        with open(PATH.SYSTEM+'/'+'job_id', 'w') as f:
            call(self.job_array_cmd(classname, funcname, hosts),
                stdout=f)

        # retrieve job ids
        with open(PATH.SYSTEM+'/'+'job_id', 'r') as f:
            line = f.readline()
            job = line.split()[-1].strip()
        if hosts == 'all' and PAR.NTASK > 1:
            nn = range(PAR.NTASK)
            job0 = job.strip('[].sdb')
            return [job0+'['+str(ii)+'].sdb' for ii in nn]
        else:
            return [job]


    def job_array_cmd(self, classname, funcname, hosts):
        nodes = math.ceil(PAR.NTASK/float(PAR.NODESIZE))
        ncpus = PAR.NPROC
        mpiprocs = PAR.NPROC

        hours = PAR.STEPTIME/60
        minutes = PAR.STEPTIME%60
        walltime = 'walltime=%02d:%02d:00 '%(hours, minutes)

        return ('qsub '
                + '%s ' % PAR.PBSARGS
                + '-l select=%d:ncpus=%d:mpiprocs=%d ' (nodes, ncpus, mpiprocs)
                + '-l %s ' % walltime
                + '-J 0-%s ' % (PAR.NTASK-1)
                + '-N %s ' % PAR.TITLE
                + '-o %s ' % (PATH.WORKDIR+'/'+'output.pbs/' + '$PBS_ARRAYID')
                + '-r y '
                + '-j oe '
                + '-V '
                + self.job_array_args(hosts)
                + PATH.OUTPUT + ' '
                + classname + ' '
                + funcname + ' '
                + 'PYTHONPATH='+findpath('seisflows.system'),+','
                + PAR.ENVIRONS)


    def job_array_args(self, hosts):
        if hosts == 'all':
          args = ('-J 0-%s ' % (PAR.NTASK-1)
                +'-o %s ' % (PATH.WORKDIR+'/'+'output.pbs/' + '$PBS_ARRAYID')
                + ' -- ' + findpath('seisflows.system') +'/'+ 'wrappers/run ')

        elif hosts == 'head':
          args = ('-J 0-0 '
                 +'-o %s ' % (PATH.WORKDIR+'/'+'output.pbs/' + '$PBS_JOBID')
                 + ' -- ' + findpath('seisflows.system') +'/'+ 'wrappers/run ')

        return args


    def job_array_status(self, classname, funcname, jobs):
        """ Determines completion status of one or more jobs
        """
        for job in jobs:
            state = self._query(job)
            states = []
            if state in ['C']:
                states += [1]
            else:
                states += [0]
            if state in ['F']:
                print msg.TaskError_PBS % (classname, funcname, job)
                sys.exit(-1)
        isdone = all(states)

        return isdone, jobs


    def _query(self, jobid):
        """ Queries job state from PBS database
        """
        # TODO: replace shell utilities with native Python
        with open(PATH.SYSTEM+'/'+'job_status', 'w') as f:
            call('qstat -x -tJ ' + jobid + ' | '
                + 'tail -n 1 ' + ' | '
                + 'awk \'{print $5}\'',
                stdout=f)

        with open(PATH.SYSTEM+'/'+'job_status', 'r') as f:
            line = f.readline()
            state = line.strip()

        return state


    ### utility function

    def _timestamp(self):
        with open(PATH.SYSTEM+'/'+'timestamps', 'a') as f:
            line = time.strftime('%H:%M:%S')+'\n'
            f.write(line)

    def save_kwargs(self, classname, funcname, kwargs):
        kwargspath = join(PATH.OUTPUT, 'kwargs')
        kwargsfile = join(kwargspath, classname+'_'+funcname+'.p')
        unix.mkdir(kwargspath)
        saveobj(kwargsfile, kwargs)
Ejemplo n.º 27
0
class specfem3d_globe(custom_import('solver', 'base')):
    """ Python interface for SPECFEM3D_GLOBE

      See base class for method descriptions
    """

    if PAR.MATERIALS in ['Isotropic']:
        parameters = []
        parameters += ['vp']
        parameters += ['vs']
    else:
        parameters = []
        parameters += ['vpv']
        parameters += ['vph']
        parameters += ['vsv']
        parameters += ['vsh']
        parameters += ['eta']

    def check(self):
        """ Checks parameters and paths
        """
        super(specfem3d_globe, self).check()

        if 'CHANNELS' not in PAR:
            setattr(PAR, 'CHANNELS', 'ENZ')

        # check data format
        if 'FORMAT' not in PAR:
            raise Exception()

    def generate_data(self, **model_kwargs):
        """ Generates data
        """
        self.generate_mesh(**model_kwargs)

        unix.cd(self.cwd)
        setpar('SIMULATION_TYPE', '1')
        setpar('SAVE_FORWARD', '.true.')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

        if PAR.FORMAT in ['ASCII', 'ascii']:
            src = glob('OUTPUT_FILES/*.sem.ascii')
            dst = 'traces/obs'
            unix.mv(src, dst)

        if PAR.SAVETRACES:
            self.export_traces(PATH.OUTPUT + '/' + 'traces/obs')

    def generate_mesh(self,
                      model_path=None,
                      model_name=None,
                      model_type='gll'):
        """ Performs meshing and database generation
        """
        assert (model_name)
        assert (model_type)

        self.initialize_solver_directories()
        unix.cd(self.cwd)

        if model_type == 'gll':
            assert (exists(model_path))
            self.check_mesh_properties(model_path)

            unix.cp(glob(model_path + '/' + '*'), self.model_databases)

            call_solver(system.mpiexec(), 'bin/xmeshfem3D')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT + '/' + model_name)

        else:
            raise NotImplementedError

    ### model input/output

    def load(self, path, prefix='reg1_', suffix='', verbose=False):
        """ reads SPECFEM model or kernel

          Models are stored in Fortran binary format and separated into multiple
          files according to material parameter and processor rank.
        """
        model = Model(self.parameters)
        minmax = Minmax(self.parameters)

        for iproc in range(self.mesh_properties.nproc):
            # read database files
            keys, vals = loadbypar(path, self.parameters, iproc, prefix,
                                   suffix)
            for key, val in zip(keys, vals):
                model[key] += [val]

            minmax.update(keys, vals)

        if verbose:
            minmax.write(path, logpath=PATH.SUBMIT)

        return model

    def save(self, path, model, prefix='reg1_', suffix=''):
        """ writes SPECFEM3D_GLOBE transerverly isotropic model
        """
        unix.mkdir(path)

        for iproc in range(self.mesh_properties.nproc):
            for key in ['vpv', 'vph', 'vsv', 'vsh', 'eta']:
                if key in self.parameters:
                    savebin(model[key][iproc], path, iproc,
                            prefix + key + suffix)
                elif 'kernel' in suffix:
                    pass
                else:
                    src = PATH.OUTPUT + '/' + 'model_init'
                    dst = path
                    copybin(src, dst, iproc, prefix + key + suffix)

            if 'rho' in self.parameters:
                savebin(model['rho'][iproc], path, iproc,
                        prefix + 'rho' + suffix)
            elif 'kernel' in suffix:
                pass
            else:
                src = PATH.OUTPUT + '/' + 'model_init'
                dst = path
                copybin(src, dst, iproc, prefix + 'rho' + suffix)

    ### low-level solver interface

    def forward(self, path='traces/syn'):
        """ Calls SPECFEM3D_GLOBE forward solver
        """
        solvertools.setpar('SIMULATION_TYPE', '1')
        solvertools.setpar('SAVE_FORWARD', '.true.')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

        if PAR.FORMAT in ['ASCII', 'ascii']:
            src = glob('OUTPUT_FILES/*.sem.ascii')
            dst = path
            unix.mv(src, dst)

    def adjoint(self):
        """ Calls SPECFEM3D_GLOBE adjoint solver
        """
        solvertools.setpar('SIMULATION_TYPE', '3')
        solvertools.setpar('SAVE_FORWARD', '.false.')
        unix.rm('SEM')
        unix.ln('traces/adj', 'SEM')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

    def check_mesh_properties(self, path=None, parameters=None):
        if not hasattr(self, '_mesh_properties'):
            if not path:
                path = PATH.MODEL_INIT

            if not parameters:
                parameters = self.parameters

            nproc = 0
            ngll = []
            while True:
                dummy = loadbin(path, nproc, 'reg1_' + parameters[0])
                ngll += [len(dummy)]
                nproc += 1
                if not exists('%s/proc%06d_reg1_%s.bin' %
                              (path, nproc, parameters[0])):
                    break

            self._mesh_properties = Struct([['nproc', nproc], ['ngll', ngll]])

        return self._mesh_properties

    def rename_data(self):
        """ Works around conflicting data filename conventions
        """
        files = glob(self.cwd + '/' + 'traces/adj/*sem.ascii')
        unix.rename('sem.ascii', 'sem.ascii.adj', files)

    def initialize_adjoint_traces(self):
        super(specfem3d_globe, self).initialize_adjoint_traces()

        # workaround for  SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['ASCII', 'ascii']:
            files = glob(self.cwd + '/' + 'traces/adj/*sem.ascii')
            unix.rename('sem.ascii', 'adj', files)

    @property
    def data_filenames(self):
        unix.cd(self.cwd)
        unix.cd('traces/obs')

        print 'made it here'
        if PAR.FORMAT in ['ASCII', 'ascii']:
            filenames = []
            for channel in PAR.CHANNELS:
                filenames += glob('*.??%s.sem.ascii' % channel)
            return [filenames]

    @property
    def kernel_databases(self):
        return join(self.cwd, 'OUTPUT_FILES/DATABASES_MPI')

    @property
    def model_databases(self):
        return join(self.cwd, 'OUTPUT_FILES/DATABASES_MPI')

    @property
    def source_prefix(self):
        return 'CMTSOLUTION'
Ejemplo n.º 28
0
class specfem3d_nz(custom_import('solver', 'base')):
    """ Python interface for SPECFEM3D

      See base class for method descriptions
    """
    def check(self):
        """ Checks parameters and paths
        """
        super(specfem3d_nz, self).check()

        # check time stepping parameters
        if 'NT' not in PAR:
            raise Exception()

        if 'DT' not in PAR:
            raise Exception()

        # check data format
        if 'FORMAT' not in PAR:
            raise Exception()

        # make sure data format is accetapble
        if PAR.FORMAT not in ['su', 'ascii']:
            raise Exception()

    def setup(self):
        """
          Overload of solver.base.setup, removes the need to move data around
          as Pyatoa takes care of data fetching within eval_func
          Prepares solver for inversion or migration
          Sets up directory structure expected by SPECFEM and copies or
          generates seismic data to be inverted or migrated
        """
        # clean up for new inversion
        unix.rm(self.cwd)

        self.initialize_solver_directories()

        # if synthetic case, create synthetic observations
        if PAR.CASE == "Synthetic":
            self.generate_data(model_path=PATH.MODEL_TRUE,
                               model_name='model_true',
                               model_type='gll')

        # prepare initial model
        self.generate_mesh(model_path=PATH.MODEL_INIT,
                           model_name='model_init',
                           model_type='gll')

    def generate_data(self, **model_kwargs):
        """ 
        Generates data in the synthetic-synthetic comparison case.
        Not for use in the real-data problem. Differs from specfem3d.nz in that
        it automatically calls generate mesh for the true model, rather than
        passing them in as kwargs.
        """
        # Prepare for the forward simulation
        self.generate_mesh(**model_kwargs)

        print 'specfem3d_nz.generate data'
        unix.cd(self.cwd)
        setpar('SIMULATION_TYPE', '1')
        setpar('SAVE_FORWARD', '.true.')
        setpar('ATTENUATION ', '.true.')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

        # seismic unix format
        if PAR.FORMAT in ['SU', 'su']:
            src = glob('OUTPUT_FILES/*_d?_SU')
            dst = 'traces/obs'
            unix.mv(src, dst)
        # ascii sem output format
        elif PAR.FORMAT == "ascii":
            src = glob('OUTPUT_FILES/*sem?')
            dst = 'traces/obs'
            unix.mv(src, dst)

        if PAR.SAVETRACES:
            self.export_traces(PATH.OUTPUT + '/' + 'traces/obs')

    def generate_mesh(self,
                      model_path=None,
                      model_name=None,
                      model_type='gll'):
        """ Performs meshing and database generation
        """
        print 'specfem3d_nz.generate mesh'
        assert (model_name)
        assert (model_type)

        unix.cd(self.cwd)

        if model_type in ['gll']:
            par = getpar('MODEL').strip()
            if par != 'gll':
                if self.taskid == 0:
                    print 'WARNING: Unexpected Par_file setting:'
                    print 'MODEL =', par

            assert (exists(model_path))
            self.check_mesh_properties(model_path)

            src = glob(model_path + '/' + '*')
            dst = self.model_databases
            unix.cp(src, dst)

            call_solver(system.mpiexec(), 'bin/xgenerate_databases')

            if self.taskid == 0:
                self.export_model(PATH.OUTPUT + '/' + model_name)

        else:
            raise NotImplementedError

    def eval_fwd(self, path=''):
        """
        Performs forward simulations for misfit function evaluation.
        Same as solver.base.eval_func without the residual writing.
        For use in specfem3d_nz where eval_func is taken by Pyatoa.
        """
        print 'specfem3d_nz.eval_fwd'
        unix.cd(self.cwd)
        self.import_model(path)
        self.forward()

    def eval_func(self, iter='', step=0, suffix=None, *args, **kwargs):
        """
        evaluate the misfit functional using the external package Pyatoa.
        Pyatoa is written in Python3 so it needs to be called with subprocess

        :param args:
        :param kwargs:
        :return:
        """
        print 'specfem3d_nz.eval_func'
        load_conda = "module load Anaconda2/5.2.0-GCC-7.1.0;"
        load_hdf5 = "module load HDF5/1.10.1-GCC-7.1.0;"
        arguments = " ".join([
            "--mode process", "--working_dir {}".format(PATH.WORKDIR),
            "--current_dir {}".format(self.cwd),
            "--model_number {}".format("m{:0>2}".format(int(iter) - 1)),
            "--event_id {}".format(self.source_name),
            "--step_count {}".format("s{:0>2}".format(step)),
            "--suffix {}".format(suffix)
        ])
        call_pyatoa = " ".join(
            [load_conda, load_hdf5, PATH.PYTHON3, PATH.PYATOA_RUN, arguments])
        print(call_pyatoa)
        try:
            tstart = time.time()
            stdout = subprocess.check_output(call_pyatoa, shell=True)
            print '{:.2f}m elapsed'.format((time.time() - tstart) / 60)
        except subprocess.CalledProcessError as e:
            print("Pyatoa failed with {}".format(e))
            sys.exit(-1)

    # low-level solver interface
    def forward(self, path='traces/syn'):
        """ Calls SPECFEM3D forward solver and then moves files into path
        """
        setpar('SIMULATION_TYPE', '1')
        setpar('SAVE_FORWARD', '.true.')
        setpar('ATTENUATION ', '.true.')
        call_solver(system.mpiexec(), 'bin/xgenerate_databases')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

        # seismic unix output format
        if PAR.FORMAT in ['SU', 'su']:
            src = glob('OUTPUT_FILES/*_d?_SU')
            dst = path
            unix.mv(src, dst)
        # ascii sem output format
        elif PAR.FORMAT == "ascii":
            src = glob('OUTPUT_FILES/*sem?')
            dst = path
            unix.mv(src, dst)

    def adjoint(self):
        """ Calls SPECFEM3D adjoint solver
        """
        setpar('SIMULATION_TYPE', '3')
        setpar('SAVE_FORWARD', '.false.')
        setpar('ATTENUATION ', '.false.')
        unix.rm('SEM')
        unix.ln('traces/adj', 'SEM')
        call_solver(system.mpiexec(), 'bin/xspecfem3D')

    # input file writers
    def check_solver_parameter_files(self):
        """ Checks solver parameters
        """
        nt = getpar('NSTEP', cast=int)
        dt = getpar('DT', cast=float)

        if nt != PAR.NT:
            if self.taskid == 0: print "WARNING: nt != PAR.NT"
            setpar('NSTEP', PAR.NT)

        if dt != PAR.DT:
            if self.taskid == 0: print "WARNING: dt != PAR.DT"
            setpar('DT', PAR.DT)

        if self.mesh_properties.nproc != PAR.NPROC:
            if self.taskid == 0:
                print 'Warning: mesh_properties.nproc != PAR.NPROC'

        if 'MULTIPLES' in PAR:
            raise NotImplementedError

    def initialize_adjoint_traces(self):
        super(specfem3d_nz, self).initialize_adjoint_traces()

        # workaround for SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT in ['SU', 'su']:
            files = glob(self.cwd + '/' + 'traces/adj/*SU')
            unix.rename('_SU', '_SU.adj', files)

        # workaround for SPECFEM3D's requirement that all components exist,
        # even ones not in use
        unix.cd(self.cwd + '/' + 'traces/adj')
        for iproc in range(PAR.NPROC):
            for channel in ['x', 'y', 'z']:
                src = '%d_d%s_SU.adj' % (iproc, PAR.CHANNELS[0])
                dst = '%d_d%s_SU.adj' % (iproc, channel)
                if not exists(dst):
                    unix.cp(src, dst)

    def rename_data(self):
        """ Works around conflicting data filename conventions
        """
        if PAR.FORMAT in ['SU', 'su']:
            files = glob(self.cwd + '/' + 'traces/adj/*SU')
            unix.rename('_SU', '_SU.adj', files)

    def write_parameters(self):
        unix.cd(self.cwd)
        solvertools.write_parameters(vars(PAR))

    def write_receivers(self):
        unix.cd(self.cwd)
        key = 'use_existing_STATIONS'
        val = '.true.'
        setpar(key, val)
        _, h = preprocess.load('traces/obs')
        solvertools.write_receivers(h.nr, h.rx, h.rz)

    def write_sources(self):
        unix.cd(self.cwd)
        _, h = preprocess.load(dir='traces/obs')
        solvertools.write_sources(vars(PAR), h)

    # postprocessing wrapper overload
    def smooth(self,
               input_path='',
               output_path='',
               parameters=[],
               span_h=0.,
               span_v=0.):
        """ Smooths kernels by convolving them with a Gaussian.  Wrapper over 
            xsmooth_sem utility. 
            smooth() in base.py has the incorrect command line call, specfem 
            requires that NPROC be specified
        """
        if not exists(input_path):
            raise Exception

        if not exists(output_path):
            unix.mkdir(output_path)

        # apply smoothing operator
        unix.cd(self.cwd)
        print 'smoothing parameters ', self.parameters
        for name in parameters or self.parameters:
            print 'smoothing', name
            solver_call = " ".join([
                PATH.SPECFEM_BIN + '/' + 'xsmooth_sem',  # ./bin/xsmooth_sem
                str(span_h),  # SIGMA_H
                str(span_v),  # SIGMA_V
                name + '_kernel',  # KERNEL_NAME
                input_path + '/',  # INPUT_DIR
                output_path + '/',  # OUTPUT_DIR
                '.false'  # USE_GPU
            ])
            call_solver(system.mpiexec(), solver_call)
        print ''

        # rename output files
        files = glob(output_path + '/*')
        unix.rename('_smooth', '', files)

    def combine_vol_data(self, output_path='', quantity=''):
        """
        This does not work
        Call Specfems executable combine_vol_data_vtk on kernels or model files
        """
        if not exists(output_path):
            unix.mkdir(output_path)

        # This should probably be moved to its own function
        # def import_kernels()
        unix.cd(self.cwd)
        src = glob(join(PATH.GRAD, self.source_name, "*{}*".format(quantity)))
        dst = join(self.cwd, "kernels")
        unix.mkdir(dst)
        unix.ln(src=src, dst=dst)

        solver_call = " ".join([
            PATH.SPECFEM_BIN + '/' + 'xcombine_vol_data_vtk',
            0,  # NPROC_START
            PAR.NPROC,  # NPROC_END
            quantity,  # QUANTITY
            dst,  # DIR_IN
            dst,  # DIR_OUT, we will rename the files first
            0  # GPU ACCEL
        ])
        call_solver(system_mpiexec(), solver_call)

        unix.rm(dst)
        print ''

    # miscellaneous
    @property
    def data_wildcard(self):
        channels = PAR.CHANNELS
        return '*_d[%s]_SU' % channels.lower()

    @property
    def data_filenames(self):
        unix.cd(self.cwd + '/' + 'traces/obs')

        if PAR.FORMAT in ['SU', 'su']:
            if not PAR.CHANNELS:
                return sorted(glob('*_d?_SU'))
            filenames = []
            for channel in PAR.CHANNELS:
                filenames += sorted(glob('*_d' + channel + '_SU'))
            return filenames

        else:
            raise NotImplementedError

    @property
    def kernel_databases(self):
        return join(self.cwd, 'OUTPUT_FILES/DATABASES_MPI')

    @property
    def model_databases(self):
        return join(self.cwd, 'OUTPUT_FILES/DATABASES_MPI')

    @property
    def source_prefix(self):
        return 'CMTSOLUTION'
Ejemplo n.º 29
0
class isotropic2d(custom_import('solver', 'specfem2d')):
    """ Adds isotropic elastic inversion machinery

       Must supply a model in vp,vs,rho
    """
    def check(self):
        super(isotropic2d, self).check()

        if not hasattr(forward, PAR.MATERIALS):
            raise Exception

        if not hasattr(reverse, PAR.MATERIALS):
            raise Exception

        assert PAR.MATERIALS in [
            'rho_phi_beta', 'rho_kappa_mu'
            'rho_lambda_mu', 'rho_alpha_beta'
            'phi_beta_gardner', 'kappa_mu_gardner'
            'lambda_mu_gardner', 'alpha_beta_gardner', 'phi_beta', 'kappa_mu'
            'lambda_mu', 'alpha_beta'
        ]

    def __init__(self):
        # variable density
        if PAR.MATERIALS == 'rho_phi_beta':
            self.parameters = []
            self.parameters += ['bulk_c']
            self.parameters += ['bulk_beta']
            self.parameters += ['rho']

        if PAR.MATERIALS == 'rho_kappa_mu':
            self.parameters = []
            self.parameters += ['kappa']
            self.parameters += ['mu']
            self.parameters += ['rho']

        if PAR.MATERIALS == 'rho_lambda_mu':
            self.parameters = []
            self.parameters += ['lame1']
            self.parameters += ['lame2']
            self.parameters += ['rho']

        if PAR.MATERIALS == 'rho_alpha_beta':
            self.parameters = []
            self.parameters += ['vp']
            self.parameters += ['vs']
            self.parameters += ['rho']

        # constant density
        if PAR.MATERIALS == 'phi_beta':
            self.parameters = []
            self.parameters += ['bulk_c']
            self.parameters += ['bulk_beta']

        if PAR.MATERIALS == 'kappa_mu':
            self.parameters = []
            self.parameters += ['kappa']
            self.parameters += ['mu']

        if PAR.MATERIALS == 'lambda_mu':
            self.parameters = []
            self.parameters += ['lame1']
            self.parameters += ['lame2']

        if PAR.MATERIALS == 'alpha_beta':
            self.parameters = []
            self.parameters += ['vp']
            self.parameters += ['vs']

        # gardner's law density
        if PAR.MATERIALS == 'phi_beta_gardner':
            self.parameters = []
            self.parameters += ['bulk_c']
            self.parameters += ['bulk_beta']

        if PAR.MATERIALS == 'kappa_mu_gardner':
            self.parameters = []
            self.parameters += ['kappa']
            self.parameters += ['mu']

        if PAR.MATERIALS == 'lambda_mu_gardner':
            self.parameters = []
            self.parameters += ['lame1']
            self.parameters += ['lame2']

        if PAR.MATERIALS == 'alpha_beta_gardner':
            self.parameters = []
            self.parameters += ['vp']
            self.parameters += ['vs']

    def load(self, path, parameters=['vp', 'vs', 'rho'], prefix='', suffix=''):
        """ Reads isotropic elastic model
        """
        dict = Container()

        nproc = self.mesh_properties.nproc
        for iproc in range(nproc):
            for key in parameters:
                key = prefix + key + suffix
                dict[key] = self.io.read_slice(path, key, iproc)

        if parameters == ['vp', 'vs', 'rho']:
            dict.map(self.forward, nproc)

        return dict

    def save(self,
             dict,
             path,
             parameters=['vp', 'vs', 'rho'],
             prefix='',
             suffix=''):
        """ Writes isotropic elastic
        """
        nproc = self.mesh_properties.nproc
        if ['rho'] not in parameters:
            for iproc in range(nproc):
                dict['rho'] = self.io.read_slice(PATH.MODEL_INIT, 'rho', iproc)

        if parameters != ['vp', 'vs', 'rho']:
            dict.map(self.reverse, nproc)

        nproc = self.mesh_properties.nproc
        for key, val in dict.items():
            key = prefix + key + suffix
            self.io.write_slice(val, path, key, iproc)

    @staticmethod
    def forward(*args):
        return getattr(forward, PAR.MATERIALS)(*args)

    @staticmethod
    def reverse(*args):
        return getattr(reverse, PAR.MATERIALS)(*args)

    def check_mesh_properties(self, path=None, parameters=['vp', 'vs', 'rho']):
        super(isotropic2d, self).check_mesh_properties(path, parameters)
Ejemplo n.º 30
0
class slurm_sm(custom_import('system', 'base')):
    """ An interface through which to submit workflows, run tasks in serial or 
      parallel, and perform other system functions.

      By hiding environment details behind a python interface layer, these 
      classes provide a consistent command set across different computing
      environments.

      Intermediate files are written to a global scratch path PATH.SCRATCH,
      which must be accessible to all compute nodes.

      Optionally, users can provide a local scratch path PATH.LOCAL if each
      compute node has its own local filesystem.

      For important additional information, please see 
      http://seisflows.readthedocs.org/en/latest/manual/manual.html#system-configuration
    """


    def check(self):
        """ Checks parameters and paths
        """
        # name of job
        if 'TITLE' not in PAR:
            setattr(PAR, 'TITLE', basename(abspath('.')))

        # time allocated for entire workflow
        if 'WALLTIME' not in PAR:
            setattr(PAR, 'WALLTIME', 30.)

        # number of tasks
        if 'NTASK' not in PAR:
            raise ParameterError(PAR, 'NTASK')

        # number of cores per task
        if 'NPROC' not in PAR:
            raise ParameterError(PAR, 'NPROC')

        # optional additional SLURM arguments
        if 'SLURMARGS' not in PAR:
            setattr(PAR, 'SLURMARGS', '')

        # optional environment variable list VAR1=val1,VAR2=val2,...
        if 'ENVIRONS' not in PAR:
            setattr(PAR, 'ENVIRONS', '')

        # level of detail in output messages
        if 'VERBOSE' not in PAR:
            setattr(PAR, 'VERBOSE', 1)

        # where job was submitted
        if 'WORKDIR' not in PATH:
            setattr(PATH, 'WORKDIR', abspath('.'))

        # where output files are written
        if 'OUTPUT' not in PATH:
            setattr(PATH, 'OUTPUT', PATH.WORKDIR+'/'+'output')

        # where temporary files are written
        if 'SCRATCH' not in PATH:
            setattr(PATH, 'SCRATCH', PATH.WORKDIR+'/'+'scratch')

        # where system files are written
        if 'SYSTEM' not in PATH:
            setattr(PATH, 'SYSTEM', PATH.SCRATCH+'/'+'system')

        # optional local scratch path
        if 'LOCAL' not in PATH:
            setattr(PATH, 'LOCAL', None)


    def submit(self, workflow):
        """ Submits workflow
        """
        # create scratch directories
        unix.mkdir(PATH.SCRATCH)
        unix.mkdir(PATH.SYSTEM)

        # create output directories
        unix.mkdir(PATH.OUTPUT)

        self.checkpoint()

        # submit workflow
        call('sbatch '
                + '%s ' %  PAR.SLURMARGS
                + '--job-name=%s '%PAR.TITLE
                + '--output=%s '%(PATH.WORKDIR +'/'+ 'output.log')
                + '--cpus-per-task=%d '%PAR.NPROC
                + '--ntasks=%d '%PAR.NTASK
                + '--time=%d '%PAR.WALLTIME
                + findpath('seisflows.system') +'/'+ 'wrappers/submit '
                + PATH.OUTPUT)


    def run(self, classname, funcname, hosts='all', **kwargs):
        """  Runs tasks in serial or parallel on specified hosts
        """
        self.checkpoint()
        self.save_kwargs(classname, funcname, kwargs)

        if hosts == 'all':
            # run on all available nodes
            call('srun '
                    + '--wait=0 '
                    + join(findpath('seisflows.system'), 'wrappers/run ')
                    + PATH.OUTPUT + ' '
                    + classname + ' '
                    + funcname + ' '
                    + PAR.ENVIRONS)

        elif hosts == 'head':
            # run on head node
            call('srun '
                    + '--wait=0 '
                    + '--ntasks=1 '
                    + '--nodes=1 ' 
                    + join(findpath('seisflows.system'), 'wrappers/run ')
                    + PATH.OUTPUT + ' '
                    + classname + ' '
                    + funcname + ' '
                    + PAR.ENVIRONS)

        else:
            raise KeyError('Bad keyword argument: system.run: hosts')


    def hostlist(self):
        with open(PATH.SYSTEM+'/'+'hostlist', 'w') as f:
            call('scontrol show hostname $SLURM_JOB_NODEFILE', stdout=f)

        with open(PATH.SYSTEM+'/'+'hostlist', 'r') as f:
            return [line.strip() for line in f.readlines()]


    def getnode(self):
        """ Gets number of running task
        """
        gid = os.getenv('SLURM_GTIDS').split(',')
        lid = int(os.getenv('SLURM_LOCALID'))
        return int(gid[lid])


    def mpiexec(self):
        """ Specifies MPI exectuable; used to invoke solver
        """
        return ''
        #return 'mpirun -np %d '%PAR.NPROC


    def save_kwargs(self, classname, funcname, kwargs):
        kwargspath = join(PATH.OUTPUT, 'kwargs')
        kwargsfile = join(kwargspath, classname+'_'+funcname+'.p')
        unix.mkdir(kwargspath)
        saveobj(kwargsfile, kwargs)
import sys
import numpy as np

from seisflows.tools import unix
from seisflows.tools.tools import exists
from seisflows.config import custom_import, ParameterError
from seisflows.workflow.base import base

PAR = sys.modules['seisflows_parameters']
PATH = sys.modules['seisflows_paths']

solver = sys.modules['seisflows_solver']
postprocess = sys.modules['seisflows_postprocess']

migration = custom_import('workflow','migration')()


class test_postprocess(base):
    """ Postprocessing class
    """

    def check(self):
        """ Checks parameters and paths
        """
        migration.check()

        if 'INPUT' not in PATH:
            setattr(PATH, 'INPUT', None)