Example #1
0
    def check(self, validate=True):
        """
        Checks parameters and paths. Must be implemented by sub-class
        """
        if validate:
            self.required.validate()

        if PAR.CASE.upper() == "SYNTHETIC":
            assert exists(PATH.MODEL_TRUE), \
                "CASE == SYNTHETIC requires PATH.MODEL_TRUE"

        if not exists(PATH.DATA):
            assert "MODEL_TRUE" in PATH, f"DATA or MODEL_TRUE must exist"
Example #2
0
def smooth_legacy(input_path='', output_path='', parameters=[], span=0.):
    """

    :param input_path: 
    :param output_path: 
    :param parameters: 
    :param span: 
    :return: 
    """
    solver = sys.modules['seisflows_solver']
    PATH = sys.modules['seisflows_paths']

    if not exists(input_path):
        raise Exception

    if not exists(output_path):
        unix.mkdir(output_path)

    if solver.mesh_properties.nproc != 1:
        raise NotImplementedError

    # intialize arrays
    kernels = {}
    for key in parameters or solver.parameters:
        kernels[key] = []

    coords = {}
    for key in ['x', 'z']:
        coords[key] = []

    # read kernels
    for key in parameters or solver.parameters:
        kernels[key] += solver.io.read_slice(input_path, key+'_kernel', 0)

    if not span:
        return kernels

    # read coordinates
    for key in ['x', 'z']:
        coords[key] += solver.io.read_slice(PATH.MODEL_INIT, key, 0)

    mesh = array.stack(coords['x'][0], coords['z'][0])

    # apply smoother
    for key in parameters or solver.parameters:
        kernels[key] = [array.meshsmooth(kernels[key][0], mesh, span)]

    # write smooth kernels
    for key in parameters or solver.parameters:
        solver.io.write_slice(kernels[key][0], output_path, key+'_kernel',
                              0)
Example #3
0
    def initialize_adjoint_traces(self):
        """
        Setup utility: Creates the "adjoint traces" expected by SPECFEM

        Note:
            Adjoint traces are initialized by writing zeros for all channels.
            Channels actually in use during an inversion or migration will be
            overwritten with nonzero values later on.
        """
        # Initialize adjoint traces as zeroes for all data_filenames
        # write to `traces/adj`
        super().initialize_adjoint_traces()

        # Rename data to work around Specfem naming convetions
        self.rename_data()

        # Workaround for Specfem3D's requirement that all components exist,
        # even ones not in use as adjoint traces
        if PAR.FORMAT.upper() == "SU":
            unix.cd(os.path.join(self.cwd, "traces", "adj"))

            for iproc in range(PAR.NPROC):
                for channel in ["x", "y", "z"]:
                    dst = f"{iproc:d}_d{channel}_SU.adj"
                    if not exists(dst):
                        src = f"{iproc:d}_d{PAR.COMPONENTS[0]}_SU.adj"
                        unix.cp(src, dst)
Example #4
0
    def smooth(self, input_path, **kwargs):
        """
        Specfem2D requires additional model parameters in directory to perform
        the xsmooth_sem task. This function will copy these files into the 
        directory before performing the base smooth operations. 

        Kwargs should match arguments of solver.base.smooth()
        
        .. note::
            This operation is usually run with run(single=True) so only one
            task will be performing these operations.

        :type input_path: str
        :param input_path: path to data
        """
        # Redundant to 'base' class but necessary
        if not exists(input_path):
            unix.mkdir(input_path)

        unix.cd(self.cwd)
        unix.cd("DATA")

        # Copy over only the files that are required. Won't execute if no match
        files = []
        for tag in ["jacobian", "NSPEC_ibool", "x", "y", "z"]:
            files += glob(f"*_{tag}.bin")
        for src in files:
            unix.cp(src=src, dst=input_path)

        super().smooth(input_path=input_path, **kwargs)
Example #5
0
    def generate_mesh(self, model_path, model_name, model_type='gll'):
        """
        Performs meshing with internal mesher Meshfem2D and database generation

        :type model_path: str
        :param model_path: path to the model to be used for mesh generation
        :type model_name: str
        :param model_name: name of the model to be used as identification
        :type model_type: str
        :param model_type: available model types to be passed to the Specfem3D
            Par_file. See Specfem3D Par_file for available options.
        """
        assert (exists(model_path)), f"model {model_path} does not exist"

        available_model_types = ["gll"]
        assert(model_type in available_model_types), \
            f"{model_type} not in available types {available_model_types}"

        unix.cd(self.cwd)

        # Run mesh generation
        if model_type == "gll":
            self.check_mesh_properties(model_path)

            # Copy the model files (ex: proc000023_vp.bin ...) into DATA
            src = glob(os.path.join(model_path, "*"))
            dst = self.model_databases
            unix.cp(src, dst)

        # Export the model into output folder
        if self.taskid == 0:
            self.export_model(os.path.join(PATH.OUTPUT, model_name))
Example #6
0
    def _write_residuals(self, path, syn, obs):
        """
        Computes residuals between observed and synthetic seismogram based on
        the misfit function PAR.MISFIT. Saves the residuals for each
        data-synthetic pair into a text file located at:

        ./scratch/solver/*/residuals

        The resulting file will be a single-column ASCII file that needs to be
        summed before use by the solver

        :type path: str
        :param path: location "adjoint traces" will be written
        :type syn: obspy.core.stream.Stream
        :param syn: synthetic data
        :type obs: obspy.core.stream.Stream
        :param syn: observed data
        """
        residuals = []
        for obs_, syn_ in zip(obs, syn):
            residuals.append(self.misfit(syn_.data, obs_.data, PAR.NT, PAR.DT))

        filename = os.path.join(path, "residuals")
        if exists(filename):
            residuals = np.append(residuals, np.loadtxt(filename))

        np.savetxt(filename, residuals)
Example #7
0
    def check_mesh_properties(self, path=None, parameters=None):
        """
        Determine if Mesh properties are okay for workflow

        :type path: str
        :param path: path to the mesh file
        """
        if not hasattr(self, '_mesh_properties'):
            if path is None:
                path = PATH.MODEL_INIT

            if parameters is None:
                parameters = self.parameters

            nproc = 0
            ngll = []
            while True:
                dummy = loadbin(path, nproc, 'reg1_' + parameters[0])
                ngll += [len(dummy)]
                nproc += 1
                if not exists(
                        os.path.join(path,
                                     f"proc{nrpoc}_reg1_{parameters[0]}.bin")):
                    break

            self._mesh_properties = Struct([['nproc', nproc], ['ngll', ngll]])
Example #8
0
    def initialize_adjoint_traces(self):
        """
        Setup utility: Creates the "adjoint traces" expected by SPECFEM.
        This is only done for the 'base' the Preprocess class.

        Note:
            Adjoint traces are initialized by writing zeros for all channels.
            Channels actually in use during an inversion or migration will be
            overwritten with nonzero values later on.
        """
        super().initialize_adjoint_traces()

        unix.cd(self.cwd)
        unix.cd(os.path.join("traces", "adj"))

        # work around SPECFEM2D's use of different name conventions for
        # regular traces and 'adjoint' traces
        if PAR.FORMAT.upper() == "SU":
            files = glob("*SU")
            unix.rename(old="_SU", new="_SU.adj", names=files)
        elif PAR.FORMAT.upper() == "ASCII":
            files = glob("*sem?")

            # Get the available extensions, which are named based on unit
            extensions = set([os.path.splitext(_)[-1] for _ in files])
            for extension in extensions:
                unix.rename(old=extension, new=".adj", names=files)

        # SPECFEM2D requires that all components exist even if ununsed
        components = ["x", "y", "z", "p"]

        if PAR.FORMAT.upper() == "SU":
            for comp in components:
                src = f"U{PAR.COMPONENTS[0]}_file_single.su.adj"
                dst = f"U{comp.lower()}s_file_single.su.adj"
                if not exists(dst):
                    unix.cp(src, dst)
        elif PAR.FORMAT.upper() == "ASCII":
            for fid in glob("*.adj"):
                net, sta, cha, ext = fid.split(".")
                for comp in components:
                    # Replace the last value in the channel with new component
                    cha_check = cha[:-1] + comp.upper()
                    fid_check = ".".join([net, sta, cha_check, ext])
                    if not exists(fid_check):
                        unix.cp(fid, fid_check)
Example #9
0
    def smooth(self,
               input_path,
               output_path,
               parameters=None,
               span_h=0.,
               span_v=0.,
               output="solver.log"):
        """
        Postprocessing wrapper: xsmooth_sem
        Smooths kernels by convolving them with a Gaussian.

        .. note::
            paths require a trailing `/` character when calling xsmooth_sem

        .. note::
            It is ASSUMED that this function is being called by
            system.run(single=True) so that we can use the main solver
            directory to perform the kernel smooth task

        :type input_path: str
        :param input_path: path to data
        :type output_path: str
        :param output_path: path to export the outputs of xcombine_sem
        :type parameters: list
        :param parameters: optional list of parameters,
            defaults to `self.parameters`
        :type span_h: float
        :param span_h: horizontal smoothing length in meters
        :type span_v: float
        :param span_v: vertical smoothing length in meters
        :type output: str
        :param output: file to output stdout to
        """
        if parameters is None:
            parameters = self.parameters

        if not exists(output_path):
            unix.mkdir(output_path)

        # Apply smoothing operator inside scratch/solver/*
        unix.cd(self.cwd)

        # mpiexec ./bin/xsmooth_sem SMOOTH_H SMOOTH_V name input output use_gpu
        for name in parameters:
            call_solver(mpiexec=PAR.MPIEXEC,
                        executable=" ".join([
                            "bin/xsmooth_sem",
                            str(span_h),
                            str(span_v), f"{name}_kernel",
                            os.path.join(input_path, ""),
                            os.path.join(output_path, ""), ".false"
                        ]),
                        output=output)

        # Rename output files
        files = glob(os.path.join(output_path, "*"))
        unix.rename(old="_smooth", new="", names=files)
Example #10
0
    def check_mesh_properties(self, path=None):
        """
        Determine if Mesh properties are okay for workflow

        :type path: str
        :param path: path to the mesh file
        """
        # Check the given model path or the initial model
        if path is None:
            path = PATH.MODEL_INIT

        if not exists(path):
            print(
                msg.cli(f"The following mesh path does not exist but should",
                        items=[path],
                        header="solver error",
                        border="="))
            sys.exit(-1)

        # Count slices and grid points
        key = self.parameters[0]
        iproc = 0
        ngll = []
        while True:
            dummy = self.io.read_slice(path=path, parameters=key,
                                       iproc=iproc)[0]
            ngll += [len(dummy)]
            iproc += 1
            if not exists(os.path.join(path,
                                       f"proc{int(iproc):06d}_{key}.bin")):
                break
        nproc = iproc

        # Create coordinate pointers
        # !!! This partial is incorrectly defined and does not execute when
        # !!! called. What is the point of that?
        coords = Struct()
        for key in ['x', 'y', 'z']:
            coords[key] = partial(self.io.read_slice, self, path, key)

        # Define internal mesh properties
        self._mesh_properties = Struct([["nproc", nproc], ["ngll", ngll],
                                        ["path", path], ["coords", coords]])
Example #11
0
    def generate_mesh(self, model_path, model_name, model_type='gll'):
        """
        Performs meshing and database generation as a serial task. Differs
        slightly from specfem3d class as it only creates database files for
        the main solver, which are then copied in serial by the function
        distribute_databases()

        :type model_path: str
        :param model_path: path to the model to be used for mesh generation
        :type model_name: str
        :param model_name: name of the model to be used as identification
        :type model_type: str
        :param model_type: available model types to be passed to the Specfem3D
            Par_file. See Specfem3D Par_file for available options.
        """
        available_model_types = ["gll"]

        assert (exists(model_path)), f"model {model_path} does not exist"

        model_type = model_type or getpar(key="MODEL", file="DATA/Par_file")
        assert(model_type in available_model_types), \
            f"{model_type} not in available types {available_model_types}"

        # Ensure that we're running on the main solver only
        assert (self.taskid == 0)

        unix.cd(self.cwd)

        # Check that the model parameter falls into the acceptable types
        par = getpar("MODEL").strip()
        assert(par in available_model_types), \
            f"Par_file {par} not in available types {available_model_types}"

        if par == "gll":
            self.check_mesh_properties(model_path)

            # Copy model files and then run xgenerate databases
            src = glob(os.path.join(model_path, "*"))
            dst = self.model_databases
            unix.cp(src, dst)

            call_solver(mpiexec=PAR.MPIEXEC,
                        executable="bin/xgenerate_databases")

        self.export_model(os.path.join(PATH.OUTPUT, model_name))
Example #12
0
    def combine(self, input_path, output_path, parameters=None):
        """
        Postprocessing wrapper: xcombine_sem 
        Sums kernels from individual source contributions to create gradient.

        .. note::
            The binary xcombine_sem simply sums matching databases (.bin) 

        .. note::
            It is ASSUMED that this function is being called by
            system.run(single=True) so that we can use the main solver
            directory to perform the kernel summation task

        :type input_path: str
        :param input_path: path to data
        :type output_path: str
        :param output_path: path to export the outputs of xcombine_sem
        :type parameters: list
        :param parameters: optional list of parameters,
            defaults to `self.parameters`
        """
        if parameters is None:
            parameters = self.parameters

        if not exists(output_path):
            unix.mkdir(output_path)

        unix.cd(self.cwd)

        # Write the source names into the kernel paths file for SEM/ directory
        with open("kernel_paths", "w") as f:
            f.writelines([
                os.path.join(input_path, f"{name}\n")
                for name in self.source_names
            ])

        # Call on xcombine_sem to combine kernels into a single file
        for name in self.parameters:
            # e.g.: mpiexec ./bin/xcombine_sem alpha_kernel kernel_paths output
            call_solver(mpiexec=PAR.MPIEXEC,
                        executable=" ".join([
                            f"bin/xcombine_sem", f"{name}_kernel",
                            "kernel_paths", output_path
                        ]))
Example #13
0
    def generate_mesh(self, model_path, model_name, model_type=None):
        """
        Performs meshing with internal mesher Meshfem3D and database generation

        :type model_path: str
        :param model_path: path to the model to be used for mesh generation
        :type model_name: str
        :param model_name: name of the model to be used as identification
        :type model_type: str
        :param model_type: available model types to be passed to the Specfem3D
            Par_file. See Specfem3D Par_file for available options.
        """
        available_model_types = ["gll"]

        assert (exists(model_path)), f"model {model_path} does not exist"

        model_type = model_type or getpar(key="MODEL", file="DATA/Par_file")
        assert(model_type in available_model_types), \
            f"{model_type} not in available types {available_model_types}"

        unix.cd(self.cwd)

        # Run mesh generation
        if model_type == "gll":
            self.check_mesh_properties(model_path)

            src = glob(os.path.join(model_path, "*"))
            dst = self.model_databases
            unix.cp(src, dst)

            call_solver(mpiexec=PAR.MPIEXEC, executable="bin/xmeshfem3D")
            call_solver(mpiexec=PAR.MPIEXEC,
                        executable="bin/xgenerate_databases")

        # Export the model for future use in the workflow
        if self.taskid == 0:
            self.export_model(os.path.join(PATH.OUTPUT, model_name))