def required(self): """ Checks parameters and paths """ sf = SeisFlowsPathsParameters(super().required) sf.par("MPIEXEC", required=False, default="mpiexec", par_type=str, docstr="Function used to invoke executables on the system. " "For example 'srun' on SLURM systems, or './' on a " "workstation. If left blank, will guess based on the " "system.") # Define the Parameters required by this module sf.par("NTASKMAX", required=False, default=100, par_type=int, docstr="Limit on the number of concurrent tasks in array") sf.par("NODESIZE", required=True, par_type=int, docstr="The number of cores per node defined by the system") sf.par("LSFARGS", required=False, default="", par_type=str, docstr="Any optional, additional LSG arguments that will be " "passed to the LSF submit scripts")
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) # Define the Parameters required by this module sf.par("NT", required=True, par_type=float, docstr="Number of time steps set in the SPECFEM Par_file") sf.par("DT", required=True, par_type=float, docstr="Time step or delta set in the SPECFEM Par_file") sf.par("FORMAT", required=True, par_type=float, docstr="Format of synthetic waveforms used during workflow, " "available options: ['ascii', 'su']") sf.par("SOURCE_PREFIX", required=False, default="CMTSOLUTION", par_type=str, docstr="Prefix of SOURCE files in path SPECFEM_DATA. Available " "['CMTSOLUTION', FORCESOLUTION']") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) sf.par("MPIEXEC", required=False, default="srun -u", par_type=str, docstr="Function used to invoke executables on the system. " "For example 'srun' on SLURM systems, or './' on a " "workstation. If left blank, will guess based on the " "system.") sf.par("NTASKMAX", required=False, default=100, par_type=int, docstr="Limit on the number of concurrent tasks in array") sf.par("NODESIZE", required=True, par_type=int, docstr="The number of cores per node defined by the system") sf.par("SLURMARGS", required=False, default="", par_type=str, docstr="Any optional, additional SLURM arguments that will be " "passed to the SBATCH scripts") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() # Define the Parameters required by this module sf.par("LINESEARCH", required=False, default="Bracket", par_type=str, docstr="Algorithm to use for line search, see " "seisflows3.plugins.line_search for available choices") sf.par("PRECOND", required=False, par_type=str, docstr="Algorithm to use for preconditioning gradients, see " "seisflows3.plugins.preconds for available choices") sf.par("STEPCOUNTMAX", required=False, default=10, par_type=int, docstr="Max number of trial steps in line search before a " "change in line search behavior") sf.par("STEPLENINIT", required=False, default=0.05, par_type=float, docstr="Initial line search step length, as a fraction " "of current model parameters") sf.par("STEPLENMAX", required=False, default=0.5, par_type=float, docstr="Max allowable step length, as a fraction of " "current model parameters") # Define the Paths required by this module sf.path("OPTIMIZE", required=False, default=os.path.join(PATH.SCRATCH, "optimize"), docstr="scratch path for nonlinear optimization data") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) # Define the Parameters required by this module sf.par("BEGIN", required=False, default=1, par_type=int, docstr="First iteration of workflow, 1 <= BEGIN <= inf") sf.par("END", required=True, par_type=int, docstr="Last iteration of workflow, BEGIN <= END <= inf") # Define the Paths required by this module sf.path("FUNC", required=False, default=os.path.join(PATH.SCRATCH, "evalfunc"), docstr="scratch path to store data related to function " "evaluations") sf.path("GRAD", required=False, default=os.path.join(PATH.SCRATCH, "evalgrad"), docstr="scratch path to store data related to gradient " "evaluations") sf.path("HESS", required=False, default=os.path.join(PATH.SCRATCH, "evalhess"), docstr="scratch path to store data related to Hessian " "evaluations") sf.path("OPTIMIZE", required=False, default=os.path.join(PATH.SCRATCH, "optimize"), docstr="scratch path to store data related to nonlinear " "optimization") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() sf.par("TITLE", required=False, default=os.path.basename(os.path.abspath(".")), par_type=str, docstr="The name used to submit jobs to the system, defaults " "to the name of the working directory") sf.par("PRECHECK", required=False, par_type=list, default=["TITLE"], docstr="A list of parameters that will be displayed to stdout " "before 'submit' or 'resume' is run. Useful for " "manually reviewing important parameters prior to " "system submission") sf.par("LOG_LEVEL", required=False, par_type=str, default="DEBUG", docstr="Verbosity output of SF3 logger. Available from least to " "most verbosity: 'CRITICAL', 'WARNING', 'INFO', 'DEBUG'; " "defaults to 'DEBUG'") sf.par("VERBOSE", required=False, default=False, par_type=bool, docstr="Level of verbosity provided to the output log. If True, " "log statements will declare what module/class/function " "they are being called from. Useful for debugging but " "also very noisy.") # Define the Paths required by this module # note: PATH.WORKDIR has been set by the entry point seisflows.setup() sf.path("SCRATCH", required=False, default=os.path.join(PATH.WORKDIR, CFGPATHS.SCRATCHDIR), docstr="scratch path to hold temporary data during workflow") sf.path("OUTPUT", required=False, default=os.path.join(PATH.WORKDIR, CFGPATHS.OUTPUTDIR), docstr="directory to save workflow outputs to disk") sf.path("SYSTEM", required=False, default=os.path.join(PATH.WORKDIR, CFGPATHS.SCRATCHDIR, "system"), docstr="scratch path to hold any system related data") sf.path("LOCAL", required=False, docstr="path to local data to be used during workflow") sf.path("LOGFILE", required=False, default=self.output_log, docstr="the main output log file where all processes will " "track their status") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) # Define the Parameters required by this module sf.par("NLCGMAX", required=False, default="null", par_type=float, docstr="NLCG periodic restart interval, between 1 and inf") sf.par("NLCGTHRESH", required=False, default="null", par_type=float, docstr="NLCG conjugacy restart threshold, between 1 and inf") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) sf.par("PARTITION", required=False, default="t1small", par_type=int, docstr="Name of partition on main cluster, available: " "analysis, t1small, t2small, t1standard, t2standard, gpu") sf.par("MPIEXEC", required=False, default="srun", par_type=str, docstr="Function used to invoke parallel executables") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) sf.par("MPIEXEC", required=False, default=None, par_type=str, docstr="Function used to invoke executables on the system. " "For example 'srun' on SLURM systems, or './' on a " "workstation. If left blank, will guess based on the " "system.") sf.par("NTASK", required=False, default=1, par_type=int, docstr="Number of separate, individual tasks. Also equal to " "the number of desired sources in workflow") sf.par("NPROC", required=False, default=1, par_type=int, docstr="Number of processor to use for each simulation") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. :rtype: seisflows.config.SeisFlowsPathsParameters :return: Paths and parameters that define the given class """ sf = SeisFlowsPathsParameters() # Define the Parameters required by this module sf.par( "EXAMPLE_REQUIRED_PARAMETER", required=True, par_type=str, docstr="Required parameters do not need default values and will " "need to be set by the user in the parameter file") sf.par("EXAMPLE_OPTIONAL_PARAMETER", required=False, default=0, par_type=int, docstr="Optional parameters require a default " "value, if no default value is given, the " "parameter is set to None") # Define the Paths required by this module sf.path("EXAMPLE_REQUIRED_PATH", required=True, docstr="Required paths to be set by user in parameter file") sf.path("EXAMPLE_OPTIONAL_PATH", required=False, default=os.path.join(PATH.SCRATCH, "example"), docstr="Optional paths require default values") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) # Define the Parameters required by this module sf.par("WALLTIME", required=True, par_type=float, docstr="Maximum job time in minutes for main SeisFlows3 job") sf.par("TASKTIME", required=True, par_type=float, docstr="Maximum job time in minutes for each SeisFlows3 task") sf.par("NTASK", required=True, par_type=int, docstr="Number of separate, individual tasks. Also equal to " "the number of desired sources in workflow") sf.par("NPROC", required=True, par_type=int, docstr="Number of processor to use for each simulation") sf.par("ENVIRONS", required=False, default="", par_type=str, docstr="Optional environment variables to be provided in the" "following format VAR1=var1,VAR2=var2... Will be set" "using os.environs") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters(super().required) # Define the Parameters required by this module sf.par("LINESEARCH", required=False, default="Backtrack", par_type=str, docstr="Algorithm to use for line search, see " "seisflows.plugins.line_search for available choices") sf.par("LBFGSMEM", required=False, default=3, par_type=int, docstr="Max number of previous gradients to retain " "in local memory") sf.par("LBFGSMAX", required=False, par_type=int, default="inf", docstr="LBFGS periodic restart interval, between 1 and 'inf'") sf.par("LBFGSTHRESH", required=False, default=0., par_type=float, docstr="LBFGS angle restart threshold") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() # Define the Parameters required by this module sf.par("SMOOTH_H", required=False, default=0., par_type=float, docstr="Gaussian half-width for horizontal smoothing in units " "of meters. If 0., no smoothing applied") sf.par("SMOOTH_V", required=False, default=0., par_type=float, docstr="Gaussian half-width for vertical smoothing in units " "of meters") sf.par("TASKTIME_SMOOTH", required=False, default=1, par_type=int, docstr="Large radii smoothing may take longer than normal " "tasks. Allocate additional smoothing task time " "as a multiple of TASKTIME") # Define the Paths required by this module sf.path("MASK", required=False, docstr="Directory to mask files for gradient masking") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() sf.par("MATERIALS", required=True, par_type=str, docstr="Material parameters used to define model. Available: " "['ELASTIC': Vp, Vs, 'ACOUSTIC': Vp, 'ISOTROPIC', " "'ANISOTROPIC']") sf.par("DENSITY", required=True, par_type=str, docstr="How to treat density during inversion. Available: " "['CONSTANT': Do not update density, " "'VARIABLE': Update density]") sf.par("ATTENUATION", required=True, par_type=str, docstr="If True, turn on attenuation during forward " "simulations, otherwise set attenuation off. Attenuation " "is always off for adjoint simulations.") sf.par( "COMPONENTS", required=False, default="ZNE", par_type=str, docstr="Components used to generate data, formatted as a single " "string, e.g. ZNE or NZ or E") sf.par("SOLVERIO", required=False, default="fortran_binary", par_type=int, docstr="The format external solver files. Available: " "['fortran_binary', 'adios']") sf.path("SOLVER", required=False, default=os.path.join(PATH.SCRATCH, "solver"), docstr="scratch path to hold solver working directories") sf.path("SPECFEM_BIN", required=True, docstr="path to the SPECFEM binary executables") sf.path("SPECFEM_DATA", required=True, docstr="path to the SPECFEM DATA/ directory containing the " "'Par_file', 'STATIONS' file and 'CMTSOLUTION' files") sf.path("DATA", required=False, docstr="path to a directory containing any external data " "required by the workflow. Catch all directory that " "can be accessed by all modules") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() # Define the Parameters required by this module sf.par("UNIT_OUTPUT", required=True, par_type=str, docstr="Data units. Must match the synthetic output of external " "solver. Available: ['DISP': displacement, " "'VEL': velocity, 'ACC': acceleration]") # TODO Check this against T0 in check() sf.par("START_PAD", required=False, default=0, par_type=float, docstr="For data gathering; time before origin time to gather. " "START_PAD >= T_0 in SPECFEM constants.h.in. " "Positive values only") # TODO set this automatically by setting equal NT * DT sf.par("END_PAD", required=True, par_type=float, docstr="For data gathering; time after origin time to gather. " "END_PAD >= NT * DT (of Par_file). Positive values only") sf.par("MIN_PERIOD", required=False, default="", par_type=float, docstr="Minimum filter corner in unit seconds. Bandpass filter " "if set with `MAX_PERIOD`, highpass filter if set " "without `MAX_PERIOD`, no filtering if not set and " "`MAX_PERIOD also not set") sf.par("MAX_PERIOD", required=False, default="", par_type=float, docstr="Maximum filter corner in unit seconds. Bandpass filter " "if set with `MIN_PERIOD`, lowpass filter if set " "without `MIN_PERIOD`, no filtering if not set and " "`MIN_PERIOD also not set") sf.par("CORNERS", required=False, default=4, par_type=int, docstr="Number of filter corners applied to filtering") sf.par("CLIENT", required=False, par_type=str, docstr="Client name for ObsPy FDSN data gathering. Pyatoa will " "attempt to collect waveform and metadata based on " "network and station codes provided in the SPECFEM " "STATIONS file. If set None, no FDSN gathering will be " "attempted") sf.par("ROTATE", required=False, default=False, par_type=bool, docstr="Attempt to rotate waveform components from NEZ -> RTZ") sf.par("PYFLEX_PRESET", required=False, default="default", par_type=str, docstr="Parameter map for misfit window configuration defined " "by Pyflex. IF None, misfit and adjoint sources will be " "calculated on whole traces. For available choices, " "see Pyatoa docs page (pyatoa.rtfd.io)") sf.par("FIX_WINDOWS", required=False, default=False, par_type="bool or str", docstr="How to address misfit window evaluation at each " "evaluation. Options to re-use misfit windows collected " "during an inversion, available options: " "[True, False, 'ITER', 'ONCE'] " "True: Re-use windows after first evaluation (i01s00); " "False: Calculate new windows each evaluation; " "'ITER': Calculate new windows at first evaluation of " "each iteration (e.g., i01s00... i02s00..." "'ONCE': Calculate new windows at first evaluation of " "the workflow, i.e., at PAR.BEGIN") sf.par("ADJ_SRC_TYPE", required=False, default="cc", par_type=str, docstr="Adjoint source type to evaluate misfit, defined by " "Pyadjoint. Currently available options: " "['cc': cross-correlation, 'mt': multitaper, " "wav: waveform']") sf.par("PLOT", required=False, default=True, par_type=bool, docstr="Attempt to plot waveforms and maps as PDF files at each " "function evaluation") sf.par("PYATOA_LOG_LEVEL", required=False, default="DEBUG", par_type=str, docstr="Log level to set Pyatoa, Pyflex, Pyadjoint. Available: " "['null': no logging, 'warning': warnings only, " "'info': task tracking, " "'debug': log all small details (recommended)]") # Parameters to control saving scratch/preprocess files to work dir. sf.par("SAVE_DATASETS", required=False, default=True, par_type=bool, docstr="Save PyASDF HDF5 datasets to disk. These datasets store " "waveform data, metadata, misfit windows, adjoint " "sources and configuration parameters") sf.par("SAVE_FIGURES", required=False, default=True, par_type=bool, docstr="Save output waveform figures to disk as PDFs") sf.par("SAVE_LOGS", required=False, default=True, par_type=bool, docstr="Save event-specific Pyatoa logs to disk as .txt files") # Define the Paths required by this module sf.path("PREPROCESS", required=False, default=os.path.join(PATH.SCRATCH, "preprocess"), docstr="scratch/ path to store waveform data and figures. " "Pyatoa will generate an internal directory structure " "here") sf.path("DATA", required=False, docstr="Directory to locally stored data. Pyatoa looks for " "waveform and metadata in the 'PATH.DATA/mseed' and " "'PATH.DATA/seed', directories respectively.") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() sf.par("CASE", required=True, par_type=str, docstr="Type of inversion, available: " "['data': real data inversion, " "'synthetic': synthetic-synthetic inversion]") sf.par("RESUME_FROM", required=False, par_type=str, docstr="Name of task to resume inversion from") sf.par("STOP_AFTER", required=False, par_type=str, docstr="Name of task to stop inversion after finishing") sf.par("SAVEMODEL", required=False, default=True, par_type=bool, docstr="Save final model files after each iteration") sf.par("SAVEGRADIENT", required=False, default=True, par_type=bool, docstr="Save gradient files after each iteration") sf.par("SAVEKERNELS", required=False, default=False, par_type=bool, docstr="Save event kernel files after each iteration") sf.par("SAVETRACES", required=False, default=False, par_type=bool, docstr="Save waveform traces after each iteration") sf.par("SAVERESIDUALS", required=False, default=False, par_type=bool, docstr="Save waveform residuals after each iteration") sf.par("SAVEAS", required=False, default="binary", par_type=str, docstr="Format to save models, gradients, kernels. " "Available: " "['binary': save files in native SPECFEM .bin format, " "'vector': save files as NumPy .npy files, " "'both': save as both binary and vectors]") sf.path("MODEL_INIT", required=True, docstr="location of the initial model to be used for workflow") sf.path("MODEL_TRUE", required=False, docstr="Target model to be used for PAR.CASE == 'synthetic'") sf.path("DATA", required=False, default=None, docstr="path to data available to workflow") return sf
def required(self): """ A hard definition of paths and parameters required by this class, alongside their necessity for the class and their string explanations. """ sf = SeisFlowsPathsParameters() # Define the Parameters required by this module sf.par( "MISFIT", required=False, default="waveform", par_type=str, docstr="Misfit function for waveform comparisons, for available " "see seisflows.plugins.misfit") sf.par("BACKPROJECT", required=False, default="null", par_type=str, docstr="Backprojection function for migration, for available " "see seisflows.plugins.adjoint") sf.par("NORMALIZE", required=False, default="null", par_type=str, docstr="Data normalization option") sf.par("FILTER", required=False, default="null", par_type=str, docstr="Data filtering type, available options are:" "BANDPASS (req. MIN/MAX PERIOD/FREQ);" "LOWPASS (req. MAX_FREQ or MIN_PERIOD); " "HIGHPASS (req. MIN_FREQ or MAX_PERIOD) ") sf.par("MIN_PERIOD", required=False, par_type=float, docstr="Minimum filter period applied to time series." "See also MIN_FREQ, MAX_FREQ, if User defines FREQ " "parameters, they will overwrite PERIOD parameters.") sf.par("MAX_PERIOD", required=False, par_type=float, docstr="Maximum filter period applied to time series." "See also MIN_FREQ, MAX_FREQ, if User defines FREQ " "parameters, they will overwrite PERIOD parameters.") sf.par("MIN_FREQ", required=False, par_type=float, docstr="Maximum filter frequency applied to time series." "See also MIN_PERIOD, MAX_PERIOD, if User defines FREQ " "parameters, they will overwrite PERIOD parameters.") sf.par("MAX_FREQ", required=False, par_type=float, docstr="Maximum filter frequency applied to time series," "See also MIN_PERIOD, MAX_PERIOD, if User defines FREQ " "parameters, they will overwrite PERIOD parameters.") sf.par("MUTE", required=False, par_type=list, default=[], docstr="Data mute parameters used to zero out early / late " "arrivals or offsets. Choose any number of: " "EARLY: mute early arrivals; " "LATE: mute late arrivals; " "SHORT: mute short source-receiver distances; " "LONG: mute long source-receiver distances") sf.par("NORMALIZE", required=False, par_type=list, default=[], docstr="Data normalization parameters used to normalize the " "amplitudes of waveforms. Choose from two sets: " "ENORML1: normalize per event by L1 of traces; OR " "ENORML2: normalize per event by L2 of traces; AND " "TNORML1: normalize per trace by L1 of itself; OR " "TNORML2: normalize per trace by L2 of itself") # TODO: Add the mute parameters here, const, slope and dist # Define the Paths required by this module sf.path("PREPROCESS", required=False, default=os.path.join(PATH.SCRATCH, "preprocess"), docstr="scratch path to store any preprocessing outputs") return sf