Пример #1
0
class KilosortHelperParameters(DefaultSchema):

    kilosort_version = Int(required=True,
                           default=2,
                           help='Kilosort version to use (1, 2, or 3)')

    surface_channel_buffer = Int(
        required=False,
        default=15,
        help=
        'Number of channels above brain surface to include in spike sorting')

    matlab_home_directory = InputDir(
        help='Location from which Matlab files can be copied and run.')
    kilosort_repository = InputDir(
        help='Local directory for the Kilosort source code repository.')

    kilosort_params = Nested(
        KilosortParameters,
        required=False,
        help='Parameters used to auto-generate a Kilosort config file')
    kilosort2_params = Nested(
        Kilosort2Parameters,
        required=False,
        help='Parameters used to auto-generate a Kilosort2 config file')
    kilosort3_params = Nested(
        Kilosort3Parameters,
        required=False,
        help='Parameters used to auto-generate a Kilosort3 config file')
Пример #2
0
class MontageSolverSchema(ArgSchema):
    data_dir = InputDir(
        required=False,
        description="directory containing metafile, images, and matches")
    metafile = InputFile(
        required=False,
        description=("fullpath to metafile. Helps in the case of multiple"
                     " metafiles in one directory. data_dir will take "
                     " os.path.dirname(metafile)"))
    output_dir = OutputDir(required=False,
                           missing=None,
                           default=None,
                           description="directory for output files")
    read_transform_from = Str(
        required=False,
        missing='metafile',
        default='metafile',
        validator=mm.validate.OneOf(['metafile', 'reffile', 'dict']),
        description="3 possible ways to read in the reference transform")
    ref_transform = InputFile(required=False,
                              missing=None,
                              default=None,
                              description="transform json")
    ref_transform_dict = Dict(require=False,
                              missing=None,
                              description="transform in from memory")
    ransacReprojThreshold = Float(
        required=False,
        missing=10.0,
        default=10.0,
        description=("passed into cv2.estimateAffinePartial2D()"
                     "for RANSAC filtering of montage template matches"))
    compress_output = Boolean(
        required=False,
        missing=True,
        default=True,
        description=("tilespecs will be .json or .json.gz"))
    solver_templates = List(
        Str,
        required=True,
        description="input json basenames for the solver args")
    solver_template_dir = InputDir(
        required=True, description="location of the templates for the solver")

    @mm.post_load
    def check_solver_inputs(self, data):
        for args in data['solver_templates']:
            argpath = os.path.join(data['solver_template_dir'], args)
            if not os.path.isfile(argpath):
                raise mm.ValidationError("solver arg file doesn't exist: %s" %
                                         argpath)

    @mm.post_load
    def check_metafile(self, data):
        if ('data_dir' not in data) & ('metafile' not in data):
            raise mm.ValidationError(" must specify either data_dir"
                                     " or metafile")
Пример #3
0
class Directories(DefaultSchema):

    ecephys_directory = InputDir(
        help=
        'Location of the ecephys_spike_sorting directory containing modules directory'
    )
    npx_directory = InputDir(help='Location of raw neuropixels binary files')
    kilosort_output_directory = OutputDir(
        help='Location of Kilosort output files')
    extracted_data_directory = OutputDir(
        help='Location for NPX/CatGT processed files')
    kilosort_output_tmp = OutputDir(help='Location for temporary KS output')
Пример #4
0
class KilosortHelperParameters(DefaultSchema):

    kilosort_version = Int(required=True, default=2, help='Kilosort version to use (1 or 2)')
    
    spikeGLX_data = Bool(required=True, default=False, help='If true, use SpikeGLX metafile to build chanMap')
    ks_make_copy = Bool(required=False, default=False, help='If true, make a copy of the original KS output')

    surface_channel_buffer = Int(required=False, default=15, help='Number of channels above brain surface to include in spike sorting')

    matlab_home_directory = InputDir(help='Location from which Matlab files can be copied and run.')
    kilosort_repository = InputDir(help='Local directory for the Kilosort source code repository.')
    npy_matlab_repository = InputDir(help='Local directory for the npy_matlab repo for writing phy output')
   
    kilosort_params = Nested(KilosortParameters, required=False, help='Parameters used to auto-generate a Kilosort config file')
    kilosort2_params = Nested(Kilosort2Parameters, required=False, help='Parameters used to auto-generate a Kilosort2 config file')
Пример #5
0
class CatGTParams(DefaultSchema):
    run_name = String(required=True,
                      help='undecorated run name (no g or t indices')
    gate_string = String(required=True, default='0', help='gate string')
    trigger_string = String(
        required=True,
        default='0,0',
        help='string specifying trials to concatenate, e.g. 0,200')
    probe_string = String(required=True,
                          default='0',
                          help='string specifying probes, e.g. 0:3')
    stream_string = String(required=True,
                           default='-ap',
                           help='string specifying which streams to process')
    car_mode = String(
        required=False,
        default='None',
        help='Comaon average reference mode. Must = None, gbldmx, or loccar ')
    loccar_inner = Int(required=False,
                       default=2,
                       help='Inner radius for loccar in sites')
    loccar_outer = Int(required=False,
                       default=8,
                       help='Outer radius for loccar in sites')
    cmdStr = String(
        required=True,
        default='-prbfld -aphipass=300 -gbldmx -gfix=0.40,0.10,0.02',
        help='input stream filter, error correct and extract settings for CatGT'
    )
    extract_string = String(required=True,
                            default='',
                            help='extract edges from datastreams')
    catGTPath = InputDir(help='directory containing the CatGT executable.')
Пример #6
0
class UploadTileSpecParameters(RenderParameters):
    inputDir = InputDir(
        required=True,
        metadata={'description': 'input directory of tilespecs'})
    outputStack = Str(
        required=True,
        metadata={'description': 'name of output stack to upload to render'})
Пример #7
0
class CreateFastStacksParameters(RenderParameters):
    projectDirectory = InputDir(required=True,
        description='path to project root')
    pool_size = Int(require=False,default=20,
        description='number of parallel threads to use')
    delete_stack = Boolean(require=False,default=True,
        description='flag to decide whether stack should be deleted before new upload')
Пример #8
0
class MipMapDirectories(DefaultSchema):
    level = Int(
        required=True,
        description=("mipMapLevel for which parent directory will be changed"))
    directory = InputDir(
        required=True,
        description=("directory where relocated mipmaps are found."))
Пример #9
0
class tPrimeParams(DefaultSchema):
    tPrime_path = InputDir(help='directory containing the TPrime executable.')
    sync_period = Float(default=1.0, help='Period of sync waveform (sec).')
    toStream_sync_params = String(
        required=False,
        default='SY=0,384,6,500',
        help=
        'string of CatGT params used to extract to stream sync edges, e.g. SY=0,384,6,500'
    )
    ni_sync_params = String(
        required=False,
        default='XA=0,1,3,500',
        help=
        'string of CatGT params used to extract NI sync edges, e.g. XA=0,1,3,500'
    )
    ni_ex_list = String(
        required=False,
        default='',
        help=
        'string of CatGT params used to extract edges from ni, e.g. XA=0,1,3,500'
    )
    im_ex_list = String(
        required=False,
        default='',
        help=
        'string of CatGT params used to extract edges from im streams, e.g. SY=0,384,6,500'
    )
    tPrime_3A = Boolean(required=False, default=False, help='is this 3A data?')
    toStream_path_3A = String(required=False,
                              help='full path to toStream edges file')
    fromStream_list_3A = List(
        String,
        required=False,
        help='list of full paths to fromStream edges files')
Пример #10
0
class PointMatchClientParametersQsub(
        RenderParameters, SIFTPointMatchParameters, SparkOptions):
    sparkhome = InputDir(
        required=False,
        default="/allen/aibs/pipeline/image_processing/"
        "volume_assembly/utils/spark",
        missing="/allen/aibs/pipeline/image_processing/"
        "volume_assembly/utils/spark",
        description="Path to the spark home directory")
    pbs_template = InputFile(
        required=True,
        description="pbs template to wrap spark job")
    no_nodes = Int(
        required=False,
        default=30,
        missing=10,
        description='Number of nodes to run the pbs job')
    ppn = Int(
        required=False,
        default=30,
        missing=30,
        description='Number of processors per node (default = 30)')
    queue_name = Str(
        required=False,
        default='connectome',
        missing='connectome',
        description='Name of the queue to submit the job')
    logdir = OutputDir(
        required=True,
        description="location to set logging for qsub command"
    )
Пример #11
0
class BaseStackParameters(DefaultSchema):
    stack = Str(required=True, description="Stack name")
    owner = Str(
        required=False,
        default=None,
        missing=None,
        description="Owner of the stack (defaults to render clients' owner)")
    project = Str(required=False,
                  default=None,
                  missing=None,
                  description="Project of the stack")
    service_host = Str(
        required=False,
        default=None,
        missing=None,
        description="url of render service host (without http://)")
    baseURL = Str(required=False,
                  default=None,
                  missing=None,
                  description="Base Render URL")
    renderbinPath = InputDir(required=False,
                             default=None,
                             missing=None,
                             description="Path to render's client scripts")
    verbose = Int(required=False,
                  default=0,
                  missing=0,
                  description="Verbose output from solver needed?")
Пример #12
0
class GenerateEMTileSpecsParameters(ArgSchema):
    metafile = InputFile(
        required=True,
        description="metadata file containing TEMCA acquisition data")
    maskUrl = InputFile(required=False,
                        default=None,
                        missing=None,
                        description="absolute path to image mask to apply")
    image_directory = InputDir(
        required=False,
        description=("directory used in determining absolute paths to images. "
                     "Defaults to parent directory containing metafile "
                     "if omitted."))
    maximum_intensity = Int(
        required=False,
        default=255,
        description=("intensity value to interpret as white"))
    minimum_intensity = Int(
        required=False,
        default=0,
        description=("intensity value to interpret as black"))
    z = Float(required=False, default=0, description=("z value"))
    sectionId = Str(
        required=False,
        description=("sectionId to apply to tiles during ingest.  "
                     "If unspecified will default to a string "
                     "representation of the float value of z_index."))
    output_path = OutputFile(required=False,
                             description="directory for output files")
    compress_output = Boolean(
        required=False,
        missing=True,
        default=True,
        escription=("tilespecs will be .json or .json.gz"))
Пример #13
0
class RenderSectionAtScaleOutput(argschema.schemas.DefaultSchema):
    image_directory = InputDir(
        required=True,
        description=('Directory in which the downsampled '
                     'section images are saved'))
    temp_stack = Str(required=True,
                     description=('The temp stack that was used to '
                                  'generate the downsampled sections'))
Пример #14
0
class GenerateEMTileSpecsParameters(OutputStackParameters):
    metafile = InputFile(
        required=False,
        description="metadata file containing TEMCA acquisition data")
    metafile_uri = Str(
        required=True,
        description=("uri of metadata containing TEMCA acquisition data"))
    # FIXME maskUrl and image_directory are not required -- posix_to_uri should support this
    maskUrl = InputFile(required=False,
                        default=None,
                        missing=None,
                        description="absolute path to image mask to apply")
    maskUrl_uri = Str(required=False,
                      default=None,
                      missing=None,
                      description=("uri of image mask to apply"))
    image_directory = InputDir(
        required=False,
        description=("directory used in determining absolute paths to images. "
                     "Defaults to parent directory containing metafile "
                     "if omitted."))
    image_prefix = Str(
        required=False,
        description=(
            "prefix used in determining full uris of images in metadata. "
            "Defaults to using the / delimited prefix to "
            "the metadata_uri if omitted"))
    maximum_intensity = Int(
        required=False,
        default=255,
        description=("intensity value to interpret as white"))
    minimum_intensity = Int(
        required=False,
        default=0,
        description=("intensity value to interpret as black"))
    sectionId = Str(
        required=False,
        description=("sectionId to apply to tiles during ingest.  "
                     "If unspecified will default to a string "
                     "representation of the float value of z_index."))

    @pre_load
    def metafile_to_uri(self, data):
        rendermodules.utilities.schema_utils.posix_to_uri(
            data, "metafile", "metafile_uri")

    # FIXME not required -- does this work
    @pre_load
    def maskUrl_to_uri(self, data):
        rendermodules.utilities.schema_utils.posix_to_uri(
            data, "storage_directory", "storage_prefix")

    @pre_load
    def image_directory_to_prefix(self, data):
        rendermodules.utilities.schema_utils.posix_to_uri(
            data, "image_directory", "image_prefix")
Пример #15
0
class DetectMontageDefectsParameters(RenderParameters, ZValueParameters,
                                     ProcessPoolParameters):
    prestitched_stack = Str(required=True,
                            description='Pre stitched stack (raw stack)')
    poststitched_stack = Str(required=True,
                             description='Stitched montage stack')
    match_collection = Str(
        reuqired=True,
        description='Name of the montage point match collection')
    match_collection_owner = Str(
        required=False,
        default=None,
        missing=None,
        description='Name of the match collection owner')
    residual_threshold = Int(
        required=False,
        default=4,
        missing=4,
        description=('threshold value to filter residuals '
                     'for detecting seams (default = 4)'))
    neighbors_distance = Int(
        required=False,
        default=80,
        missing=80,
        description=('distance in pixels to look for '
                     'neighboring points in seam detection (default = 60)'))
    min_cluster_size = Int(
        required=False,
        default=12,
        missing=12,
        description=(
            'minimum number of point matches required in each cluster '
            'for taking it into account for seam detection (default = 7)'))
    threshold_cutoff = argschema.fields.List(
        argschema.fields.Float,
        required=False,
        default=[0.005, 0.005],
        description='Threshold for MAD cutoff in x and y')
    plot_sections = Bool(
        required=False,
        default=True,
        missing=True,
        description=("Do you want to plot the sections with defects "
                     "(holes or gaps)?. Will plot Bokeh plots in a html file"))
    out_html_dir = InputDir(
        required=False,
        default=None,
        missing=None,
        description="Folder to save the Bokeh plot defaults to /tmp directory")

    @post_load
    def add_match_collection_owner(self, data):
        if data['match_collection_owner'] is None:
            data['match_collection_owner'] = data['render']['owner']
Пример #16
0
class UploadTileDirParameters(RenderParameters):
    inputTileDirectory = InputDir(required=True,
        description='path to project root')
    tilespecDirectory = Str(required=True,
        description='path to project root')
    outputStack = Str(required=True,
        description='Output stack')
    pool_size = Int(require=False,default=20,
        description='number of parallel threads to use')
    delete_stack = Boolean(require=False,default=True,
        description='flag to decide whether stack should be deleted before new upload')
Пример #17
0
class GenerateSBEMTileSpecsParameters(OutputStackParameters):

    image_directory = InputDir(
        required=True,
        description=("directory used in determining absolute paths to images. "
                     "Defaults to parent directory containing metafile "
                     "if omitted."))
    image_prefix = Str(
        required=False,
        description=(
            "prefix used in determining full uris of images in metadata. "
            "Defaults to using the / delimited prefix to "
            "the metadata_uri if omitted"))
Пример #18
0
class TileSetIngestSchema(argschema.ArgSchema):
    storage_directory = InputDir(
        required=False, description=(
            "Directory which stores acquisition data. "
            "Non-file uris must use storage_prefix"))
    storage_prefix = Str(required=True, description=(
        "uri prefix for acquisition data."))
    section = Nested(Section, required=False,
                     description="")
    acquisition_data = Nested(AcquisitionData, required=False)

    # TODO test this
    @marshmallow.pre_load
    def directory_to_storage_prefix(self, data):
        asap.utilities.schema_utils.posix_to_uri(
            data, "storage_directory", "storage_prefix")
Пример #19
0
class CreateFastStacksParameters(RenderParameters):
    statetableFile = InputFile(required=True, description='state table file')
    projectDirectory = InputDir(required=True,
                                description='path to project root')
    outputStackPrefix = Str(
        required=False,
        default="ACQ",
        description=
        'prefix to include in front of channel name for render stack')
    pool_size = Int(require=False,
                    default=20,
                    description='number of parallel threads to use')
    delete_stack = Boolean(
        require=False,
        default=True,
        description=
        'flag to decide whether stack should be deleted before new upload')
Пример #20
0
class SparkOptions(argschema.schemas.DefaultSchema):
    jarfile = Str(required=True, description=(
        "spark jar to call java spark command"))
    className = Str(required=True, description=(
        "spark class to call"))
    driverMemory = Str(required=False, default='6g', description=(
        "spark driver memory (important for local spark)"))
    memory = Str(
        required=False,
        description="Memory required for spark job")
    sparkhome = InputDir(required=True, description=(
        "Spark home directory containing bin/spark_submit"))
    spark_files = List(InputFile, required=False, description=(
        "list of spark files to add to the spark submit command"))
    spark_conf = Dict(required=False, description=(
        "dictionary of key value pairs to add to spark_submit "
        "as --conf key=value"))
Пример #21
0
class CatGTParams(DefaultSchema):
    run_name = String(required=True,
                      help='undecorated run name (no g or t indices')
    gate_string = String(required=True, default='0', help='gate string')
    trigger_string = String(
        required=True,
        default='0,0',
        help='string specifying trials to concatenate, e.g. 0,200')
    probe_string = String(required=True,
                          default='0',
                          help='string specifying probes, e.g. 0:3')
    stream_string = String(required=True,
                           default='-ap',
                           help='string specifying which streams to process')
    cmdStr = String(
        required=True,
        default='-prbfld -aphipass=300 -gbldmx -gfix=0.40,0.10,0.02',
        help='input stream filter, error correct settings for CatGT')
    catGTPath = InputDir(help='directory containing the CatGT executable.')
Пример #22
0
class ViewMatchesSchema(ArgSchema):
    collection_path = InputFile(
        required=False,
        description="if specified, will read collection from here")
    collection_basename = Str(
        required=True,
        missing="collection.json",
        default="collection.json",
        description=("basename for collection file if collection_path"
                     " not specified. will also check for .json.gz"))
    data_dir = InputDir(
        required=True,
        description=("directory containing image files. Will also be dir"
                     " dir for collection path, if not otherwise specified"))
    resolved_tiles = List(
        Str,
        required=True,
        missing=["resolvedtiles.json.gz", "resolvedtiles_input.json.gz"],
        description=("will take the transform from the first file"
                     " matching this list, if possible"))
    transform_file = InputFile(
        required=False,
        description=("if provided, will get lens correction transform "
                     " from here"))
    view_all = Boolean(
        required=True,
        missing=False,
        default=False,
        description=("will plot all the pair matches. can be useful "
                     "for lens correction to file. probably not desirable "
                     "for montage"))
    show = Boolean(required=True,
                   missing=True,
                   default=True,
                   description=("show on screen?"))
    match_index = Int(required=True,
                      missing=0,
                      default=0,
                      description=("which index of self.matches to plot"))
    pdf_out = OutputFile(required=True,
                         missing='./view_matches_output.pdf',
                         default='./view_matches_output.pdf',
                         description="where to write the pdf output")
Пример #23
0
class AddMipMapsToStackParameters(StackTransitionParameters):
    mipmap_dir = InputDir(
        required=False,
        description='directory to which the mipmaps will be stored')
    mipmap_prefix = Str(
        required=True,
        description=("uri prefix from which mipmap locations are built."))
    levels = mm.fields.Int(
        required=False,
        default=6,
        description='number of levels of mipmaps, default is 6')
    imgformat = mm.fields.Str(
        required=False,
        default="tiff",
        description='mipmap image format, default is tiff')

    @pre_load
    def mipmap_directory_to_prefix(self, data):
        asap.utilities.schema_utils.posix_to_uri(data, "mipmap_dir",
                                                 "mipmap_prefix")
Пример #24
0
class CreateSectionPolygonsParameters(RenderParameters):
    stack = Str(
        required=True,
        description=
        'stack sectionImages are based upon (assumes section bounds for images)'
    )

    sectionImageDir = InputDir(
        required=True,
        description=
        'stack sectionImages directory (assumes section bounds for images)')

    polygon_dir = Str(
        required=True,
        description='directory to save json seciton boundaries into')

    isHorizontal = Boolean(
        required=True,
        description=
        'flag as to whether the sections are vertically or horizontally oriented'
    )
Пример #25
0
class MeanWaveformParams(DefaultSchema):
    samples_per_spike = Int(required=True,
                            default=82,
                            help='Number of samples to extract for each spike')
    pre_samples = Int(
        required=True,
        default=20,
        help='Number of samples between start of spike and the peak')
    num_epochs = Int(required=True,
                     default=1,
                     help='Number of epochs to compute mean waveforms')
    spikes_per_epoch = Int(require=True,
                           default=100,
                           help='Max number of spikes per epoch')
    upsampling_factor = Float(
        require=False,
        default=200 / 82,
        help='Upsampling factor for calculating waveform metrics')
    spread_threshold = Float(
        require=False,
        default=0.12,
        help='Threshold for computing channel spread of 2D waveform')
    site_range = Int(require=False,
                     default=16,
                     help='Number of sites to use for 2D waveform metrics')
    cWaves_path = InputDir(require=False,
                           help='directory containing the TPrime executable.')
    use_C_Waves = Bool(require=False,
                       default=False,
                       help='Use faster C routine to calculate mean waveforms')
    snr_radius = Int(
        require=False,
        default=8,
        help='disk radius (chans) about pk-chan for snr calculation in C_waves'
    )
    mean_waveforms_file = String(required=True,
                                 help='Path to mean waveforms file (.npy)')
Пример #26
0
class LensCorrectionSchema(ArgSchema):
    data_dir = InputDir(
        required=True,
        description="directory containing metafile, images, and matches")
    output_dir = OutputDir(required=False,
                           description="directory for output files")
    mask_file = InputFile(required=False,
                          default=None,
                          missing=None,
                          description="mask to apply to each tile")
    nvertex = Int(required=False,
                  default=1000,
                  missinf=1000,
                  description="maximum number of vertices to attempt")
    ransac_thresh = Float(required=False,
                          default=5.0,
                          missing=5.0,
                          description="ransac outlier threshold")
    regularization = Nested(regularization, missing={})
    good_solve = Nested(good_solve_criteria, missing={})
    ignore_match_indices = List(
        Int,
        required=False,
        default=None,
        missing=None,
        description=("debug feature for ignoring certain indices"
                     " of the match collection"))
    compress_output = Boolean(
        required=False,
        missing=True,
        default=True,
        description=("tilespecs will be .json or .json.gz"))
    timestamp = Boolean(required=False,
                        missing=False,
                        default=False,
                        description="add a timestamp to basename output")
Пример #27
0
class SolverParameters(DefaultSchema):
    # NOTE: Khaled's EM_aligner needs some of the boolean variables as Integers
    # Hence providing the input as Integers
    degree = Int(required=True,
                 default=1,
                 description="Degree of transformation 1 - affine, "
                 "2 - second order polynomial, maximum is 3")
    solver = Str(required=False,
                 default='backslash',
                 missing='backslash',
                 description="Solver type - default is backslash")
    transfac = Float(required=False,
                     default=1e-15,
                     missing=1e-15,
                     description='Translational factor')
    lambda_value = Float(required=True, description="regularization parameter")
    edge_lambda = Float(required=True,
                        description="edge lambda regularization parameter")
    nbrs = Int(required=False,
               default=3,
               missing=3,
               description="No. of neighbors")
    nbrs_step = Int(required=False,
                    default=1,
                    missing=1,
                    description="Step value to increment the # of neighbors")
    xs_weight = Float(
        required=True,
        description="Weight ratio for cross section point matches")
    min_points = Int(
        required=True,
        default=8,
        description="Minimum no. of point matches per tile pair defaults to 8")
    max_points = Int(required=True,
                     default=100,
                     description="Maximum no. of point matches")
    filter_point_matches = Int(
        required=False,
        default=1,
        missing=1,
        description='set to a value 1 if point matches must be filtered')
    outlier_lambda = Float(
        required=True,
        default=100,
        description="Outlier lambda - large numbers result in "
        "fewer tiles excluded")
    min_tiles = Int(required=False,
                    default=2,
                    missing=2,
                    description="minimum number of tiles")
    Width = Int(required=False,
                default=3840,
                missing=3840,
                description='Width of the tiles (default = 3840)')
    Height = Int(required=False,
                 default=3840,
                 missing=3840,
                 description='Height of the tiles (default= 3840)')
    outside_group = Int(required=False,
                        default=0,
                        missing=0,
                        description='Outside group parameter (default = 0)')
    matrix_only = Int(required=False,
                      default=0,
                      missing=0,
                      description="0 - solve (default), 1 - only generate "
                      "the matrix. For debugging only")
    distribute_A = Int(required=False,
                       default=1,
                       missing=1,
                       description="Shards of A matrix")
    dir_scratch = InputDir(required=True, description="Scratch directory")
    distributed = Int(required=False,
                      default=0,
                      missing=0,
                      description="distributed or not?")
    disableValidation = Int(
        required=False,
        default=1,
        missing=1,
        description="Disable validation while ingesting tiles?")
    use_peg = Int(required=False,
                  default=0,
                  missing=0,
                  description="use pegs or not")
    complete = Int(required=False,
                   default=0,
                   missing=0,
                   description="Set stack state to complete after processing?")
    verbose = Int(required=False,
                  default=0,
                  missing=0,
                  description="want verbose output?")
    debug = Int(required=False,
                default=0,
                missing=0,
                description="Debug mode?")
    constrain_by_z = Int(required=False,
                         default=0,
                         missing=0,
                         description='Contrain by z')
    sandwich = Int(required=False,
                   default=0,
                   missing=0,
                   description='Sandwich parameter for solver')
    constraint_fac = Float(required=False,
                           default=1e+15,
                           missing=1e+15,
                           description='Constraint factor')
    pmopts = Nested(PointMatchParameters,
                    required=True,
                    description='Point match filtering parameters')
    pastix = Nested(PastixParameters,
                    required=False,
                    default=None,
                    missing=None,
                    description="Pastix parameters if solving using Pastix")
Пример #28
0
class BasicInputDir(ArgSchema):
    input_dir = InputDir(required=True,
                         metadata={'description': 'a simple file'})
Пример #29
0
class MaterializeSectionsOutput(argschema.schemas.DefaultSchema):
    zValues = List(Int, required=True)
    rootDirectory = InputDir(required=True)
    materializedDirectory = InputDir(required=True)
Пример #30
0
class BasicInputDir(ArgSchema):
    input_dir = InputDir(required=True, description='a simple file')