Пример #1
0
class InputSchema(ArgSchema):
    log_level = Str(required=False, default="INFO")
    depths_tif = InputFile(required=True,
                           description="Full path to depth 2p tiff file.")
    surface_tif = InputFile(required=True,
                            description="Full path to surface 2p tiff file.")
    timeseries_tif = InputFile(
        required=True, description="Full path to timeseries tiff file.")
    storage_directory = OutputDir(
        required=True, description="Folder for column stack outputs.")
    plane_groups = Nested(PlaneGroup, many=True)
    test_mode = Int(
        default=0,
        description=("Flag to run without actually splitting data. For testing"
                     " runner mechanism and metadata. Testing of splitting "
                     "is handled in testing for the mesoscope_2p package."))
Пример #2
0
class Image(DefaultSchema):
    input_path = InputFile(description="Read the image from here",
                           required=True)
    output_path = OutputFile(
        description="Write outputs to (siblings of) this path", required=True)
    downsample = Int(
        description=("Downsample the image by this amount on each dimension "
                     "(currently this is just a decimation, hence Int)."),
        required=True,
        default=8)
    overlay_types = List(
        String,
        description=("produce these types of overlays for this image. "
                     "See ImageOutputter for options"),
        required=True,
        default=["before", "after"])
Пример #3
0
class QualityMetricsParams(DefaultSchema):
    isi_threshold = Float(required=False, default=0.0015, help='Maximum time (in seconds) for ISI violation')
    min_isi = Float(required=False, default=0.00, help='Minimum time (in seconds) for ISI violation')
    num_channels_to_compare = Int(required=False, default=13, help='Number of channels to use for computing PC metrics; must be odd')
    max_spikes_for_unit = Int(required=False, default=500, help='Number of spikes to subsample for computing PC metrics')
    max_spikes_for_nn = Int(required=False, default=10000, help='Further subsampling for NearestNeighbor calculation')
    n_neighbors = Int(required=False, default=4, help='Number of neighbors to use for NearestNeighbor calculation')
    n_silhouette = Int(required=False, default=10000, help='Number of spikes to use for calculating silhouette score')

    drift_metrics_min_spikes_per_interval = Int(required=False, default=10, help='Minimum number of spikes for computing depth')
    drift_metrics_interval_s = Float(required=False, default=100, help='Interval length is seconds for computing spike depth')
Пример #4
0
class PointMatchCollectionParameters(DefaultSchema):
    owner = Str(
        required=False,
        default=None,
        missing=None,
        description="Point match collection owner (defaults to render owner")
    match_collection = Str(required=True,
                           description="Point match collection name")
    server = Str(required=False,
                 default=None,
                 missing=None,
                 description="baseURL of the Render service holding the "
                 "point match collection")
    verbose = Int(required=False,
                  default=0,
                  missing=0,
                  description="Verbose output flag")
Пример #5
0
class CreateFastStacksParameters(RenderParameters):
    statetableFile = InputFile(required=True, description='state table file')
    projectDirectory = InputDir(required=True,
                                description='path to project root')
    outputStackPrefix = Str(
        required=False,
        default="ACQ",
        description=
        'prefix to include in front of channel name for render stack')
    pool_size = Int(require=False,
                    default=20,
                    description='number of parallel threads to use')
    delete_stack = Boolean(
        require=False,
        default=True,
        description=
        'flag to decide whether stack should be deleted before new upload')
Пример #6
0
class PointMatchCollectionParameters(DefaultSchema):
    server = Str(required=False,
                 default=None,
                 missing=None,
                 description="Render server's base URL")
    owner = Str(
        required=False,
        default=None,
        missing=None,
        description=
        "Owner of the point match collection (default - render service owner)")
    match_collection = Str(required=True,
                           description="Montage point match collection")
    verbose = Int(required=False,
                  default=0,
                  missing=0,
                  description="Verbose output from point match loader needed?")
Пример #7
0
class ReceptiveFieldMapping(DefaultSchema):
    stimulus_key = List(String,
                        default=ReceptiveFieldMapping.known_stimulus_keys(),
                        help='Key for the receptive field mapping stimulus')
    trial_duration = Float(
        default=0.25,
        help='typical length of a epoch for given stimulus in seconds')
    minimum_spike_count = Int(
        default=10,
        help='Minimum number of spikes for computing receptive field parameters'
    )
    mask_threshold = Float(
        default=1.0,
        help=
        'Threshold (as fraction of peak) for computing receptive field mask')
    stimulus_step_size = Float(
        default=10.0, help='Distance between stimulus locations in degrees')
class FeatureStorageParameters(argschema.schemas.DefaultSchema):
    # TODO is this inputdir or outputdir?
    rootFeatureDirectory = Str(
        required=False,
        description=("Root directory for saved feature lists. "
                     "Features extracted from dynamically rendered canvases "
                     "if excluded or None."))
    requireStoredFeatures = Boolean(
        required=False,
        description=(
            "Whether to throw an exception in case features stored in "
            "rootFeatureDirectory cannot be found. "
            "Missing features are extracted from dynamically rendered canvases "
            "if excluded or None"))
    maxFeatureCacheGb = Int(
        required=False,
        description=(
            "Maximum size of feature cache, in GB. 2GB if excluded or None"))
class MaterializedBoxParameters(argschema.schemas.DefaultSchema):
    stack = Str(required=True,
                description=("stack fromw which boxes will be materialized"))
    rootDirectory = OutputDir(
        required=True,
        description=
        ("directory in which materialization directory structure will be "
         "created (structure is "
         "<rootDirectory>/<project>/<stack>/<width>x<height>/<mipMapLevel>/<z>/<row>/<col>.<fmt>)"
         ))
    width = Int(required=True,
                description=("width of flat rectangular tiles to generate"))
    height = Int(required=True,
                 description=("height of flat rectangular tiles to generate"))
    maxLevel = Int(required=False,
                   default=0,
                   description=("maximum mipMapLevel to generate."))
    fmt = Str(required=False,
              validator=validate.OneOf(['PNG', 'TIF', 'JPG']),
              description=("image format to generate mipmaps -- "
                           "PNG if not specified"))
    maxOverviewWidthAndHeight = Int(
        required=False,
        description=(
            "maximum pixel size for width or height of overview image.  "
            "If excluded or 0, no overview generated."))
    skipInterpolation = Boolean(
        required=False,
        description=("whether to skip interpolation (e.g. DMG data)"))
    binaryMask = Boolean(
        required=False,
        description=("whether to use binary mask (e.g. DMG data)"))
    label = Boolean(
        required=False,
        description=("whether to generate single color tile labels rather "
                     "than actual images"))
    createIGrid = Boolean(required=False,
                          description=("whther to create an IGrid file"))
    forceGeneration = Boolean(
        required=False, description=("whether to regenerate existing tiles"))
    renderGroup = Int(
        required=False,
        description=(
            "index (1-n) identifying coarse portion of layer to render"))
    numberOfRenderGroups = Int(
        required=False,
        description=(
            "used in conjunction with renderGroup, total number of groups "
            "being used"))
    filterListName = Str(
        required=False,
        description=("Apply specified filter list to all renderings"))
Пример #10
0
class PostprocessingParams(DefaultSchema):
    within_unit_overlap_window = Float(
        required=False,
        default=0.000166,
        help='Time window for removing overlapping spikes for one unit.')
    between_unit_overlap_window = Float(
        required=False,
        default=0.000166,
        help='Time window for removing overlapping spikes between two units.')
    between_unit_dist_um = Int(
        required=False,
        default=5,
        help=
        'Number of channels (above and below peak channel) to search for overlapping spikes'
    )
    deletion_mode = String(required=False,
                           default='lowAmpCluster',
                           help='lowAmpCluster or deleteFirst')
Пример #11
0
class ViewMatchesSchema(ArgSchema):
    collection_path = InputFile(
        required=False,
        description="if specified, will read collection from here")
    collection_basename = Str(
        required=True,
        missing="collection.json",
        default="collection.json",
        description=("basename for collection file if collection_path"
                     " not specified. will also check for .json.gz"))
    data_dir = InputDir(
        required=True,
        description=("directory containing image files. Will also be dir"
                     " dir for collection path, if not otherwise specified"))
    resolved_tiles = List(
        Str,
        required=True,
        missing=["resolvedtiles.json.gz", "resolvedtiles_input.json.gz"],
        description=("will take the transform from the first file"
                     " matching this list, if possible"))
    transform_file = InputFile(
        required=False,
        description=("if provided, will get lens correction transform "
                     " from here"))
    view_all = Boolean(
        required=True,
        missing=False,
        default=False,
        description=("will plot all the pair matches. can be useful "
                     "for lens correction to file. probably not desirable "
                     "for montage"))
    show = Boolean(required=True,
                   missing=True,
                   default=True,
                   description=("show on screen?"))
    match_index = Int(required=True,
                      missing=0,
                      default=0,
                      description=("which index of self.matches to plot"))
    pdf_out = OutputFile(required=True,
                         missing='./view_matches_output.pdf',
                         default='./view_matches_output.pdf',
                         description="where to write the pdf output")
Пример #12
0
class PointMatchTransferParameters(RenderTransferParameters):
    collection_source = Str(
        required=True,
        metadata={
            'description': 'point match collection to move from source_render'
        })
    collection_target = Str(
        required=False,
        metadata={
            'description':
            'point match colleciton to move to target_render (default to the same)'
        })
    pool_size = Int(
        required=False,
        default=20,
        metadata={
            'description':
            'point match colleciton to move to target_render (default to the same)'
        })
Пример #13
0
class SwapPointMatches(RenderParameters):
    match_owner = Str(
        required=True,
        description="Match collection owner name")
    source_collection = Str(
        required=True,
        description="Source point match collection")
    target_collection = Str(
        required=True,
        description="Target point match collection")
    zValues = List(
        Int,
        required=True,
        description="List of integer group ids")
    pool_size = Int(
        required=False,
        default=5,
        missing=5,
        description="Pool size")
Пример #14
0
class NonFileParameters(DefaultSchema):
    use_rsync = Boolean(default=True,
                        description='copy files using rsync rather than '
                        'shutil (this is not likely to work if '
                        'you are running windows!)')
    hasher_key = String(default='sha256',
                        validate=lambda st: st in available_hashers,
                        allow_none=True,
                        description='select a hash function to compute over '
                        'base64-encoded pre- and post-copy files')
    raise_if_comparison_fails = Boolean(default=True,
                                        description='if a hash comparison '
                                        'fails, throw an error ('
                                        'vs. a warning)')
    make_parent_dirs = Boolean(default=True,
                               description='build missing parent directories '
                               'for destination')
    chmod = Int(default=775,
                description="destination files (and any created parents will "
                "have these permissions")
class ConsolidateTransformsParameters(RenderParameters):
    stack = Str(required=True, description='stack to consolidate')
    postfix = Str(
        required=False,
        default="_CONS",
        description=
        'postfix to add to stack name on saving if no output defined (default _CONS)'
    )
    output_stack = Str(
        required=False,
        description='name of output stack (default to adding postfix to input)'
    )
    output_directory = Str(
        required=True,
        description='location to store tilespecs before uploading')
    pool_size = Int(
        required=False,
        default=20,
        description='name of output stack (default to adding postfix to input)'
    )
Пример #16
0
class DepthField(DefaultSchema):
    gradient_field_path = InputFile(description=(
        "The path to an xarray file describing the gradient of cortical "
        "depth on some domain. This file should contain one dataarray "
        "called 'gradient' which has dimensions drawn from "
        "{'x', 'y', 'z', 'component'}. The coords of x, y, z define the "
        "domain over which the gradient was computed. The component "
        "dimension describes the dimension associated with each component "
        "of the gradient and should have coords drawn from {'x', 'y', 'z'}."),
                                    required=True)
    depth_field_path = InputFile(
        description=("As gradient field, but gives depth values"),
        required=True)
    soma_origin = Bool(
        description="If true, the field is centered at the soma",
        required=True,
        default=True)
    pia_sign = Int(description="which direction is the pia",
                   required=True,
                   default=1,
                   validate=lambda val: val in {1, -1})
Пример #17
0
class InputParameters(ArgSchema):
    swc_path = InputFile(description="path to input swc (csv) file",
                         required=True)
    depth = Nested(DepthField,
                   description=("A transform which can be evaluated at the "
                                "location of each node in the input swc"),
                   required=True,
                   many=False)
    layers = Nested(Layer,
                    description="specification of layer bounds",
                    many=True,
                    required=True)
    step_size = Float(description=(
        "size of each step, in the same units as the depth field and swc"),
                      required=True,
                      default=1.0)
    output_path = OutputFile(description="write (csv) outputs here",
                             required=True)
    max_iter = Int(description="how many steps to take before giving up",
                   required=True,
                   default=1000)
Пример #18
0
class QualityMetricsParams(DefaultSchema):
    
    isi_threshold = Float(required=False, default=0.0015, help='Maximum time (in seconds) for ISI violation')
    min_isi = Float(required=False, default=0.00, help='Minimum time (in seconds) for ISI violation')
    num_channels_to_compare = Int(required=False, default=13, help='Number of channels to use for computing PC metrics; must be odd')
    max_spikes_for_unit = Int(required=False, default=500, help='Number of spikes to subsample for computing PC metrics')
    max_spikes_for_nn = Int(required=False, default=10000, help='Further subsampling for NearestNeighbor calculation')
    n_neighbors = Int(required=False, default=4, help='Number of neighbors to use for NearestNeighbor calculation')
    n_silhouette = Int(required=False, default=10000, help='Number of spikes to use for calculating silhouette score')

    drift_metrics_min_spikes_per_interval = Int(required=False, default=10, help='Minimum number of spikes for computing depth')
    drift_metrics_interval_s = Float(required=False, default=100, help='Interval length is seconds for computing spike depth')

    quality_metrics_output_file = String(required=True, help='CSV file where metrics will be saved')

    include_pc_metrics = Bool(required=False, default=True, help='Compute features that require principal components')
Пример #19
0
class ConsolidateTransformsParameters(RenderParameters):
    stack = Str(required=True, description='stack to consolidate')
    postfix = Str(
        required=False,
        default="_CONS",
        description=
        'postfix to add to stack name on saving if no output defined (default _CONS)'
    )
    transforms_slice = Slice(
        required=True,
        description="a string representing a slice describing \
                             the set of transforms to be consolidated (i.e. 1:)"
    )
    output_stack = Str(
        required=False,
        description='name of output stack (default to adding postfix to input)'
    )
    pool_size = Int(
        required=False,
        default=10,
        description='name of output stack (default to adding postfix to input)'
    )
    minZ = Float(
        required=False,
        description=
        """minimum z to consolidate in read in from stack and write to output_stack\
                 default to minimum z in stack""")
    maxZ = Float(
        required=False,
        description=
        """minimaximummum z to consolidate in read in from stack and write to output_stack\
                 default to maximum z in stack""")
    overwrite_zlayer = Boolean(
        required=False,
        default=False,
        description=("whether to remove the existing layer from the "
                     "target stack before uploading."))
    close_stack = Boolean(required=False, default=False)
Пример #20
0
class PointMatchFilteringOptions(DefaultSchema):
    NumRandomSamplingsMethod = Str(
        required=False,
        default="Desired confidence",
        missing="Desired confidence",
        description="Numerical random sampling method")
    MaximumRandomSamples = Int(required=False,
                               default=5000,
                               missing=5000,
                               description="Max number of random samples")
    DesiredConfidence = Float(
        required=False,
        default=99.9,
        missing=99.9,
        description="Desired confidence value for point match filtering")
    Transform = Str(required=False,
                    default="Affine",
                    missing="Affine",
                    description="RANSAC model to fit")
    PixelDistanceThreshold = Float(required=False,
                                   default=0.1,
                                   missing=0.1,
                                   description="Pixel distance threshold")
Пример #21
0
class PointMatchOptimizationParameters(RenderParameters):
    stack = Str(required=True,
                description='Name of the stack containing the tile pair')
    tile_stack = Str(
        required=False,
        default=None,
        missing=None,
        description='Name of the stack that will hold these two tiles')
    tileId1 = Str(required=True,
                  description='tileId of the first tile in the tile pair')
    tileId2 = Str(required=True,
                  description='tileId of the second tile in the tile pair')
    pool_size = Int(required=False,
                    default=10,
                    missing=10,
                    description='Pool size for parallel processing')
    SIFT_options = Nested(SIFT_options, required=True)
    outputDirectory = OutputDir(
        required=True,
        description=(
            'Parent directory in which subdirectories will be '
            'created to store images and point-match results from SIFT'))
    url_options = Nested(url_options, required=True)
Пример #22
0
class PostgresInputConfigSchema(mm.Schema):
    host = String(
        description="",
        required=True
    )
    database = String(
        description="",
        required=True
    )
    user = String(
        description="",
        required=True
    )
    password = String(
        description="",
        required=False,
        default=os.environ.get("POSTGRES_SOURCE_PASSWORD")
    )
    port = Int(
        description="",
        required=False, # seems not to get hydrated from the default
        default=5432
    )
Пример #23
0
class ReplaceFileNameParameters(RenderParameters):
    input_stack = Str(required=True,
                      metadata={'description': 'stack to apply affine to'})
    output_stack = Str(
        required=False,
        metadata={
            'description':
            'stack to save answer into (defaults to overwriting input_stack)'
        })
    regex_find = Str(required=True,
                     metadata={
                         'description':
                         'regular expression to search for in imageUrl'
                     })
    regex_replace = Str(
        required=True,
        metadata={'description': 'regular expression replacement string'})
    pool_size = Int(required=False,
                    default=20,
                    metadata={
                        'description':
                        'size of pool for parallel processing (default=20)'
                    })
Пример #24
0
class ConsolidateTransformsParameters(RenderParameters):
    stack = Str(required=True,
                metadata={'description': 'stack to consolidate'})
    postfix = Str(
        required=False,
        default="_CONS",
        metadata={
            'description':
            'postfix to add to stack name on saving if no output defined (default _CONS)'
        })
    output_stack = Str(
        required=False,
        metadata={
            'description':
            'name of output stack (default to adding postfix to input)'
        })
    pool_size = Int(
        required=False,
        default=20,
        metadata={
            'description':
            'name of output stack (default to adding postfix to input)'
        })
Пример #25
0
class RenderSchema(DefaultSchema):
    protocol = Str(
        default="http",
        help="Protocol to connect to render with (http or https)",
        required=False)
    port = Int(default=80, required=False)
    encoding = Str(
        default="jpg",
        help="Encoding option for the neuroglancer render datasource (jpg or raw16)",
        required=False)
    all_channels = Boolean(default=False,
        help="Use Render API to query for and load all channels",
        required=False)
    alt_render = Str(
        default="",
        help="Alternate render host to use for vizrelay API calls [to work in Docker]",
        required=False)
    enable_one_channel = Boolean(default=False,
        help="Enable only one of the channels",
        required=False)
    channel_name_shader_sub = Dict(default={},
        help="Dictionary of CHANNEL_NAME : { SUB_NAME : SUB_VALUE }",
        required=False)
Пример #26
0
class OtherParameters(DefaultSchema):
    rod = Float(required=True, metadata={'description': ''})
    maxEpsilon = Float(required=True, metadata={'description': ''})
    minInlierRatio = Float(required=True, metadata={'description': ''})
    minNumInliers = Int(required=True, metadata={'description': ''})
    expectedModelIndex = Int(required=True, metadata={'description': ''})
    multipleHypotheses = Bool(required=True, metadata={'description': ''})
    rejectIdentity = Bool(required=True, metadata={'description': ''})
    identityTolerance = Float(required=True, metadata={'description': ''})
    tilesAreInPlace = Bool(required=True, metadata={'description': ''})
    desiredModelIndex = Int(required=True, metadata={'description': ''})
    regularize = Bool(required=True, metadata={'description': ''})
    maxIterationsOptimize = Int(required=True, metadata={'description': ''})
    maxPlateauWidthOptimize = Int(required=True, metadata={'description': ''})
    dimension = Int(required=True, metadata={'description': ''})
    lambdaVal = Float(required=True, metadata={'description': ''})
    clearTransform = Bool(required=True, metadata={'description': ''})
    visualize = Bool(required=True, metadata={'description': ''})
Пример #27
0
class MeshLensCorrectionSchema(ArgSchema):
    nvertex = Int(required=False,
                  default=1000,
                  missinf=1000,
                  description="maximum number of vertices to attempt")
    tilespec_file = InputFile(required=False,
                              description="path to json of tilespecs")
    tilespecs = List(Dict,
                     required=False,
                     description="list of dict of tilespecs")
    match_file = InputFile(required=False,
                           description="path to json of matches")
    matches = List(Dict, required=False, description="list of dict of matches")
    regularization = Nested(regularization, missing={})
    good_solve = Nested(good_solve_criteria, missing={})
    output_dir = OutputDir(required=False,
                           description="directory for output files")
    outfile = Str(required=False,
                  description=("Basename to which resolved json output of "
                               "lens correction is written"))
    compress_output = Boolean(
        required=False,
        missing=True,
        default=True,
        description=("tilespecs will be .json or .json.gz"))
    timestamp = Boolean(required=False,
                        missing=False,
                        default=False,
                        description="add a timestamp to basename output")

    @mm.post_load
    def one_of_two(self, data):
        for a, b in [['tilespecs', 'tilespec_file'], ['matches',
                                                      'match_file']]:
            if (a in data) == (b in data):  # xor
                raise mm.ValidationError(
                    'must specify one and only one of %s or %s' % (a, b))
Пример #28
0
class PointMatchParameters(DefaultSchema):
    NumRandomSamplingsMethod = Str(
        required=False,
        default="Desired confidence",
        missing="Desired confidence",
        description='Numerical Random sampling method')
    MaximumRandomSamples = Int(required=False,
                               default=5000,
                               missing=5000,
                               description='Maximum number of random samples')
    DesiredConfidence = Float(required=False,
                              default=99.9,
                              missing=99.9,
                              description='Desired confidence level')
    PixelDistanceThreshold = Float(
        required=False,
        default=0.1,
        missing=0.1,
        description='Pixel distance threshold for filtering')
    Transform = Str(required=False,
                    default="AFFINE",
                    missing="AFFINE",
                    description="Transformation type parameter for point "
                    "match filtering (default AFFINE)")
Пример #29
0
class LensCorrectionSchema(ArgSchema):
    data_dir = InputDir(
        required=True,
        description="directory containing metafile, images, and matches")
    output_dir = OutputDir(required=False,
                           description="directory for output files")
    mask_file = InputFile(required=False,
                          default=None,
                          missing=None,
                          description="mask to apply to each tile")
    nvertex = Int(required=False,
                  default=1000,
                  missinf=1000,
                  description="maximum number of vertices to attempt")
    ransac_thresh = Float(required=False,
                          default=5.0,
                          missing=5.0,
                          description="ransac outlier threshold")
    regularization = Nested(regularization, missing={})
    good_solve = Nested(good_solve_criteria, missing={})
    ignore_match_indices = List(
        Int,
        required=False,
        default=None,
        missing=None,
        description=("debug feature for ignoring certain indices"
                     " of the match collection"))
    compress_output = Boolean(
        required=False,
        missing=True,
        default=True,
        description=("tilespecs will be .json or .json.gz"))
    timestamp = Boolean(required=False,
                        missing=False,
                        default=False,
                        description="add a timestamp to basename output")
Пример #30
0
class QualityMetricsParams(DefaultSchema):
    isi_threshold = Float(required=False,
                          default=0.0015,
                          help='Maximum time (in seconds) for ISI violation')
    min_isi = Float(required=False,
                    default=0.00,
                    help='Minimum time (in seconds) for ISI violation')
    max_radius_um = Int(required=False,
                        default=68,
                        help='Maximum radius for computing PC metrics, in um')
    max_spikes_for_unit = Int(
        required=False,
        default=500,
        help='Number of spikes to subsample for computing PC metrics')
    max_spikes_for_nn = Int(
        required=False,
        default=10000,
        help='Further subsampling for NearestNeighbor calculation')
    n_neighbors = Int(
        required=False,
        default=4,
        help='Number of neighbors to use for NearestNeighbor calculation')
    n_silhouette = Int(
        required=False,
        default=10000,
        help='Number of spikes to use for calculating silhouette score')

    drift_metrics_min_spikes_per_interval = Int(
        required=False,
        default=10,
        help='Minimum number of spikes for computing depth')
    drift_metrics_interval_s = Float(
        required=False,
        default=100,
        help='Interval length is seconds for computing spike depth')
    include_pcs = Boolean(
        required=False,
        default=True,
        help='Set to false if features were not saved with Phy output')