def cli_parser(): """ :type parser: argparse.ArgumentParser :rtype: argparse.ArgumentParser """ parser = bin_utils.basic_cli_parser(bin_utils.doc_as_description(__doc__)) parser.add_argument('--after-time', metavar='TIMESTAMP', help='Optional timestamp to constrain that we look ' 'for entries added after the given time stamp. ' 'This should be formatted according to Solr\'s ' 'timestamp format (e.g. ' '"2016-01-01T00:00:00.000Z"). Sub-seconds are ' 'optional. Timestamps should be in UTC (Zulu). ' 'The constraint is inclusive.') parser.add_argument('--before-time', metavar='TIMESTAMP', help='Optional timestamp to constrin that we look for ' 'entries added before the given time stamp. See ' 'the description of `--after-time` for format ' 'info.') g_required = parser.add_argument_group("Required options") g_required.add_argument('-p', '--paths-file', help='Path to the file to output collected file ' 'paths to (local filesystem path).') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) # Application options group_application = parser.add_argument_group("Application Selection") group_application.add_argument( '-l', '--list', default=False, action="store_true", help="List currently available applications " "for running. More description is " "included if SMQTK verbosity is " "increased (-v | --debug-smqtk)") group_application.add_argument('-a', '--application', default=None, help="Label of the web application to run.") # Server options group_server = parser.add_argument_group("Server options") group_server.add_argument('-r', '--reload', action='store_true', default=False, help='Turn on server reloading.') group_server.add_argument('-t', '--threaded', action='store_true', default=False, help="Turn on server multi-threading.") group_server.add_argument('--host', default=None, help="Run host address specification override. " "This will override all other configuration " "method specifications.") group_server.add_argument( '--port', default=None, help="Run port specification override. This will " "override all other configuration method " "specifications.") group_server.add_argument("--use-basic-auth", action="store_true", default=False, help="Use global basic authentication as " "configured.") # Other options group_other = parser.add_argument_group("Other options") group_other.add_argument('--debug-server', action='store_true', default=False, help='Turn on server debugging messages ONLY') group_other.add_argument('--debug-smqtk', action='store_true', default=False, help='Turn on SMQTK debugging messages ONLY') return parser
def get_cli_parser(): parser = basic_cli_parser(__doc__) g_classifier = parser.add_argument_group("Classification") g_classifier.add_argument('--overwrite', action='store_true', default=False, help='When generating a configuration file, ' 'overwrite an existing file.') g_classifier.add_argument('-l', '--label', type=str, default=None, help='The class to filter by. This is based on ' 'the classifier configuration/model used. ' 'If this is not provided, we will list the ' 'available labels in the provided ' 'classifier configuration.') # Positional parser.add_argument("file_globs", nargs='*', metavar='GLOB', help='Series of shell globs specifying the files to ' 'classify.') return parser
def cli_parser(): parser = basic_cli_parser(__doc__, configuration_group=False) parser.add_argument("hash2uuids_fp", type=str) parser.add_argument("bit_len", type=int) parser.add_argument("leaf_size", type=int) parser.add_argument("rand_seed", type=int) parser.add_argument("balltree_model_fp", type=str) return parser
def cli_parser(): p = basic_cli_parser(__doc__) g_output = p.add_argument_group("output") g_output.add_argument('-o', '--output-map', metavar="PATH", help="Path to output the clustering class mapping " "to. Saved as a pickle file with -1 format.") return p
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) g_io = parser.add_argument_group("Input Output Files") g_io.add_argument('--uuids-list', metavar='PATH', help='Path to the input file listing UUIDs to process.') g_io.add_argument('--csv-header', metavar='PATH', help='Path to the file to output column header labels.') g_io.add_argument('--csv-data', metavar='PATH', help='Path to the file to output the CSV data to.') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) g_io = parser.add_argument_group("I/O") g_io.add_argument("--uuids-list", default=None, metavar="PATH", help='Optional path to a file listing UUIDs of ' 'descriptors to computed hash codes for. If ' 'not provided we compute hash codes for all ' 'descriptors in the configured descriptor index.') return parser
def get_cli_parser(): parser = basic_cli_parser(__doc__, configuration_group=False) parser.add_argument('-i', '--invert', default=False, action='store_true', help='Invert results, showing only invalid images.') g_required = parser.add_argument_group("Required Arguments") g_required.add_argument('-f', '--file-list', type=str, default=None, metavar='PATH', help='Path to a file that lists data file paths.') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) g_io = parser.add_argument_group("IO Options") g_io.add_argument('-f', metavar='PATH', help='Path to the csv file mapping descriptor UUIDs to ' 'their class label. String labels are transformed ' 'into integers for libSVM. Integers start at 1 ' 'and are applied in the order that labels are ' 'seen in this input file.') g_io.add_argument('-o', metavar='PATH', help='Path to the output file to write libSVM labeled ' 'descriptors to.') return parser
def get_cli_parser(): parser = basic_cli_parser(__doc__) parser.add_argument('-u', '--uuid-list', default=None, metavar='PATH', help='Path to list of UUIDs to calculate nearest ' 'neighbors for. If empty, all UUIDs present ' 'in the descriptor index will be used.') parser.add_argument('-n', '--num', default=10, metavar='INT', type=int, help='Number of maximum nearest neighbors to return ' 'for each UUID, defaults to retrieving 10 nearest ' 'neighbors. Set to 0 to retrieve all nearest ' 'neighbors.') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) # Application options group_application = parser.add_argument_group("Application Selection") group_application.add_argument('-l', '--list', default=False, action="store_true", help="List currently available applications " "for running. More description is " "included if SMQTK verbosity is " "increased (-v | --debug-smqtk)") group_application.add_argument('-a', '--application', default=None, help="Label of the web application to run.") # Server options group_server = parser.add_argument_group("Server options") group_server.add_argument('-r', '--reload', action='store_true', default=False, help='Turn on server reloading.') group_server.add_argument('-t', '--threaded', action='store_true', default=False, help="Turn on server multi-threading.") group_server.add_argument('--host', default=None, help="Run host address specification override. " "This will override all other configuration " "method specifications.") group_server.add_argument('--port', default=None, help="Run port specification override. This will " "override all other configuration method " "specifications.") group_server.add_argument("--use-basic-auth", action="store_true", default=False, help="Use global basic authentication as " "configured.") # Other options group_other = parser.add_argument_group("Other options") group_other.add_argument('--debug-server', action='store_true', default=False, help='Turn on server debugging messages ONLY') group_other.add_argument('--debug-smqtk', action='store_true', default=False, help='Turn on SMQTK debugging messages ONLY') return parser
def cli_parser(): parser = basic_cli_parser(__doc__) parser.add_argument('-b', '--batch-size', type=int, default=0, metavar='INT', help="Number of files to batch together into a single " "compute async call. This defines the " "granularity of the checkpoint file in regards " "to computation completed. If given 0, we do not " "batch and will perform a single " "``compute_async`` call on the configured " "generator. Default batch size is 0.") parser.add_argument('--check-image', default=False, action='store_true', help="If se should check image pixel loading before " "queueing an input image for processing. If we " "cannot load the image pixels via " "``PIL.Image.open``, the input image is not " "queued for processing") # Non-config required arguments g_required = parser.add_argument_group("Required Arguments") g_required.add_argument('-f', '--file-list', type=str, default=None, metavar='PATH', help="Path to a file that lists data file paths. " "Paths in this file may be relative, but " "will at some point be coerced into absolute " "paths based on the current working " "directory.") g_required.add_argument('-p', '--completed-files', default=None, metavar='PATH', help='Path to a file into which we add CSV ' 'format lines detailing filepaths that have ' 'been computed from the file-list provided, ' 'as the UUID for that data (currently the ' 'SHA1 checksum of the data).') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) parser.add_argument('--overwrite', action='store_true', default=False, help="Force descriptor computation even if an " "existing descriptor vector was " "discovered based on the given content " "descriptor type and data combination.") parser.add_argument('-o', '--output-filepath', help='Optional path to a file to output ' 'feature vector to. Otherwise the feature ' 'vector is printed to standard out. ' 'Output is saved in numpy binary format ' '(.npy suffix recommended).') parser.add_argument("input_file", nargs="?", help="Data file to compute descriptor on") return parser
def cli_parser(): """ :rtype: argparse.ArgumentParser """ parser = basic_cli_parser(doc_as_description(__doc__)) parser.add_argument('-s', '--report-size', action='store_true', default=False, help="Report the number of elements that would be " "scanned by the ElasticSearch query generated " "and then exit.") parser.add_argument('--crawled-after', default=None, help="Optional timestamp constraint to only get " "content that was crawled after the given time. " "Time should be in UTC." "Timestamp format like: '2016-01-01T12:00:00Z'") parser.add_argument('--inserted-after', default=None, help="Optional timestamp constraint to only get " "content that was inserted into the " "ElasticSearch instance/index after the given " "time. Time should be in UTC." "Timestamp format like: '2016-01-01T12:00:00Z'") g_output = parser.add_argument_group("Output") g_output.add_argument('-d', '--output-dir', metavar='PATH', help='Output image directory path.') g_output.add_argument('-l', '--file-list', metavar='PATH', help='Path to an output CSV file where downloaded ' 'files are recorded along with their ' 'associated CDR identifier as SHA1 checksum.') return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) g_girder = parser.add_argument_group('Girder References') g_girder.add_argument('-F', '--folder', nargs='*', default=[], metavar='FOLDER_ID', help='Specify specific folder IDs') g_girder.add_argument('--folder-list', metavar='PATH', help='Path to a new-line separated file of folder ' 'IDs') g_girder.add_argument('-i', '--item', nargs='*', default=[], metavar='ITEM_ID', help='Specify specific item IDs') g_girder.add_argument('--item-list', metavar='PATH', help='Path to a new-line separated file of item IDs') g_girder.add_argument('-f', '--file', nargs='*', default=[], metavar='FILE_ID', help='Specify specific file IDs') g_girder.add_argument('--file-list', metavar='PATH', help='Path to a new-line separated file of file IDs') return parser
def cli_parser(): return bin_utils.basic_cli_parser(__doc__)
def get_cli_parser(): parser = basic_cli_parser(__doc__) parser.add_argument('-i', '--iqr-state', help="Path to the ZIP file saved from an IQR session.") return parser
def cli_parser(): parser = bin_utils.basic_cli_parser(__doc__) parser.add_argument("input_files", metavar='GLOB', nargs='*') return parser