def remove_repeat_detections(inputFile, outputFile, filteringDir, options=None): assert os.path.isfile(inputFile), "Can't find file {}".format(inputFile) assert os.path.isdir(filteringDir), "Can't find folder {}".format( filteringDir) if options is None: options = repeat_detections_core.RepeatDetectionOptions() options.filterFileToLoad = os.path.join( filteringDir, repeat_detections_core.DETECTION_INDEX_FILE_NAME) options.bWriteFilteringFolder = False repeat_detections_core.find_repeat_detections(inputFile, outputFile, options)
# # Everything after this should be considered mostly manual, and no longer includes # looping over folders. # #%% Repeat detection elimination, phase 1 # Deliberately leaving these imports here, rather than at the top, because this cell is not # typically executed from api.batch_processing.postprocessing.repeat_detection_elimination import repeat_detections_core import path_utils job_index = 0 options = repeat_detections_core.RepeatDetectionOptions() options.confidenceMin = 0.6 options.confidenceMax = 1.01 options.iouThreshold = 0.85 options.occurrenceThreshold = 10 options.maxSuspiciousDetectionSize = 0.2 options.bRenderHtml = False options.imageBase = read_only_sas_url rde_string = 'rde_{:.2f}_{:.2f}_{}_{:.2f}'.format( options.confidenceMin, options.iouThreshold, options.occurrenceThreshold, options.maxSuspiciousDetectionSize) options.outputBase = os.path.join(filename_base, rde_string) options.filenameReplacements = {'': ''}
def main(): defaultOptions = repeat_detections_core.RepeatDetectionOptions() parser = argparse.ArgumentParser() parser.add_argument('inputFile') parser.add_argument( '--outputFile', action='store', type=str, default=None, help= ".json file to write filtered results to... don't use this if you're going to do manual review of the repeat detection images" ) parser.add_argument( '--imageBase', action='store', type=str, default='', help= 'Image base dir, relevant if renderHtml is True or if omitFilteringFolder is not set' ) parser.add_argument('--outputBase', action='store', type=str, default='', help='HTML or filtering folder output dir') parser.add_argument( '--filterFileToLoad', action='store', type=str, default= '', # checks for string length so default needs to be the empty string help= 'Path to detectionIndex.json, which should be inside a folder of images that are manually verified to _not_ contain valid animals' ) parser.add_argument( '--confidenceMax', action='store', type=float, default=defaultOptions.confidenceMax, help= 'Detection confidence threshold; don\'t process anything above this') parser.add_argument( '--confidenceMin', action='store', type=float, default=defaultOptions.confidenceMin, help= 'Detection confidence threshold; don\'t process anything below this') parser.add_argument( '--iouThreshold', action='store', type=float, default=defaultOptions.iouThreshold, help= 'Detections with IOUs greater than this are considered "the same detection"' ) parser.add_argument( '--occurrenceThreshold', action='store', type=int, default=defaultOptions.occurrenceThreshold, help= 'More than this many near-identical detections in a group (e.g. a folder) is considered suspicious' ) parser.add_argument( '--nWorkers', action='store', type=int, default=defaultOptions.nWorkers, help='Level of parallelism for rendering and IOU computation') parser.add_argument( '--maxSuspiciousDetectionSize', action='store', type=float, default=defaultOptions.maxSuspiciousDetectionSize, help= 'Detections larger than this fraction of image area are not considered suspicious' ) parser.add_argument('--renderHtml', action='store_true', dest='bRenderHtml', help='Should we render HTML output?') parser.add_argument( '--omitFilteringFolder', action='store_false', dest='bWriteFilteringFolder', help= 'Should we create a folder of rendered detections for post-filtering?') parser.add_argument( '--excludeClasses', action='store', nargs='+', type=int, default=defaultOptions.excludeClasses, help= 'List of classes (ints) to exclude from analysis, separated by spaces') parser.add_argument( '--nDirLevelsFromLeaf', default=0, type=int, help= 'Number of levels from the leaf folders to use for repeat detection (0 == leaves)' ) parser.add_argument('--debugMaxDir', action='store', type=int, default=-1) parser.add_argument('--debugMaxRenderDir', action='store', type=int, default=-1) parser.add_argument('--debugMaxRenderDetection', action='store', type=int, default=-1) parser.add_argument('--debugMaxRenderInstance', action='store', type=int, default=-1) parser.add_argument('--forceSerialComparisons', action='store_false', dest='bParallelizeComparisons') parser.add_argument('--forceSerialRendering', action='store_false', dest='bParallelizeRendering') if len(sys.argv[1:]) == 0: parser.print_help() parser.exit() args = parser.parse_args() # Convert to an options object options = repeat_detections_core.RepeatDetectionOptions() ct_utils.args_to_object(args, options) repeat_detections_core.find_repeat_detections(args.inputFile, args.outputFile, options)