def stage3_extract_plant_parts(**args):
    ''' 
    Extract out possible plant parts to be clustered in next stage.
    args should match the names and descriptions of command line parameters,
    but unlike command line, all arguments must be present.
    '''
    # Copy args so we can archive them to a file when function is finished.
    args_copy = args.copy()
    
    # Convert arguments to local variables of the correct type.
    input_filepath = args.pop('input_filepath')
    out_directory = args.pop('output_directory')
    pad = float(args.pop('pad'))
    special_pad = float(args.pop('special_pad'))
    min_leaf_size = float(args.pop('min_leaf_size'))
    max_leaf_size = float(args.pop('max_leaf_size'))
    min_stick_part_size = float(args.pop('min_stick_part_size'))
    max_stick_part_size = float(args.pop('max_stick_part_size'))
    min_tag_size = float(args.pop('min_tag_size'))
    max_tag_size = float(args.pop('max_tag_size'))
    disable_sticks = args.pop('disable_sticks').lower() == 'true'
    disable_tags = args.pop('disable_tags').lower() == 'true'
    use_marked_image = args.pop('marked_image').lower() == 'true'
    debug_start = args.pop('debug_start')
    debug_stop = args.pop('debug_stop')
    
    if len(args) > 0:
        print "Unexpected arguments provided: {}".format(args)
        return ExitReason.bad_arguments

    rows, geo_images = unpickle_stage2_output(input_filepath)
    
    if len(rows) == 0 or len(geo_images) == 0:
        print "No rows or no geo images could be loaded from {}".format(input_filepath)
        return ExitReason.no_rows
    
    ImageWriter.level = ImageWriter.NORMAL
    
    # Write images out to subdirectory to keep separated from pickled results.
    image_out_directory = os.path.join(out_directory, 'images/')
    if not os.path.exists(image_out_directory):
        os.makedirs(image_out_directory)
    
    rows = sorted(rows, key=lambda r: r.number)
    
    # Sort geo images so they're processed by time.
    geo_images = sorted(geo_images, key=lambda img: img.image_time)
    
    # Look for start/stop filenames so user doesn't have to process all images.
    start_geo_index, stop_geo_index = get_subset_of_geo_images(geo_images, debug_start, debug_stop)
        
    print "Processing geo images {} through {}".format(start_geo_index, stop_geo_index)
    geo_images = geo_images[start_geo_index : stop_geo_index+1]

    num_images_not_in_segment = 0
    num_images_without_path = 0

    leaf_finder = LeafFinder(min_leaf_size, max_leaf_size)
    
    if disable_sticks:
        stick_finder = None
    else:
        stick_finder = BlueStickFinder(min_stick_part_size, max_stick_part_size)
        
    if disable_tags:
        tag_finder = None
    else:
        tag_finder = TagFinder(min_tag_size, max_tag_size)

    all_segments = all_segments_from_rows(rows)
    
    for segment in all_segments:
        if segment.is_special:
            segment.lrud = calculate_special_segment_lrud(segment, special_pad)
        else:
            segment.lrud = calculate_segment_lrud(segment, pad)
    
    num_matched = [] # keep track of how many segments each image maps to.
    num_leaves = [] # how many leaves are in each processed image
    num_sticks = [] # how many sticks are in each processed images
    num_tags = [] # how many tags are in each processed images
    
    for k, geo_image in enumerate(geo_images):
        
        if not geo_image.file_path:
            num_images_without_path += 1
            continue
        
        # Check if image east/west/north/south (lrud) overlaps with any segments.
        image_lrud = calculate_image_lrud(geo_image)
        overlapping_segments = [seg for seg in all_segments if is_overlapping_segment(image_lrud, seg)]
        
        if len(overlapping_segments) == 0:
            num_images_not_in_segment += 1
            continue
        
        print "{} [{} / {}]".format(geo_image.file_name, k, len(geo_images))
            
        leaves, sticks, tags = process_geo_image_to_find_plant_parts(geo_image, leaf_finder, stick_finder, tag_finder, image_out_directory, use_marked_image)
        
        # Remove any false positive items that came from codes.
        geo_codes = geo_image.items['codes'] 
        leaves = dont_overlap_with_items(geo_codes, leaves)
        sticks = dont_overlap_with_items(geo_codes, sticks)
        tags = dont_overlap_with_items(geo_codes, tags)
        
        geo_image.items['leaves'] = leaves
        geo_image.items['stick_parts'] = sticks
        geo_image.items['tags'] = tags
        
        print "Found {} leaves, {} stick parts and {} tags".format(len(leaves), len(sticks), len(tags))

        for segment in overlapping_segments:
            segment.geo_images.append(geo_image)
         
        num_matched.append(len(overlapping_segments))
        num_leaves.append(len(leaves))
        num_sticks.append(len(sticks))
        num_tags.append(len(tags))

    print "\nProcessed {}".format(len(num_matched))
    print "Not in segment {}".format(num_images_not_in_segment)
    print "Invalid path {}".format(num_images_without_path)

    print "Matched images were in average of {} segments".format(np.mean(num_matched))
    print "Average of {} leaves, {} stick parts and {} tags per image".format(np.mean(num_leaves), np.mean(num_sticks), np.mean(num_tags))

    if not os.path.exists(out_directory):
        os.makedirs(out_directory)

    # Pickle
    dump_filename = "stage3_output.s3"
    print "\nSerializing {} rows to {}".format(len(rows), dump_filename)
    pickle_results(dump_filename, out_directory, rows)
    
    # Write arguments out to file for archiving purposes.
    write_args_to_file("stage3_args.csv", out_directory, args_copy)
Esempio n. 2
0
def stage1_extract_codes(**args):
    ''' 
    Extract codes from set of images and write out results to file.
    args should match the names and descriptions of command line parameters,
    but unlike command line, all arguments must be present.
    '''
    # Copy args so we can archive them to a file when function is finished.
    args_copy = args.copy()
    
    # Convert arguments to local variables of the correct type.
    image_directory = args.pop('image_directory')
    image_geo_file = args.pop('image_geo_file')
    out_directory = args.pop('output_directory')
    postfix_id = args.pop('postfix_id')
    code_min_size = float(args.pop('code_min_size'))
    code_max_size = float(args.pop('code_max_size'))
    provided_resolution = float(args.pop('resolution'))
    camera_height = float(args.pop('camera_height'))
    use_marked_image = args.pop('marked_image').lower() == 'true'
    debug_start = args.pop('debug_start')
    debug_stop = args.pop('debug_stop')

    if len(args) > 0:
        print "Unexpected arguments provided: {}".format(args)
        return ExitReason.bad_arguments
    
    if code_max_size <= 0 or code_min_size <= 0:
        print "\nError: code sizes must be greater than zero.\n"
        return ExitReason.bad_arguments
        
    if code_max_size <= code_min_size:
        print "\nError: Max code size must be greater than min.\n"
        return ExitReason.bad_arguments
    
    if provided_resolution <= 0:
        print "\nError: Resolution must be greater than zero."
        return ExitReason.bad_arguments
    
    if camera_height <= 0:
        print "\nError: Specified camera height must be greater than zero."
        return ExitReason.bad_arguments
        
    image_filenames = list_images(image_directory, ['tiff', 'tif', 'jpg', 'jpeg', 'png'])
                        
    if len(image_filenames) == 0:
        print "No images found in directory: {}".format(image_directory)
        return ExitReason.no_images
    
    print "\nFound {} images to process".format(len(image_filenames))
    
    geo_images = parse_geo_file(image_geo_file, provided_resolution, camera_height)
            
    print "Parsed {} geo images".format(len(geo_images))
    
    if len(geo_images) == 0:
        print "No geo images. Exiting."
        return ExitReason.no_geo_images
    
    # Look for start/stop filenames so user doesn't have to process all images.
    start_geo_index, stop_geo_index = get_subset_of_geo_images(geo_images, debug_start, debug_stop)
        
    print "Processing geo images {} through {}".format(start_geo_index, stop_geo_index)
    geo_images = geo_images[start_geo_index : stop_geo_index+1]
        
    print "Sorting images by timestamp."
    geo_images = sorted(geo_images, key=lambda image: image.image_time)
    
    geo_images, missing_image_count = verify_geo_images(geo_images, image_filenames)
           
    if missing_image_count > 0:
        print "Warning {} geo images do not exist and will be skipped.".format(missing_image_count)

    if len(geo_images) == 0:
        print "No images match up with any geo images. Exiting."
        return ExitReason.no_geo_images

    missed_code_finder = MissedCodeFinder()
    code_finder = CodeFinder(code_min_size, code_max_size, missed_code_finder)
    
    ImageWriter.level = ImageWriter.NORMAL
    
    # Write images out to subdirectory to keep separated from pickled results.
    image_out_directory = os.path.join(out_directory, 'images/')
    if not os.path.exists(image_out_directory):
        os.makedirs(image_out_directory)

    # Find and extract all codes from images.
    codes = []
    try:
        for i, geo_image in enumerate(geo_images):
            print "Analyzing image {} [{}/{}]".format(geo_image.file_name, i+1, len(geo_images))
            newly_found_codes = process_geo_image(geo_image, [code_finder], image_directory, image_out_directory, use_marked_image)
            geo_image.items["codes"] = newly_found_codes
            for code in newly_found_codes:
                print "Found {}: {}".format(code.type, code.name)
            codes += newly_found_codes
    except KeyboardInterrupt:
        print "\nKeyboard interrupt detected."
        answer = raw_input("\nType y to save results or anything else to quit: ").strip()
        if answer.lower() != 'y':
            return ExitReason.user_interrupt
        
    # Write possibly missed codes out to separate directory
    missed_codes_out_directory = os.path.join(out_directory, 'missed_codes_{}/'.format(postfix_id))
    if not os.path.exists(missed_codes_out_directory):
        os.makedirs(missed_codes_out_directory)
        
    print "Writing out missed codes"
    missed_code_filename = "missed_codes_{}.txt".format(postfix_id)
    missed_code_finder.write_out_missed_codes(codes, missed_code_filename, missed_codes_out_directory)
  
    dump_filename = "stage1_output_{}_{}_{}.s1".format(postfix_id, int(geo_images[0].image_time), int(geo_image.image_time))
    print "Serializing {} geo images and {} codes to {}.".format(len(geo_images), len(codes), dump_filename)
    pickle_results(dump_filename, out_directory, geo_images, codes)
    
    # Display code stats for user.
    merged_codes = merge_items(codes, max_distance=500)
    if len(merged_codes) == 0:
        print "No codes found."
    else:
        print "There were {} codes found and {} were unique.  Average code is in {} images.".format(len(codes), len(merged_codes), float(len(codes)) / len(merged_codes))
        print "Merged codes not being saved.  Just for user information."

    # Write arguments out to file for archiving purposes.
    args_filename = "stage1_args_{}_{}_{}.csv".format(postfix_id, int(geo_images[0].image_time), int(geo_image.image_time))
    write_args_to_file(args_filename, out_directory, args_copy)
        
    return ExitReason.success