示例#1
0
def stage2_group_codes(**args):
    ''' 
    Group codes into rows.
    args should match the names and descriptions of command line parameters,
    but unlike command line, all arguments must be present.
    '''
    # Copy args so we can archive them to a file when function is finished.
    args_copy = args.copy()
    
    # Convert arguments to local variables of the correct type.
    input_directory = args.pop('input_directory')
    field_direction = float(args.pop('field_direction'))
    output_directory = args.pop('output_directory')
    num_rows_per_pass = int(args.pop('num_rows_per_pass'))
    code_list_filepath = args.pop('code_list_filepath')
    code_modifications_filepath = args.pop('code_modifications_filepath')
    
    if len(args) > 0:
        print "Unexpected arguments provided: {}".format(args)
        return ExitReason.bad_arguments
    
    geo_images, all_codes = unpickle_stage1_output(input_directory)

    print 'Found {} codes in {} geo images.'.format(len(all_codes), len(geo_images))

    if len(geo_images) == 0 or len(all_codes) == 0:
        print "Couldn't load any geo images or codes from input directory {}".format(input_directory)
        return ExitReason.no_geo_images

    if code_modifications_filepath != 'none':
        if not os.path.exists(code_modifications_filepath):
            print "Provided code modification file {} doesn't exist".format(code_modifications_filepath)
            return ExitReason.no_geo_images
        modifications_out_directory = os.path.join(output_directory, 'modifications')
        code_modifications = parse_code_modifications_file(code_modifications_filepath)
        geo_images, all_codes = apply_code_modifications(code_modifications, geo_images, all_codes, modifications_out_directory)

    # Merge items so they're unique.  One code references other instances of that same code.
    merged_codes = merge_items(all_codes, max_distance=500)

    print '{} unique codes.'.format(len(merged_codes))
                
    row_codes = [code for code in merged_codes if code.type.lower() == 'rowcode']
    group_codes = [code for code in merged_codes if code.type.lower() == 'groupcode']
    single_codes = [code for code in merged_codes if code.type.lower() == 'singlecode']
        
    '''
    if row_labeling_scheme == 0:
        
        grouped_row_codes = group_row_codes(row_codes)
    
        if len(grouped_row_codes) == 0:
            print "No row codes found. Exiting"
            return ExitReason.no_rows
    
        display_row_info(grouped_row_codes)
            
        rows = create_rows(grouped_row_codes, field_direction)
        
        up_row_nums, back_row_nums = associate_row_numbers_with_up_back_rows()
        
        assign_rows_a_direction(rows, up_row_nums, back_row_nums)
        
        field_passes = [rows[x:x+2] for x in xrange(0, len(rows), 2)]
        
    elif row_labeling_scheme == 1:
        
        grouped_row_codes = group_row_codes_by_pass_name(row_codes)
        
        rows, field_passes = create_rows_and_field_passes_by_pass_codes(grouped_row_codes, field_direction)
        
    elif row_labeling_scheme == 2:
    '''
    
    grouped_row_codes = group_row_codes_by_row_name(row_codes)

    if len(grouped_row_codes) == 0:
        print "No row codes found. Exiting"
        return ExitReason.no_rows

    display_row_info(grouped_row_codes)
    
    rows, field_passes = create_rows_and_field_passes_by_row_codes(grouped_row_codes, field_direction, num_rows_per_pass)

    if len(rows) == 0:
        print "No complete rows found.  Exiting."
        return ExitReason.no_rows
    
    print sorted([r.number for r in rows], key=lambda r: r)
    
    print "Calculating field positions."
    calculate_field_positions_and_range(rows, merged_codes, all_codes, geo_images)
    for code in merged_codes:
        code.refresh_fields()
    
    print "Calculating projections to nearest row"
    codes_with_projections = calculate_projection_to_nearest_row(group_codes + single_codes, rows)
            
    print "Creating segments"
    group_segments, special_segments = create_segments(codes_with_projections, rows)
        
    print "Organizing segments"
    start_segments, middle_segments, end_segments, single_segments = organize_group_segments(group_segments)
    
    if len(middle_segments) > 0:
        print "Middle segments that span entire row aren't supported right now. Exiting"
        return ExitReason.operation_not_supported
    
    print "Forming groups"
    groups = complete_groups(end_segments, single_segments, field_passes, num_rows_per_pass)
        
    handle_single_segments(single_segments, groups)
    
    # Add in information about max number of plants and optional alternate ids.
    if code_list_filepath != 'none':
        if not os.path.exists(code_list_filepath):
            print "Code list file doesn't exist {}".format(code_list_filepath)
            return ExitReason.bad_arguments
        else:
            code_listings, alternate_ids_included = parse_code_listing_file(code_list_filepath)
            print "Applying code listings"
            apply_code_listings(code_listings, groups, alternate_ids_included)
        
    display_segment_info(group_segments, special_segments, groups)
    
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)
 
    dump_filename = "stage2_output_{}_{}.s2".format(int(geo_images[0].image_time), int(geo_images[-1].image_time))
    print "Serializing {} rows and {} geo images to {}.".format(len(rows), len(geo_images), dump_filename)
    pickle_results(dump_filename, output_directory, rows, geo_images)
    
    # Write arguments out to file for archiving purposes.
    args_filename = "stage2_args_{}_{}.csv".format(int(geo_images[0].image_time), int(geo_images[-1].image_time))
    write_args_to_file(args_filename, output_directory, args_copy)
    parser.add_argument('output_directory', help='where to write output files')
    
    args = parser.parse_args()
    input_directory = args.input_directory
    out_directory = args.output_directory

    geo_images, all_codes = unpickle_stage1_output(input_directory)

    print 'Found {} codes in {} geo images.'.format(len(all_codes), len(geo_images))

    if len(geo_images) == 0 or len(all_codes) == 0:
        print "Couldn't load any geo images or codes from input directory {}".format(input_directory)
        sys.exit(ExitReason.no_geo_images)

    # Merge items so they're unique.  One code references other instances of that same code.
    merged_codes = merge_items(all_codes, max_distance=500)

    print '{} unique codes.'.format(len(merged_codes))

    # Sanity check that multiple references of the same code are all close to each other.
    largest_separation = 0
    sum_separation = 0
    sum_separation_count = 0
    for code in merged_codes:
        for code_ref in code.all_refs:
            diff = position_difference(code.position, code_ref.position)
            sum_separation += diff
            sum_separation_count += 1
            if diff > largest_separation:
                largest_separation = diff
                
def stage1_extract_codes(**args):
    ''' 
    Extract codes from set of images and write out results to file.
    args should match the names and descriptions of command line parameters,
    but unlike command line, all arguments must be present.
    '''
    # Copy args so we can archive them to a file when function is finished.
    args_copy = args.copy()
    
    # Convert arguments to local variables of the correct type.
    image_directory = args.pop('image_directory')
    image_geo_file = args.pop('image_geo_file')
    out_directory = args.pop('output_directory')
    postfix_id = args.pop('postfix_id')
    code_min_size = float(args.pop('code_min_size'))
    code_max_size = float(args.pop('code_max_size'))
    provided_resolution = float(args.pop('resolution'))
    camera_height = float(args.pop('camera_height'))
    use_marked_image = args.pop('marked_image').lower() == 'true'
    debug_start = args.pop('debug_start')
    debug_stop = args.pop('debug_stop')

    if len(args) > 0:
        print "Unexpected arguments provided: {}".format(args)
        return ExitReason.bad_arguments
    
    if code_max_size <= 0 or code_min_size <= 0:
        print "\nError: code sizes must be greater than zero.\n"
        return ExitReason.bad_arguments
        
    if code_max_size <= code_min_size:
        print "\nError: Max code size must be greater than min.\n"
        return ExitReason.bad_arguments
    
    if provided_resolution <= 0:
        print "\nError: Resolution must be greater than zero."
        return ExitReason.bad_arguments
    
    if camera_height <= 0:
        print "\nError: Specified camera height must be greater than zero."
        return ExitReason.bad_arguments
        
    image_filenames = list_images(image_directory, ['tiff', 'tif', 'jpg', 'jpeg', 'png'])
                        
    if len(image_filenames) == 0:
        print "No images found in directory: {}".format(image_directory)
        return ExitReason.no_images
    
    print "\nFound {} images to process".format(len(image_filenames))
    
    geo_images = parse_geo_file(image_geo_file, provided_resolution, camera_height)
            
    print "Parsed {} geo images".format(len(geo_images))
    
    if len(geo_images) == 0:
        print "No geo images. Exiting."
        return ExitReason.no_geo_images
    
    # Look for start/stop filenames so user doesn't have to process all images.
    start_geo_index, stop_geo_index = get_subset_of_geo_images(geo_images, debug_start, debug_stop)
        
    print "Processing geo images {} through {}".format(start_geo_index, stop_geo_index)
    geo_images = geo_images[start_geo_index : stop_geo_index+1]
        
    print "Sorting images by timestamp."
    geo_images = sorted(geo_images, key=lambda image: image.image_time)
    
    geo_images, missing_image_count = verify_geo_images(geo_images, image_filenames)
           
    if missing_image_count > 0:
        print "Warning {} geo images do not exist and will be skipped.".format(missing_image_count)

    if len(geo_images) == 0:
        print "No images match up with any geo images. Exiting."
        return ExitReason.no_geo_images

    missed_code_finder = MissedCodeFinder()
    code_finder = CodeFinder(code_min_size, code_max_size, missed_code_finder)
    
    ImageWriter.level = ImageWriter.NORMAL
    
    # Write images out to subdirectory to keep separated from pickled results.
    image_out_directory = os.path.join(out_directory, 'images/')
    if not os.path.exists(image_out_directory):
        os.makedirs(image_out_directory)

    # Find and extract all codes from images.
    codes = []
    try:
        for i, geo_image in enumerate(geo_images):
            print "Analyzing image {} [{}/{}]".format(geo_image.file_name, i+1, len(geo_images))
            newly_found_codes = process_geo_image(geo_image, [code_finder], image_directory, image_out_directory, use_marked_image)
            geo_image.items["codes"] = newly_found_codes
            for code in newly_found_codes:
                print "Found {}: {}".format(code.type, code.name)
            codes += newly_found_codes
    except KeyboardInterrupt:
        print "\nKeyboard interrupt detected."
        answer = raw_input("\nType y to save results or anything else to quit: ").strip()
        if answer.lower() != 'y':
            return ExitReason.user_interrupt
        
    # Write possibly missed codes out to separate directory
    missed_codes_out_directory = os.path.join(out_directory, 'missed_codes_{}/'.format(postfix_id))
    if not os.path.exists(missed_codes_out_directory):
        os.makedirs(missed_codes_out_directory)
        
    print "Writing out missed codes"
    missed_code_filename = "missed_codes_{}.txt".format(postfix_id)
    missed_code_finder.write_out_missed_codes(codes, missed_code_filename, missed_codes_out_directory)
  
    dump_filename = "stage1_output_{}_{}_{}.s1".format(postfix_id, int(geo_images[0].image_time), int(geo_image.image_time))
    print "Serializing {} geo images and {} codes to {}.".format(len(geo_images), len(codes), dump_filename)
    pickle_results(dump_filename, out_directory, geo_images, codes)
    
    # Display code stats for user.
    merged_codes = merge_items(codes, max_distance=500)
    if len(merged_codes) == 0:
        print "No codes found."
    else:
        print "There were {} codes found and {} were unique.  Average code is in {} images.".format(len(codes), len(merged_codes), float(len(codes)) / len(merged_codes))
        print "Merged codes not being saved.  Just for user information."

    # Write arguments out to file for archiving purposes.
    args_filename = "stage1_args_{}_{}_{}.csv".format(postfix_id, int(geo_images[0].image_time), int(geo_image.image_time))
    write_args_to_file(args_filename, out_directory, args_copy)
        
    return ExitReason.success