def main(): """Use local data to train the neural net, probably made by bin/create_training_data.py.""" parser = create_parser() args = parser.parse_args() with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile: raster_data_paths = pickle.load(infile) test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands, args.tile_size, args.number_of_epochs) if not args.omit_findings: for path in raster_data_paths: print path labels, images = load_training_tiles(path) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, false_negatives, fp_images, fn_images = list_findings(labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print("FINDINGS: {} false pos and {} false neg, of {} tiles, from {}".format( len(false_positives), len(false_negatives), len(images), filename)) render_results_for_analysis([path], false_positives, fp_images, args.bands, args.tile_size) if args.render_results: predictions = predictions_for_tiles(test_images, model) render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands, args.tile_size)
def post_findings_to_s3(raster_data_paths, model, training_info, render_results): """Aggregate findings from all NAIPs into a pickled list, post to S3.""" findings = [] for path in raster_data_paths: labels, images = load_training_tiles(path) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, fp_images = list_findings(labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print("FINDINGS: {} false pos of {} tiles, from {}".format( len(false_positives), len(images), filename)) if render_results: # render JPEGs showing findings render_results_for_analysis([path], false_positives, fp_images, training_info['bands'], training_info['tile_size']) # combine findings for all NAIP images analyzedfor the region [findings.append(f) for f in tag_with_locations(fp_images, false_positives, training_info['tile_size'])] # dump combined findings to disk as a pickle try: os.mkdir(CACHE_PATH + training_info['naip_state']) except: pass naip_path_in_cache_dir = training_info['naip_state'] + '/' + 'findings.pickle' local_path = CACHE_PATH + naip_path_in_cache_dir with open(local_path, 'w') as outfile: pickle.dump(findings, outfile) # push pickle to S3 s3_client = boto3.client('s3') s3_client.upload_file(local_path, FINDINGS_S3_BUCKET, naip_path_in_cache_dir)
def render_errors(raster_data_paths, model, training_info, render_results): """Render JPEGs showing findings.""" for path in raster_data_paths: labels, images = load_training_tiles(path) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, fp_images = list_findings(labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print("FINDINGS: {} false pos of {} tiles, from {}".format( len(false_positives), len(images), filename)) render_results_for_analysis([path], false_positives, fp_images, training_info['bands'], training_info['tile_size'])
def main(): """Use local data to train the neural net, probably made by bin/create_training_data.py.""" parser = create_parser() args = parser.parse_args() with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile: raster_data_paths = pickle.load(infile) test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands, args.tile_size, args.number_of_epochs) if not args.omit_findings: findings = [] for path in raster_data_paths: print path labels, images = load_training_tiles(path) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, false_negatives, fp_images, fn_images = list_findings( labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print( "FINDINGS: {} false pos and {} false neg, of {} tiles, from {}" .format(len(false_positives), len(false_negatives), len(images), filename)) # render JPEGs showing findings render_results_for_analysis([path], false_positives, fp_images, args.bands, args.tile_size) # combine findings for all NAIP images analyzed [ findings.append(f) for f in tag_with_locations( fp_images, false_positives, args.tile_size) ] # dump combined findings to disk as a pickle with open(CACHE_PATH + 'findings.pickle', 'w') as outfile: pickle.dump(findings, outfile) # push pickle to S3 s3_client = boto3.client('s3') s3_client.upload_file(CACHE_PATH + 'findings.pickle', 'deeposm', 'findings.pickle') if args.render_results: predictions = predictions_for_tiles(test_images, model) render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands, args.tile_size)
def main(): """Use local data to train the neural net, probably made by bin/create_training_data.py.""" parser = create_parser() args = parser.parse_args() with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile: raster_data_paths = pickle.load(infile) test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands, args.tile_size, args.number_of_epochs) if not args.omit_findings: findings = [] for path in raster_data_paths: print path labels, images = load_training_tiles(path) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, false_negatives, fp_images, fn_images = list_findings(labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print("FINDINGS: {} false pos and {} false neg, of {} tiles, from {}".format( len(false_positives), len(false_negatives), len(images), filename)) # render JPEGs showing findings render_results_for_analysis([path], false_positives, fp_images, args.bands, args.tile_size) # combine findings for all NAIP images analyzed [findings.append(f) for f in tag_with_locations(fp_images, false_positives, args.tile_size)] # dump combined findings to disk as a pickle with open(CACHE_PATH + 'findings.pickle', 'w') as outfile: pickle.dump(findings, outfile) # push pickle to S3 s3_client = boto3.client('s3') s3_client.upload_file(CACHE_PATH + 'findings.pickle', 'deeposm', 'findings.pickle') if args.render_results: predictions = predictions_for_tiles(test_images, model) render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands, args.tile_size)
def post_findings_to_s3(raster_data_paths, model, training_info, bands, render_results): """Aggregate findings from all NAIPs into a pickled list, post to S3.""" findings = [] for path in raster_data_paths: labels, images = load_all_training_tiles(path, bands) if len(labels) == 0 or len(images) == 0: print("WARNING, there is a borked naip image file") continue false_positives, fp_images = list_findings(labels, images, model) path_parts = path.split('/') filename = path_parts[len(path_parts) - 1] print("FINDINGS: {} false pos of {} tiles, from {}".format( len(false_positives), len(images), filename)) if render_results: # render JPEGs showing findings render_results_for_analysis([path], false_positives, fp_images, training_info['bands'], training_info['tile_size']) # combine findings for all NAIP images analyzedfor the region [ findings.append(f) for f in tag_with_locations( fp_images, false_positives, training_info['tile_size'], training_info['naip_state']) ] # dump combined findings to disk as a pickle try: os.mkdir(CACHE_PATH + training_info['naip_state']) except: pass naip_path_in_cache_dir = training_info[ 'naip_state'] + '/' + 'findings.pickle' local_path = CACHE_PATH + naip_path_in_cache_dir with open(local_path, 'w') as outfile: pickle.dump(findings, outfile) # push pickle to S3 s3_client = boto3.client('s3') s3_client.upload_file(local_path, FINDINGS_S3_BUCKET, naip_path_in_cache_dir)