Beispiel #1
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)
    test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands,
                                              args.tile_size, args.number_of_epochs)
    if not args.omit_findings:
        for path in raster_data_paths:
            print path
            labels, images = load_training_tiles(path)
            if len(labels) == 0 or len(images) == 0:
                print("WARNING, there is a borked naip image file")
                continue
            false_positives, false_negatives, fp_images, fn_images = list_findings(labels, images,
                                                                                   model)
            path_parts = path.split('/')
            filename = path_parts[len(path_parts) - 1]
            print("FINDINGS: {} false pos and {} false neg, of {} tiles, from {}".format(
                len(false_positives), len(false_negatives), len(images), filename))
            render_results_for_analysis([path], false_positives, fp_images, args.bands,
                                        args.tile_size)

    if args.render_results:
        predictions = predictions_for_tiles(test_images, model)
        render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands,
                                    args.tile_size)
Beispiel #2
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)
    test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands,
                                              args.tile_size)
    if args.render_results:
        predictions = predictions_for_tiles(test_images, model)
        render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands,
                                    args.tile_size)
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)
    test_images, model = train_on_cached_data(raster_data_paths,
                                              args.neural_net, args.bands,
                                              args.tile_size)
    if args.render_results:
        predictions = predictions_for_tiles(test_images, model)
        render_results_for_analysis(raster_data_paths, predictions,
                                    test_images, args.bands, args.tile_size)
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)
    test_images, model = train_on_cached_data(raster_data_paths,
                                              args.neural_net, args.bands,
                                              args.tile_size,
                                              args.number_of_epochs)
    if not args.omit_findings:
        findings = []
        for path in raster_data_paths:
            print path
            labels, images = load_training_tiles(path)
            if len(labels) == 0 or len(images) == 0:
                print("WARNING, there is a borked naip image file")
                continue
            false_positives, false_negatives, fp_images, fn_images = list_findings(
                labels, images, model)
            path_parts = path.split('/')
            filename = path_parts[len(path_parts) - 1]
            print(
                "FINDINGS: {} false pos and {} false neg, of {} tiles, from {}"
                .format(len(false_positives), len(false_negatives),
                        len(images), filename))
            # render JPEGs showing findings
            render_results_for_analysis([path], false_positives, fp_images,
                                        args.bands, args.tile_size)

            # combine findings for all NAIP images analyzed
            [
                findings.append(f) for f in tag_with_locations(
                    fp_images, false_positives, args.tile_size)
            ]

        # dump combined findings to disk as a pickle
        with open(CACHE_PATH + 'findings.pickle', 'w') as outfile:
            pickle.dump(findings, outfile)

        # push pickle to S3
        s3_client = boto3.client('s3')
        s3_client.upload_file(CACHE_PATH + 'findings.pickle', 'deeposm',
                              'findings.pickle')

    if args.render_results:
        predictions = predictions_for_tiles(test_images, model)
        render_results_for_analysis(raster_data_paths, predictions,
                                    test_images, args.bands, args.tile_size)
Beispiel #5
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    test_images, model = train_on_cached_data(raster_data_paths, args.neural_net,
                                              training_info['bands'], training_info['tile_size'],
                                              args.number_of_epochs)

    with open(CACHE_PATH + 'model.pickle', 'w') as outfile:
        pickle.dump(model, outfile)
Beispiel #6
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    test_images, model = train_on_cached_data(raster_data_paths,
                                              args.neural_net,
                                              training_info['bands'],
                                              training_info['tile_size'],
                                              args.number_of_epochs)

    with open(CACHE_PATH + 'model.pickle', 'w') as outfile:
        pickle.dump(model, outfile)
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)
    test_images, model = train_on_cached_data(raster_data_paths, args.neural_net, args.bands,
                                              args.tile_size, args.number_of_epochs)
    if not args.omit_findings:
        findings = []
        for path in raster_data_paths:
            print path
            labels, images = load_training_tiles(path)
            if len(labels) == 0 or len(images) == 0:
                print("WARNING, there is a borked naip image file")
                continue
            false_positives, false_negatives, fp_images, fn_images = list_findings(labels, images,
                                                                                   model)
            path_parts = path.split('/')
            filename = path_parts[len(path_parts) - 1]
            print("FINDINGS: {} false pos and {} false neg, of {} tiles, from {}".format(
                len(false_positives), len(false_negatives), len(images), filename))
            # render JPEGs showing findings
            render_results_for_analysis([path], false_positives, fp_images, args.bands,
                                        args.tile_size)

            # combine findings for all NAIP images analyzed
            [findings.append(f) for f in tag_with_locations(fp_images, false_positives,
                                                            args.tile_size)]

        # dump combined findings to disk as a pickle
        with open(CACHE_PATH + 'findings.pickle', 'w') as outfile:
            pickle.dump(findings, outfile)

        # push pickle to S3
        s3_client = boto3.client('s3')
        s3_client.upload_file(CACHE_PATH + 'findings.pickle', 'deeposm', 'findings.pickle')

    if args.render_results:
        predictions = predictions_for_tiles(test_images, model)
        render_results_for_analysis(raster_data_paths, predictions, test_images, args.bands,
                                    args.tile_size)
Beispiel #8
0
def main():
    """Do each state one by one."""
    # randomize_naips = False
    naip_year = 2013
    naip_states = {'de': ['http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'],
                   'me': ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']}
    number_of_naips = 25

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1

    neural_net = 'one_layer_relu_conv'
    number_of_epochs = 25

    for state, filenames in naip_states:
        naiper = NAIPDownloader(number_of_naips, state, naip_year)
        raster_data_paths = naiper.download_naips()
        cache_paths(raster_data_paths)
        create_tiled_training_data(raster_data_paths, extract_type, bands, tile_size,
                                   pixels_to_fatten_roads, filenames,
                                   tile_overlap, state)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    test_images, model = train_on_cached_data(raster_data_paths, neural_net,
                                              training_info['bands'], training_info['tile_size'],
                                              number_of_epochs)

    with open(CACHE_PATH + MODEL_METADATA_PATH, 'r') as infile:
        model_info = pickle.load(infile)

    model = load_model(model_info['neural_net_type'], model_info['tile_size'],
                       len(model_info['bands']))
    post_findings_to_s3(raster_data_paths, model, training_info)
def main():
    """Analyze each state and publish results to deeposm.org."""

    naip_year = 2013
    naip_states = {'de': ['http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'],
                   'ia': ['http://download.geofabrik.de/north-america/us/iowa-latest.osm.pbf'],
                   'me': ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']
                   }
    number_of_naips = 10

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1

    neural_net = 'two_layer_relu_conv'
    number_of_epochs = 10
    randomize_naips = False

    for state in naip_states:
        filenames = naip_states[state]
        raster_data_paths = download_and_serialize(number_of_naips,
                                                   randomize_naips,
                                                   state,
                                                   naip_year,
                                                   extract_type,
                                                   bands,
                                                   tile_size,
                                                   pixels_to_fatten_roads,
                                                   filenames,
                                                   tile_overlap)
        model = train_on_cached_data(neural_net, number_of_epochs)
        with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
            training_info = pickle.load(infile)
        post_findings_to_s3(raster_data_paths, model, training_info, training_info['bands'], False)

    requests.get('http://www.deeposm.org/refresh_findings/')
def main():
    """Analyze each state and publish results to deeposm.org."""

    naip_year = 2013
    naip_states = {
        'de': [
            'http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'
        ],
        'ia':
        ['http://download.geofabrik.de/north-america/us/iowa-latest.osm.pbf'],
        'me':
        ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']
    }
    number_of_naips = 175

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1
    naip_extent = None  # WMIV 3/31/17
    neural_net = 'two_layer_relu_conv'
    number_of_epochs = 10
    randomize_naips = False

    for state in naip_states:
        filenames = naip_states[state]
        raster_data_paths = download_and_serialize(
            number_of_naips, randomize_naips, state, naip_year, naip_extent,
            extract_type, bands, tile_size, pixels_to_fatten_roads, filenames,
            tile_overlap)
        model = train_on_cached_data(neural_net, number_of_epochs)
        with open(METADATA_FILE, 'r') as infile:
            training_info = pickle.load(infile)
        post_findings_to_s3(raster_data_paths, model, training_info,
                            training_info['bands'], False)

    requests.get('http://www.deeposm.org/refresh_findings/')
Beispiel #11
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    train_on_cached_data(args.neural_net, args.number_of_epochs)
Beispiel #12
0
def main():
    """Use local data to train the neural net, probably made by bin/create_training_data.py."""
    parser = create_parser()
    args = parser.parse_args()
    train_on_cached_data(args.neural_net, args.number_of_epochs)