Exemplo n.º 1
0
def main():
    """Generate image tiles corresponding to the predictions."""
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    with open(CACHE_PATH + 'model_metadata.pickle', 'r') as infile:
        model_info = pickle.load(infile)

    model = load_model(model_info['neural_net_type'], model_info['tile_size'],
                       len(model_info['bands']))
    bands = training_info['bands']

    for path in raster_data_paths:
        labels, images = load_all_training_tiles(path, bands)

        path_parts = path.split('/')
        filename = path_parts[len(path_parts) - 1]

        wp_preds = list_findings_3(labels, images, model)
        start_xs = [label[1] for label in labels]
        start_ys = [label[2] for label in labels]
        x_values = numpy.unique(start_xs).size
        y_values = numpy.unique(start_ys).size
        img = numpy.reshape(wp_preds,
                            (x_values, y_values))  # wrong orientation!

        im_uint8 = Image.fromarray(img * 255).convert('RGB')
        png_file = os.path.splitext(filename)[0] + "_tf_pred.png"
        rotate_and_flip(im_uint8).save(os.path.join(tf_preds_dir, png_file))

        raster_dataset, bands_data = read_naip(path, bands)  # expensive!
        raster_prj = raster_dataset.GetProjection()
        epsg = osr.SpatialReference(wkt=raster_prj).GetAttrValue(
            'AUTHORITY', 1)
        drv = gdal.GetDriverByName("GTiff")
        geotiff_file = os.path.splitext(filename)[0] + "_tf_pred.tif"
        ds = drv.Create(os.path.join(tf_preds_dir, geotiff_file), x_values,
                        y_values, 1, gdal.GDT_Float32)
        geo_transform = raster_dataset.GetGeoTransform()
        ds.SetGeoTransform((geo_transform[0] + NAIP_PIXEL_BUFFER, 64.0, 0.0,
                            geo_transform[3] - NAIP_PIXEL_BUFFER, 0.0, -64.0))
        srs = osr.SpatialReference()
        srs.ImportFromEPSG(int(epsg))
        ds.SetProjection(srs.ExportToWkt())
        im_flt32 = Image.fromarray(img)
        ds.GetRasterBand(1).WriteArray(numpy.asarray(
            rotate_and_flip(im_flt32)))
        ds.FlushCache()
        del ds
Exemplo n.º 2
0
def main():
    """Post test results to an S3 bucket."""
    with open(CACHE_PATH + 'raster_data_paths.pickle', 'r') as infile:
        raster_data_paths = pickle.load(infile)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    with open(CACHE_PATH + MODEL_METADATA_PATH, 'r') as infile:
        model_info = pickle.load(infile)

    model = load_model(model_info['neural_net_type'], model_info['tile_size'],
                       len(model_info['bands']))
    post_findings_to_s3(raster_data_paths, model, training_info, model_info['bands'], False)
Exemplo n.º 3
0
def main():
    """Do each state one by one."""
    # randomize_naips = False
    naip_year = 2013
    naip_states = {'de': ['http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'],
                   'me': ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']}
    number_of_naips = 25

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1

    neural_net = 'one_layer_relu_conv'
    number_of_epochs = 25

    for state, filenames in naip_states:
        naiper = NAIPDownloader(number_of_naips, state, naip_year)
        raster_data_paths = naiper.download_naips()
        cache_paths(raster_data_paths)
        create_tiled_training_data(raster_data_paths, extract_type, bands, tile_size,
                                   pixels_to_fatten_roads, filenames,
                                   tile_overlap, state)

    with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
        training_info = pickle.load(infile)

    test_images, model = train_on_cached_data(raster_data_paths, neural_net,
                                              training_info['bands'], training_info['tile_size'],
                                              number_of_epochs)

    with open(CACHE_PATH + MODEL_METADATA_PATH, 'r') as infile:
        model_info = pickle.load(infile)

    model = load_model(model_info['neural_net_type'], model_info['tile_size'],
                       len(model_info['bands']))
    post_findings_to_s3(raster_data_paths, model, training_info)