Пример #1
0
def main():
    """Download and serialize training data."""
    args = create_parser().parse_args()
    naip_state, naip_year = args.naip_path
    download_and_serialize(
        args.number_of_naips, args.randomize_naips, naip_state, naip_year,
        args.extract_type, args.bands, args.tile_size,
        args.pixels_to_fatten_roads, args.label_data_files, args.tile_overlap)
Пример #2
0
def main():
    """Download and serialize training data."""
    args = create_parser().parse_args()
    naip_state, naip_year = args.naip_path
    download_and_serialize(args.number_of_naips, args.randomize_naips,
                           naip_state, naip_year, args.extract_type,
                           args.bands, args.tile_size,
                           args.pixels_to_fatten_roads, args.label_data_files,
                           args.tile_overlap)
Пример #3
0
def main():
    """Analyze each state and publish results to deeposm.org."""

    naip_year = 2013
    naip_states = {'de': ['http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'],
                   'ia': ['http://download.geofabrik.de/north-america/us/iowa-latest.osm.pbf'],
                   'me': ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']
                   }
    number_of_naips = 10

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1

    neural_net = 'two_layer_relu_conv'
    number_of_epochs = 10
    randomize_naips = False

    for state in naip_states:
        filenames = naip_states[state]
        raster_data_paths = download_and_serialize(number_of_naips,
                                                   randomize_naips,
                                                   state,
                                                   naip_year,
                                                   extract_type,
                                                   bands,
                                                   tile_size,
                                                   pixels_to_fatten_roads,
                                                   filenames,
                                                   tile_overlap)
        model = train_on_cached_data(neural_net, number_of_epochs)
        with open(CACHE_PATH + METADATA_PATH, 'r') as infile:
            training_info = pickle.load(infile)
        post_findings_to_s3(raster_data_paths, model, training_info, training_info['bands'], False)

    requests.get('http://www.deeposm.org/refresh_findings/')
Пример #4
0
def main():
    """Analyze each state and publish results to deeposm.org."""

    naip_year = 2013
    naip_states = {
        'de': [
            'http://download.geofabrik.de/north-america/us/delaware-latest.osm.pbf'
        ],
        'ia':
        ['http://download.geofabrik.de/north-america/us/iowa-latest.osm.pbf'],
        'me':
        ['http://download.geofabrik.de/north-america/us/maine-latest.osm.pbf']
    }
    number_of_naips = 175

    extract_type = 'highway'
    bands = [1, 1, 1, 1]
    tile_size = 64
    pixels_to_fatten_roads = 3
    tile_overlap = 1
    naip_extent = None  # WMIV 3/31/17
    neural_net = 'two_layer_relu_conv'
    number_of_epochs = 10
    randomize_naips = False

    for state in naip_states:
        filenames = naip_states[state]
        raster_data_paths = download_and_serialize(
            number_of_naips, randomize_naips, state, naip_year, naip_extent,
            extract_type, bands, tile_size, pixels_to_fatten_roads, filenames,
            tile_overlap)
        model = train_on_cached_data(neural_net, number_of_epochs)
        with open(METADATA_FILE, 'r') as infile:
            training_info = pickle.load(infile)
        post_findings_to_s3(raster_data_paths, model, training_info,
                            training_info['bands'], False)

    requests.get('http://www.deeposm.org/refresh_findings/')