Exemplo n.º 1
0
def generate_new_street_umap(number_of_snapshots=len(bev_db.SNAPSHOTS),
                             include_found_objects=False,
                             gkz_starts_with=""):
    ids = {}
    skz_found = {}
    gkz_starts_with += "%"
    con = bev_db.get_db_conn()  # always use newest db to check found status
    for skz, found in con.execute("SELECT SKZ, FOUND FROM STRASSE"):
        if found is None:
            skz_found[skz] = bev_db.SearchStatus.NOT_FOUND
        else:
            skz_found[skz] = found
    umap = Umap()
    number_of_streets = 0
    number_of_missing_streets = 0
    for (old, new) in zip(bev_db.SNAPSHOTS[-number_of_snapshots:],
                          bev_db.SNAPSHOTS[-number_of_snapshots + 1:]):
        print(old, new)
        if not old in ids:
            con = bev_db.get_db_conn(old)
            ids[old] = set()
            for row in con.execute("SELECT SKZ FROM STRASSE"):
                ids[old].add(row[0])
        con = bev_db.get_db_conn(new)
        if not new in ids:
            ids[new] = set()
            for row in con.execute("SELECT SKZ FROM STRASSE"):
                ids[new].add(row[0])
        new_ids = ids[new] - ids[old]
        cur = con.cursor()
        query = """SELECT GEMEINDE.GKZ, GEMEINDE.GEMEINDENAME, STRASSE.SKZ, STRASSE.STRASSENNAME, 
            COUNT(ADRESSE.ADRCD), MIN(LAT), MIN(LON), MAX(LAT), MAX(LON) 
            FROM STRASSE JOIN ADRESSE ON ADRESSE.SKZ = STRASSE.SKZ JOIN GEMEINDE ON GEMEINDE.GKZ = ADRESSE.GKZ 
            WHERE STRASSE.GKZ LIKE ? AND STRASSE.SKZ IN ({}) AND ADRESSE.HAUSNRZAHL1 != ""
            GROUP BY STRASSE.SKZ HAVING COUNT(ADRESSE.ADRCD) > 1 ORDER BY 1, 4 DESC""".format(
            ",".join("?" * len(new_ids)))
        parameters = [gkz_starts_with] + list(new_ids)
        for row in cur.execute(query, parameters):
            gkz, gemeindename, skz, strassenname, count, min_lat, min_lon, max_lat, max_lon = row
            if ignore_streetname(strassenname):
                continue
            bezirkname = get_bezirk(gkz)
            bundesland = get_bundesland(gkz)
            layer = "%s: %s" % (bundesland, bezirkname)
            try:
                area_size = projection.get_area_size(min_lon, max_lon, min_lat,
                                                     max_lat)
            except TypeError:
                continue
            try:
                adr_per_km2 = count / area_size
            except ZeroDivisionError:
                adr_per_km2 = 0
            josm_link = umap.get_josm_link(min_lon,
                                           max_lon,
                                           min_lat,
                                           max_lat,
                                           area_size=area_size)
            properties = {
                "name":
                "#%s %s (%s)" % (gkz[3:], strassenname, gemeindename),
                "description":
                "%s\nNeu mit Stichtag %s\n%s Adressen\nSKZ %s\nGröße: %4.2f km²\nAdr./km²: %s"
                % (josm_link, bev_db.format_key_date(new), count, skz,
                   area_size, int(adr_per_km2))
            }
            feature = Feature(properties=properties,
                              geometry=Polygon([[[min_lon, min_lat],
                                                 [min_lon, max_lat],
                                                 [max_lon, max_lat],
                                                 [max_lon, min_lat]]]))
            if skz not in skz_found or skz_found[
                    skz] != bev_db.SearchStatus.FOUND:
                if skz in skz_found and skz_found[
                        skz] == bev_db.SearchStatus.UNDER_CONSTRUCTION:
                    umap.add_feature(feature, layer, {"color": "Orange"})
                else:
                    number_of_missing_streets += 1
                    umap.add_feature(feature, layer, {"color": "Red"})
            elif include_found_objects:
                umap.add_feature(feature, layer)
            number_of_streets += 1
    if gkz_starts_with == "":
        filename = "new_streets.umap"
    else:
        filename = "new_streets_%s.umap" % (gkz_starts_with)
    umap.dump(filename)
    print("%s/%s streets missing" %
          (number_of_missing_streets, number_of_streets))
Exemplo n.º 2
0
def write_geojson(df, filename=None, geomtype='linestring', drop_na=True):
    """
    convert dataframe with coords series to geojson format
    :param df: target dataframe
    :param filename: optional path of new file to contain geojson
    :param geomtype: geometry type [linestring, point, polygon]
    :param drop_na: whether to remove properties with None values
    :return: geojson.FeatureCollection

    >>> from swmmio.examples import philly
    >>> geoj = write_geojson(philly.links.dataframe, drop_na=True)
    >>> print(json.dumps(geoj['features'][0]['properties'], indent=2))
    {
      "InletNode": "J1-025",
      "OutletNode": "J1-026",
      "Length": 309.456216,
      "Roughness": 0.014,
      "InOffset": 0,
      "OutOffset": 0.0,
      "InitFlow": 0,
      "MaxFlow": 0,
      "Shape": "CIRCULAR",
      "Geom1": 1.25,
      "Geom2": 0,
      "Geom3": 0,
      "Geom4": 0,
      "Barrels": 1,
      "Name": "J1-025.1"
    }
    >>> print(json.dumps(geoj['features'][0]['geometry'], indent=2))
    {
      "type": "LineString",
      "coordinates": [
        [
          2746229.223,
          1118867.764
        ],
        [
          2746461.473,
          1118663.257
        ]
      ]
    }
    """

    # CONVERT THE DF INTO JSON
    df['Name'] = df.index  # add a name column (we wont have the index)
    records = json.loads(df.to_json(orient='records'))

    # ITERATE THROUGH THE RECORDS AND CREATE GEOJSON OBJECTS
    features = []
    for rec in records:

        coordinates = rec['coords']
        del rec['coords']  # delete the coords so they aren't in the properties
        if drop_na:
            rec = {k: v for k, v in rec.items() if v is not None}
        latlngs = coordinates

        if geomtype == 'linestring':
            geometry = LineString(latlngs)
        elif geomtype == 'point':
            geometry = Point(latlngs)
        elif geomtype == 'polygon':
            geometry = Polygon([latlngs])

        feature = Feature(geometry=geometry, properties=rec)
        features.append(feature)

    if filename is not None:
        with open(filename, 'wb') as f:
            f.write(json.dumps(FeatureCollection(features)))
        return filename

    else:
        return FeatureCollection(features)
Exemplo n.º 3
0
Arquivo: utils.py Projeto: WFP-VAM/HRM
def points_to_polygon(minlon, minlat, maxlon, maxlat):
    from geojson import Polygon
    square = Polygon([[(minlon, minlat), (maxlon, minlat), (maxlon, maxlat),
                       (minlon, maxlat), (minlon, minlat)]])
    return square
Exemplo n.º 4
0
def hexagons(north, south, east, west, radial, outfile):

    params('heagons', north, south, east, west, radial)
    my_os = os.name
    if (my_os is 'posix'):
        # cmd_text = '/usr/bin/ogr2ogr'
        slash = '/'
    else:
        # cmd_text = 'c:\\OSGeo4W64\\bin\\ogr2ogr.exe'
        slash = '\\'
    # init bits
    point_list = []
    g_array = []  # array of geojson formatted geometry elements
    tabular_list = []  # array of all polygons and tabular columns
    layer_dict = {'Bounds': {'Australia': {'North': north, 'South': south,
        'West': west, 'East': east}}}
    layer_dict['Param'] = {}
    layer_dict['Param']['side_km'] = radial
    layer_dict['Param']['epsg'] = 4326
    layer_dict['Param']['shape'] = 'hexagon'
    layer_dict['Hexagon'] = {}
    layer_dict['Hexagon']['short'] = 0.707108
    layer_dict['Hexagon']['long'] = 1

    bounds_lat_min = north
    bounds_lat_max = south
    bounds_lon_max = east
    bounds_lon_min = west

    hor_seq = [layer_dict['Hexagon']['short'], layer_dict['Hexagon']['short'],
    layer_dict['Hexagon']['short'], layer_dict['Hexagon']['short']]

    vert_seq = [layer_dict['Hexagon']['short'], layer_dict['Hexagon']['long'],
    layer_dict['Hexagon']['short'], layer_dict['Hexagon']['long']]

    h_line_list = horizontal(east, north,
                            west, south,
                            hor_seq, radial)
    max_h = len(h_line_list)

    v_line_list = vertical(east, north,
                              west, south,
                              vert_seq, radial)
    max_v = len(v_line_list)

    intersect_list = intersections(h_line_list, max_h, v_line_list, max_v)
    
    lat_offset = 4
    top_left = 0
    poly_row_count = int(max_v / (len(hor_seq)))
    rem_lat = max_v % (lat_offset + len(hor_seq))
    layer_dict['Row_1'] = {}
    layer_dict['Row_1']['lat_offset'] = lat_offset
    layer_dict['Row_1']['poly_row_count'] = poly_row_count
    layer_dict['Row_1']['remain_lat'] = rem_lat

    print('first row starting from {0}, {1} hexagons, {2} \
    latitude line(s) remaining'.format(top_left, poly_row_count, rem_lat))

    inc_by_rem = True
    inc_adj = 0
    if rem_lat is 0 or rem_lat is 1 or rem_lat is 2 or rem_lat is 3 \
    or rem_lat is 4 or rem_lat is 5 or rem_lat is 6 or rem_lat is 7:

        if rem_lat is 2 or rem_lat is 5 or rem_lat is 6 or rem_lat is 7:
            inc_by_rem = True
            inc_adj = -4
        if rem_lat is 1 or rem_lat is 3:
            inc_by_rem = True
            inc_adj = 0
        if rem_lat is 0 or rem_lat is 4:
            inc_by_rem = False
            inc_adj = 0

        print('\n4/7 deriving hexagon polygons from intersection data')
        row = 1
        last_lat_row = 0
        hexagon = 0
        row = 1
        while (top_left < (max_h) * (max_v)):
            vertex = [1 + top_left, 2 + top_left, max_v + 3 + top_left,
            (max_v * 2) + 2 + top_left, (max_v * 2) + 1 + top_left, max_v +
             top_left]
            try:
                poly_coords = [intersect_list[vertex[0]],
                intersect_list[vertex[1]], intersect_list[vertex[2]],
                intersect_list[vertex[3]], intersect_list[vertex[4]],
                intersect_list[vertex[5]], intersect_list[vertex[0]]]
                centre_lat = intersect_list[vertex[0]][1]
                + (intersect_list[vertex[5]][1]
                   - intersect_list[vertex[0]][1]) / 2
                centre_lon = intersect_list[vertex[0]][0]
                + (intersect_list[vertex[5]][0] -
                intersect_list[vertex[0]][0]) / 2

                if (centre_lat is not last_lat_row) or last_lat_row is 0:
                    bounds_n = intersect_list[vertex[0]][1]
                    bounds_s = intersect_list[vertex[2]][1]
                    bounds_e = intersect_list[vertex[2]][0]
                    bounds_w = intersect_list[vertex[5]][0]
                    last_lat_row = centre_lat
                    geopoly = Polygon([poly_coords])
                    hexagon += 1
                    # start = (intersect_list[vertex[0]][1],
                    # intersect_list[vertex[0]][0])
                    # end = (intersect_list[vertex[1]][1],
                    # intersect_list[vertex[1]][0])
                    # len_radial = geodesic(start,end).km
                    est_area = (((3 * sqrt(3)) / 2) * pow(radial, 2)) * 0.945
                    #estimate polygon area
                    geopoly = Feature(geometry = geopoly, properties =
                    {"p": hexagon,"row": row, "lat": centre_lat
                    , "lon": centre_lon, "N": bounds_n, "S": bounds_s
                    , "E": bounds_e, "W": bounds_w, "est_area": est_area})
                    if  (bounds_e > bounds_w):
                        for i in range(0, 5):
                            point_list.append(
                                [hexagon, str(intersect_list[vertex[i]][0])
                                + str(intersect_list[vertex[i]][1])])
                        g_array.append(geopoly)
                        #append geojson geometry definition attributes to list
                        #tabular dataset
                        tabular_line = [top_left, row, centre_lat, centre_lon,
                        bounds_n, bounds_s, bounds_e, bounds_w, est_area]
                        tabular_list.append(tabular_line)
                        #array of polygon and tabular columns
                else:
                    donothing = True

            except IndexError:
                donothing = True

            last_row = row
            last_lat_row = centre_lat
            row = int(1 + int(hexagon / poly_row_count))
            top_left += lat_offset
            if row is not last_row:
                top_left += inc_adj
                if inc_by_rem:
                    top_left += rem_lat
                if row % 2 is 0:
                    top_left += 2
                if row & 1:
                    top_left += -2

        print('\n5/7 geojson dataset of {0} derived hexagon polygons'
        .format(len(g_array)))
        boxes_geojson = FeatureCollection(g_array)
        # convert merged geojson features to geojson feature geohex_geojson
        g_array = []  # release g_array - array of geojson geometry elements

        print('writing geojson formatted hexagon dataset to file: {0}.json'
        .format(outfile))
        myfile = open('geojson{slash}{outfile}_layer.json'
        .format(outfile=outfile, slash=slash), 'w')
        #open file for writing geojson layer in geojson format
        myfile.write(str(boxes_geojson))  # write geojson layer to open file
        myfile.close()  # close file

        print('\n6/7 tabular dataset of {0} lines of hexagon polygon data'
        .format(len(tabular_list)))
        print('writing tabular dataset to file: {0}_dataset.csv'
        .format(outfile))
        point_df = pd.DataFrame(point_list)
        point_df.columns = ['poly', 'latlong']
        point_df.to_csv('csv{slash}{outfile}_points.csv'
        .format(outfile=outfile, slash=slash), sep=',')
        point_df_a = point_df  # make copy of dataframe
        process_point_df = pd.merge(point_df, point_df_a, on='latlong')
        # merge columns of same dataframe on concatenated latlong
        process_point_df = process_point_df[(process_point_df['poly_x']
        != process_point_df['poly_y'])]  # remove self references
        output_point_df = process_point_df[['poly_x', 'poly_y']].copy().sort_values(by=['poly_x']).drop_duplicates()
        #just leave polygon greferences and filter output

        output_point_df.to_csv('csv{slash}{outfile}_neighbours.csv'.format(outfile=outfile, slash=slash), sep=',', index = False)
        #print(output_point_df['poly_y',0])


        tabular_df = pd.DataFrame(tabular_list)
        #convert tabular array to tabular data frame
        tabular_df.columns = ['poly', 'row', 'lat', 'long', 'N',
        'S', 'E', 'W', 'area']
        layer_dict['Bounds']['Dataset'] = {}
        #update layer_dict with dataset bounds
        layer_dict['Bounds']['Dataset']['North'] = tabular_df['N'].max()
        layer_dict['Bounds']['Dataset']['South'] = tabular_df['S'].min()
        layer_dict['Bounds']['Dataset']['East'] = tabular_df['E'].max()
        layer_dict['Bounds']['Dataset']['West'] = tabular_df['W'].min()
        tabular_df.to_csv('csv{slash}{outfile}_dataset.csv'.format(outfile=outfile,slash=slash), sep=',', index = False)

        print('\n7/7 hexagons json metadata to written to file:\
         {0}_metadata.json'.format(outfile))
        myfile = open('metadata{slash}{outfile}_metadata.json'
        .format(outfile=outfile, slash=slash), 'w')
        #open file for writing geojson layer
        myfile.write(str(json.dumps(layer_dict)))
        #write geojson layer to open file
        myfile.close()  #close file

        to_shp_tab(outfile, 'hexagons')
        ref_files()

    print('\n')
    print('The End')  # hexagons
Exemplo n.º 5
0
def box_to_json_rep(box: SlfBox) -> Polygon:
    ps = list_point_tuples(box.to_polygon())
    return Polygon([ps])
for poly in translate_coords['coords']:
    poly_coords = []
    poly_geo_coords = []
    for cr in poly:
        poly_coords.append({
            'x': cr['x'],
            'y': cr['y'],
            'latitude': cr['lat'],
            'longitude': cr['long']
        })
        poly_geo_coords.append((cr['long'], cr['lat']))

    # add final closing point
    poly_geo_coords.append((poly[0]['long'], poly[0]['lat']))
    final_coords.append(poly_coords)
    geo_feature = Feature(geometry=Polygon([poly_geo_coords], precision=15))
    geo_features.append(geo_feature)

geo_feature_collection = FeatureCollection(geo_features)
geo_feature_collection_dump = geojson_dumps(geo_feature_collection,
                                            sort_keys=True)

json_contour_filepath = os.path.join(
    BASE_DIR, img_base_results_path + img_name + '-contours-method-2.json')
geojson_filepath = os.path.join(
    BASE_DIR, img_base_results_path + img_name + '-method-2-geojson.json')
with open(json_contour_filepath, 'w') as outfile:
    # json.dump(contours, outfile, default=json_np_default_parser)
    # json.dump({'contours': ctr_points, 'contours_coords': translate_coords.coords}, outfile)
    json.dump(final_coords, outfile)
    # json.dump(new_ctrs, outfile, default=json_np_default_parser)
Exemplo n.º 7
0
import ast
import pymongo
from geojson import Polygon, Feature, dump

myclient = pymongo.MongoClient(
    "mongodb://<user>:<password>@ds152000.mlab.com:52000/<client>")
mydb = myclient["<client>"]
mycol = mydb["<database>"]

polyjson = ""
with open('polydemo.csv', 'r') as f:
    next(f)  #ignore headers line - start on second line
    reader = csv.reader(f)
    for row in reader:
        coords = ast.literal_eval(row[3])
        poly = Polygon(coords)
        polyjson = Feature(name="cv" + row[0],
                           geometry=poly,
                           style={
                               "color": "#ff46b5",
                               "weight": 10,
                               "opacity": 0.85
                           },
                           properties={
                               "direction": row[1],
                               "status": row[2]
                           })
        # x = mycol.insert_one(polyjson)

# populate file to confirm content
with open('poly.geojson', 'w') as f:
Exemplo n.º 8
0
def main():

    freeze_support()

    args = param_parser()

    intputfolder = args.input_folder.replace('\\', '/')
    outputfolder = AtlassGen.makedir(args.output_dir.replace('\\', '/'))
    tilelayoutfile = args.file
    filetype = args.filetype
    cores = args.cores
    copy = args.copy
    move = args.move
    batches = args.batches
    gen_block = args.gen_block
    block_size = int(args.block_size)
    ffile = args.txtfile
    usetxtfile = args.usetxtfile
    tilesize = args.tilesize
    tasks = {}

    tl_in = AtlassTileLayout()

    if usetxtfile:
        lines = [line.rstrip('\n') for line in open(ffile)]

        modificationTime = time.strftime('%Y-%m-%d %H:%M:%S',
                                         time.localtime(time.time()))
        for i, line in enumerate(lines):
            print(line)
            tilename = line

            x, y = tilename.split('_')

            tl_in.addtile(name=tilename,
                          xmin=float(x),
                          ymin=float(y),
                          xmax=float(x) + tilesize,
                          ymax=float(y) + tilesize,
                          modtime=modificationTime)
    else:

        tl_in.fromjson(tilelayoutfile)

    no_of_tiles = len(tl_in)

    print('\nTotal Number of Files : {0}'.format(no_of_tiles))
    batchlen = math.ceil(no_of_tiles / batches)
    batch = 0

    if gen_block:
        features = []
        blocks = []

        print('\nBlocking started.')
        block_path = os.path.join(outputfolder,
                                  '{0}m_blocks'.format(block_size)).replace(
                                      '\\', '/')

        for tile in tl_in:
            tilename = tile.name
            xmin = tile.xmin
            xmax = tile.xmax
            ymin = tile.ymin
            ymax = tile.ymax
            tilesize = int(int(xmax) - int(xmin))

            block_x = math.floor(xmin / block_size) * block_size
            block_y = math.floor(ymin / block_size) * block_size
            blockname = '{0}_{1}'.format(block_x, block_y)
            block_folder = os.path.join(block_path,
                                        blockname).replace('\\', '/')

            if blockname not in blocks:
                blocks.append(blockname)

            boxcoords = AtlassGen.GETCOORDS([xmin, ymin], tilesize)
            poly = Polygon([[
                boxcoords[0], boxcoords[1], boxcoords[2], boxcoords[3],
                boxcoords[4]
            ]])

            if not os.path.exists(block_folder):
                AtlassGen.makedir(block_folder)

            input = os.path.join(intputfolder, '{0}.{1}'.format(
                tilename, filetype)).replace('\\', '/')
            output = os.path.join(block_folder, '{0}.{1}'.format(
                tilename, filetype)).replace('\\', '/')
            #print(output)
            #block_task[blockname] = AtlassTask(blockname, movefiles, input, output)
            if copy:
                tasks[tilename] = AtlassTask(tilename, copyfile, input, output)
            elif move:
                tasks[tilename] = AtlassTask(tilename, movefiles, input,
                                             output)
            else:
                print("no command selected")
        p = Pool(processes=cores)
        results = p.map(AtlassTaskRunner.taskmanager, tasks.values())

        success = 0
        for result in results:
            if not result.success:
                print('File {0} could Not be copied/moved'.format(result.name))
            else:
                success += 1
        print('No of blocks : {0}'.format(len(blocks)))
        print('\nFiles copied/moved Successfully : {0}'.format(success))

        for block in blocks:
            blockname = block
            block_folder = os.path.join(block_path,
                                        blockname).replace('\\', '/')
            lfiles = AtlassGen.FILELIST(['*.{0}'.format(filetype)],
                                        block_folder)
            tilelayout = AtlassTileLayout()
            features = []
            for lf in lfiles:
                path, tilename, ext = AtlassGen.FILESPEC(lf)
                xmin, ymin = tilename.split('_')
                xmax = str(int(xmin) + tilesize)
                ymax = str(int(ymin) + tilesize)

                boxcoords = AtlassGen.GETCOORDS([xmin, ymin], tilesize)
                poly = Polygon([[
                    boxcoords[0], boxcoords[1], boxcoords[2], boxcoords[3],
                    boxcoords[4]
                ]])

                #adding records for json file
                features.append(
                    Feature(geometry=poly,
                            properties={
                                "name": tilename,
                                "xmin": xmin,
                                "ymin": ymin,
                                "xmax": xmax,
                                "ymax": ymax,
                                "tilenum": tilename
                            }))
                tilelayout.addtile(name=tilename,
                                   xmin=float(xmin),
                                   ymin=float(ymin),
                                   xmax=float(xmax),
                                   ymax=float(ymax))

            jsonfile = 'TileLayout'
            jsonfile = os.path.join(
                block_folder, '{0}_{1}.json'.format(jsonfile, len(features)))

            feature_collection = FeatureCollection(features)

            with open(jsonfile, 'w') as f:
                dump(feature_collection, f)

    else:
        for i, tile in enumerate(tl_in):

            tilename = '{0}.{1}'.format(tile.name, filetype)

            if i % batchlen == 0:
                batch = batch + 1
            batchstring = '{0}'.format(batch)
            batchstring = batchstring.rjust(3, '0')
            if batches == 1:
                output = os.path.join(outputfolder,
                                      tilename).replace("\\", "/")
            else:
                output = os.path.join(
                    AtlassGen.makedir('{0}/Batch_{1}'.format(
                        outputfolder, batchstring)),
                    tilename).replace("\\", "/")

            input = os.path.join(intputfolder, tilename).replace("\\", "/")

            if copy:
                tasks[tilename] = AtlassTask(tilename, copyfile, input, output)
            elif move:
                tasks[tilename] = AtlassTask(tilename, movefiles, input,
                                             output)
            else:
                print("no command selected")

        p = Pool(processes=cores)
        results = p.map(AtlassTaskRunner.taskmanager, tasks.values())

        success = 0
        for result in results:
            if not result.success:
                print('File {0} could Not be copied/moved'.format(result.name))
            else:
                success += 1

        print('Files copied/moved Successfully : {0}'.format(success))
Exemplo n.º 9
0
point_voronoi_list = []
feature_list = []
for region in range(len(vor.regions) - 1):
    #for region in range(9):
    vertex_list = []
    for x in vor.regions[region]:
        #Not sure how to map the "infinite" point, so, leave off those regions for now:
        if x == -1:
            break
        else:
            #Get the vertex out of the list, and flip the order for folium:
            vertex = vor.vertices[x]
            vertex = (vertex[1], vertex[0])
        vertex_list.append(vertex)
    #Save the vertex list as a polygon and then add to the feature_list:
    polygon = Polygon([vertex_list])
    feature = Feature(geometry=polygon, properties={})
    feature_list.append(feature)

#Write the features to the new file:
feature_collection = FeatureCollection(feature_list)
print(feature_collection, file=vorJSON)
vorJSON.close()

#Add the voronoi layer to the map:
mapLibrary.choropleth(geo_data='SwyVor2.json',
                      fill_color="BuPu",
                      fill_opacity=0.01,
                      line_opacity=0.5)
mapLibrary.save(outfile='Subway_NoLtrain.html')
Exemplo n.º 10
0
def triangle_grid(bbox, cell_size, units):
    fc = FeatureCollection([])
    x_fraction = cell_size / (distance([bbox[0], bbox[1]], [bbox[2], bbox[1]],
                                       units))
    cell_width = x_fraction * (bbox[2] - bbox[0])
    y_fraction = cell_size / (distance([bbox[0], bbox[1]], [bbox[0], bbox[3]],
                                       units))
    cell_height = y_fraction * (bbox[3] - bbox[1])

    xi = 0
    current_x = bbox[0]
    while current_x <= bbox[2]:
        yi = 0
        current_y = bbox[1]
        while current_y <= bbox[3]:
            if xi % 2 == 0 and yi % 2 == 0:
                fc["features"].append(
                    Polygon([[[current_x, current_y],
                              [current_x, current_y + cell_height],
                              [current_x + cell_width, current_y],
                              [current_x, current_y]]]),
                    Polygon(
                        [[[current_x, current_y + cell_height],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x + cell_width, current_y],
                          [current_x, current_y + cell_height]]]))

            elif xi % 2 == 0 and yi % 2 == 1:
                fc["features"].append(
                    Polygon(
                        [[[current_x, current_y],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x + cell_width, current_y],
                          [current_x, current_y]]]),
                    Polygon(
                        [[[current_x, current_y],
                          [current_x, current_y + cell_height],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x, current_y]]]))

            elif yi % 2 == 0 and xi % 2 == 1:
                fc["feautres"].append(
                    Polygon(
                        [[[current_x, current_y],
                          [current_x, current_y + cell_height],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x, current_y]]]),
                    Polygon(
                        [[[current_x, current_y],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x + cell_width, current_y],
                          [current_x, current_y]]]))

            elif yi % 2 == 1 and xi % 2 == 1:
                fc["feautres"].append(
                    Polygon([[[current_x, current_y],
                              [current_x, current_y + cell_height],
                              [current_x + cell_width, current_y],
                              [current_x, current_y]]]),
                    Polygon(
                        [[[current_x, current_y + cell_height],
                          [current_x + cell_width, current_y + cell_height],
                          [current_x + cell_width, current_y],
                          [current_x, current_y + cell_height]]]))

            current_y += cell_height
            yi += 1

        xi += 1
        current_x += cell_width
    return fc
Exemplo n.º 11
0
def create_geo_json(date, sess):
    # get the precipitation data from db then close the connection
    dataFrame = get_monthly_saws_data(date, date, sess)
    sess.conn.close()

    # FOR DEBUGGING ONLY
    # print(dataFrame.head())

    geoJsonList = []

    # latitude is y coord
    # longitude is x coord
    lat1, lat2, long1, long2 = None, None, None, None

    # loop through all rows in the dataframe
    for index, row in dataFrame.iterrows():
        # set the initial longitude and latitude
        if index == 0:
            lat1 = row["Latitude"]
            long1 = row["Longitude"]

        # if the 2nd latitude hasn't been set and the current entry is different than the inital latitude
        if lat2 == None and row["Latitude"] != lat1:
            lat2 = row["Latitude"]

        # if the 2nd longitude hasn't been set and the current entry is different than the inital longitude
        if long2 == None and row["Longitude"] != long1:
            long2 = row["Longitude"]

        # break when all requested coordinates are filled
        if long1 != None and long2 != None and lat1 != None and lat2 != None:
            break

    # default values
    polyWidth = 0.017
    polyHeight = 0.017
    # as long as no coordinates are none, try to calculate width and height of polygons accurately
    if long1 != None and long2 != None and lat1 != None and lat2 != None:
        # subtract 0.001 for slight spacing between data points
        polyWidth = (abs(long1 - long2) / 2.0) - 0.001
        polyHeight = (abs(lat1 - lat2) / 2.0) - 0.001

    # loop through all rows in the dataframe
    for index, row in dataFrame.iterrows():
        # create a feature using the current latitude, longitude, precipitation, and the poly width and height
        feature = Feature(geometry=Polygon([
            [[row['Longitude'] - polyWidth, -1 * row['Latitude'] - polyHeight],
             [row['Longitude'] - polyWidth, -1 * row['Latitude'] + polyHeight],
             [row['Longitude'] + polyWidth, -1 * row['Latitude'] + polyHeight],
             [row['Longitude'] + polyWidth, -1 * row['Latitude'] - polyHeight],
             [row['Longitude'] - polyWidth, -1 * row['Latitude'] - polyHeight]]
        ]),
                          properties={
                              "elevation": row["Rainfall (mm)"] * 490,
                              "normalizedElevation": row["Rainfall (mm)"],
                              "Longitude": row["Longitude"],
                              "Latitude": -1 * row["Latitude"]
                          })
        # append the feature to the geoJson list
        geoJsonList.append(feature)

    # make a feature collection out of the geoJson list
    featureCollection = FeatureCollection(geoJsonList)
    return featureCollection
Exemplo n.º 12
0
    def get_bboxes_as_geojson(self):
        ''' This method converts response file to geojson geometries. imageAnalysisResult is included to the geojson features.
		returns: list of geojson elements
		'''
        features = []
        extent = self.bbox

        invalid_request_count = 0
        bbox_out_count = 0

        count = 0

        coords_min = [float('inf'), float('inf')]
        coords_max = [float('-inf'), float('-inf')]

        logging.info("Creating geojson objects.")
        for res in self.responses:
            count += 1
            if count % 1000 == 0:
                logging.debug("Result no. {}".format(count))

            # Filter out invalid test results
            if ('imageAnalysisResult' not in res.keys()
                    or 'testResult' not in res.keys()
                    or res['testResult'] != 0):
                invalid_request_count += 1
                continue

            # Convert bbox as a list.
            bbox = list(map(float, res['bBox'].split(',')))

            if not self.crs.is_first_axis_east():
                bbox = change_bbox_axis_order(bbox)

            # Tolerance helps to handle rounding problems in the border areas.
            unit = self.crs.get_coordinate_unit().lower()
            tolerance = 1 if unit == 'metre' else 0.000001

            inside = [
                bbox[0] >= extent[0] - tolerance,
                bbox[1] >= extent[1] - tolerance,
                bbox[2] <= extent[2] + tolerance,
                bbox[3] <= extent[3] + tolerance,
            ]

            # Filter out requests out of the interest area
            if not all(inside):
                bbox_out_count += 1
                continue

            if bbox_out_count == 0:
                for i in range(len(coords_min)):
                    if bbox[i] < coords_min[i]:
                        coords_min[i] = bbox[i]
                for i in range(len(coords_max)):
                    if bbox[i + len(coords_max)] > coords_max[i]:
                        coords_max[i] = bbox[i + len(coords_max)]

            # Create a closed Polygon following the edges of the bbox.
            g = Polygon([[(bbox[0], bbox[1]), (bbox[0], bbox[3]),
                          (bbox[2], bbox[3]), (bbox[2], bbox[1]),
                          (bbox[0], bbox[1])]])

            # Save other data
            props = {
                'imageAnalysisResult': res['imageAnalysisResult'],
                'testResult': res['testResult'],
                'requestTime': res['requestTime']
            }
            feat = Feature(geometry=g, properties=props)

            features.append(feat)

        if invalid_request_count > 0:
            logging.info(
                "Filtered {} requests away due to failed request.".format(
                    invalid_request_count))

        if bbox_out_count > 0:
            logging.info(
                "Filtered {} requests way because request bbox was not completely within layer bbox"
                .format(bbox_out_count))
        #TODO: this needs to be a square; now it isn't
        '''else:
			self.bbox = coords_min + coords_max
			logging.info("Bounding box set to the extent of all requests to {}".format(self.bbox))'''

        feat_c = FeatureCollection(features)

        return feat_c
Exemplo n.º 13
0
def dibuja():
    lat_max = 40.521943
    lat_min = 40.402217
    lng_max = -3.736493
    lng_min = -3.658524
    num_pasos = 10
    paso_lat = (lat_max - lat_min) / num_pasos
    paso_lng = (lng_max - lng_min) / num_pasos

    valor_md = [[
        1576.502822786425, 1706.6024631007563, 1765.4756749258,
        1730.4701125108154, 1676.7717989715075, 1773.888975699718,
        1727.973873236178, 1818.7390118225294, 1778.3675195488888,
        1636.4195734196987
    ],
                [
                    1617.990830299028, 1765.4574264030343, 2082.8787400397086,
                    1970.475127569303, 1880.4715724252405, 1808.9799799492425,
                    1749.4835753156208, 1865.676916691485, 1699.8442690306997,
                    1548.661404708652
                ],
                [
                    1800.4411737138187, 1899.0382807711433, 2135.728808486968,
                    2146.4674423175193, 2060.405058854577, 1857.6465020530975,
                    1830.4064166238215, 1718.8702757811516, 1545.4603192802192,
                    1552.44760033048
                ],
                [
                    1860.8659767078823, 2031.1509322836484, 1940.9909256212586,
                    2205.8917787800806, 2113.3416929714876, 1888.8082424298354,
                    1848.3404511802141, 1580.1835206839178, 1590.9000661859593,
                    1517.5458150832567
                ],
                [
                    2010.6467875229014, 2118.838771887762, 2100.4620470586665,
                    2170.607697158998, 2055.463328539102, 1894.602931961983,
                    1844.0016710111192, 1551.3091453608013, 1610.6799555186594,
                    1482.1423588430466
                ],
                [
                    2035.8840085213733, 2145.3105742056873, 1938.3521119766278,
                    2060.278987798437, 1951.5575632793266, 1900.7566749129032,
                    1665.8443026800874, 1539.317365639115, 1555.804831075347,
                    1265.444367025447
                ],
                [
                    1958.6557279825324, 2130.8823758158683, 2065.9273058216145,
                    2002.0850953883328, 1851.5665856314931, 1742.669397791948,
                    1726.0666330187992, 1505.875231162832, 1395.0684212318604,
                    1159.8286615433553
                ],
                [
                    1921.5252039954185, 2093.248368534183, 2031.135521245579,
                    1952.5024128287732, 1734.596183216546, 1659.3668957295552,
                    1638.4777732906803, 1451.9091595890104, 1201.37517688246,
                    1065.8097462167982
                ],
                [
                    1770.0217926477626, 1929.407113243848, 1923.3225808769876,
                    1799.5299623355581, 1688.3144743339237, 1571.1412517480169,
                    1365.7765001587277, 1342.1435768352987, 1142.198731863111,
                    896.5992603981927
                ],
                [
                    1355.1472758947357, 1598.5996571094881, 1722.815245369124,
                    1634.44264055553, 1670.1450864594226, 1503.8584139014665,
                    1228.490413775686, 1125.954091059637, 1179.4306413278232,
                    778.7172073157207
                ]]
    print(valor_md)
    malla_md = []
    for i in range(num_pasos):
        for k in range(num_pasos):
            el_cubo = Polygon([[(lng_min + (float(i) - 0.5) * paso_lng,
                                 lat_min + (float(k) - 0.5) * paso_lat),
                                (lng_min + (float(i) + 0.5) * paso_lng,
                                 lat_min + (float(k) - 0.5) * paso_lat),
                                (lng_min + (float(i) + 0.5) * paso_lng,
                                 lat_min + (float(k) + 0.5) * paso_lat),
                                (lng_min + (float(i) - 0.5) * paso_lng,
                                 lat_min + (float(k) + 0.5) * paso_lat),
                                (lng_min + (float(i) - 0.5) * paso_lng,
                                 lat_min + (float(k) - 0.5) * paso_lat)]])

            malla_md.append(
                Feature(properties={"valor": valor_md[k][i]},
                        geometry=el_cubo))

    malla_md_json = FeatureCollection(malla_md)
    print(malla_md)
    with open('output/md_json.json', 'w') as outfile:
        json.dump(malla_md_json, outfile)

    df1 = gpd.read_file('output/md_json.json')
    df1 = df1.to_crs(epsg=3857)

    fig, ax1 = plt.subplots(1, 1)
    divider = make_axes_locatable(ax1)
    cax = divider.append_axes("right", size="5%", pad=0.1)
    ax1 = df1.plot(column='valor', alpha=0.5, ax=ax1, legend=True, cax=cax)
    fig.set_size_inches(10.5, 15.5)
    ctx.add_basemap(ax1)
    plt.savefig('output/map_md.png')
Exemplo n.º 14
0
def getRDT(path, lev, type):
    """
    Gets RDT data from the netcdf output of the NWCSAF software
    :param path: Full path and filename of the netcdf file
    :param lev: [0,1] Level number. 0 = bottom of the cloud, 1 = top of the cloud (heights vary between clouds)
    :param type: ['All', 'Centre_Point', 'Polygon', 'Tail_Points', 'Tail_Lines', 'Gridded']
    :return: geojson feature collection object for plotting
    """

    # Load the netcdf data
    ncds = nc.Dataset(path)

    # Get variable names that have either 'nlevel' or 'recNUM' dimensions
    varlev = []  # 'nlevel' dimension
    varrec = []  # 'recNUM' dimension
    vartraj = []  # 'nbpttraj' Trajectory points for each object
    for ncv in ncds.variables.keys():
        var_dim = ncds.variables[ncv].dimensions
        if "nlevel" in var_dim:
            varlev.append(ncv)
        if ("recNUM" in var_dim) and (ncds.dimensions["recNUM"].size
                                      == ncds.variables[ncv].size):
            varrec.append(ncv)
        if "nbpttraj" in var_dim:
            vartraj.append(ncv)
    allvars = varlev.copy()
    allvars.extend(varrec)

    # How many cloud levels have we got? It should only be 2 (cloud top and bottom)
    dimsize = len(ma.getdata(ncds.variables[varlev[0]][0, :]))

    # Convert units from the netcdf
    unitsToRescale = {"Pa": "hPa"}  # , 'K': 'degC'}
    # Get text labels instead of numbers for certain fields
    fieldsToLookup = [
        "PhaseLife",
        "SeverityType",
        "SeverityIntensity",
        "ConvType",
        "CType",
    ]

    # Check the lev is within the dimsize
    if not lev in np.arange(dimsize):
        return "Please enter a valid level number (0 or 1)"
    else:
        list_to_return = []

    if type in ["Polygon", "All"]:

        # Create list of features to populate
        features = []
        fieldsToLookup = [
            "PhaseLife",
            "SeverityType",
            "SeverityIntensity",
            "ConvType",
            "CType",
        ]
        for i in np.arange(ncds.dimensions["recNUM"].size):

            # do something
            ypolcoords_ll = getDataOnly(ncds.variables["LatContour"][i,
                                                                     lev, :])
            xpolcoords_ll = getDataOnly(ncds.variables["LonContour"][i,
                                                                     lev, :])

            xpolcoords, ypolcoords = geo.web_mercator(xpolcoords_ll,
                                                      ypolcoords_ll)

            this_poly = Polygon([[(float(coord[0]), float(coord[1]))
                                  for coord in zip(xpolcoords, ypolcoords)]])

            # Get the properties for this polygon
            pol_props = {}
            for var in allvars:

                if var in varlev:
                    thisdata = ncds.variables[var][i, lev]
                else:
                    thisdata = ncds.variables[var][i]

                thisdata_scaled = convert_values(thisdata, ncds.variables[var])
                datatype = ncds.variables[var].datatype

                if var in fieldsToLookup:
                    try:
                        thisdata_scaled = fieldValueLUT(var, int(thisdata))
                        datatype = "string"
                    except:
                        continue

                update_json(pol_props, var, thisdata_scaled, datatype)

            features.append(Feature(geometry=this_poly, properties=pol_props))

        feature_collection = FeatureCollection(features)

        list_to_return.append(json.dumps(feature_collection))

    if type in ["Tail_Lines", "All"]:

        # Create an empty dictionary
        mydict_tl = get_empty_feature_dict("Tail_Lines")

        # Loop through features
        for i in np.arange(ncds.dimensions["recNUM"].size):

            # Loop through all items in mydict_tl
            for k in mydict_tl.keys():
                try:
                    thisdata, units = descale_rdt(
                        k, getDataOnly(ncds.variables[k][:]))
                    mydict_tl[k].append(thisdata)
                except:
                    # Do nothing at the moment with the xs and ys
                    if not k in ["xs", "ys"]:
                        mydict_tl[k].append(None)
                    else:
                        continue

            lats = getDataOnly(ncds.variables["LatTrajCellCG"][i, :])
            lons = getDataOnly(ncds.variables["LonTrajCellCG"][i, :])

            xs, ys = geo.web_mercator(lons, lats)
            mydict_tl["xs"].append(xs)
            mydict_tl["ys"].append(ys)

        list_to_return.append(mydict_tl)

    if type in ["Tail_Points", "All"]:

        # Create an empty dictionary
        mydict_tp = get_empty_feature_dict("Tail_Points")

        for i in np.arange(ncds.dimensions["recNUM"].size):

            # do something
            npts = len(getDataOnly(ncds.variables["LonTrajCellCG"][i]))
            mykeys = [k for k in mydict_tp.keys() if k not in ["x", "y"]]
            for k in mykeys:

                thisdata = getDataOnly(ncds.variables[k][i]).tolist()

                try:
                    if len(thisdata) == 1:
                        thisdata = thisdata[0]
                except:
                    pass

                if isinstance(thisdata, list) and (npts == len(thisdata)):
                    datalist, units = descale_rdt(k, thisdata)
                    mydict_tp[k].extend(datalist)
                else:
                    # Repeat the data value npts times
                    datalist = [i for i in itertools.repeat(thisdata, npts)]
                    thisdata, units = descale_rdt(k, datalist)
                    mydict_tp[k].extend(datalist)

                # Some records don't have any data. Mostly happens for ExpanRateTraj
                if len(thisdata) == 0:
                    datalist = [i for i in itertools.repeat("-", npts)]
                    mydict_tp[k].extend(datalist)

            lats = getDataOnly(ncds.variables["LatTrajCellCG"][i, :])
            lons = getDataOnly(ncds.variables["LonTrajCellCG"][i, :])
            x, y = geo.web_mercator(lons, lats)
            mydict_tp["x"].extend(x)
            mydict_tp["y"].extend(y)

        list_to_return.append(mydict_tp)

    # Do specific things for each feature type
    if type in ["Centre_Point", "All"]:

        # Create an empty dictionary
        mydict_cp = get_empty_feature_dict("Centre_Point")

        for i in np.arange(ncds.dimensions["recNUM"].size):

            # Get the Point features
            lat = ncds.variables["LatG"][i, lev]
            lon = ncds.variables["LonG"][i, lev]
            x1, y1 = geo.web_mercator(lon, lat)
            mydict_cp["x1"].extend(x1)
            mydict_cp["y1"].extend(y1)

            mykeys = [
                k for k in mydict_cp.keys() if not (("x" in k) or ("y" in k))
            ]
            for k in mykeys:
                try:
                    if k in varlev:
                        thisdata = ncds.variables[k][i, lev]
                    else:
                        thisdata = ncds.variables[k][i]

                    thisdata, units = descale_rdt(
                        k, thisdata)  # May not need to do this
                    mydict_cp[k].append(thisdata)
                except:
                    mydict_cp[k].append(None)

            # Now calculate future point and line
            try:
                speed = float(ncds.variables["MvtSpeed"][i])
            except ValueError:
                speed = 0
            try:
                direction = float(ncds.variables["MvtDirection"][i])
            except ValueError:
                direction = 0

            mydict_cp = make_arrow(mydict_cp, lon, lat, speed, direction)

        list_to_return.append(mydict_cp)

    if len(list_to_return) == 1:
        return list_to_return[0]
    elif len(list_to_return) > 1:
        return tuple(list_to_return)
    else:
        return "Nothing to return"
Exemplo n.º 15
0
    meshnum = int(mesh)
    lat_sw, lon_sw = ju.to_meshpoint(meshnum,
                                     lat_multiplier=0,
                                     lon_multiplier=0)
    lat_se, lon_se = ju.to_meshpoint(meshnum,
                                     lat_multiplier=0,
                                     lon_multiplier=1)
    lat_nw, lon_nw = ju.to_meshpoint(meshnum,
                                     lat_multiplier=1,
                                     lon_multiplier=0)
    lat_ne, lon_ne = ju.to_meshpoint(meshnum,
                                     lat_multiplier=1,
                                     lon_multiplier=1)
    meshst = str(mesh)
    Mesh.append(
        Polygon([[(lon_ne, lat_ne), (lon_nw, lat_nw), (lon_sw, lat_sw),
                  (lon_se, lat_se)]]))
Meshs = gpd.GeoDataFrame({'id': index_list, 'geometry': Mesh})
src = Meshs.to_json()

linear = cm.LinearColormap(
    ['white', 'blue', 'aqua', 'lime', 'yellow', 'orange', 'red'],
    index=[0, 5, 10, 50, 100, 500],
    vmin=0,
    vmax=600).to_step(200)
linear.caption = 'Suspicious User of infected'

Heatdata_7d = []

for yy in range(0, len(df2.index)):
    Heatdata_7d.append([])
Exemplo n.º 16
0
def segmentImage(input_parameters):
    """ This function takes an input URL, seed point, and tolerance and produces a pointlist of the outer most contour
    """

    import cv2
    import numpy as np
    from numpy import unique, squeeze
    import Image
    import cStringIO
    import re
    import geojson
    from geojson import Polygon, Feature, FeatureCollection

    opdata = input_parameters

    print opdata

    imgstr = re.search(r'base64,(.*)', opdata['image']).group(1)
    tempimg = cStringIO.StringIO(imgstr.decode('base64'))
    tempimg.seek(0)
    cvimg = cv2.imdecode(np.asarray(bytearray(tempimg.read()), dtype=np.uint8),
                         1)
    # cv2.imwrite('inputimage.png', cvimg)

    # imgray = cv2.cvtColor(cvimg,cv2.COLOR_BGR2GRAY)
    imgray = cvimg[:, :, 2]
    # cv2.imwrite('segment.png', imgray)

    all_cnts = []
    cntdict = {}

    return_data = []

    extent = opdata['extent']
    tr = extent[0]
    bl = extent[1]

    native_width = tr[0] - bl[0]
    native_height = -bl[1] + tr[1]

    x_scale = native_width / imgray.shape[1]
    y_scale = native_height / imgray.shape[0]

    def contourToGeoString(cnt):
        '''convert an opencv contour to a geojson-compatible representation'''

        t_string = []
        for pt in cnt:

            px = np.round(pt[0] * x_scale) + bl[0]
            py = -1 * np.round(pt[1] * y_scale) + tr[1]

            t_string.append((float(px), float(py)))

        return t_string

    unique_labels = unique(imgray)

    print 'uniques %s' % (unique_labels)

    # we're going to make an assumption: only consider a single hole in a polygon

    for label in unique_labels:

        working_img = imgray.copy()
        working_img[working_img != label] = 0

        # CV_RETR_CCOMP retrieves all of the contours and organizes them into a two-level
        # hierarchy. At the top level, there are external boundaries of the components.
        # At the second level, there are boundaries of the holes. If there is another contour
        # inside a hole of a connected component, it is still put at the top level.

        contours, hierarchy = cv2.findContours(working_img, cv2.RETR_CCOMP,
                                               cv2.CHAIN_APPROX_NONE)

        # hierarchy[i][0] , hiearchy[i][1] , hiearchy[i][2] , and hiearchy[i][3] are set
        # to 0-based indices in contours of the next and previous contours at the same
        # hierarchical level, the first child contour and the parent contour, respectively.
        # If for the contour i there are no next, previous, parent, or nested contours,
        # the corresponding elements of hierarchy[i] will be negative.

        for n, cnt in enumerate(contours):

            hei = hierarchy[0][n]
            #         print hei

            # create an array for this polygon
            if str(label) not in cntdict.keys():
                cntdict[str(label)] = []

            if hei[3] >= 0:
                print '%s: %d -> this contour has a parent: %d' % (label, n,
                                                                   hei[3])
                # this contour has a parent, do not add it directly
                pass

            elif hei[2] < 0:
                # this contour has no children, just add it

                outer_poly = (contourToGeoString(squeeze(cnt)))

                #             x_vals = np.round(ca[:,0] * x_scale) + bl[0]
                #             y_vals = -1*np.round(ca[:,1] * y_scale) + tr[1]

                print '(add) %s: %d -> this contour (%d) has no children' % (
                    label, n, len(outer_poly))

                print outer_poly

                geo = Polygon([outer_poly])
                feat = Feature(geometry=geo, id=len(all_cnts))
                feat['properties']['labelindex'] = str(label)

                cntdict[str(label)].append(feat)
                all_cnts.append(feat)

            else:
                # contour's child is at contours[hei[2]]
                # add this contour and it's child

                outer_poly = contourToGeoString(squeeze(cnt))
                inner_poly = contourToGeoString(squeeze(contours[hei[2]]))

                print '(add) %s: %d -> this contour (%d) has a child: %d (%d)' % (
                    label, n, len(outer_poly), hei[2], len(inner_poly))

                geo = Polygon([outer_poly, inner_poly])

                feat = Feature(geometry=geo, id=len(all_cnts))
                feat['properties']['labelindex'] = str(label)

                cntdict[str(label)].append(feat)

                all_cnts.append(feat)

        for c in all_cnts:
            return_data.append(geojson.dumps(c))

        print 'There are %d features to return' % (len(return_data))

        # msg['features'] =

    return (return_data)
    listLength = len(coordArray) - 1
    firstItem = coordArray[0]

    for index, coordPair in enumerate(coordArray):
        coordPairArray = coordPair.split(":")

        latitude, longitude = map(float,
                                  (coordPairArray[0], coordPairArray[1]))
        point = Point((longitude, latitude))

        coords.append(point)

        if (index == listLength):
            lastCoordPairArray = firstItem.split(":")
            lat, lon = map(float,
                           (lastCoordPairArray[0], lastCoordPairArray[1]))
            lastPoint = Point((lon, lat))
            coords.append(lastPoint)

    features.append(
        Feature(geometry=Polygon([coords]), properties={
            'STATE': nodeId,
        }))

collection = FeatureCollection(features)
with open("converted_final.json", "w") as f:
    f.write('%s' % collection)

print("FINISHED")
Exemplo n.º 18
0
def fillImageGeoJSON(params):

    #todo implement a smart url-based hashing cache

    # loading image from url into memory, first as np array then opencv image
    req = urllib.urlopen(params['image']['url'])
    arr = np.asarray(bytearray(req.read()), dtype=np.uint8)
    img = cv2.imdecode(arr, -1)  # 'load it as it is'

    h, w = img.shape[:2]
    mask = np.zeros((h + 2, w + 2), np.uint8)

    lo = int(params['tolerance'])
    hi = int(params['tolerance'])
    connectivity = 4
    flags = connectivity
    flags |= cv2.FLOODFILL_FIXED_RANGE

    # print 'relative', params['click']['relative']
    # print 'absolute', params['click']['absolute']

    relclick = np.asarray(params['click']['relative'])
    absclick = np.asarray(params['click']['absolute'])
    regsize = np.asarray(params['image']['region']['size'])
    region_origin = np.asarray(params['image']['region']['origin'])

    regclick = absclick - region_origin
    reg_relclick = regclick / regsize
    real_size = np.asarray([w, h])
    region_real_click = real_size * reg_relclick

    # print real_size
    # print region_real_click

    seed_pt = (int(region_real_click[0]), int(region_real_click[1]))

    # seed_pt = (int(params['click']['relative'][0] * w), int(params['click']['relative'][1] * h))
    # this doesn't work when an edge is clipped

    cv2.floodFill(img, mask, seed_pt, (255, 190, 00), (lo, lo, lo),
                  (hi, hi, hi), flags)
    contours = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)

    # contours are now defined in the coordinates of the image captured
    # to get their relative position in the subimage

    subimage_x_to_rel_subimage_x = 1. / w
    subimage_y_to_rel_subimage_y = 1. / h

    # since we know the transform of the subimage coordinate frame to the native coordinate frame

    js_region_width = float(params['image']['region']['size']
                            [0])  # this is width in native coordinates
    js_region_height = float(params['image']['region']['size']
                             [1])  # this is height in native coordinates

    js_region_origin_x = float(params['image']['region']['origin']
                               [0])  # this is offset in native coordinates
    js_region_origin_y = float(params['image']['region']['origin']
                               [1])  # this is offset in native coordinates

    def contourToGeoString(cnt):
        '''convert an opencv contour to a geojson-compatible representation'''

        t_string = []

        for pt in cnt:

            rx = subimage_x_to_rel_subimage_x * pt[0]
            ry = subimage_y_to_rel_subimage_y * pt[1]

            new_x = (js_region_width * rx) + js_region_origin_x - 3
            new_y = -1 * ((js_region_height * ry) + js_region_origin_y - 3)

            # px = np.round(pt[0] * x_scale) + bl[0]
            # py = -1*np.round(pt[1] * y_scale) + tr[1]

            t_string.append((float(new_x), float(new_y)))

        return t_string

    outer_poly = (contourToGeoString(squeeze(contours[0][0])))

    geo = Polygon([outer_poly])
    feat = Feature(geometry=geo)

    feat['properties']['rgbcolor'] = '''rgba(255, 255, 255, 0.1)'''
    feat['properties']['hexcolor'] = '''#ff0000'''
    feat['properties']['source'] = 'autofill'

    del img, mask

    return_msg = {}
    return_msg['features'] = [geojson.dumps(feat)]

    return return_msg
Exemplo n.º 19
0
    def add_geojson(self, json_ld):
        """
        adds geospatial and event data that links time and space information
        """
        uuid = self.manifest.uuid
        item_type = self.manifest.item_type
        geo_meta = self.geo_meta
        event_meta = self.event_meta
        features_dict = False  # dict of all features to be added
        feature_events = False  # mappings between features and time periods
        if geo_meta is not False:
            # print('here!' + str(geo_meta))
            features_dict = LastUpdatedOrderedDict()
            feature_events = LastUpdatedOrderedDict()
            for geo in geo_meta:
                geo_id = geo.feature_id
                geo_node = '#geo-' + str(
                    geo_id)  # the node id for database rec of the feature
                geo_node_geom = '#geo-geom-' + str(geo_id)
                geo_node_props = '#geo-props-' + str(geo_id)
                geo_node_derived = '#geo-derived-' + str(
                    geo_id)  # node id for a derived feature
                geo_node_derived_geom = '#geo-derived-geom-' + str(geo_id)
                geo_node_derived_props = '#geo-derived-props-' + str(geo_id)
                feature_events[geo_node] = []
                geo_props = LastUpdatedOrderedDict()
                geo_props['href'] = URImanagement.make_oc_uri(
                    uuid, item_type, self.cannonical_uris)
                geo_props['type'] = geo.meta_type
                if len(geo.note) > 0:
                    geo_props['note'] = geo.note
                if uuid != geo.uuid:
                    geo_props['reference-type'] = 'inferred'
                    geo_props['reference-uri'] = URImanagement.make_oc_uri(
                        geo.uuid, 'subjects', self.cannonical_uris)

                    rel_meta = self.item_gen_cache.get_entity(geo.uuid)
                    if rel_meta is not False:
                        geo_props['reference-label'] = rel_meta.label
                        geo_props['reference-slug'] = rel_meta.slug
                else:
                    geo_props['reference-label'] = self.manifest.label
                    geo_props['reference-type'] = 'specified'
                    if self.assertion_hashes:
                        geo_props['hash_id'] = geo.hash_id
                        geo_props['feature_id'] = geo.feature_id
                if geo.specificity < 0 and self.manifest.item_type != 'projects':
                    # case where we've got reduced precision geospatial data
                    # geotile = quadtree.encode(geo.latitude, geo.longitude, abs(geo.specificity))
                    geo_props['location-precision'] = abs(geo.specificity)
                    geo_props[
                        'location-precision-note'] = 'Location data approximated as a security precaution.'
                    gmt = GlobalMercator()
                    geotile = gmt.lat_lon_to_quadtree(geo.latitude,
                                                      geo.longitude,
                                                      abs(geo.specificity))
                    tile_bounds = gmt.quadtree_to_lat_lon(geotile)
                    item_polygon = Polygon([[(tile_bounds[1], tile_bounds[0]),
                                             (tile_bounds[1], tile_bounds[2]),
                                             (tile_bounds[3], tile_bounds[2]),
                                             (tile_bounds[3], tile_bounds[0]),
                                             (tile_bounds[1], tile_bounds[0])]
                                            ])
                    item_f_poly = Feature(geometry=item_polygon)
                    item_f_poly.id = geo_node_derived
                    item_f_poly.geometry.id = geo_node_derived_geom
                    item_f_poly.properties.update(geo_props)
                    item_f_poly.properties['location-note'] = 'This region defines the '\
                                                              'approximate location for this item.'
                    item_f_poly.properties['id'] = geo_node_derived_props
                    features_dict[geo_node_derived] = item_f_poly
                    item_point = Point(
                        (float(geo.longitude), float(geo.latitude)))
                    item_f_point = Feature(geometry=item_point)
                    item_f_point.id = geo_node
                    item_f_point.geometry.id = geo_node_geom
                    item_f_point.properties.update(geo_props)
                    item_f_point.properties['location-note'] = 'This point defines the center of the '\
                                                               'region approximating the location for this item.'
                    item_f_point.properties['id'] = geo_node_props
                    features_dict[geo_node] = item_f_point
                elif len(geo.coordinates) > 1:
                    # here we have geo_json expressed features and geometries to use
                    if geo.specificity < 0:
                        geo_props[
                            'location-precision-note'] = 'Location data approximated as a security precaution.'
                    elif geo.specificity > 0:
                        geo_props[
                            'location-precision-note'] = 'Location data has uncertainty.'
                    else:
                        geo_props['location-precision-note'] = 'Location data available with no '\
                                                               'intentional reduction in precision.'
                    item_point = Point(
                        (float(geo.longitude), float(geo.latitude)))
                    item_f_point = Feature(geometry=item_point)
                    item_f_point.properties.update(geo_props)
                    if uuid == geo.uuid:
                        #the item itself has the polygon as it's feature
                        item_db = Point(
                            (float(geo.longitude), float(geo.latitude)))
                        if geo.ftype == 'Polygon':
                            coord_obj = json.loads(geo.coordinates)
                            item_db = Polygon(coord_obj)
                        elif (geo.ftype == 'MultiPolygon'):
                            coord_obj = json.loads(geo.coordinates)
                            item_db = MultiPolygon(coord_obj)
                        elif (geo.ftype == 'MultiLineString'):
                            coord_obj = json.loads(geo.coordinates)
                            item_db = MultiLineString(coord_obj)
                        item_f_db = Feature(geometry=item_db)
                        item_f_db.id = geo_node
                        item_f_db.geometry.id = geo_node_geom
                        item_f_db.properties.update(geo_props)
                        item_f_db.properties['id'] = geo_node_props
                        features_dict[geo_node] = item_f_db
                        item_f_point.id = geo_node_derived
                        item_f_point.geometry.id = geo_node_derived_geom
                        item_f_point.properties['location-region-note'] = 'This point represents the center of the '\
                                                                          'region defining the location of this item.'
                        item_f_point.properties['id'] = geo_node_derived_props
                        features_dict[geo_node_derived] = item_f_point
                    else:
                        #the item is contained within another item with a polygon or multipolygon feature
                        item_f_point.id = geo_node
                        item_f_point.geometry.id = geo_node_geom
                        item_f_point.properties['id'] = geo_node_props
                        item_f_point.properties['contained-in-region'] = True
                        item_f_point.properties['location-region-note'] = 'This point represents the center of the '\
                                                                          'region containing this item.'
                        features_dict[geo_node] = item_f_point
                else:
                    # case where the item only has a point for geo-spatial reference
                    geo_props[
                        'location-note'] = 'Location data available with no intentional reduction in precision.'
                    item_point = Point(
                        (float(geo.longitude), float(geo.latitude)))
                    item_f_point = Feature(geometry=item_point)
                    item_f_point.id = geo_node
                    item_f_point.geometry.id = geo_node_geom
                    item_f_point.properties.update(geo_props)
                    item_f_point.properties['id'] = geo_node_props
                    features_dict[geo_node] = item_f_point
            if event_meta is not False:
                # events provide chrological information, tied to geo features
                # sometimes there are more than 1 time period for each geo feature
                # in such cases, we duplicate geo features and add the different time event
                # information to the new features
                for event in event_meta:
                    rel_feature_num = 1  # default to the first geospatial feature for where the event happened
                    rel_feature_node = False
                    if event.feature_id > 0:
                        rel_feature_num = event.feature_id
                    if rel_feature_num >= 1:
                        rel_feature_node = '#geo-' + str(rel_feature_num)
                    act_event_obj = LastUpdatedOrderedDict()
                    act_event_obj = self.add_when_json(act_event_obj, uuid,
                                                       item_type, event)
                    if rel_feature_node is not False and feature_events is not False:
                        feature_events[rel_feature_node].append(act_event_obj)
            if features_dict is not False:
                if feature_events is not False:
                    for node_key, event_list in feature_events.items():
                        # update the feature with the first event "when" information
                        if len(event_list) > 0:
                            features_dict[node_key].update(event_list[0])
                            event_i = 1
                            for event in event_list:
                                if event_i <= 1:
                                    # add the time info to the feature
                                    old_feature = features_dict[node_key]
                                    old_geo_id = old_feature.geometry['id']
                                    old_prop_id = old_feature.properties['id']
                                    features_dict[node_key].update(event)
                                else:
                                    act_feature = copy.deepcopy(old_feature)
                                    # now add new node ids for the new features created to for the event
                                    new_node = node_key + '-event-' + str(
                                        event_i)
                                    act_feature.id = new_node
                                    act_feature.geometry[
                                        'id'] = old_geo_id + '-event-' + str(
                                            event_i)
                                    act_feature.properties[
                                        'id'] = old_prop_id + '-event-' + str(
                                            event_i)
                                    act_feature.update(
                                        event
                                    )  # add the time info to the new feature
                                    features_dict[new_node] = act_feature
                                    del (act_feature)
                                event_i += 1
                feature_keys = list(features_dict.keys())
                if len(feature_keys) < 1:
                    del features_dict[feature_keys[0]][
                        'id']  # remove the conflicting id
                    # only 1 feature, so item is not a feature collection
                    json_ld.update(features_dict[feature_keys[0]])
                else:
                    feature_list = [
                    ]  # multiple features, so item has a feature collection
                    for node_key, feature in features_dict.items():
                        feature_list.append(feature)
                    item_fc = FeatureCollection(feature_list)
                    json_ld.update(item_fc)
        return json_ld
Exemplo n.º 20
0
"""Tests for filter.py"""
import unittest
from geojson import Feature, Polygon, LineString

from label_maker.filter import create_filter, _compile, _compile_property_reference, \
     _compile_comparison_op, _compile_logical_op, _compile_in_op, _compile_has_op, \
     _compile_negation, _stringify

line_geometry = LineString([(0, 0), (1, 1)])
polygon_geometry = Polygon([(0, 0), (1, 0), (1, 1), (0, 1), (0, 0)])


class TestCompiledFilters(unittest.TestCase):
    """Tests for compiled filter functions"""
    def test_comparison(self):
        """Test comparison filter function"""
        ff = create_filter(['==', 'a', 5])
        passing = Feature(geometry=line_geometry, properties=dict(a=5))
        failing = Feature(geometry=line_geometry, properties=dict(a=4))
        self.assertTrue(ff(passing))
        self.assertFalse(ff(failing))

    def test_any(self):
        """Test any filter function"""
        ff = create_filter(['any', ['==', 'a', 5], ['==', 'b', 3]])
        passing1 = Feature(geometry=line_geometry, properties=dict(a=5))
        passing2 = Feature(geometry=line_geometry, properties=dict(b=3))
        passing3 = Feature(geometry=line_geometry, properties=dict(a=5, b=3))
        failing1 = Feature(geometry=line_geometry, properties=dict(a=4))
        failing2 = Feature(geometry=line_geometry, properties=dict(b=5))
        self.assertTrue(ff(passing1))
Exemplo n.º 21
0
def boxes(north, south, east, west, radial, outfile):
    params('boxes', north, south, east, west, radial)
    my_os = os.name
    if (my_os is 'posix'):
        # cmd_text = '/usr/bin/ogr2ogr'
        slash = '/'
    else:
        # cmd_text = 'c:\\OSGeo4W64\\bin\\ogr2ogr.exe'
        slash = '\\'
    #init bits
    # poly_list = []
    g_array = []  # array of geojson formatted geometry elements
    tabular_list = []  # array of all polygons and tabular columns
    layer_dict = {'Bounds': {'Australia': {'North': north,'South': south,
         'West': west,'East': east}}}
    layer_dict['Param'] = {}
    layer_dict['Param']['side_km'] = radial
    layer_dict['Param']['epsg'] = 4326
    layer_dict['Param']['shape'] = 'box'
    layer_dict['Boxes'] = {}
    layer_dict['Boxes']['long'] = 1
    hor_seq = [layer_dict['Boxes']['long'], layer_dict['Boxes']['long'],
               layer_dict['Boxes']['long'], layer_dict['Boxes']['long']]
    vert_seq = [layer_dict['Boxes']['long'], layer_dict['Boxes']['long'],
               layer_dict['Boxes']['long'], layer_dict['Boxes']['long']]
    bounds_lat_min = north
    bounds_lat_max = south
    bounds_lon_max = east
    bounds_lon_min = west

    h_line_list = horizontal(east,north,west,south,vert_seq,radial)
    num_h = len(h_line_list)
    max_h = num_h - 1
    v_line_list = vertical(east,north,west,south,vert_seq,radial)
    num_v = len(v_line_list)
    max_v = num_v - 1
    intersect_list = intersections(h_line_list,max_h, v_line_list, max_v)

    print('\n4/7 deriving boxes polygons from intersection data')
    top_left = 0
    vertex = [top_left + 0, top_left + 1, top_left + max_v + 1,
        top_left + max_v]

    while (vertex[2] < (max_h) * (max_v)):
        poly_coords = [intersect_list[vertex[0]],
            intersect_list[vertex[1]], intersect_list[vertex[2]],
            intersect_list[vertex[3]], intersect_list[vertex[0]]]
        centre_lat = intersect_list[vertex[0]][1]
        + (intersect_list[vertex[2]][1] - intersect_list[vertex[0]][1]) / 2
        centre_lon = intersect_list[vertex[0]][0]
        + (intersect_list[vertex[2]][0] - intersect_list[vertex[0]][0]) / 2
        bounds_n = intersect_list[vertex[0]][1]
        bounds_s = intersect_list[vertex[3]][1]
        bounds_e = intersect_list[vertex[1]][0]
        bounds_w = intersect_list[vertex[0]][0]
        if bounds_e > bounds_w:
            geopoly = Polygon([poly_coords])
            geopoly = Feature(geometry=geopoly,
            properties={"p": top_left, "lat": centre_lat, "lon": centre_lon,
                "N": bounds_n, "S": bounds_s, "E": bounds_e, "W": bounds_w})
            g_array.append(geopoly)
            #append geojson geometry definition attributes to list
            #tabular dataset
            tabular_line = [top_left, centre_lat, centre_lon,
                            bounds_n, bounds_s, bounds_e, bounds_w]
            tabular_list  .append(tabular_line)
            #array of polygon and tabular columns

        #increment values
        top_left += 1
        vertex = [top_left + 0, top_left + 1, top_left + max_v + 1, top_left
        + max_v]

    print('\n5/7 boxes geojson dataset of {0} derived polygons'
    .format(len(g_array)))
    boxes_geojson = FeatureCollection(g_array)
    # convert merged geojson features
    #to geojson feature geohex_geojson
    g_array = []  # release g_array - array of geojson geometry elements

    print('writing boxes geojson formatted dataset to file: {0}.json'
    .format(outfile))
    myfile = open('geojson{slash}{outfile}_layer.json'
    .format(outfile=outfile, slash=slash), 'w')
    #open file for writing geojson layer in geojson format
    myfile.write(str(boxes_geojson))  # write geojson layer to open file
    myfile.close()  # close file

    print('\n6/7 tabular dataset of {0} lines of boxes polygon data'
    .format(len(tabular_list)))
    print('writing tabular dataset to file: {0}_dataset.csv'.format(outfile))
    tabular_df = pd.DataFrame(tabular_list)
    #convert tabular array to tabular data frame
    tabular_df.columns = ['poly', 'lat', 'long', 'N', 'S', 'E', 'W']
    layer_dict['Bounds']['Dataset'] = {}
    #update layer_dict with dataset bounds
    layer_dict['Bounds']['Dataset']['North'] = tabular_df['N'].max()
    layer_dict['Bounds']['Dataset']['South'] = tabular_df['S'].min()
    layer_dict['Bounds']['Dataset']['East'] = tabular_df['E'].max()
    layer_dict['Bounds']['Dataset']['West'] = tabular_df['W'].min()
    tabular_df.to_csv('csv{slash}{outfile}_dataset.csv'.format(outfile=
    outfile, slash = slash), sep = ',')


    print('\n7/7 boxes json metadata to written to file: {0}_metadata.json'
    .format(outfile))
    myfile = open('metadata{slash}{outfile}_metadata.json'.format(outfile=
    outfile, slash=slash), 'w')  # open file for writing geojson layer
    myfile.write(str(json.dumps(layer_dict)))
    #write geojson layer to open file
    myfile.close()  # close file
    to_shp_tab(outfile, 'boxes')
    ref_files()
    print('\n')
    print('The End')  # end boxes
Exemplo n.º 22
0
def catch(x, y, upload_folder, user_folder, file_name):
    # x, y, upload_folder, user_folder, file_name = read_in()

    # x, y, upload_folder, user_folder, file_name = [638311.1290535209, 4148774.824472582, 'file_uploads',
    #                                                'bbbbb',
    #                                                'dem.tar.gz']

    if file_name == 'srtm_turkey':
        try:
            xy_cor = transform(Proj(init='epsg:3857'), Proj(
                init='epsg:23036'), *zip([float(x), float(y)]))
            x, y = xy_cor[0][0], xy_cor[1][0]
            print(xy_cor)
            print(x, y)
            start = datetime.datetime.now()
            # bas = catchment_routine.CreateCacthment(480189.932, 4100069.151)
            # bas = catchment_routine.CreateCacthment(664421.0251895901, 4124028.181024239)
            bas = catchment_routine.CreateCacthment(x, y)
            bas.process_path = path
            bas.basin_dem = os.path.join(bas.process_path, "DEM_ED50_re.tif")
            # bas.process_path = r  "./file_uploads/srtm"
            upload_path = "./file_uploads"
            bas.init_grid()
            bas.readwindow()
            bas.conditioning()
            # bas.resample()
            bas.calculate_flow_dir()
            bas.calculate_accumlation()
            bas.dem_export(os.path.join(upload_path, user_folder))
            bas.snaptoacc()
            bas.snap_xy()
            ccc = bas.run_for_catchment()
            bas.to_shape(os.path.join(upload_path, user_folder))
            end = datetime.datetime.now() - start

        except BaseException as be:
            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)
            with open("python_err_log.csv", "a") as c_file:
                c_writer = csv.writer(c_file)
                c_writer.writerow([be.message])
                c_writer.writerow(["error"])
    else:
        file_name = extract_and_retreive(
            os.path.join(path, file_name))

        xnew = x
        ynew = y

        point_geom = Point(float(xnew), float(ynew))
        point = gpd.GeoDataFrame(
            index=[0], crs='epsg:23036', geometry=[point_geom])
        point.to_file(filename=os.path.join(
            path, "point.shp"), driver="ESRI Shapefile")

        wbt = whitebox.WhiteboxTools()
        wbt.set_verbose_mode(False)
        wbt.work_dir = path
        at = path

        wbt.breach_depressions("dem.tif", "DEM_breach.tif")
        wbt.fill_depressions("DEM_breach.tif", "DEM_fill.tif")
        wbt.flow_accumulation_full_workflow(
            "DEM_fill.tif", "DEM_out.tif", "Flow_dir.tif", "Flow_acc.tif", log=False)
        # wbt.basins("Flow_dir.tif", "Basins.tif")
        # wbt.extract_streams("Flow_acc.tif", "streams.tif", threshold=-1)
        # wbt.find_main_stem(
        #     "Flow_dir.tif", "streams.tif", "main_stream.tif")
        # wbt.raster_streams_to_vector(
        #     "streams.tif", "Flow_dir.tif", "riverswht.shp")
        # wbt.raster_streams_to_vector(
        #     "main_stream.tif", "Flow_dir.tif", "main_stream.shp")
        # wbt.horton_stream_order(
        #     "Flow_dir.tif", "streams.tif", "Horton.tif")
        # wbt.strahler_stream_order(
        #     "Flow_dir.tif", "streams.tif", "Strahler.tif")
        # wbt.raster_streams_to_vector(
        #     "Horton.tif", "Flow_dir.tif", "Horton.shp")
        # wbt.raster_streams_to_vector(
        #     "Strahler.tif", "Flow_dir.tif", "Strahler.shp")
        wbt.snap_pour_points("point.shp", "Flow_acc.tif",
                             "snap_point.shp", snap_dist=200)
        wbt.watershed("Flow_dir.tif", "snap_point.shp", "Watershed.tif")
        mask = None
        with rasterio.open(os.path.join(at, "Watershed.tif")) as src:
            image = src.read(1)  # first band
            results = (
                {'properties': {'raster_val': v}, 'geometry': s}
                for i, (s, v)
                in enumerate(
                shp(image, mask=mask, transform=src.transform)))

        geoms = list(results)
        boundary = shp(geoms[0]['geometry'])
        gpd_polygonized_raster = gpd.GeoDataFrame.from_features(geoms)
        # Filter nodata value
        gpd_polygonized_raster = gpd_polygonized_raster[gpd_polygonized_raster['raster_val'] == 1]
        # Convert to geojson

        gpd_polygonized_raster.crs = 'epsg:23036'
        gpd_polygonized_raster.to_file(
            driver='ESRI Shapefile', filename=os.path.join(at, "basin_boundary_23063.shp"))

        gpd_polygonized_raster = gpd_polygonized_raster.to_crs(
            'epsg:4326')  # world.to_crs(epsg=3395) would also work
        gpd_polygonized_raster.to_file(
            driver='ESRI Shapefile', filename=os.path.join(at, "basin_boundary.shp"))

        wbt.clip_raster_to_polygon(
            "DEM_out.tif", "basin_boundary_23063.shp", "DEM_watershed.tif")
        wbt.hypsometric_analysis("DEM_watershed.tif", "hypso.html")
        #wbt.slope_vs_elevation_plot(
        #    "DEM_watershed.tif", "Slope_elevation.html")
        wbt.zonal_statistics(
            "DEM_out.tif", "Watershed.tif", output=None, stat="total", out_table="stat.html")
        #wbt.raster_histogram("DEM_watershed.tif", "hist.html")

        gpd_polygonized_raster["area"] = gpd_polygonized_raster['geometry'].area
        Area = gpd_polygonized_raster['geometry'].area * 10000
        Area = Area.max()
        try:
            Centroid = [gpd_polygonized_raster.centroid.x[1], gpd_polygonized_raster.centroid.y[1]]
        except:
            Centroid = [gpd_polygonized_raster.centroid.x[0], gpd_polygonized_raster.centroid.y[0]]
        boundary = gpd_polygonized_raster.to_json()

        y = json.loads(boundary)
        # data = boundary['features'][0]['geometry']['coordinates']

        # logfile2.write(str(y['features'][0]['geometry']['coordinates']))

        data = y['features'][0]['geometry']['coordinates']

        try:
            if y['features'][1]['geometry']['coordinates'].__str__().__sizeof__() > data.__str__().__sizeof__():
                boundary = Polygon(y['features'][1]['geometry']['coordinates'])
            else:
                boundary = Polygon(data)
        except:
            boundary = Polygon(data)

        X, Y = hy.hypso(os.path.join(at, "hypso.html"))
        stat = hy.stat(os.path.join(at, "stat.html"))
        # logfile = open(
        #     r'D:\Github\model_experiment\NAM\datadir\basin_log33.txt', 'a+')
        # logfile.write(str(stat))
        basin_object = []
        text = "at"
        hypsometry = []
        hypsometry.append(X)
        hypsometry.append(Y)

        df_res = pd.DataFrame()
        df_res['X'] = X
        df_res['Y'] = Y
        j = df_res.to_json(orient='records')
        basin_object.append({"Polygon": json.dumps(boundary),
                             "hypso": j,
                             "stats": json.dumps(stat),
                             "status": 'success',
                             "Area": json.dumps(Area),
                             "Centroid": json.dumps(Centroid)})
        basin_object.append({"Polygon": json.dumps(boundary)})
        basin_object = json.dumps(basin_object)
        return basin_object
Exemplo n.º 23
0
def polygon_to_json_rep(polygon: SlfPolygon) -> Polygon:
    ps = list_point_tuples(polygon)
    return Polygon([ps])
Exemplo n.º 24
0
            try:
                prop[classes[i]]['probability_' + ps] = nz_prob
                prop[classes[i]]['row'] = int(coord[0])
                prop[classes[i]]['column'] = int(coord[1])
            except KeyError:
                prop[classes[i]] = {}
                prop[classes[i]]['probability_' + ps] = nz_prob
                prop[classes[i]]['row'] = int(coord[0])
                prop[classes[i]]['column'] = int(coord[1])
                if sdi is not None:
                    prop[classes[i]]['sdi'] = sdi
                prop[classes[i]]['type'] = "probability"
i = 0
for key in prop:
    pol = Polygon(polygons[key])
    if no_feature_collection is True:
        result = dumps({
            'type': 'Feature',
            'geometry': pol,
            "properties": prop[key]
        })
        print_result(args.output, result)
        if i < len(prop) - 1:
            print_result(args.output, ",")
    else:
        features.append(Feature(geometry=pol, properties=prop[key]))
    i = i + 1

if y is not None and no_feature_collection is False:
    prop = {}
Exemplo n.º 25
0
def analyze_nest_data(config):
    """ Analyze nest data """
    start_time = time.time()
    nest_url = osm_uri(
        config['p1_lat'],
        config['p1_lon'],
        config['p2_lat'],
        config['p2_lon'],
        config['osm_date'],
    )
    print("Overpass url:")
    print(nest_url)
    print("Getting OSM Data...")
    osm_session = requests.Session()

    response = osm_session.get(nest_url)

    # global nest_json
    nest_json = json.loads(response.text)
    if not nest_json:
        print("Error getting osm data")
        print(nest_json)
        return

    print("Getting OSM Data...Complete (took {} seconds)".format(time.time()))
    nest_mons = ""
    if NEST_SPECIES_LIST:
        filtered_species = set(NEST_SPECIES_LIST) - set(config['event_poke'])
        for i in filtered_species:
            if nest_mons == "":
                nest_mons = "'"+ str(i) +"'"
            else:
                nest_mons = nest_mons + ",'"+ str(i) +"'"
    else:
        nest_mons = "''"
    #print(response.text)
    print("##"*20)

    nodes = dict()
    areas = list()
    for element in nest_json['elements']:
        if not "type" in element:
            continue
        if element["type"] == "node":
            nodes[element["id"]] = {
                "lat": element["lat"],
                "lon": element["lon"]
            }
        elif element["type"] == "way":
            if "nodes" not in element and not element["nodes"]:
                continue
            areas.append(element)
    print("Initialize/Start DB Session")
    mydb_r = connect(
        host=config['db_r_host'],
        user=config['db_r_user'],
        passwd=config['db_r_pass'],
        database=config['db_r_name'],
        port=config['db_r_port'],
        charset=config['db_r_charset'],
        autocommit=True)
    mydb_w = connect(
        host=config['db_w_host'],
        user=config['db_w_user'],
        passwd=config['db_w_pass'],
        database=config['db_w_name'],
        port=config['db_w_port'],
        charset=config['db_w_charset'],
        autocommit=True)

    mycursor_r = mydb_r.cursor()
    mycursor_w = mydb_w.cursor()
    print("Connection clear")
    # Delete old Nest data
    if config['delete_old']:
        print("Delete Old Nests")
        mycursor_w.execute(
            NEST_DELETE_QUERY.format(
                db_name=config['db_w_name'],
                db_nests=config['db_nest']
            )
        )
        print("Delete Old Nests - Complete")
    print("Start Analyzing Nests")

    all_areas = list()
    failed_nests = defaultdict(int)
    areas_len = len(areas)
    for (idx, area) in enumerate(areas, start=1):
        area_name = "Unknown Areaname"
        if "tags" in area and "name" in area["tags"]:
            area_name = area["tags"]["name"]
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Starting to analyze Nest"))
        area_points = list()
        for point in area['nodes']:
            point_coords = nodes[point]
            area_points.append([point_coords['lon'], point_coords['lat']])
        area_poly = Polygon([area_points])
        area_poly_props = {
            "name": area_name,
            "stroke": config["json-stroke"],
            "stroke-width": config['json-stroke-width'],
            "stroke-opacity": config['json-stroke-opacity'],
            "fill": config['json-fill'],
            "fill-opacity": config['json-fill-opacity']
        }
        area_shapeley_poly = geometry.MultiPoint(area_points).convex_hull
        area_center_point = area_shapeley_poly.centroid
        min_lon, min_lat, max_lon, max_lat = area_shapeley_poly.bounds

        area_pokestops = dict()
        if config['pokestop_pokemon']:
            # Get all Pokestops with id, lat and lon
            progress(idx, areas_len, "({}/{}) {}".format(
                idx,
                areas_len,
                "Get all Pokestops within min/max lat/lon"))
            pokestop_sel_query = POKESTOP_SELECT_QUERY.format(
                db_name=config['db_r_name'],
                db_pokestop=config['db_pokestop'],
                min_lat=min_lat,
                max_lat=max_lat,
                min_lon=min_lon,
                max_lon=max_lon
            )
            #print(pokestop_sel_query)
            mycursor_r.execute(pokestop_sel_query)
            myresult_pokestops = mycursor_r.fetchall()
            progress(idx, areas_len, "({}/{}) {}".format(
                idx,
                areas_len,
                "Got all wanted Pokestops - now filter them"))
            for pkstp in myresult_pokestops:
                pkst_point = geometry.Point(pkstp[2], pkstp[1])
                if pkst_point.within(area_shapeley_poly):
                    area_pokestops[pkstp[0]] = pkst_point
            progress(idx, areas_len, "({}/{}) {}".format(
                idx,
                areas_len,
                "Filtering of all Pokestops complete"))

        area_spawnpoints = dict()
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Get all Spawnpoints within min/max lat/lon"))
        # Get all Spawnpoints with id, lat and lon
        spawnpoint_sel_query = SPAWNPOINT_SELECT_QUERY.format(
            db_name=config['db_r_name'],
            db_spawnpoint=config['db_spawnpoint'],
            sp_id=config['db_spawnpoint_id'],
            lat=config['db_spawnpoint_lat'],
            lon=config['db_spawnpoint_lon'],
            min_lat=min_lat,
            max_lat=max_lat,
            min_lon=min_lon,
            max_lon=max_lon
        )
        #print(spawnpoint_sel_query)
        mycursor_r.execute(spawnpoint_sel_query)
        my_result_spawnsoints = mycursor_r.fetchall()
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Got all wanted Spawnpoints - now filter them"))
        for spwn in my_result_spawnsoints:
            spwn_point = geometry.Point(spwn[2], spwn[1])
            if spwn_point.within(area_shapeley_poly):
                area_spawnpoints[spwn[0]] = spwn_point
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Filtering of all Spawnpoints complete"))

        if not area_pokestops and not area_spawnpoints:
            failed_nests["Park has no Stops and no Spawnpoints, ignore it"] += 1
            continue
        if (len(area_pokestops) < 1) and (
                len(area_spawnpoints) < config['min_spawn']):
            failed_nests["Park has not enough Spawnpoints, ignore it"] += 1
            continue
        spawnpoint_in = "'{}'".format("','".join(str(nr) for nr in area_spawnpoints))
        pokestop_in = "'{}'".format("','".join(str(nr) for nr in area_pokestops))
        #print(spawnpoint_in)
        #print(pokestop_in)
        #print(nest_mons)

        # Use data since last change:
        reset_time = int(time.time()) - (config['timespan']*3600)
        # RDM uses pokestop_ids, MAD not
        if config['pokestop_pokemon']:
            progress(idx, areas_len, "({}/{}) {}".format(
                idx,
                areas_len,
                "Get all Pokes from stops and spawnpoints within nest area"))
            nest_query = NEST_SELECT_QUERY_STOP
            if not config['use_unix_timestamp']:
                nest_query = NEST_SELECT_QUERY_STOP.replace(
                    "UNIX_TIMESTAMP({pokemon_timestamp})",
                    "{pokemon_timestamp}")
        else:
            progress(idx, areas_len, "({}/{}) {}".format(
                idx,
                areas_len,
                "Get all Pokes from spawnpoints within nest area"))
            nest_query = NEST_SELECT_QUERY
            if not config['use_unix_timestamp']:
                nest_query = NEST_SELECT_QUERY.replace(
                    "UNIX_TIMESTAMP({pokemon_timestamp})",
                    "{pokemon_timestamp}")
        query = nest_query.format(
            db_name=config['db_r_name'],
            db_pokemon_table=config['db_pokemon'],
            pokemon_timestamp=config['db_pokemon_timestamp'],
            pokestop_in=pokestop_in,
            spawn_id=config['db_pokemon_spawnid'],
            spawnpoint_in=spawnpoint_in,
            nest_mons=nest_mons,
            reset_time=str(reset_time)
        )
        #print(query)
        mycursor_r.execute(query)
        myresult = mycursor_r.fetchall()
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Got all Pokes from Nest area"))
        area_poke = (0, 0)
        for mrsp in myresult:
            poke_id, poke_amount = int(mrsp[0]), int(mrsp[1])
            if poke_amount < area_poke[1]:
                continue
            area_poke = (poke_id, poke_amount)
        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Filter and insert Nests"))
        if area_poke[1] < config['min_pokemon']:
            failed_nests["Not enough Pokes in this Area to specify a real Nest"] += 1
            continue

        current_time = int(time.time())

        progress(idx, areas_len, "({}/{}) {}".format(
            idx,
            areas_len,
            "Found Probable Nest - insert it now in db"))
        # Insert Nest data to db
        insert_query = NEST_INSERT_QUERY.format(
            db_name=config['db_w_name'],
            db_nests=config['db_nest'])

        insert_args = {
            "nest_id": str(area['id']),
            "name": area_name,
            "lat": float(area_center_point.x),
            "lon": float(area_center_point.y),
            "pokemon_id": int(area_poke[0]),
            "type": 0,
            "pokemon_count": int(area_poke[1]),
            "pokemon_avg": area_poke[1] / float(config['timespan']),
            "current_time": current_time,
        }
        #print(sql)
        mycursor_w.execute(insert_query, insert_args)
        print("\nNest added in DB\n")
        all_areas.append(
            Feature(
                geometry=area_poly,
                id=area['id'],
                properties=area_poly_props))

    mydb_r.close()
    mydb_w.close()

    print("\nNest analyzing took {:.2f} minutes".format(
        (time.time() - start_time)/60))
    if all_areas:
        print("All Nests Added ({}):\n############".format(len(all_areas)))
    else:
        print("No Nests Added")
    print("No nest reasons:\n############") if failed_nests else "No false positive Parks"
    for (key, value) in failed_nests.items():
        print("{}: {}".format(key, value))


    if config['geojson_extend']:
        with open(config['save_path'], 'r') as old_file_:
            old_geojson = json.load(old_file_)
            all_areas += old_geojson['features']
            print('old areas added to the new ones')
    with open(config['save_path'], 'w') as file_:
        print('write geojson')
        json.dump(FeatureCollection(all_areas), file_, indent=4)
        print("geoJSON saved successfully")
Exemplo n.º 26
0
    gj['crs'] = dict()
    gj['crs']['properties'] = {'name': "urn:ogc:def:crs:EPSG::4269"}
    gj['crs']['type'] = "name"
    gj['type'] = "FeatureCollection"
    gj['features'] = []

    for k, p in enumerate(pols[y]):
        if (len(p) == 0):
            continue

        curPoints = []
        for i in range(len(p)):
            curPoints.append(points[y][p[i]])
        curPoints.append(points[y][p[0]])

        g = Polygon([curPoints])
        p = dict()
        i, j = revID[y][k]
        p['CT_ID'] = 'SY.{0}.{1}'.format(i, j)
        p['variables'] = []
        p['variables'].append({
            'labels': [
                'Total',
            ],
            'name': 'Population',
            'short': [
                'Total',
            ],
            'type': 'internal',
            'values': [
                100,
Exemplo n.º 27
0
def get_raster_availability(layer, bbox=None):
    """retrieve metadata for raster tiles that cover the given bounding box
    for the specified data layer.

    Parameters
    ----------
    layer : str
        dataset layer name. (see get_available_layers for list)
    bbox : (sequence of float|str)
        bounding box of in geographic coordinates of area to download tiles
        in the format (min longitude, min latitude, max longitude, max latitude)

    Returns
    -------
    metadata : geojson FeatureCollection
        returns metadata including download urls as a FeatureCollection
    """

    base_url = 'https://www.sciencebase.gov/catalog/items'
    params = [
        ('parentId', layer_dict[layer]),
        ('filter', 'tags=IMG'),
        ('max', 1000),
        ('fields', 'webLinks,spatial,title'),
        ('format', 'json'),
    ]

    if bbox:
        xmin, ymin, xmax, ymax = [float(n) for n in bbox]
        polygon = 'POLYGON (({}))'.format(','.join([
            (repr(x) + ' ' + repr(y))
            for x, y in [(xmin, ymax), (xmin,
                                        ymin), (xmax,
                                                ymin), (xmax,
                                                        ymax), (xmin, ymax)]
        ]))
        params.append(
            ('filter', 'spatialQuery={{wkt:"{}",relation:"{}"}}'.format(
                polygon, 'intersects')))

    features = []
    url = base_url
    while url:
        r = requests.get(url, params)
        print('retrieving raster availability from %s' % r.url)
        params = []  # not needed after first request
        content = r.json()
        for item in content['items']:
            feature = Feature(
                geometry=Polygon(_bbox2poly(item['spatial']['boundingBox'])),
                id=item['id'],
                properties={
                    'name':
                    item['title'],
                    'layer':
                    layer,
                    'format':
                    '.img',
                    'download url':
                    [x for x in item['webLinks']
                     if x['type'] == 'download'][0]['uri']
                })
            features.append(feature)

        if content.get('nextlink'):
            url = content['nextlink']['url']
        else:
            break

    return FeatureCollection(features)
Exemplo n.º 28
0
import fiona

from geojson import Feature, FeatureCollection, dump, Polygon

if __name__ == "__main__":
    arguments = sys.argv
    if len(arguments) != 3:
        print("Invalid number of arguments used!")
        print(
            "Usage: gpkgtogeojson.py <input GPKG file> <output GeoJSON file>")
        sys.exit()

    input_file = sys.argv[1]
    output_file = sys.argv[2]

    vertices = []
    features = []

    vertex_id = 1

    with fiona.open(input_file) as layer:
        for feature in layer:
            features.append(
                Feature(geometry=Polygon(feature["geometry"]["coordinates"])))

    feature_collection = FeatureCollection(features)

    with open(output_file, 'w') as f:
        dump(feature_collection, f)
    def preflight_generator():

        # header info is stringified query parameters (to encode the GUI parameters via GA)
        query_list = list(request.form.items()) 
        header = make_current_URL(query_list)[1:] # skip leading ? 

        # create html string
        html = '<html>'
        html += make_GA_script(header) # <head> with script that inits GA with my tracking id and calls send pageview
        
        # onload event will only be triggered once </body> is given
        html +=  '''<body onerror="document.getElementById('error').innerHTML='Error (non-python), possibly the server timed out ...'"\n onload="document.getElementById('gif').style.display='none'; document.getElementById('working').innerHTML='Processing finished'">\n'''
        html += '<h2 id="working" >Processing terrain data into 3D print file(s), please be patient.<br>\n'
        html += 'Once the animation stops, you can preview and download your file.</h2>\n'
        
        
        
        yield html  # this effectively prints html into the browser but doesn't block, so we can keep going and append more html later ...


        #
        #  print/log all args and their values
        #

        # put all args we got from the browser in a dict as key:value
        args = request.form.to_dict()

        # list of the subset of args needed for processing
        key_list = ("DEM_name", "trlat", "trlon", "bllat", "bllon", "printres",
                  "ntilesx", "ntilesy", "tilewidth", "basethick", "zscale", "fileformat")

        for k in key_list:
            # float-ify some args
            if k in ["trlat", "trlon", "bllat", "bllon","printres", "tilewidth", "basethick", "zscale"]:
                args[k] = float(args[k])

            # int-ify some args
            if k in ["ntilesx", "ntilesy"]:
                args[k] = int(args[k])


        # decode any extra (manual) args and put them in the args dict as
        # separate args as the are needed in that form for processing
        # Note: the type of each arg is decided by  json.loads(), so 1.0 will be a float, etc.
        manual = args.get("manual", None)
        extra_args={}
        if manual != None:

            JSON_str = "{ " + manual + "}"
            try:
                extra_args = json.loads(JSON_str)
            except Exception as e:
                s = "JSON decode Error for manual: " + manual + "   " + str(e)
                logging.warning(s)
                print(e)
                yield "Warning: " + s + "<br>"
            else:
                for k in extra_args:
                    args[k] = extra_args[k] # append/overwrite
                    # TODO: validate

        # log and show args in browser
        html =  '<br>'
        for k in key_list:
            if args[k] != None and args[k] != '':
                html += "%s = %s <br>" % (k, str(args[k]))
                logging.info("%s = %s" % (k, str(args[k])))
        html += "<br>"
        for k in extra_args:
            if args[k] != None and args[k] != '':
                html += "%s = %s <br>" % (k, str(args[k]))
                logging.info("%s = %s" % (k, str(args[k])))

        # see if we have a optional kml file in requests
        geojson_polygon = None
        if 'kml_file' in request.files:
            kml_file = request.files['kml_file']
            
            if kml_file.filename != '':
                from geojson import Polygon
                
                # process kml file
                kml_stream = kml_file.read()
                coords, msg = TouchTerrainEarthEngine.get_KML_poly_geometry(kml_stream) 
                
                if msg != None: # Either got a line instead of polygon or nothing good at all
                    if coords == None: # got nothing good
                        html += "Warning: " + kml_file.filename + " contained neither polygon nor line, falling back to area selection box.<br>"
                    else: 
                        html += "Warning: Using line with " + str(len(coords)) + " points in " + kml_file.filename + " as no polygon was found.<br>"
                        geojson_polygon = Polygon([coords])  
                else: # got polygon
                    geojson_polygon = Polygon([coords]) # coords must be [0], [1] etc. would be holes 
                    html  += "Using polygon from kml file " + kml_file.filename + " with " + str(len(coords)) + " points.<br>"                   
        
        html += "<br>"
        yield html

        #
        # bail out if the raster would be too large
        #
        width = args["tilewidth"]
        bllon = args["bllon"]
        trlon = args["trlon"]
        bllat = args["bllat"]
        trlat = args["trlat"]
        dlon =  180 - abs(abs(bllon - trlon) - 180) # width in degrees
        dlat =  180 - abs(abs(bllat - trlat) - 180) # height in degrees
        center_lat = bllat + abs((bllat - trlat) / 2.0)
        latitude_in_m, longitude_in_m = arcDegr_in_meter(center_lat)
        num_total_tiles = args["ntilesx"] * args["ntilesy"]
        pr = args["printres"]

        # if we have "only" set, divide load by number of tiles
        div_by = 1
        if extra_args.get("only") != None:
            div_by = float(num_total_tiles)

        # for geotiffs only, set a much higher limit b/c we don't do any processing,
        # just d/l the GEE geotiff and zip it
        if args["fileformat"] == "GeoTiff":
            global MAX_CELLS_PERMITED # thanks Nick!
            MAX_CELLS_PERMITED *= 100

        # pr <= 0 means: use source resolution
        if pr > 0: # print res given by user (width and height are in mm)
            height = width * (dlat / float(dlon))
            pix_per_tile = (width / float(pr)) * (height / float(pr))
            tot_pix = int((pix_per_tile * num_total_tiles) / div_by) # total pixels to print
            print("total requested pixels to print", tot_pix, ", max is", MAX_CELLS_PERMITED, file=sys.stderr)
        else:
            # estimates the total number of cells from area and arc sec resolution of source
            # this is done for the entire area, so number of cell is irrelevant
            DEM_name = args["DEM_name"]
            cell_width_arcsecs = {"USGS/NED":1/9.0,  "MERIT/DEM/v1_0_3":3,"USGS/GMTED2010":7.5, "CPOM/CryoSat2/ANTARCTICA_DEM":30,
                                  "NOAA/NGDC/ETOPO1":60, "USGS/GTOPO30":30, "USGS/SRTMGL1_003":1,
                                  "JAXA/ALOS/AW3D30/V2_2":1, "NRCan/CDEM": 0.75,} # in arcseconds!
            cwas = float(cell_width_arcsecs[DEM_name])
            tot_pix =    int( ( ((dlon * 3600) / cwas) *  ((dlat * 3600) / cwas) ) / div_by)
            print("total requested pixels to print at a source resolution of", round(cwas,2), "arc secs is ", tot_pix, ", max is", MAX_CELLS_PERMITED, file=sys.stderr)

        if tot_pix >  MAX_CELLS_PERMITED:
            html = "Your requested job is too large! Please reduce the area (red box) or lower the print resolution<br>"
            html += "<br>Current total number of Kilo pixels is " + str(round(tot_pix / 1000.0, 2))
            html += " but must be less than " + str(round(MAX_CELLS_PERMITED / 1000.0, 2))
            html +  "If you're trying to process multiple tiles: Consider using the only manual setting to instead print one tile at a time (https://chharding.github.io/TouchTerrain_for_CAGEO/)"
            html += "<br><br>Hit Back on your browser to go back to the Main page and make adjustments ...\n"
            html +=  '</body></html>'
            yield html
            return "bailing out!"

        args["CPU_cores_to_use"] = NUM_CORES


        # check if we have a valid temp folder
        args["temp_folder"] = TMP_FOLDER
        print("temp_folder is set to", args["temp_folder"], file=sys.stderr)
        if not os.path.exists(args["temp_folder"]):
            s = "temp folder " + args["temp_folder"] + " does not exist!"
            print(s, file=sys.stderr)
            logging.error(s)
            html = '</body></html>Error:' + s
            yield html
            return "bailing out!"# Cannot continue without proper temp folder

        # name of zip file is time since 2000 in 0.01 seconds
        fname = str(int((datetime.now()-datetime(2000,1,1)).total_seconds() * 1000))
        args["zip_file_name"] = fname

        # if this number of cells to process is exceeded, use a temp file instead of memory only
        args["max_cells_for_memory_only"] = MAX_CELLS

        # set geojson_polygon as polygon arg (None by default)
        args["polygon"] = geojson_polygon

        # show snazzy animated gif - set to style="display: none to hide once processing is done
        html =  '<img src="static/processing.gif" id="gif" alt="processing animation" style="display: block;">\n'

        # add an empty paragraph for error messages during processing that come from JS
        html += '<p id="error"> </p>\n'
        yield html

        #
        # Create zip and write to tmp
        #
        try:
            totalsize, full_zip_file_name = TouchTerrainEarthEngine.get_zipped_tiles(**args) # all args are in a dict
        except Exception as e:
            print("Error:", e, file=sys.stderr)
            html =  '</body></html>' + "Error:," + str(e)
            yield html
            return "bailing out!"

        # if totalsize is negative, something went wrong, error message is in full_zip_file_name
        if totalsize < 0:
            print("Error:", full_zip_file_name, file=sys.stderr)
            html =  '</body></html>' + "Error:," + str(full_zip_file_name)
            yield html
            return "bailing out!"

        else:
            html = ""

            # move zip from temp folder to static folder so flask can serve it (. is server root!)
            zip_file = fname + ".zip"
            try:
                os.rename(full_zip_file_name, os.path.join(DOWNLOADS_FOLDER, zip_file))
            except Exception as e:
                print("Error moving file from tmp to downloads:", e, file=sys.stderr)
                html =  '</body></html>' + "Error:," + str(e)
                yield html
                return "bailing out!"

            zip_url = url_for("download", filename=zip_file)


            if args["fileformat"] in ("STLa", "STLb"):
                html += '<br><form action="' + url_for("preview", zip_file=zip_file)  +'" method="GET" enctype="multipart/form-data">'
                html += '  <input type="submit" value="Preview STL " '
                html += ''' onclick="ga('send', 'event', 'Preview', 'Click', 'preview', '0')" '''
                html += '   title=""> '
                html += 'Note: This uses WebGL for in-browser 3D rendering and may take a while to load for large models.<br>\n'
                html += 'You may not see anything for a while even after the progress bar is full!'
                html += '</form>\n'

            html += "Optional: tell us what you're using this model for<br>\n"
            html += '''<textarea autofocus form="dl" id="comment" cols="100" maxlength=150 rows="2"></textarea><br>\n'''

            html += '<br>\n<form id="dl" action="' + zip_url +'" method="GET" enctype="multipart/form-data">\n'
            html += '  <input type="submit" value="Download zip File " \n'
            #https://stackoverflow.com/questions/57499732/google-analytics-events-present-in-console-but-no-more-in-api-v4-results
            html += '''  onclick=onclick_for_dl();\n'''
            html += '   title="zip file contains a log file, the geotiff of the processed area and the 3D model file (stl/obj) for each tile">\n'
            html += "   Size: %.2f Mb   (All files will be deleted in 6 hrs.)<br>\n" % totalsize
            html += '</form>\n'

            html += "   <br>To return to the selection map, click on the back button in your browser once, or on the link below:<br>"
            #html += "<br>Click on the URL below to return to the selection map:<br>"

            # print out the query parameters (note hardcoded server name!)
            html += '<a href = "'
            query_list = list(request.form.items())
            server = "https://touchterrain.geol.iastate.edu/"
            #server = "https://touchterrain-beta.geol.iastate.edu/"
            query_str = server + make_current_URL(query_list) 
            html += query_str + '">' + query_str + "</a><br>"
            html += "<br>To have somebody else generate the same model, have them copy&paste this URL into a browser<br>" 
 
            html +=  '</body></html>'
            yield html
Exemplo n.º 30
0
def generate_renamed_street_umap(number_of_snapshots=2,
                                 include_found_objects=False,
                                 ignore_minor_changes=True):
    streets = {}
    for snapshot in bev_db.SNAPSHOTS[-number_of_snapshots:]:
        print(snapshot)
        con = bev_db.get_db_conn(snapshot)
        cur = con.cursor()
        sql = "SELECT s.SKZ, s.STRASSENNAME, g.GEMEINDENAME, g.GKZ FROM STRASSE s JOIN GEMEINDE g ON s.GKZ = g.GKZ"
        for row in cur.execute(sql):
            skz, name, gemeinde, gkz = row
            if skz in streets:
                if ignore_minor_changes:
                    if normalize_streetname(
                            streets[skz][-1][0]) != normalize_streetname(name):
                        streets[skz].append((name, snapshot, gemeinde, gkz))
                elif streets[skz][-1][0] != name:
                    streets[skz].append((name, snapshot, gemeinde, gkz))
            else:
                streets[skz] = [(name, snapshot, gemeinde, gkz)]

    renamed_skz = [skz for skz in streets if len(streets[skz]) > 1]
    query = """SELECT GEMEINDE.GKZ, GEMEINDE.GEMEINDENAME, STRASSE.SKZ, STRASSE.STRASSENNAME, STRASSE.FOUND, 
        COUNT(ADRESSE.ADRCD), MIN(LAT), MIN(LON), MAX(LAT), MAX(LON) 
        FROM STRASSE JOIN ADRESSE ON ADRESSE.SKZ = STRASSE.SKZ JOIN GEMEINDE ON GEMEINDE.GKZ = ADRESSE.GKZ WHERE STRASSE.SKZ IN ({}) 
        AND ADRESSE.HAUSNRZAHL1 != ""
        GROUP BY STRASSE.SKZ HAVING COUNT(ADRESSE.ADRCD) > 1 ORDER BY 1, 4 DESC""".format(
        ",".join("?" * len(renamed_skz)))
    umap = Umap()
    for row in con.execute(query, tuple(renamed_skz)):
        gkz, gemeindename, skz, strassenname, found, count, min_lat, min_lon, max_lat, max_lon = row
        bezirkname = get_bezirk(gkz)
        bundesland = get_bundesland(gkz)
        layer = "%s: %s" % (bundesland, bezirkname)
        try:
            area_size = projection.get_area_size(min_lon, max_lon, min_lat,
                                                 max_lat)
        except TypeError:
            continue
        try:
            adr_per_km2 = count / area_size
        except ZeroDivisionError:
            adr_per_km2 = 0
        changes = "\n".join([
            "%s: %s" % (bev_db.format_key_date(s[1]), s[0])
            for s in streets[skz]
        ])
        josm_link = umap.get_josm_link(min_lon,
                                       max_lon,
                                       min_lat,
                                       max_lat,
                                       area_size=area_size)
        properties = {
            "name":
            "%s (%s)" % (strassenname, gemeindename),
            "description":
            """%s\n%s\n%s Adressen\nSKZ %s\nGröße: %4.2f km²\nAdr./km²: %s""" %
            (josm_link, changes, count, skz, area_size, int(adr_per_km2))
        }
        feature = Feature(properties=properties,
                          geometry=Polygon([[[min_lon, min_lat],
                                             [min_lon, max_lat],
                                             [max_lon, max_lat],
                                             [max_lon, min_lat]]]))
        if found != bev_db.SearchStatus.FOUND:
            if found == bev_db.SearchStatus.UNDER_CONSTRUCTION:
                umap.add_feature(feature, layer, {"color": "Orange"})
            else:
                umap.add_feature(feature, layer, {"color": "Red"})
        elif include_found_objects:
            umap.add_feature(feature, layer)
    umap.dump('renamed_streets.umap')