コード例 #1
0
def get_labs(format):
    """Gets Hackerspaces data from hackaday.io."""

    hackerspaces_json = data_from_hackaday_io(hackaday_io_labs_map_url)
    hackerspaces = {}

    # Load all the Hackerspaces
    for i in hackerspaces_json:
        current_lab = Hackerspace()
        current_lab.id = i["id"]
        current_lab.url = "https://hackaday.io/hackerspace/" + current_lab.id
        current_lab.name = i["name"]
        if len(i["description"]) != 0:
            current_lab.description = i["description"]
        elif len(i["summary"]) != 0:
            current_lab.description = i["summary"]
        current_lab.created_at = i["moments"]["exact"]

        # Check if there are coordinates
        if i["latlon"] is not None:
            latlon = json.loads(i["latlon"])
            current_lab.latitude = latlon["lat"]
            current_lab.longitude = latlon["lng"]
            # Get country, county and city from them
            country = geolocator.reverse([latlon["lat"], latlon["lng"]])
            current_lab.country = country.raw["address"]["country"]
            current_lab.address = country.raw["display_name"]
            current_lab.address_1 = country.raw["display_name"]
            current_lab.country_code = country.raw["address"]["country_code"]
            current_lab.county = country.raw["address"]["state_district"]
            current_lab.city = country.raw["address"]["city"]
            current_lab.postal_code = country.raw["address"]["postcode"]
        else:
            # For labs without a location or coordinates
            # add 0,0 as coordinates
            current_lab.latitude = 0.0
            current_lab.longitude = 0.0

        # Add the lab
        hackerspaces[i["name"]] = current_lab

    # Return a dictiornary / json
    if format.lower() == "dict" or format.lower() == "json":
        output = {}
        for j in hackerspaces:
            output[j] = hackerspaces[j].__dict__
    # Return a geojson
    elif format.lower() == "geojson" or format.lower() == "geo":
        labs_list = []
        for l in hackerspaces:
            single = hackerspaces[l].__dict__
            single_lab = Feature(type="Feature",
                                 geometry=Point((single["latitude"],
                                                 single["longitude"])),
                                 properties=single)
            labs_list.append(single_lab)
        output = dumps(FeatureCollection(labs_list))
    # Return a Pandas DataFrame
    elif format.lower() == "pandas" or format.lower() == "dataframe":
        output = {}
        for j in hackerspaces:
            output[j] = hackerspaces[j].__dict__
        # Transform the dict into a Pandas DataFrame
        output = pd.DataFrame.from_dict(output)
        output = output.transpose()
    # Return an object
    elif format.lower() == "object" or format.lower() == "obj":
        output = hackerspaces
    # Default: return an oject
    else:
        output = hackerspaces
    # Return a proper json
    if format.lower() == "json":
        output = json.dumps(output)
    return output
コード例 #2
0
 def get_geojson_feature_collection(segments: List['Segment']):
     return FeatureCollection([p.get_geojson_feature() for s in segments])
コード例 #3
0
def export_observations_web(info_role):
    """
        Optimized route for observations web export
        .. :quickref: Synthese;
        This view is customisable by the administrator
        Some columns arer mandatory: id_sythese, geojson and geojson_local to generate the exported files
        
        POST parameters: Use a list of id_synthese (in POST parameters) to filter the v_synthese_for_export_view
        
        :query str export_format: str<'csv', 'geojson', 'shapefiles'>

    """
    params = request.args
    # set default to csv
    export_format = "csv"
    export_view = GenericTable(
        "v_synthese_for_export",
        "gn_synthese",
        "the_geom_local",
        current_app.config["LOCAL_SRID"],
    )
    if "export_format" in params:
        export_format = params["export_format"]

    # get list of id synthese from POST
    id_list = request.get_json()

    db_cols_for_shape = []
    columns_to_serialize = []
    # loop over synthese config to get the columns for export
    for db_col in export_view.db_cols:
        if db_col.key in current_app.config["SYNTHESE"]["EXPORT_COLUMNS"]:
            db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)

    q = DB.session.query(export_view.tableDef).filter(
        export_view.tableDef.columns.idSynthese.in_(id_list)
    )
    # check R and E CRUVED to know if we filter with cruved
    cruved = cruved_scope_for_user_in_module(info_role.id_role, module_code="SYNTHESE")[
        0
    ]
    if cruved["R"] > cruved["E"]:
        # filter on cruved specifying the column
        # id_dataset, id_synthese, id_digitiser and observer in the v_synthese_for_export_view
        q = synthese_query.filter_query_with_cruved(
            export_view.tableDef,
            q,
            info_role,
            id_synthese_column=current_app.config["SYNTHESE"]["EXPORT_ID_SYNTHESE_COL"],
            id_dataset_column=current_app.config["SYNTHESE"]["EXPORT_ID_DATASET_COL"],
            observers_column=current_app.config["SYNTHESE"]["EXPORT_OBSERVERS_COL"],
            id_digitiser_column=current_app.config["SYNTHESE"][
                "EXPORT_ID_DIGITISER_COL"
            ],
            with_generic_table=True,
        )
    results = q.limit(current_app.config["SYNTHESE"]["NB_MAX_OBS_EXPORT"])

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)

    # columns = [db_col.key for db_col in export_view.db_cols]

    if export_format == "csv":
        formated_data = [
            export_view.as_dict(d, columns=columns_to_serialize) for d in results
        ]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )

    elif export_format == "geojson":
        features = []
        for r in results:
            geometry = ast.literal_eval(
                getattr(r, current_app.config["SYNTHESE"]["EXPORT_GEOJSON_4326_COL"])
            )
            feature = Feature(
                geometry=geometry,
                properties=export_view.as_dict(r, columns=columns_to_serialize),
            )
            features.append(feature)
        results = FeatureCollection(features)
        return to_json_resp(results, as_file=True, filename=file_name, indent=4)
    else:
        try:
            filemanager.delete_recursively(
                str(ROOT_DIR / "backend/static/shapefiles"), excluded_files=[".gitkeep"]
            )

            dir_path = str(ROOT_DIR / "backend/static/shapefiles")

            export_view.as_shape(
                db_cols=db_cols_for_shape,
                data=results,
                geojson_col=current_app.config["SYNTHESE"]["EXPORT_GEOJSON_LOCAL_COL"],
                dir_path=dir_path,
                file_name=file_name,
            )
            return send_from_directory(dir_path, file_name + ".zip", as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"] + "/#/synthese",
        )
コード例 #4
0
        'label': store['store_name'],
        'description': store['store_address']
    }

    feature = Feature(id=store['objectid'],
                      geometry=Point((store['lon'], store['lat'])),
                      properties=props)

    features.append(feature)

c.close()
conn.close()

props = {
    'id': 'healthy_cornerstores',
    'label': 'Healthy Cornerstores',
    'description': 'Philadelphia dataset for healthy cornerstore locations.',
    'schema': {
        'cdc_store_level': {
            'label': 'CDC Store Level'
        }
    }
}

featureCollection = FeatureCollection(features, properties=props)

outf = open('cornerstores.geo.json', 'wb')
outf.write(geojson.dumps(featureCollection))

outf.close()
コード例 #5
0
 def get_geojson_feature_collection(dataPoints: List['GwPoint']):
     return FeatureCollection([d.get_geojson_feature() for d in dataPoints])
コード例 #6
0
ファイル: utils.py プロジェクト: pamyx/geoq
 def create_geojson_polygon_fc(self,coords):
     feature = Feature(geometry=Polygon([coords]))
     return FeatureCollection([feature])
コード例 #7
0
def map(request):
    latlng = request.GET.get(
        'latlng', 'LatLng(55.751244, 37.618423)').strip('LatLng(').strip(')')
    base_lat = float(latlng.split(', ')[1])
    base_lon = float(latlng.split(', ')[0])
    r_lat = 0.01
    r_lon = 0.005

    o = {
        "coordinates": [[[base_lat - r_lat, base_lon - r_lon],
                         [base_lat + r_lat, base_lon - r_lon],
                         [base_lat + r_lat, base_lon + r_lon],
                         [base_lat - r_lat, base_lon + r_lon],
                         [base_lat - r_lat, base_lon - r_lon]]],
        "type":
        "Polygon"
    }

    s = json.dumps(o)
    g1 = geojson.loads(s)
    g2 = shape(g1)

    my_feature = Feature(geometry=g2)
    collection = FeatureCollection([my_feature])
    col = gpd.GeoDataFrame.from_features(collection['features'])

    try:
        df_highway = osm.query_osm('way',
                                   col.ix[0].geometry,
                                   recurse='down',
                                   tags='highway')
        df_highway = df_highway[['geometry']]
        df_highway['tag'] = 'highway'
        df_building = osm.query_osm('way',
                                    col.ix[0].geometry,
                                    recurse='down',
                                    tags='building')
        df_building = df_building[['geometry']]
        df_building['tag'] = 'building'
        df_waterway = osm.query_osm('way',
                                    col.ix[0].geometry,
                                    recurse='down',
                                    tags='waterway')
        df_waterway = df_waterway[['geometry']]
        df_waterway['tag'] = 'waterway'
        df_highway.append(df_building).append(df_waterway).to_csv(os.path.join(
            BASE_DIR, 'polygons.csv'),
                                                                  index=False)

        wkt = []
        test = [base_lon, base_lat]

        res = ()
        for item in d.items():
            k = item[0]
            v = item[1]
            tile_lat_min, tile_long_min, tile_lat_max, tile_long_max = v

            if tile_lat_min <= test[
                    0] <= tile_lat_max and tile_long_min <= test[
                        1] <= tile_long_max:
                res = (k, v)

        if len(res) > 0:
            x, y, z = res[0].split('_')

            return getfiles(request, [
                os.path.join(
                    BASE_DIR,
                    'satellite_image_recognition/media/combined_37_UDB/%s/%s/%s.png'
                    % (z, x, y)),
                os.path.join(
                    BASE_DIR,
                    'satellite_image_recognition/media/combined_37_UDB/%s/%s/%s_pred_highway.png'
                    % (z, x, y)),
                os.path.join(
                    BASE_DIR,
                    'satellite_image_recognition/media/combined_37_UDB/%s/%s/%s_pred_waterway.png'
                    % (z, x, y)),
                os.path.join(
                    BASE_DIR,
                    'satellite_image_recognition/media/combined_37_UDB/%s/%s/%s_pred_building.png'
                    % (z, x, y)),
                os.path.join(BASE_DIR, 'polygons.csv')
            ])

    except Exception as e:
        print(e)
        wkt = []

    return render(request, 'map.html', {
        'base_lat': base_lon,
        'base_lon': base_lat,
        'boundary': o,
        'geometry': wkt
    })
コード例 #8
0
ファイル: map_select.py プロジェクト: thanhleviet/foss4g-map
 def index(self):
     if "lat" in request.params:
         lat = float( request.params["lat"] )
     else:
         return { "error": True, "message": "No \"lat\" parameter was found." }
     
     if "lon" in request.params:
         lon = float( request.params["lon"] )
     else:
         return { "error": True, "message": "No \"lon\" parameter was found." }
     
     if "zoom" in request.params:
         zoom = int( request.params["zoom"] )
     else:
         return { "error": True, "message": "No \"zoom\" parameter was found." }
     
     is_mobile = False
     if "mobile" in request.params:
         if request.params["mobile"] == "true":
             is_mobile = True
         
     point = Point(lon, lat)
     wkb_point = WKBSpatialElement( buffer( point.wkb ), 4326 )
     
     meters_to_search = 1.8
     if is_mobile:
         meters_to_search = 2.1
     
     distance_meters = pow( meters_to_search, ( 20 - zoom ) )
     tolerance = metersToDegrees( distance_meters, lat )
     
     features = []
     
     #
     # Query points first
     #
     
     #
     # These layers aren't visible until we hit zoom 9
     #
     if zoom >= 9:        
         #
         # Light Rail Stop query
         #
         lightRailFilter = func.ST_DWithin( wkb_point, LightRail.geometry_column(), tolerance )
         lightRailQuery = Session.query( LightRail ).filter( lightRailFilter )
         
         for row in lightRailQuery:
             feature = row.toFeature()
             feature.properties["feature_type"] = "Light Rail Stop"
             features.append( feature )
             
         if len( features ) > 0:
             return FeatureCollection(features)
             
         if zoom >= 16:
             #
             # These layers aren't visible until we hit zoom 16
             #
             
             #
             # Bar/Pub query
             #
             barPubFilter = func.ST_DWithin( wkb_point, BarPub.geometry_column(), tolerance )
             barPubQuery = Session.query( BarPub ).filter ( barPubFilter )
             
             for row in barPubQuery:
                 feature = row.toFeature()
                 feature.properties["feature_type"] = "Bar/Pub"
                 features.append( feature )
                 
             if len( features ) > 0:
                 return FeatureCollection( features )
                 
             #
             # Cafe query
             #
             cafeFilter = func.ST_DWithin( wkb_point, Cafe.geometry_column(), tolerance )
             cafeQuery = Session.query( Cafe ).filter ( cafeFilter )
             
             for row in cafeQuery:
                 feature = row.toFeature()
                 feature.properties["feature_type"] = "Cafe"
                 features.append( feature )
                 
             if len( features ) > 0:
                 return FeatureCollection( features )
             
             #
             # Restaurant query
             #
             restaurantFilter = func.ST_DWithin( wkb_point, Restaurant.geometry_column(), tolerance )
             restaurantQuery = Session.query( Restaurant ).filter ( restaurantFilter )
             
             for row in restaurantQuery:
                 feature = row.toFeature()
                 feature.properties["feature_type"] = "Restaurant"
                 features.append( feature )
                 
             if len( features ) > 0:
                 return FeatureCollection( features )
             
             #
             # Bicycle Rental query
             #
             bicycleRentalFilter = func.ST_DWithin( wkb_point, BicycleRental.geometry_column(), tolerance )
             bicycleRentalQuery = Session.query( BicycleRental ).filter ( bicycleRentalFilter )
             
             for row in bicycleRentalQuery:
                 feature = row.toFeature()
                 feature.properties["feature_type"] = "Bicycle Rental"
                 features.append( feature )
                 
             if len( features ) > 0:
                 return FeatureCollection( features )
                 
     #
     # If no points, query lines
     #
     
     #
     # Light Rail Line query
     #
     lightRailLineFilter = func.ST_DWithin( wkb_point, LightRailLine.geometry_column(), tolerance )
     lightRailLineQuery = Session.query( LightRailLine ).filter( lightRailLineFilter )
     
     for row in lightRailLineQuery:
         feature = row.toFeature()
         feature.properties["feature_type"] = "Light Rail Line"
         features.append( feature )
         
     if len( features ) > 0:
         return FeatureCollection( features )
         
     #
     # Free Bus query
     #
     freeBusFilter = func.ST_DWithin( wkb_point, FreeBus.geometry_column(), tolerance )
     freeBusQuery = Session.query( FreeBus ).filter( freeBusFilter )
     
     for row in freeBusQuery:
         feature = row.toFeature()
         feature.properties["feature_type"] = "Free Bus"
         features.append( feature )
         
     if len( features ) > 0:
         return FeatureCollection( features )
 
     
     return FeatureCollection( features )
コード例 #9
0
    geo_stops.append(
        Feature(geometry=Point(
            (float(stop['stop_lon']), float(stop['stop_lat']))),
                properties={
                    'name': stop['stop_name'],
                    'stop_id': stop['stop_id']
                }))

    list_stops.append({
        'name': stop['stop_name'],
        'stop_id': stop['stop_id'],
        'lat': float(stop['stop_lat']),
        'lon': float(stop['stop_lon'])
    })

collection_stops = FeatureCollection(geo_stops)

df_stops = pd.DataFrame(list_stops)
df_stops.to_csv("{}.csv".format(args.route), index=False)

# shapes
c.execute(
    """SELECT shape_id, COUNT(*) count FROM trips WHERE route_id = %s  GROUP BY shape_id ORDER BY count DESC""",
    (route_id, ))
trips = c.fetchall()

for trip in trips:
    shape_id = trip['shape_id']
    print(shape_id)

    c.execute(
コード例 #10
0
ファイル: predict.py プロジェクト: alod83/srp
        if ps in y:
            coord = y[ps][0].split("_")
            label = y[ps][0]
            polygon[label] = get_polygon(int(coord[1]), int(coord[0]),
                                         float(gp['cx']), float(gp['cy']))
            try:

                prop[label]['row'] = int(coord[0])
                prop[label]['column'] = int(coord[1])
                prop[label]['type'] = "effective"
                prop[label]['delta'].append(ps)
            except KeyError:
                prop[label] = {}
                prop[label]['row'] = int(coord[0])
                prop[label]['column'] = int(coord[1])
                prop[label]['type'] = "effective"
                prop[label]['delta'] = [ps]
    for key in prop:
        pol = Polygon(polygon[key])
        myprop = prop[key]
        features.append(Feature(geometry=pol, properties=myprop))

if no_feature_collection is False:
    result = FeatureCollection(features)
    result = dumps(result)
    print_result(args.output, result)

if verbose:
    seconds = datetime.now() - startTime
    print "Number of seconds to execute the script: " + str(seconds)
コード例 #11
0
def searchPoi(lang_code, search_text, mode):

    poi_list = []

    pois = Poi.objects.filter(
        Q(name__icontains=search_text) | Q(poi_tags__icontains=search_text)
        | Q(category__cat_name__icontains=search_text)).filter(enabled=True)

    if lang_code == "de":
        pois = Poi.objects.filter(
            Q(name_de__icontains=search_text)
            | Q(poi_tags__icontains=search_text)
            | Q(category__cat_name_de__icontains=search_text)).filter(
                enabled=True)

    build_name = ""
    icon_path = ""

    if pois:
        for poi in pois:
            if hasattr(poi.fk_building, 'building_name'):
                build_name = poi.fk_building.building_name
            if hasattr(poi.category.fk_poi_icon, 'poi_icon'):
                icon_path = str(poi.category.fk_poi_icon.poi_icon)

            center_geom = json.loads(poi.geom.geojson)

            if lang_code == "de":
                poi_data = {
                    "label":
                    poi.name_de,
                    "name":
                    poi.name_de,
                    "name_de":
                    poi.name_de,
                    "type":
                    "",
                    "external_id":
                    "",
                    "centerGeometry":
                    center_geom,
                    "floor_num":
                    poi.floor_num,
                    "floor_name":
                    poi.floor_name,
                    "building":
                    build_name,
                    "aks_nummer":
                    "",
                    "roomcode":
                    "",
                    "parent":
                    poi.category.cat_name_de,
                    "category": {
                        'id': poi.category_id,
                        'cat_name': poi.category.cat_name_de
                    },
                    "icon":
                    icon_path,
                    "poi_link_unique":
                    "/?poi-id=" + str(poi.id) + "&floor=" + str(poi.floor_num),
                    "poi_link_category":
                    "/?poi-cat-id=" + str(poi.category_id),
                    "src":
                    "poi db",
                    "poi_id":
                    poi.id
                }

                if mode == "search":
                    new_feature_geojson = Feature(geometry=center_geom,
                                                  properties=poi_data)
                    poi_list.append(new_feature_geojson)
                elif mode == "autocomplete":
                    poi_list.append(poi_data)

            else:
                poi_data = {
                    "label":
                    poi.name,
                    "name":
                    poi.name,
                    "name_de":
                    poi.name_de,
                    "type":
                    "",
                    "external_id":
                    "",
                    "centerGeometry":
                    center_geom,
                    "floor_num":
                    poi.floor_num,
                    "building":
                    build_name,
                    "aks_nummer":
                    "",
                    "roomcode":
                    "",
                    "parent":
                    poi.category.cat_name,
                    "category": {
                        'id': poi.category_id,
                        'cat_name': poi.category.cat_name_en
                    },
                    "poi_link_unique":
                    "/?poi-id=" + str(poi.id) + "&floor=" + str(poi.floor_num),
                    "poi_link_category":
                    "/?poi-cat-id=" + str(poi.category_id),
                    "icon":
                    icon_path,
                    "src":
                    "poi db",
                    "poi_id":
                    poi.id
                }

                if mode == "search":
                    new_feature_geojson = Feature(geometry=center_geom,
                                                  properties=poi_data)
                    poi_list.append(new_feature_geojson)
                elif mode == "autocomplete":
                    poi_list.append(poi_data)

    spaces_list = [{
        "name": _(space.room_code),
        "name_" + lang_code: _(space.room_code),
        "id": space.id,
        "space_id": space.id
    } for space in BuildingFloorSpace.objects.filter(
        room_code__isnull=False).filter(room_code__icontains=search_text)]

    if poi_list:

        final_geojs_res = FeatureCollection(features=poi_list)
    else:
        final_geojs_res = False

    if mode == "search":
        if final_geojs_res:
            return final_geojs_res
        else:
            return False
    else:
        if poi_list:
            return poi_list
        else:
            return False
コード例 #12
0
def main():
    """
    Read files, and extract the ground surface.
    Store this as a new GeoJSON file.
    """

    # All files to convert with their destination file
    source_files = ["../Data/Astoria/OCM/Oregon-41007-000.json",
                    "../Data/Seattle/OCM/Washington-53033-004.json",
                    "../Data/Seattle/OCM/Washington-53033-016.json",
                    "../Data/Portland/OCM/Oregon-41051-000.json",
                    "../Data/Portland/OCM/Oregon-41051-001.json",
                    "../Data/Portland/OCM/Oregon-41051-002.json",
                    "../Data/Portland/OCM/Oregon-41051-003.json",
                    "../Data/Portland/OCM/Oregon-41051-004.json",
                    "../Data/Portland/OCM/Oregon-41051-005.json",
                    "../Data/Portland/OCM/Oregon-41051-006.json",
                    "../Data/Portland/OCM/Oregon-41051-007.json",
                    "../Data/SanDiego/OCM/California-06073-002.json",
                    "../Data/SanDiego/OCM/California-06073-003.json",
                    "../Data/SanDiego/OCM/California-06073-004.json",
                    "../Data/SanDiego/OCM/California-06073-012.json"]

    dest_files = ["../Data/Astoria/OCM/2D/Oregon-41007-000_2D.geojson",
                  "../Data/Seattle/OCM/2D/Washington-53033-004_2D.geojson",
                  "../Data/Seattle/OCM/2D/Washington-53033-016_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-000_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-001_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-002_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-003_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-004_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-005_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-006_2D.geojson",
                  "../Data/Portland/OCM/2D/Oregon-41051-007_2D.geojson",
                  "../Data/SanDiego/OCM/2D/California-06073-002_2D.geojson",
                  "../Data/SanDiego/OCM/2D/California-06073-003_2D.geojson",
                  "../Data/SanDiego/OCM/2D/California-06073-004_2D.geojson",
                  "../Data/SanDiego/OCM/2D/California-06073-012_2D.geojson"]

    for i, fname in enumerate(source_files):
        print(fname)

        with open(fname) as filepointer:
            data = json.load(filepointer)

        # Extract cityobjects and vertices list
        cityobjects = data['CityObjects']
        vertices = np.array(data['vertices'])

        features = []

        for obj_id in cityobjects:

            # Extract the list with indices of the vertices
            coord_idxs = cityobjects[obj_id]['geometry'][0]['boundaries']

            attributes = cityobjects[obj_id]['attributes']
            attributes['id'] = obj_id

            # Go over all these index sets and find the one where the
            # z-value is all zero -> ground surface
            for idx_set in coord_idxs[0]:
                coordinates = vertices[idx_set[0]]
                zeros = np.count_nonzero(coordinates[:, 2])

                if zeros == 0:
                    coords_2D = np.delete(coordinates, np.s_[2], axis=1)
                    footprint = Polygon(coords_2D)
                    break

            # Check for invalid polygons, fix them if invalid
            if not footprint.is_valid:
                print("Fixing invalid polygon. ID:", obj_id)
                footprint = footprint.buffer(0)

            # Create the geojson feature based on the geometry and attributes
            geojson_feature = Feature(geometry=footprint, properties=attributes)

            # Check if the features that we store are actually valid
            if not geojson_feature.is_valid:
                print("Invalid Feature. ID:", obj_id)

            features.append(geojson_feature)

        # Make putt all features in the geojson feature collection
        feature_collection = FeatureCollection(features)

        # Write the 2D footprints with their attributes to a new file
        with open(dest_files[i], 'w') as filepointer:
            dump(feature_collection, filepointer)
コード例 #13
0
def response(context, flow):
  with decoded(flow.response):
    if flow.match("~d pgorelease.nianticlabs.com"):
      env = RpcResponseEnvelopeProto()
      env.ParseFromString(flow.response.content)
      key = request_api[env.response_id]
      value = env.returns[0]

      name = Holoholo.Rpc.Method.Name(key)
      name = mismatched_apis.get(name, name) #return class name when not the same as method
      klass = underscore_to_camelcase(name) + "OutProto"
      try:
        mor = deserialize(value, "." + klass)
        print("Deserialized Response %s" % name)
      except:
        print("Missing Response API: %s" % name)


      if (key == Holoholo.Rpc.GET_MAP_OBJECTS):
        features = []

        for cell in mor.MapCell:
          for fort in cell.Fort:
            p = Point((fort.Longitude, fort.Latitude))
            if fort.FortType == Holoholo.Rpc.CHECKPOINT:
              f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Pokestop", "marker-color": "00007F", "marker-symbol": "town-hall"})
              features.append(f)
            else:
              f = None
              if fort.Team == BLUE:
                f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Blue Gym", "marker-color": "0000FF", "marker-symbol": "town-hall", "marker-size": "large"})
              elif fort.Team == RED:
                f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Red Gym", "marker-color": "FF0000", "marker-symbol": "town-hall", "marker-size": "large"})
              elif fort.Team == YELLOW:
                f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Yellow Gym", "marker-color": "FFFF00", "marker-symbol": "town-hall", "marker-size": "large"})
              else:
                f = Feature(geometry=p, id=len(features), properties={"id": fort.FortId, "title": "Neutral Gym", "marker-color": "808080", "marker-symbol": "town-hall", "marker-size": "large"})
              features.append(f)

          for spawn in cell.SpawnPoint:
            p = Point((spawn.Longitude, spawn.Latitude))
            f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "spawn", "marker-color": "00FF00", "marker-symbol": "garden"})
            features.append(f)

          for spawn in cell.DecimatedSpawnPoint:
            p = Point((spawn.Longitude, spawn.Latitude))
            f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "decimated spawn", "marker-color": "000000", "marker-symbol": "monument"})
            features.append(f)

          for pokemon in cell.WildPokemon:
            p = Point((pokemon.Longitude, pokemon.Latitude))
            f = Feature(geometry=p, id=len(features), properties={"id": len(features), "TimeTillHiddenMs": pokemon.TimeTillHiddenMs, "title": "Wild %s" % Custom_PokemonName.Name(pokemon.Pokemon.PokemonId), "marker-color": "FF0000", "marker-symbol": "suitcase"})
            features.append(f)

          for pokemon in cell.CatchablePokemon:
            p = Point((pokemon.Longitude, pokemon.Latitude))
            f = Feature(geometry=p, id=len(features), properties={"id": len(features), "ExpirationTimeMs": pokemon.ExpirationTimeMs, "title": "Catchable %s" % Custom_PokemonName.Name(pokemon.PokedexTypeId), "marker-color": "000000", "marker-symbol": "circle"})
            features.append(f)

          for poke in cell.NearbyPokemon:
            gps = request_location[env.response_id]
            if poke.EncounterId in pokeLocation:
              add=True
              for loc in pokeLocation[poke.EncounterId]:
                if gps[0] == loc[0] and gps[1] == loc[1]:
                  add=False
              if add:
                pokeLocation[poke.EncounterId].append((gps[0], gps[1], poke.DistanceMeters/1000))
            else:
              pokeLocation[poke.EncounterId] = [(gps[0], gps[1], poke.DistanceMeters/1000)]
            if len(pokeLocation[poke.EncounterId]) >= 3:
              lat, lon = triangulate(pokeLocation[poke.EncounterId][0],pokeLocation[poke.EncounterId][1],pokeLocation[poke.EncounterId][2])
              if not math.isnan(lat) and not math.isnan(lon) :
                p = Point((lon, lat))
                f = Feature(geometry=p, id=len(features), properties={"id": len(features), "title": "Nearby %s" % Custom_PokemonName.Name(poke.PokedexNumber), "marker-color": "FFFFFF", "marker-symbol": "dog-park"})
                features.append(f)


        fc = FeatureCollection(features)
        dump = geojson.dumps(fc, sort_keys=True)
        f = open('ui/get_map_objects.json', 'w')
        f.write(dump)
コード例 #14
0
ファイル: repository.py プロジェクト: PnCevennes/app_oeasc
def areas_from_type_code_container(b_simple, data_type, type_code, ids_area_container):
    '''
        retourne toutes les aires pour un type_code donne (par exemple OEASC_CADASTRE)
        et étant contenue dans la geometrie identifiée par son id_area : id_area_container
        la recherche de ses élément se fait par rapport aux area_code:
            - soit en comparant les area_code des contenus et du contenant (cas général)
            - soit en se servant d'une table de correlation
            precalculée pour le cas des forêts avec DGD

        data type : t -> renvoie seulement les attributs
                    l -> renvoie aussi la geometrie

        b_simple : renvoie  geométrie simplifiee si vrai
                            géométrie d'origine sinon

    '''
    table = set_table(b_simple, data_type)

    id_type = get_id_type(type_code)
    v = ids_area_container.split("-")

    out = []


    for id_area_container in v:

        container = DB.session.query(table).filter(table.id_area == id_area_container).first()

        id_type_commune = get_id_type('OEASC_COMMUNE')
        id_type_dgd = get_id_type('OEASC_DGD')

        # cas des section de communes
        if container.id_type == id_type_commune:

            sql_text = text("SELECT ref_geo.get_old_communes('{}')".format(container.area_code))

            result = DB.engine.execute(sql_text)

            data = []

            for r in result:
                area_code = r[0]

                data = (
                    DB.session.query(table)
                    .filter(
                        and_(
                            table.id_type == id_type,
                            table.enable,
                            table.area_code.like(area_code + "-%")
                        )
                    )
                    .order_by(table.label)
                    .all()
                    + data
                )

        # cas des dgd
        elif container.id_type == id_type_dgd:

            res = DB.engine.execute(
                text(
                    "SELECT area_code_cadastre \
                        FROM oeasc_forets.cor_dgd_cadastre WHERE area_code_dgd = '{}' ;"
                    .format(container.area_code)
                    )
                )

            v = [r[0] for r in res]

            data = (
                DB.session.query(table)
                .filter(table.area_code.in_(v))
                .order_by(table.label).all()
            )

        # autres cas ONF
        else:
            data = (
                DB.session.query(table)
                .filter(
                    and_(
                        table.id_type == id_type,
                        table.enable,
                        table.area_code.like(container.area_code + "-%")
                    )
                )
                .order_by(table.label)
                .all()
            )

        # output
        if data_type == 'l':
            out = out + [d.get_geofeature() for d in data]

        else:

            out = out + [d.as_dict() for d in data]

    # output final
    if data_type == 'l':

        out = FeatureCollection(out)

    return out
コード例 #15
0
ファイル: user_data.py プロジェクト: Will1707/Map_For_Reddit
def user_data(user_dict):
    feature_collection_list = []
    split_score = user_dict['score'].split(',')
    user_dict['score_upper'] = split_score[1]
    user_dict['score_lower'] = split_score[0]
    split_comments = user_dict['comments'].split(',')
    user_dict['comments_upper'] = split_comments[1]
    user_dict['comments_lower'] = split_comments[0]
    split_comments = user_dict['date'].split(' - ')
    user_dict['date_upper'] = split_comments[1]
    user_dict['date_lower'] = split_comments[0]
    date_upper_utc = int(
        (datetime.strptime(user_dict['date_upper'], "%m/%d/%Y") -
         datetime(1970, 1, 1)).total_seconds())
    date_lower_utc = int(
        (datetime.strptime(user_dict['date_lower'], "%m/%d/%Y") -
         datetime(1970, 1, 1)).total_seconds())
    date_upper = "".join(user_dict['date_upper'].split("/"))
    date_lower = "".join(user_dict['date_lower'].split("/"))
    user_dict['date_upper'] = date_upper_utc
    user_dict['date_lower'] = date_lower_utc
    user_dict['id'] = ('S' + user_dict['score_upper'] + 's' +
                       user_dict['score_lower'] + 'C' +
                       user_dict['comments_upper'] + 'c' +
                       user_dict['comments_lower'] + 'D' + date_upper + 'd' +
                       date_lower + 'J' + user_dict['countries_no'] + 'G' +
                       user_dict['cluster'] + 'R' + user_dict['results'] +
                       'N' + user_dict['num_country'])

    featurecollection = data.find_one({"id": user_dict['id']})
    if featurecollection is None:
        user_form = {
            'id': user_dict['id'],
            'score_upper': int(user_dict['score_upper']),
            'score_lower': int(user_dict['score_lower']),
            'comments_upper': int(user_dict['comments_upper']),
            'comments_lower': int(user_dict['comments_lower']),
            'date_upper': user_dict['date_upper'],
            'date_lower': user_dict['date_lower'],
            'cluster': int(user_dict['cluster']),
            'results': int(user_dict['results']),
            'num_country': int(user_dict['num_country']),
            'countries': user_dict['countries']
        }
        if user_form['cluster'] == 0:
            found = submission.find({
                "geoJSON.properties.comments": {
                    "$gt": user_form['comments_lower'],
                    "$lt": user_form['comments_upper']
                },
                "geoJSON.properties.score": {
                    "$gt": user_form['score_lower'],
                    "$lt": user_form['score_upper']
                },
                "geoJSON.properties.date": {
                    "$gt": user_form['date_lower'],
                    "$lt": user_form['date_upper']
                },
                "country": {
                    "$in": user_form['countries']
                }
            }).limit(int(user_form['results'] * 1.5)).sort(
                "score", pymongo.DESCENDING)
        else:
            cluster = 'cluster.level_' + str(user_form['cluster'])
            found = submission.find({
                "geoJSON.properties.comments": {
                    "$gt": user_form['comments_lower'],
                    "$lt": user_form['comments_upper']
                },
                "geoJSON.properties.score": {
                    "$gt": user_form['score_lower'],
                    "$lt": user_form['score_upper']
                },
                "geoJSON.properties.date": {
                    "$gt": user_form['date_lower'],
                    "$lt": user_form['date_upper']
                },
                "country": {
                    "$in": user_form['countries']
                },
                cluster: True
            }).limit(int(user_form['results'] * 1.5)).sort(
                "score", pymongo.DESCENDING)

        result = []
        result_append = result.append
        remainder = []
        remainder_append = remainder.append
        locations = []
        locations_append = locations.append

        for loc in found:
            coord = loc['geoJSON']['geometry']['coordinates']
            if loc['country_rank'] is not None and loc[
                    'country_rank'] <= user_form[
                        'num_country'] and coord not in locations:
                result_append(loc['geoJSON'])
                locations_append(coord)
            elif coord not in locations:
                remainder_append(loc['geoJSON'])
                locations_append(coord)

        remaining = user_form['results'] - len(result)
        if remaining > 0:
            geoJSON_list = result + remainder[:remaining]
        elif remaining < 0:
            geoJSON_list = result[:remaining]

        user_form['feature_collection'] = FeatureCollection(geoJSON_list)
        data.insert_one(user_form)
        client.close()
        return user_form['feature_collection']

    else:
        client.close()
        return featurecollection['feature_collection']
コード例 #16
0
def multimodal_directions(origin, destination, modes, API_KEY):

    # Store GeoJSON features in a list
    results = []

    # Store durations and start / stop times
    durations = []
    starttimes = []
    endtimes = []

    for mode in modes:

        # Get data from Google Maps Directions API
        data = gmaps_directions(origin, destination, mode, API_KEY)

        # Check to see if no routes returned.
        if len(data['routes']) == 0:
            sys.exit(
                "Sorry, directions are not available for {} from {} to {}".
                format(mode, origin, destination))

        # Get duration in seconds
        if 'duration_in_traffic' in data['routes'][0]['legs'][0]:
            duration = data['routes'][0]['legs'][0]['duration_in_traffic'][
                'value']
        else:
            duration = data['routes'][0]['legs'][0]['duration']['value']

        # Calculate arrival time
        arrival_time = departure_time + timedelta(0, duration)

        # Get polyline
        polyline = data['routes'][0]['overview_polyline']['points']

        # Decode polyline
        decoded_polyline = decode_polyline(polyline)

        # Create LineString
        linestring = LineString(decoded_polyline)

        # Create GeoJSON properties
        properties = {
            'mode': mode,
            'duration': duration,
            'start': departure_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3],
            'end': arrival_time.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
        }

        # Create GeoJSON feature
        feature = Feature(geometry=linestring, properties=properties)

        # Store feature in results list
        results.append(feature)

        # Store duration and start/stop times in lists
        durations.append(duration)
        starttimes.append(departure_time)
        endtimes.append(arrival_time)

        # Convert list of features to GeoJSON FeatureCollection
        feature_collection = FeatureCollection(results)

    return feature_collection, durations, starttimes, endtimes
コード例 #17
0
def parse_geojson(fname):
    print fname
    with open(fname, 'r') as f:
        fc = geojson.load(f)

    fc.keys()

    fc['type']

    id_feat = str(fc).count('"id":') - 1

    def fix_props(props, z):
        props['height'] = z
        props['base_height'] = 0
        props['level'] = 1
        props['name'] = props['name'] if 'name' in props else 'name'

        if 'fill' in props:
            props['color'] = props['fill']
        else:
            props['color'] = 'white'

        # del props['stroke-opacity']
        # del props['stroke-width']
        # del props['styleHash']
        # del props['styleMapHash']
        # del props['styleUrl']
        return props

    for file_number in range(
            int(math.ceil(len(fc.features) / float(num_feats)))):

        newgj = FeatureCollection([])
        print file_number

        for f in fc.features[file_number * num_feats:(file_number + 1) *
                             num_feats]:
            print f.geometry.type
            if f.geometry.type == 'GeometryCollection':
                for p in f.geometry.geometries:
                    newprops = fix_props(f.properties,
                                         p['coordinates'][0][0][2])
                    newfeat = {
                        "type": "Feature",
                        'id': id_feat,
                        'properties': newprops,
                        'geometry': p
                    }
                    p['coordinates'] = remove_zetas(p['coordinates'])
                    newgj.features.append(newfeat)
                    id_feat += 1

            else:
                z = next(islice(flatten(f.geometry['coordinates']), 2, None),
                         0)
                f.properties = fix_props(f.properties, z)
                # f.geometry['coordinates'] = remove_zetas(f.geometry['coordinates'])
                newgj.features.append(f)

        # with open('new/' + fname[:gjindex] + str(file_number) +  fname[gjindex:], 'w') as outfile:
        with open(fname[:gjindex] + fname[gjindex:], 'w') as outfile:
            json.dump(newgj, outfile, separators=(',', ':'))
コード例 #18
0
shapefile_reader = shapefile.Reader(options.get('shapefile'))
shapes = shapefile_reader.shapes()

# set limit
LIMIT = options.get('limit') or None
limit = min(LIMIT, len(shapes))

# tasks list
tasks = []

# iterate over shapefile features + records
for shape_record in shapefile_reader.iterShapeRecords():
    task = MapRouletteTask(
        shape_record.record[options.get('identifier_field')],
        challenge=challenge,
        geometries=FeatureCollection(
            [Feature(geometry=shape_record.shape.__geo_interface__)]),
        instruction=r'{}'.format(
            options.get('instruction_template').format(
                **{
                    key: shape_record.record[val].strip()
                    for (key, val) in options.get(
                        'instruction_replacement_fields').items()
                })))
    tasks.append(task)
    row_count = row_count + 1
    if row_count == limit:
        break

new_collection = MapRouletteTaskCollection(challenge, tasks=tasks)
print 'reconciling {} tasks with the server...'.format(len(tasks))
result = new_collection.reconcile(server)
コード例 #19
0
    def fulltextsearch(self) -> FeatureCollection:
        lang = locale_negotiator(self.request)

        try:
            language = self.languages[lang]
        except KeyError:
            return HTTPInternalServerError(
                detail="{0!s} not defined in languages".format(lang))

        if "query" not in self.request.params:
            return HTTPBadRequest(detail="no query")
        terms = self.fts_normiliser(self.request.params.get("query"))

        maxlimit = self.settings.get("maxlimit", 200)

        try:
            limit = int(
                self.request.params.get("limit",
                                        self.settings.get("defaultlimit", 30)))
        except ValueError:
            return HTTPBadRequest(detail="limit value is incorrect")
        if limit > maxlimit:
            limit = maxlimit

        try:
            partitionlimit = int(self.request.params.get("partitionlimit", 0))
        except ValueError:
            return HTTPBadRequest(detail="partitionlimit value is incorrect")
        if partitionlimit > maxlimit:
            partitionlimit = maxlimit

        terms_array = [
            IGNORED_STARTUP_CHARS_RE.sub("", elem)
            for elem in IGNORED_CHARS_RE.sub(" ", terms).split(" ")
        ]
        terms_ts = "&".join(w + ":*" for w in terms_array if w != "")
        _filter = FullTextSearch.ts.op("@@")(func.to_tsquery(
            language, terms_ts))

        if self.request.user is None:
            _filter = and_(_filter, FullTextSearch.public.is_(True))
        else:
            _filter = and_(
                _filter,
                or_(
                    FullTextSearch.public.is_(True),
                    FullTextSearch.role_id.is_(None),
                    FullTextSearch.role_id.in_(
                        [r.id for r in self.request.user.roles]),
                ),
            )

        if "interface" in self.request.params:
            _filter = and_(
                _filter,
                or_(
                    FullTextSearch.interface_id.is_(None),
                    FullTextSearch.interface_id == self._get_interface_id(
                        self.request.params["interface"]),
                ),
            )
        else:
            _filter = and_(_filter, FullTextSearch.interface_id.is_(None))

        _filter = and_(
            _filter,
            or_(FullTextSearch.lang.is_(None), FullTextSearch.lang == lang))

        rank_system = self.request.params.get("ranksystem")
        if rank_system == "ts_rank_cd":
            # The numbers used in ts_rank_cd() below indicate a normalization method.
            # Several normalization methods can be combined using |.
            # 2 divides the rank by the document length
            # 8 divides the rank by the number of unique words in document
            # By combining them, shorter results seem to be preferred over longer ones
            # with the same ratio of matching words. But this relies only on testing it
            # and on some assumptions about how it might be calculated
            # (the normalization is applied two times with the combination of 2 and 8,
            # so the effect on at least the one-word-results is therefore stronger).
            rank = func.ts_rank_cd(FullTextSearch.ts,
                                   func.to_tsquery(language, terms_ts), 2 | 8)
        else:
            # Use similarity ranking system from module pg_trgm.
            rank = func.similarity(FullTextSearch.label, terms)

        if partitionlimit:
            # Here we want to partition the search results based on
            # layer_name and limit each partition.
            row_number = (func.row_number().over(
                partition_by=FullTextSearch.layer_name,
                order_by=(desc(rank),
                          FullTextSearch.label)).label("row_number"))
            subq = DBSession.query(FullTextSearch).add_columns(
                row_number).filter(_filter).subquery()
            query = DBSession.query(subq.c.id, subq.c.label, subq.c.params,
                                    subq.c.layer_name, subq.c.the_geom,
                                    subq.c.actions)
            query = query.filter(subq.c.row_number <= partitionlimit)
        else:
            query = DBSession.query(FullTextSearch).filter(_filter)
            query = query.order_by(desc(rank))
            query = query.order_by(FullTextSearch.label)

        query = query.limit(limit)
        objs = query.all()

        features = []
        for o in objs:
            properties = {"label": o.label}
            if o.layer_name is not None:
                properties["layer_name"] = o.layer_name
            if o.params is not None:
                properties["params"] = o.params
            if o.actions is not None:
                properties["actions"] = o.actions
            if o.actions is None and o.layer_name is not None:
                properties["actions"] = [{
                    "action": "add_layer",
                    "data": o.layer_name
                }]

            if o.the_geom is not None:
                geom = to_shape(o.the_geom)
                feature = Feature(id=o.id,
                                  geometry=geom,
                                  properties=properties,
                                  bbox=geom.bounds)
                features.append(feature)
            else:
                feature = Feature(id=o.id, properties=properties)
                features.append(feature)

        return FeatureCollection(features)
コード例 #20
0
ファイル: isf_catalogue.py プロジェクト: ftbernales/oq-mbtk
    def add_external_idf_formatted_catalogue(self, cat, ll_deltas=0.01,
            delta_t=dt.timedelta(seconds=30),
            utc_time_zone=dt.timezone(dt.timedelta(hours=0)),
            buff_t=dt.timedelta(seconds=0), buff_ll=0, use_ids=False,
            logfle=False):
        """
        This merges an external catalogue formatted in the ISF format e.g. a
        catalogue coming from an external agency. Because of this, we assume
        that each event has a single origin.

        :param cat:
            An instance of :class:`ISFCatalogue` i.e. the 'guest' catalogue
        :param ll_deltas:
            A float defining the tolerance in decimal degrees used when looking
            for colocated events
        :param delta_t:
            Tolerance used to find colocated events. It's an instance of
            :class:`datetime.timedelta`
        :param utc_time_zone:
            A :class:`datetime.timezone` instance describing the reference
            timezone for the new catalogue.
        :param buff_t:
            Tolerance used to find events close to the selection threshold.
            It's an instance of :class:`datetime.timedelta`
        :param buff_ll:
            A float defining the tolerance used to find events close to the
            selection threshold.
        :param use_ids:
            A boolean
        :param logfle:
            Name of the file which will contain the log of the processing
        :return:
            - A list with the indexes of the events in the 'guest' catalogue
              added to the 'host' catalogue.
            - A dictionary with doubtful events. The keys in this dictionary
              are the indexes of the events in the 'host' catalogue.
              The values are the indexes of the doubtful events in the 'guest'
              catalogue.
        """
        if logfle:
            fou = open(logfle, 'w')
            fname_geojson = os.path.splitext(logfle)[0]+"_secondary.geojson"

        #
        # This is a dictionary where we store the doubtful events.
        doubts = {}
        #
        # Check if we have a spatial index
        assert 'sidx' in self.__dict__
        #
        # Set delta time thresholds
        if hasattr(delta_t, '__iter__'):
            threshold = np.array([[t[0], t[1].total_seconds()] for t in
                                  delta_t])
        else:
            threshold = np.array([[1000, delta_t.total_seconds()]])
        #
        # Set ll delta thresholds
        if hasattr(ll_deltas, '__iter__'):
            ll_deltas = np.array([d for d in ll_deltas])
        else:
            ll_deltas = np.array([[1000, ll_deltas]])
        #
        # Processing the events in the catalogue 'guest' catalogue
        id_common_events = []
        features = []
        new = 0
        new_old = 0
        common = 0
        common_old = 0
        iloc = 0
        for iloc, event in enumerate(cat.events):

            if logfle:
                msg = 'Index: {:d} Event ID: {:s}\n'.format(iloc, event.id)
                fou.write(msg)

            #
            # Initial settings
            found = False
            before = self.get_number_events()
            #
            # Updating time of the origin to the new timezone
            new_datetime = dt.datetime.combine(event.origins[0].date,
                                               event.origins[0].time,
                                               tzinfo=utc_time_zone)
            new_datetime = new_datetime.astimezone(self.timezone)
            event.origins[0].date = new_datetime.date()
            event.origins[0].time = new_datetime.time()
            #
            # Set the datetime of the event
            dtime_a = dt.datetime.combine(event.origins[0].date,
                                          event.origins[0].time)
            #
            # Take the appropriate value from delta_ll - this is needed in
            # particular when delta_ll varies with time.
            idx_threshold = max(np.argwhere(dtime_a.year > ll_deltas[:, 0]))
            ll_thrs = ll_deltas[idx_threshold, 1]
            #
            # Create selection window
            minlo = event.origins[0].location.longitude - ll_thrs
            minla = event.origins[0].location.latitude - ll_thrs
            maxlo = event.origins[0].location.longitude + ll_thrs
            maxla = event.origins[0].location.latitude + ll_thrs
            #
            # Querying the spatial index
            obj = [n.object for n in self.sidx.intersection((minlo, minla,
                                                             maxlo, maxla),
                                                            objects=True)]
            #
            # This is for checking. We perform the check only if the buffer
            # distance is larger than 0
            obj_e = []
            obj_a = []
            if buff_ll > 0 or buff_t.seconds > 0:
                obj_a = [n.object for n in self.sidx.intersection((
                        minlo-buff_ll, minla-buff_ll, maxlo+buff_ll,
                        maxla+buff_ll), objects=True)]
                obj_b = [n.object for n in self.sidx.intersection((
                        minlo+buff_ll, minla+buff_ll, maxlo-buff_ll,
                        maxla+buff_ll), objects=True)]
                #
                # Find the index of the events in the buffer across the
                # selection window
                obj_e = list(set(obj_a) - set(obj_b))
            #
            # Find the appropriate delta_time
            idx_threshold = max(np.argwhere(dtime_a.year >
                                            threshold[:, 0]))
            sel_thrs = threshold[idx_threshold, 1]

            if logfle:
                msg = '   Selected {:d} events \n'.format(len(obj))
                fou.write(msg)

            if len(obj):
                #
                # Checking the events selected with the spatial index. obj is
                # a list of tuples (event and origin ID) in the host
                # catalogue for the epicenters close to the investigated event
                for i in obj:
                    #
                    # Selecting the origin of the event found in the catalogue
                    i_eve = i[0]
                    i_ori = i[1]
                    orig = self.events[i_eve].origins[i_ori]
                    dtime_b = dt.datetime.combine(orig.date, orig.time)
                    #
                    # Check if time difference is within the threshold value
                    delta = abs((dtime_a - dtime_b).total_seconds())

                    if logfle:
                        eid = self.events[i_eve].id
                        msg = '      Event ID: {:s}\n'.format(eid)
                        msg += '      Delta: {:f}\n'.format(delta)
                        fou.write(msg)

                    if delta < sel_thrs and found is False:

                        # Found an origin in the same space-time window
                        found = True
                        tmp = event.origins

                        # Check this event already contains an origin from
                        # the same agency
                        origins = self.events[i_eve].origins
                        if tmp[0].author in [o.author for o in origins]:

                            fmt = "This event already contains "
                            fmt += " an origin from the same agency: {:s}\n"
                            fmt += " Trying to add evID {:s}\n"
                            msg = fmt.format(tmp[0].author, event.id)
                            warnings.warn(msg)

                            if logfle:
                                fou.write(msg)

                        # Set prime solution is necessary
                        if (len(self.events[i_eve].origins) == 1 and
                                not self.events[i_eve].origins[0].is_prime):
                            tmp[0].is_prime = True
                        else:
                            tmp[0].is_prime = False

                        # Check event ID
                        if use_ids:
                            if event.id != self.events[i_eve].id:
                                fmt = " Trying to add a secondary origin "
                                fmt += " whose ID {:s} differs from the "
                                fmt += " original one. Skipping\n"
                                msg = fmt.format(event.id,
                                                 self.events[i_eve].id)
                                warnings.warn(msg)
                                found = False
                                continue

                        # Check if a secondary solution from the same agency
                        # exists
                        authors = [m.author for m in
                                   self.events[i_eve].magnitudes]
                        if event.magnitudes[0].author in authors:
                            print("Solution already included for this source")
                            print(event.magnitudes[0].origin_id)
                            found = False
                            continue

                        # Info
                        fmt = "Adding to event {:d}\n"
                        msg = fmt.format(i_eve)

                        # Updating the .geojson file
                        if logfle:
                            fou.write(msg)

                            lon1 = self.events[i_eve].origins[0].location.longitude
                            lat1 = self.events[i_eve].origins[0].location.latitude
                            lon2 = tmp[0].location.longitude
                            lat2 = tmp[0].location.latitude
                            line = LineString([(lon1, lat1), (lon2, lat2)])
                            ide = self.events[i_eve].id
                            features.append(Feature(geometry=line,
                                            properties={"originalID": ide}))

                        # Merging a secondary origin
                        self.events[i_eve].merge_secondary_origin(tmp)
                        id_common_events.append(iloc)
                        common += 1

                        break
            #
            # Searching for doubtful events:
            if buff_ll > 1e-10 and buff_t.seconds > 1e-10:
                if len(obj_a) > 0:
                    for i in obj_a:
                        to_add = False
                        #
                        # Selecting origin of the event found in the catalogue
                        i_eve = i[0]
                        i_ori = i[1]
                        orig = self.events[i_eve].origins[i_ori]
                        dtime_b = dt.datetime.combine(orig.date, orig.time)
                        #
                        # Check if time difference within the threshold value
                        tmp_delta = abs(dtime_a - dtime_b).total_seconds()
                        #
                        # Within max distance and across the time buffer
                        tsec = buff_t.total_seconds()
                        if (tmp_delta > (sel_thrs - tsec) and
                                tmp_delta < (sel_thrs + tsec)):
                            to_add = True
                        #
                        # Within max time and within the ll buffer
                        if (not to_add and tmp_delta < (sel_thrs + tsec)):
                            if i in obj_e:
                                to_add = True
                        #
                        # Saving info
                        if to_add:
                            if i[0] in doubts:
                                doubts[i[0]].append(iloc)
                            else:
                                doubts[i[0]] = [iloc]
            #
            # Adding new event
            if not found:
                # Making sure that the ID of the event added does not exist
                # already

                if event.id in set(self.ids):

                    if use_ids:
                        fmt = "Adding a new event whose ID {:s}"
                        fmt += " is already in the DB. Making it secondary."
                        msg = fmt.format(event.id)
                        warnings.warn(msg)

                        if logfle:
                            fou.write(msg)

                        i_eve = np.where(np.array(self.ids) == event.id)
                        tmp = event.origins
                        tmp[0].is_prime = False
                        self.events[i_eve[0][0]].merge_secondary_origin(tmp)
                        found = 1
                        common += 1

                    else:
                        fmt = 'Event ID: {:s} already there. Length ids {:d}'
                        msg = fmt.format(event.id, len(self.ids))
                        raise ValueError(msg)

                else:
                    assert len(event.origins) == 1
                    event.origins[0].is_prime = True
                    self.events.append(event)

                    if logfle:
                        msg = "Adding new event\n"
                        fou.write(msg)

                    self.ids.append(event.id)
                    new += 1
            #
            # Checking
            if (new - new_old) > 0 and (common - common_old > 0):
                msg = '{:d}'.format(iloc)
                raise ValueError(msg)
            elif (new - new_old) > 1:
                msg = 'New increment larger than 1, iloc {:d}'.format(iloc)
                raise ValueError(msg)
            elif (common - common_old) > 1:
                msg = 'Common increment larger than 1, iloc {:d}'.format(iloc)
                raise ValueError(msg)
            else:
                new_old = new
                common_old = common
            #
            #
            after = self.get_number_events()
            #
            #
            # if not iloc % 5000:
            #    idxs, stats = self.get_prime_events_info()
            #    num_primes = [len(stats[k]) for k in stats.keys()]
            #    msg = "{:d}".format(iloc)
            #    assert sum(num_primes) == after, msg

            fmt = 'before {:d} after {:d} iloc {:d} found {:d} loops: {:d}'
            msg = fmt.format(before, after, iloc, found, iloc)
            dlt = 0 if found else 1
            assert before+dlt == after, msg
        #
        # Checking
        fmt = "Wrong budget \n"
        fmt += "Common: {:d} New: {:d} Sum: {:d} Expected: {:d} loops: {:d}\n"
        msg = fmt.format(common, new, common+new, cat.get_number_events(),
                         iloc+1)
        assert (common + new) == cat.get_number_events(), msg
        #
        # Updating the spatial index
        self._create_spatial_index()

        if logfle:
            fou.close()

            feature_collection = FeatureCollection(features)
            with open(fname_geojson, 'w') as f:
                dump(feature_collection, f)

        return id_common_events, doubts
コード例 #21
0
    # Open ROI file
    roi_dict_complete = read_roi_zip(file_proc.replace(ident_ch1,ident_ch2))
    
    for key_roi, val_roi in roi_dict_complete.items():

        # Get coordinates - maybe x and y have to be exchanged
        # pos = np.column_stack((val_roi['y'], val_roi['x']))
        pos = np.column_stack((val_roi['x'], [image_size[1] - h for h in val_roi['y']]))

        # Create and append feature for geojson
        pol_loop = geojson_polygon([pos.tolist()])
        features.append(Feature(geometry=pol_loop,properties= {"label": 'nuclei'})) #,  properties={"country": "Spain"}) #)

    # Create geojson feature collection
    feature_collection = FeatureCollection(features,bbox = [0, 0, image_size[0], image_size[1]])

    # Save to json file
    save_name_json = os.path.join(folder_save, 'annotation.json')
    with open(save_name_json, 'w') as f:
        dump(feature_collection, f)
        f.close()

    # Find and copy raw data renamed with channel identifier
    img_raw = os.path.join(drive,path,file_base+img_ext)
    if os.path.isfile(img_raw):
        img_raw_new = os.path.join(folder_save, 'cells'+img_ext)
        shutil.copy(img_raw, img_raw_new)
        print(f'Copying raw image: {img_raw}')

    else:
コード例 #22
0
def get_all_observations() -> Union[FeatureCollection, Tuple[Dict, int]]:
    """Get all observations from all programs
    GET
        ---
        tags:
          - observations
        responses:
          200:
            description: A list of all species lists
        """
    try:
        observations = (db.session.query(
            ObservationModel,
            UserModel.username,
            MediaModel.filename.label("image"),
            LAreas.area_name,
            LAreas.area_code,
        ).filter(ProgramsModel.is_active).join(
            LAreas,
            LAreas.id_area == ObservationModel.municipality,
            isouter=True).join(
                ProgramsModel,
                ProgramsModel.id_program == ObservationModel.id_program,
                isouter=True,
            ).join(
                ObservationMediaModel,
                ObservationMediaModel.id_data_source ==
                ObservationModel.id_observation,
                isouter=True,
            ).join(
                MediaModel,
                ObservationMediaModel.id_media == MediaModel.id_media,
                isouter=True,
            ).join(UserModel,
                   ObservationModel.id_role == UserModel.id_user,
                   full=True))

        observations = observations.order_by(
            desc(ObservationModel.timestamp_create))
        # current_app.logger.debug(str(observations))
        observations = observations.all()

        # loop to retrieve taxonomic data from all programs
        if current_app.config.get("API_TAXHUB") is not None:
            programs = ProgramsModel.query.all()
            taxon_repository = []
            for program in programs:
                taxhub_list_id = (ProgramsModel.query.filter_by(
                    id_program=program.id_program).one().taxonomy_list)
                taxon_data = mkTaxonRepository(taxhub_list_id)
                try:
                    for taxon in taxon_data:
                        if taxon not in taxon_repository:
                            taxon_repository.append(taxon)
                except Exception as e:
                    current_app.logger.critical(str(e))

        features = []
        for observation in observations:
            feature = get_geojson_feature(observation.ObservationModel.geom)
            feature["properties"]["municipality"] = {
                "name": observation.area_name,
                "code": observation.area_code,
            }

            # Observer
            feature["properties"]["observer"] = {
                "username": observation.username
            }

            # Observer submitted media
            feature["properties"]["image"] = ("/".join([
                "/api",
                current_app.config["MEDIA_FOLDER"],
                observation.image,
            ]) if observation.image else None)

            # Municipality
            observation_dict = observation.ObservationModel.as_dict(True)
            for k in observation_dict:
                if k in obs_keys and k != "municipality":
                    feature["properties"][k] = observation_dict[k]

            # TaxRef
            if current_app.config.get("API_TAXHUB") is None:
                taxref = Taxref.query.filter(Taxref.cd_nom == observation.
                                             ObservationModel.cd_nom).first()
                if taxref:
                    feature["properties"]["taxref"] = taxref.as_dict(True)

                medias = TMedias.query.filter(TMedias.cd_ref == observation.
                                              ObservationModel.cd_nom).all()
                if medias:
                    feature["properties"]["medias"] = [
                        media.as_dict(True) for media in medias
                    ]
            else:
                try:
                    taxon = next(
                        taxon for taxon in taxon_repository if taxon
                        and taxon["cd_nom"] == feature["properties"]["cd_nom"])
                    feature["properties"]["taxref"] = taxon["taxref"]
                    feature["properties"]["medias"] = taxon["medias"]
                except StopIteration:
                    pass
            features.append(feature)

        return FeatureCollection(features)

    except Exception as e:
        # if current_app.config["DEBUG"]:
        # import traceback
        # import sys

        # import pdb
        # pdb.set_trace()
        # etype, value, tb = sys.exc_info()
        # trace = str(traceback.print_exception(etype, value, tb))
        # trace = traceback.format_exc()
        # return("<pre>" + trace + "</pre>"), 500
        raise e
        current_app.logger.critical("[get_program_observations] Error: %s",
                                    str(e))
        return {"message": str(e)}, 400
コード例 #23
0
    listLength = len(coordArray) - 1
    firstItem = coordArray[0]

    for index, coordPair in enumerate(coordArray):
        coordPairArray = coordPair.split(":")

        latitude, longitude = map(float,
                                  (coordPairArray[0], coordPairArray[1]))
        point = Point((longitude, latitude))

        coords.append(point)

        if (index == listLength):
            lastCoordPairArray = firstItem.split(":")
            lat, lon = map(float,
                           (lastCoordPairArray[0], lastCoordPairArray[1]))
            lastPoint = Point((lon, lat))
            coords.append(lastPoint)

    features.append(
        Feature(geometry=Polygon([coords]), properties={
            'STATE': nodeId,
        }))

collection = FeatureCollection(features)
with open("converted_final.json", "w") as f:
    f.write('%s' % collection)

print("FINISHED")
コード例 #24
0
def get_observations_by_user_id(user_id):
    try:
        observations = (db.session.query(
            ObservationModel,
            ProgramsModel,
            UserModel.username,
            func.json_agg(
                func.json_build_array(MediaModel.filename,
                                      MediaModel.id_media)).label("images"),
            LAreas.area_name,
            LAreas.area_code,
        ).filter(ObservationModel.id_role == user_id).join(
            LAreas,
            LAreas.id_area == ObservationModel.municipality,
            isouter=True).join(
                ProgramsModel,
                ProgramsModel.id_program == ObservationModel.id_program,
                isouter=True,
                full=True,
            ).join(
                ObservationMediaModel,
                ObservationMediaModel.id_data_source ==
                ObservationModel.id_observation,
                isouter=True,
            ).join(
                MediaModel,
                ObservationMediaModel.id_media == MediaModel.id_media,
                isouter=True,
            ).join(UserModel,
                   ObservationModel.id_role == UserModel.id_user,
                   full=True).group_by(
                       ObservationModel.id_observation,
                       ProgramsModel.id_program,
                       UserModel.username,
                       LAreas.area_name,
                       LAreas.area_code,
                   ))

        observations = observations.order_by(
            desc(ObservationModel.timestamp_create))
        # current_app.logger.debug(str(observations))
        observations = observations.all()

        try:
            if current_app.config.get("API_TAXHUB") is not None:
                taxon_repository = []
                taxhub_list_id = []
                for observation in observations:
                    if observation.ProgramsModel.taxonomy_list not in taxhub_list_id:
                        taxhub_list_id.append(
                            observation.ProgramsModel.taxonomy_list)
                for tax_list in taxhub_list_id:
                    taxon_repository.append(mkTaxonRepository(tax_list))

            features = []
        except Exception as e:
            return {"message": str(e)}, 500

        for observation in observations:
            feature = get_geojson_feature(observation.ObservationModel.geom)
            feature["properties"]["municipality"] = {
                "name": observation.area_name,
                "code": observation.area_code,
            }

            # Observer
            feature["properties"]["observer"] = {
                "username": observation.username
            }
            # Observer submitted media
            feature["properties"]["image"] = ("/".join([
                "/api",
                current_app.config["MEDIA_FOLDER"],
                observation.images[0][0],
            ]) if observation.images and observation.images != [[None, None]]
                                              else None)
            # Photos
            feature["properties"]["photos"] = [{
                "url":
                "/media/{}".format(filename),
                "id_media":
                id_media
            } for filename, id_media in observation.images
                                               if id_media is not None]
            # Municipality
            observation_dict = observation.ObservationModel.as_dict(True)
            for k in observation_dict:
                if k in obs_keys and k != "municipality":
                    feature["properties"][k] = observation_dict[k]
            # Program
            program_dict = observation.ProgramsModel.as_dict(True)
            for program in program_dict:
                if program == "title":
                    feature["properties"]["program_title"] = program_dict[
                        program]
            # TaxRef
            if current_app.config.get("API_TAXHUB") is None:
                taxref = Taxref.query.filter(Taxref.cd_nom == observation.
                                             ObservationModel.cd_nom).first()
                if taxref:
                    feature["properties"]["taxref"] = taxref.as_dict(True)

                medias = TMedias.query.filter(TMedias.cd_ref == observation.
                                              ObservationModel.cd_nom).all()
                if medias:
                    feature["properties"]["medias"] = [
                        media.as_dict(True) for media in medias
                    ]
            else:
                try:
                    for taxon_rep in taxon_repository:
                        for taxon in taxon_rep:
                            if (taxon["taxref"]["cd_nom"] ==
                                    observation.ObservationModel.cd_nom):
                                feature["properties"]["nom_francais"] = taxon[
                                    "nom_francais"]
                                feature["properties"]["taxref"] = taxon[
                                    "taxref"]
                                feature["properties"]["medias"] = taxon[
                                    "medias"]

                except StopIteration:
                    pass
            features.append(feature)

        return FeatureCollection(features), 200

    except Exception as e:
        raise e
        current_app.logger.critical("[get_program_observations] Error: %s",
                                    str(e))
        return {"message": str(e)}, 400
コード例 #25
0
 def get_geojson_feature_collection(paths: List['Path']):
     return FeatureCollection([p.get_geojson_feature() for p in paths])
コード例 #26
0
def export_all_habitats(
    info_role,
    export_format="csv",
):
    """
    Download all stations
    The route is in post to avoid a too large query string

    .. :quickref: Occhab;

    """

    data = request.get_json()

    export_view = GenericTableGeo(
        tableName="v_export_sinp",
        schemaName="pr_occhab",
        engine=DB.engine,
        geometry_field="geom_local",
        srid=current_app.config["LOCAL_SRID"],
    )

    file_name = datetime.datetime.now().strftime("%Y_%m_%d_%Hh%Mm%S")
    file_name = filemanager.removeDisallowedFilenameChars(file_name)
    db_cols_for_shape = []
    columns_to_serialize = []
    for db_col in export_view.db_cols:
        if db_col.key in blueprint.config["EXPORT_COLUMS"]:
            if db_col.key != "geometry":
                db_cols_for_shape.append(db_col)
            columns_to_serialize.append(db_col.key)
    results = (
        DB.session.query(export_view.tableDef)
        .filter(export_view.tableDef.columns.id_station.in_(data["idsStation"]))
        .limit(blueprint.config["NB_MAX_EXPORT"])
    )
    if export_format == "csv":
        formated_data = [export_view.as_dict(d, fields=[]) for d in results]
        return to_csv_resp(
            file_name, formated_data, separator=";", columns=columns_to_serialize
        )
    elif export_format == "geojson":
        features = []
        for r in results:
            features.append(
                Feature(
                    geometry=json.loads(r.geojson),
                    properties=export_view.as_dict(r, fields=columns_to_serialize),
                )
            )
        return to_json_resp(
            FeatureCollection(features), as_file=True, filename=file_name, indent=4
        )
    else:
        try:
            dir_name, file_name = export_as_geo_file(
                export_format=export_format,
                export_view=export_view,
                db_cols=db_cols_for_shape,
                geojson_col=None,
                data=results,
                file_name=file_name,
            )
            return send_from_directory(dir_name, file_name, as_attachment=True)

        except GeonatureApiError as e:
            message = str(e)

        return render_template(
            "error.html",
            error=message,
            redirect=current_app.config["URL_APPLICATION"]
            + "/#/"
            + blueprint.config["MODULE_URL"],
        )
コード例 #27
0
    for cr in poly:
        poly_coords.append({
            'x': cr['x'],
            'y': cr['y'],
            'latitude': cr['lat'],
            'longitude': cr['long']
        })
        poly_geo_coords.append((cr['long'], cr['lat']))

    # add final closing point
    poly_geo_coords.append((poly[0]['long'], poly[0]['lat']))
    final_coords.append(poly_coords)
    geo_feature = Feature(geometry=Polygon([poly_geo_coords], precision=15))
    geo_features.append(geo_feature)

geo_feature_collection = FeatureCollection(geo_features)
geo_feature_collection_dump = geojson_dumps(geo_feature_collection,
                                            sort_keys=True)

json_contour_filepath = os.path.join(
    BASE_DIR, img_base_results_path + img_name + '-contours-method-2.json')
geojson_filepath = os.path.join(
    BASE_DIR, img_base_results_path + img_name + '-method-2-geojson.json')
with open(json_contour_filepath, 'w') as outfile:
    # json.dump(contours, outfile, default=json_np_default_parser)
    # json.dump({'contours': ctr_points, 'contours_coords': translate_coords.coords}, outfile)
    json.dump(final_coords, outfile)
    # json.dump(new_ctrs, outfile, default=json_np_default_parser)

with open(geojson_filepath, 'w') as outfile:
    # json.dump(contours, outfile, default=json_np_default_parser)
コード例 #28
0
ファイル: utils.py プロジェクト: xdavld/up42-py
def any_vector_to_fc(
    vector: Union[Dict, Feature, FeatureCollection, List, GeoDataFrame,
                  Polygon, Point, ],
    as_dataframe: bool = False,
) -> Union[Dict, GeoDataFrame]:
    """
    Gets a uniform feature collection dictionary (with fc and f bboxes) from any input vector type.

    Args:
        vector: One of Dict, FeatureCollection, Feature, List of bounds coordinates,
            GeoDataFrame, shapely.geometry.Polygon, shapely.geometry.Point.
            All assume EPSG 4326 and Polygons!
        as_dataframe: GeoDataFrame output with as_dataframe=True.
    """
    if not isinstance(
            vector,
        (
            dict,
            FeatureCollection,
            Feature,
            geojson_Polygon,
            list,
            GeoDataFrame,
            Polygon,
            Point,
        ),
    ):
        raise ValueError(
            "The provided geometry muste be a FeatureCollection, Feature, Dict, geopandas "
            "Dataframe, shapely Polygon, shapely Point or a list of 4 bounds coordinates."
        )

    ## Transform all possible input geometries to a uniform feature collection.
    vector = copy.deepcopy(vector)  # otherwise changes input geometry.
    if isinstance(vector, (dict, FeatureCollection, Feature)):
        try:
            if vector["type"] == "FeatureCollection":
                df = GeoDataFrame.from_features(vector, crs=4326)
            elif vector["type"] == "Feature":
                # TODO: Handle point features?
                df = GeoDataFrame.from_features(FeatureCollection([vector]),
                                                crs=4326)
            elif vector["type"] == "Polygon":  # Geojson geometry
                df = GeoDataFrame.from_features(FeatureCollection(
                    [Feature(geometry=vector)]),
                                                crs=4326)
        except KeyError as e:
            raise ValueError(
                "Provided geometry dictionary has to include a featurecollection or feature."
            ) from e
    else:
        if isinstance(vector, list):
            if len(vector) == 4:
                box_poly = shapely.geometry.box(*vector)
                df = GeoDataFrame({"geometry": [box_poly]}, crs=4326)
            else:
                raise ValueError("The list requires 4 bounds coordinates.")
        elif isinstance(vector, Polygon):
            df = GeoDataFrame({"geometry": [vector]}, crs=4326)
        elif isinstance(vector, Point):
            df = GeoDataFrame(
                {"geometry": [vector.buffer(0.00001)]}, crs=4326
            )  # Around 1m buffer # TODO: Find better solution than small buffer?
        elif isinstance(vector, GeoDataFrame):
            df = vector
            try:
                if df.crs.to_string() != "EPSG:4326":
                    df = df.to_crs(epsg=4326)
            except AttributeError as e:
                raise AttributeError("GeoDataFrame requires a crs.") from e

    if as_dataframe:
        return df
    else:
        fc = df.__geo_interface__
        return fc
コード例 #29
0
# Create geoip2 database reader
reader = geoip2.database.Reader('db/GeoLite2-City.mmdb')

host_data = []

# Query geoip2 database for each ip
for host in hosts:
    response = reader.city(host['ip'])
    host_data.append(response)

reader.close()

features = []

count = 0

# Creat feature for each ip
for entry in host_data:
    new_feature = Feature(geometry=Point(
        (entry.location.longitude, entry.location.latitude)),
                          id=count)
    features.append(new_feature)
    count += 1

crs = {"type": "name", "properties": {"name": "EPSG:4326"}}

# Create GeoJSON FeatureCollection from features
collection = FeatureCollection(features, crs=crs)

print collection
コード例 #30
0
def data_fetch():
    results = query_resulted_in_csv(server, query_geom_symbol)

    feature_symboliser = {}
    symbolisers_geoms = {}
    """parse the queries results"""
    for row in results:
        symboliser_info = ()
        feature_symboliser[str(row.symboliser)] = ()
        query = query_symboliser.substitute(symboliser=str(row.symboliser))
        # print(query)
        results_symbolisers = query_resulted_in_csv(server, query)
        # print(list(results_symbolisers)[0][0])
        for row1 in results_symbolisers:
            # print(str(row1.h), str(row1.s), str(row1.v), str(row1.stroke_width), str(row1.size))
            symboliser_info = (float(str(row1.h)), float(str(row1.s)),
                               float(str(row1.v)), str(row1.stroke_width),
                               str(row1.size))
        if symboliser_info not in symbolisers_geoms.keys():
            symbolisers_geoms[symboliser_info] = {}
        symbolisers_geoms[symboliser_info][str(row.feature)] = str(
            row.featureWKT)

    # print(symbolisers_geoms)

    symbolisers_dict = {}
    featurecollection_list = []

    for key in symbolisers_geoms.keys():
        symboliser_type = ''
        """This part maps symboliser info to JS Leaflft style"""

        rgb_decimal = matplotlib.colors.hsv_to_rgb(
            [key[0] / 360, key[1], key[2]])
        color_hex = matplotlib.colors.to_hex(rgb_decimal)

        # print(color_hex)
        if key[3] != 'None':
            symboliser_type = 'https://www.gis.lu.se/ont/data_portrayal/symboliser#LineSymboliser'
            new_key = default_line_style
            new_key['color'] = color_hex
            new_key['weight'] = key[3]

        if key[4] != 'None':
            symboliser_type = 'https://www.gis.lu.se/ont/data_portrayal/symboliser#PointSymboliser'
            new_key = default_point_style
            new_key['fillColor'] = color_hex
            new_key['radius'] = int(key[4])

        symbolisers_dict[key] = new_key
        """This part is GeoJSON conversion"""

        feature_list = []
        for feature_key in symbolisers_geoms[key].keys():
            geom = ogr.CreateGeometryFromWkt(
                symbolisers_geoms[key][feature_key])
            geom_geojson = loads(geom.ExportToJson())
            feature_geojson = Feature(geometry=geom_geojson,
                                      properties={"URI": feature_key})
            feature_list.append(feature_geojson)

        feature_collection = FeatureCollection(feature_list)

        feature_collection['style'] = symbolisers_dict[key]

        feature_collection['symboliser_type'] = symboliser_type
        # print(feature_collection)
        # feature_collection = geojson.loads(dump)

        featurecollection_list.append(geojson.dumps(feature_collection))
    return featurecollection_list