def test_01_full_text(self):
        'full text'
        notice = an.AreaNotice(an.notice_type['cau_mammals_not_obs'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi=2)
        notice.add_subarea(an.AreaNoticeCirclePt(-69.5, 42, radius=0)) # 1

        text_sections = (
            '12345678901234', # 2
            'More text that', # 3
            ' spans across ', # 4
            'multiple lines', # 5
            '  The text is ', # 6
            'supposed to be', # 7
            ' cated togethe', # 8
            'r. 12345678901'  # 9
            )
        for text in text_sections:
            notice.add_subarea(an.AreaNoticeFreeText(text=text))

        expected = ''.join(text_sections).upper()
        # if notice.get_merged_text() != expected:
        #     sys.exit('OH CRAP....\n  %s\n  %s\n' % (notice.get_merged_text(), expected) )
        self.failUnless(notice.get_merged_text() == expected)
        
        orig = geojson.loads( geojson.dumps(notice) )
        decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in notice.get_aivdm() ] )) )
        #sys.stderr.write('\norig:'+str(orig)+'\n')
        #sys.stderr.write('decoded:'+str(decoded)+'\n\n')
        #sys.stderr.write('orig:'+str(orig['bbm']['freetext'])+'\n')
        #sys.stderr.write('deco:'+str(decoded['bbm']['freetext'])+'\n')

        #if not almost_equal_geojson(orig, decoded):
        #    sys.exit('FULL FREETEXT FAIL')
        self.failUnless( almost_equal_geojson(orig, decoded) )
    def test_feature_class(self):
        """
        Test the Feature class
        """

        from geojson.examples import SimpleWebFeature
        feature = SimpleWebFeature(
            id='1',
            geometry={'type': 'Point', 'coordinates': [53, -4]},
            title='Feature 1', summary='The first feature',
            link='http://example.org/features/1'
        )

        # It satisfies the feature protocol
        self.assertEqual(feature.id, '1')
        self.assertEqual(feature.properties['title'], 'Feature 1')
        self.assertEqual(feature.properties['summary'], 'The first feature')
        self.assertEqual(feature.properties['link'], 'http://example.org/features/1')
        self.assertEqual(geojson.dumps(feature.geometry, sort_keys=True), '{"coordinates": [53, -4], "type": "Point"}')

        # Encoding
        self.assertEqual(geojson.dumps(feature, sort_keys=True), '{"geometry": {"coordinates": [53, -4], "type": "Point"}, "id": "1", "properties": {"link": "http://example.org/features/1", "summary": "The first feature", "title": "Feature 1"}, "type": "Feature"}')

        # Decoding
        factory = geojson.examples.createSimpleWebFeature 
        json = '{"geometry": {"type": "Point", "coordinates": [53, -4]}, "id": "1", "properties": {"summary": "The first feature", "link": "http://example.org/features/1", "title": "Feature 1"}}'
        feature = geojson.loads(json, object_hook=factory, encoding="utf-8")
        self.assertEqual(repr(type(feature)), "<class 'geojson.examples.SimpleWebFeature'>")
        self.assertEqual(feature.id, '1')
        self.assertEqual(feature.properties['title'], 'Feature 1')
        self.assertEqual(feature.properties['summary'], 'The first feature')
        self.assertEqual(feature.properties['link'], 'http://example.org/features/1')
        self.assertEqual(geojson.dumps(feature.geometry, sort_keys=True), '{"coordinates": [53, -4], "type": "Point"}')
  def testPoint(self):
    # One of each.
    notice = area_notice.AreaNotice(
        area_notice.notice_type['cau_mammals_not_obs'],
        datetime.datetime(
            datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60,
        10, source_mmsi=666555444)
    notice.add_subarea(area_notice.AreaNoticeCirclePt(-69.8, 40.001, radius=0))
    notice.add_subarea(
        area_notice.AreaNoticeCirclePt(-69.8, 40.202, radius=2000))
    notice.add_subarea(
        area_notice.AreaNoticeRectangle(-69.6, 40.3003, 2000, 1000, 0))
    notice.add_subarea(
        area_notice.AreaNoticeSector(-69.4, 40.40004, 6000, 10, 50))
    notice.add_subarea(
        area_notice.AreaNoticePolyline([(170, 7400)], -69.2, 40.5000005))
    notice.add_subarea(
        area_notice.AreaNoticePolygon(
            [(10, 1400), (90, 1950)], -69.0, 40.6000001))
    notice.add_subarea(area_notice.AreaNoticeFreeText(text='Some Text'))

    orig = geojson.loads(geojson.dumps(notice))
    nmea_strings = [line for line in notice.get_aivdm()]
    decoded = geojson.loads(
        geojson.dumps(area_notice.AreaNotice(nmea_strings=nmea_strings)))
    self.assertAlmostEqualGeojson(orig, decoded)
    def test_geo_interface(self):
        class Thingy(object):
            def __init__(self, id, title, x, y):
                self.id = id
                self.title = title
                self.x = x
                self.y = y

            @property
            def __geo_interface__(self):
                return {
                    "id": self.id,
                    "properties": {"title": self.title},
                    "geometry": {"type": "Point", "coordinates": (self.x, self.y)},
                }

        ob = Thingy("1", "thingy one", -106, 40)
        self.assertEqual(
            geojson.dumps(ob.__geo_interface__["geometry"], sort_keys=True),
            '{"coordinates": [-106, 40], "type": "Point"}',
        )
        self.assertEqual(
            geojson.dumps(ob, sort_keys=True),
            (
                '{"geometry": {"coordinates": [-106, 40],'
                ' "type": "Point"},'
                ' "id": "1",'
                ' "properties": {"title": "thingy one"}}'
            ),
        )
def map():
    """
	This will render a template that holds the map.
    Displaying buildings with gids contained in taskid
	"""
    #get bdg_gids
    bdg_gids = flask.session['bdg_gids']
    #get FeatureCollection with corresponding building footprints
    rows = ve_object.query.filter(ve_object.gid.in_(bdg_gids)).all()
    bdgs = []
    for row in rows:
        geometry = json.loads(db.session.scalar(func.ST_AsGeoJSON(row.the_geom)))
        feature = Feature(id=row.gid,geometry=geometry,properties={"gid":row.gid, "rrvs_status":row.rrvs_status})
        bdgs.append(feature)
    bdgs_json = dumps(FeatureCollection(bdgs))
    #get img_gids
    img_gids = flask.session['img_gids']
    #get metadata related to these images
    image_rows = pan_imgs.query.filter(pan_imgs.gid.in_(img_gids)).all()
    gps_ids = [row.gps for row in image_rows]
    gps_rows = gps.query.filter(gps.gid.in_(gps_ids)).all()
    #create a json object
    img_gps = []
    for i,image in enumerate(image_rows):
        geometry = json.loads(db.session.scalar(func.ST_AsGeoJSON(gps_rows[i].the_geom)))
        feature = Feature(id=image.gid,geometry=geometry,properties={"img_id":image.gid,"repository":image.repository,"filename":image.filename,"frame_id":image.frame_id,"azimuth":gps_rows[i].azimuth})
        img_gps.append(feature)
    gps_json = dumps(FeatureCollection(img_gps))

    return flask.render_template('map.html',bdgs=bdgs_json,gps=gps_json)
def main(args):
    features = []

    if args.equator:
        geom = geojson.LineString([[-180, 0], [180, 0]])
        features.append(geojson.Feature('equator', geom))

    if args.tropics:
        cancer = geojson.LineString([[-180, 23.4378], [180, 23.4368]])
        features.append(geojson.Feature('cancer', cancer))

        capricorn = geojson.LineString([[-180, -23.4378], [180, -23.4378]])
        features.append(geojson.Feature('capricorn', capricorn))

    if args.lat:
        for top, right, bottom, left in latBand(args.lat):
            geom = geojson.Polygon([[top, left], [top, right],
                [bottom, right], [bottom, left]])

            features.append(geojson.Feature(geometry=geom))

    if args.long:
        for top, right, bottom, left in longBand(args.long):
            geom = geojson.Polygon([[top, left], [top, right],
                [bottom, right], [bottom, left]])

            features.append(geojson.Feature(geometry=geom))

    collection = geojson.FeatureCollection(features, indent=2)

    print geojson.dumps(collection)
    def test_protocol(self):
        """
        A dictionary can satisfy the protocol
        """
        f = {
            'type': 'Feature',
            'id': '1',
            'geometry': {'type': 'Point', 'coordinates': [53, -4]},
            'properties': {'title': 'Dict 1'},
        }

        json = geojson.dumps(f, sort_keys=True)
        self.assertEqual(json, '{"geometry":'
                               ' {"coordinates": [53, -4],'
                               ' "type": "Point"},'
                               ' "id": "1",'
                               ' "properties": {"title": "Dict 1"},'
                               ' "type": "Feature"}')

        o = geojson.loads(json)
        output = geojson.dumps(o, sort_keys=True)
        self.assertEqual(output, '{"geometry":'
                                 ' {"coordinates": [53, -4],'
                                 ' "type": "Point"},'
                                 ' "id": "1",'
                                 ' "properties": {"title": "Dict 1"},'
                                 ' "type": "Feature"}')
Exemple #8
0
    def test_polygon_filter(self):
        from mapfish.protocol import create_geom_filter
        poly = Polygon(((1, 2), (1, 3), (2, 3), (2, 2), (1, 2)))
        request = FakeRequest(
            {"geometry": dumps(poly), "tolerance": "1"}
        )
        filter = create_geom_filter(request, MappedClass)
        compiled_filter = filter.compile(engine)
        params = compiled_filter.params
        filter_str = _compiled_to_string(compiled_filter)
        eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && "table".geom) AND (ST_Expand("table".geom, %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance("table".geom, GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
        assert wkb.loads(str(params["GeomFromWKB_1"])).equals(poly)
        assert params["GeomFromWKB_2"] == 4326
        assert params["ST_Expand_1"] == 1
        assert params["ST_Distance_1"] == 1

        poly = Polygon(((1, 2), (1, 3), (2, 3), (2, 2), (1, 2)))
        request = FakeRequest(
            {"geometry": dumps(poly), "tolerance": "1", "epsg": "900913"}
        )
        filter = create_geom_filter(request, MappedClass)
        compiled_filter = filter.compile(engine)
        params = compiled_filter.params
        filter_str = _compiled_to_string(compiled_filter)
        eq_(filter_str, '(ST_Expand(GeomFromWKB(%(GeomFromWKB_1)s, %(GeomFromWKB_2)s), %(ST_Expand_1)s) && ST_Transform("table".geom, %(param_1)s)) AND (ST_Expand(ST_Transform("table".geom, %(param_2)s), %(ST_Expand_2)s) && GeomFromWKB(%(GeomFromWKB_3)s, %(GeomFromWKB_4)s)) AND ST_Distance(ST_Transform("table".geom, %(param_3)s), GeomFromWKB(%(GeomFromWKB_5)s, %(GeomFromWKB_6)s)) <= %(ST_Distance_1)s')
        assert wkb.loads(str(params["GeomFromWKB_1"])).equals(poly)
        assert params["GeomFromWKB_2"] == 900913
        assert params["ST_Expand_1"] == 1
        assert params["param_1"] == 900913
        assert params["ST_Distance_1"] == 1        #assert isinstance(filter, sql.expression.ClauseElement)
Exemple #9
0
def generator_function(f, verbose):
    counter = 0
    for line in f:
        try:
            obj = geojson.loads(line)
        except:
            print "Unexpected error:", sys.exc_info()
            continue
        properties = property_map(obj.get('properties'))
        geometry = obj.get('geometry')
        geom_type = geometry.get('type')
        if geom_type == 'Polygon':
            poly = asShape(geometry)
            bounds = poly.bounds
            feature = geojson.Feature(id=counter, geometry=poly, properties=properties)

            print counter, bounds, properties.get('name')
            counter += 1
            yield (counter, bounds, json.loads(geojson.dumps(feature)))
        elif geom_type == 'MultiPolygon':
            mpoly = asShape(geometry)
            for poly in mpoly:
                bounds = poly.bounds
                feature = geojson.Feature(id=counter, geometry=poly, properties=properties)

                print counter, bounds, properties.get('name')
                counter += 1
                yield (counter, bounds, json.loads(geojson.dumps(feature)))
        else:
            print "unsupported type", geom_type
            continue
Exemple #10
0
def convert2geojson():
  #KOD
  os.system("rm kibera-primary-schools.geojson")
  os.system("ogr2ogr -f GeoJSON kibera-primary-schools.geojson kibera-primary-schools.vrt")
  os.system("rm kibera-secondary-schools.geojson")
  os.system("ogr2ogr -f GeoJSON kibera-secondary-schools.geojson kibera-secondary-schools.vrt")
  kod_primary = geojson.loads(readfile('kibera-primary-schools.geojson'))
  kod_secondary = geojson.loads(readfile('kibera-secondary-schools.geojson'))
  kod_primary.features.extend(kod_secondary.features)
  dump = geojson.dumps(kod_primary, sort_keys=True, indent=2)
  writefile('kibera-primary-secondary-schools.geojson',dump)

  #OSM
  os.system("osmtogeojson -e kibera-schools-osm.xml > kibera-schools-osm.geojson")
  os.system("osmtogeojson -e mathare-schools-osm.xml > mathare-schools-osm.geojson")
  os.system("osmtogeojson -e kangemi-schools-osm.xml > kangemi-schools-osm.geojson")
  clean_osm('kibera-schools-osm.geojson')
  clean_osm('mathare-schools-osm.geojson')
  clean_osm('kangemi-schools-osm.geojson')
  osm_kibera = geojson.loads(readfile('kibera-schools-osm.geojson'))
  osm_mathare = geojson.loads(readfile('mathare-schools-osm.geojson'))
  osm_kangemi = geojson.loads(readfile('kangemi-schools-osm.geojson'))
  osm_kibera.features.extend(osm_mathare.features)
  osm_kibera.features.extend(osm_kangemi.features)
  dump = geojson.dumps(osm_kibera, sort_keys=True, indent=2)
  writefile('nairobi-schools-osm.geojson', dump)
    def test_01point(self):
        'point'
        year = datetime.datetime.utcnow().year
        pt1 = an.AreaNotice(an.notice_type['cau_mammals_not_obs'],datetime.datetime(year,8,6,0,1,0),60,10, source_mmsi = 445566778)
        pt1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.0, radius=0))
        orig = geojson.loads( geojson.dumps(pt1) )

        decoded_pt = an.AreaNotice(nmea_strings=[ line for line in pt1.get_aivdm() ] )
        
        #sys.stderr.write('\npt1_start:       '+str(pt1.when)+'\n')
        #sys.stderr.write('decoded_pt_start:'+str(decoded_pt.when)+'\n')
        decoded = geojson.loads( geojson.dumps(decoded_pt) )

        #sys.stderr.write('pt1:'+str(pt1)+'\n')
        #sys.stderr.write('orig_start:'+str(   orig['bbm']['start'])+'\n')
        #sys.stderr.write('deco_start:'+str(decoded['bbm']['start'])+'\n')

        #sys.stderr.write('orig: ' + str(orig) + '\n')
        #sys.stderr.write('deco: ' + str(decoded) + '\n')

        if not almost_equal_geojson(orig, decoded, verbose=True):
            sys.exit('1: That had better work!  But it did not!!!')
        #else:
        #    sys.stderr.write('try_1: ok!\n')

        self.failUnless( almost_equal_geojson(orig, decoded) )
Exemple #12
0
def ingest_json_api(path):
    log.info("ingest_json_api %s" % path)

    d = open(path)
    txt = d.read();

    data = json.loads(txt)

    t = data['t_utc']
    lat = data['Longitude']
    lon = data['Latitude']

    coords = [float(lat),float(lon),0]
    log.debug(data)

    feature = geojson.Feature(geometry={'type': "Point", 'coordinates': coords},properties=data)

    if ('Exhaustion' in data):
        feature_id = model.insert_feature('ethnographic', t, geojson.dumps(feature))
        log.info("ingest_json_api ETHNO")
    elif ('Hardness' in data):
        feature_id = model.insert_feature('hydro', t, geojson.dumps(feature))
        log.info("ingest_json_api HYDRO")
    else:
        feature_id = model.insert_feature('sighting', t, geojson.dumps(feature))
        log.info("ingest_json_api SIGHTING")

    d.close()
def test_spatial_geometry():
    poly = Polygon(((1, 2), (1, 3), (2, 3), (2, 2), (1, 2)))

    # with epsg undefined
    filter = spatial.Spatial(
        spatial.Spatial.GEOMETRY,
        MappedClass.geometry_column(),
        geometry=dumps(poly),
        tolerance=1
    )
    filter = filter.to_sql_expr()
    params = filter.compile().params
    assert str(filter) == '(expand(geomfromtext(:geomfromtext_1, :geomfromtext_2), :expand_1) && "table".geom) AND distance("table".geom, geomfromtext(:geomfromtext_1, :geomfromtext_2)) <= :distance_1'
    assert wkt.loads(params["geomfromtext_1"]).equals(poly)
    assert params["geomfromtext_2"] == 4326
    assert params["expand_1"] == 1
    assert params["distance_1"] == 1

    # with epsg defined
    filter = spatial.Spatial(
        spatial.Spatial.GEOMETRY,
        MappedClass.geometry_column(),
        geometry=dumps(poly),
        tolerance=1,
        epsg=900913
    )
    filter = filter.to_sql_expr()
    params = filter.compile().params
    assert str(filter) == '(expand(geomfromtext(:geomfromtext_1, :geomfromtext_2), :expand_1) && transform("table".geom, :transform_1)) AND distance(transform("table".geom, :transform_1), geomfromtext(:geomfromtext_1, :geomfromtext_2)) <= :distance_1'
    assert wkt.loads(params["geomfromtext_1"]).equals(poly)
    assert params["geomfromtext_2"] == 900913
    assert params["expand_1"] == 1
    assert params["transform_1"] == 900913
    assert params["distance_1"] == 1
    def test_whales(self):
        'whales observed circle notice'
        zone_type = an.notice_type['cau_mammals_reduce_speed']
        circle = an.AreaNotice(zone_type,datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi = 123456789)
        circle.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.0, radius=4260))

        self.assertEqual(zone_type, 1)
        self.assertEqual(zone_type, circle.area_type)

        json = geojson.dumps(circle)
        data = geojson.loads(json) # Get the data as a dictionary so that we can verify the contents
        self.assertEqual(zone_type, data['bbm']['area_type'])
        self.assertEqual(an.notice_type[zone_type], data['bbm']['area_type_desc'])

        # now try to pass the message as nmea strings and decode the message
        aivdms = []
        for line in circle.get_aivdm():
            aivdms.append(line)

        del circle
        del data
        del json

        notice = an.AreaNotice(nmea_strings=aivdms)
        self.assertEqual(zone_type,notice.area_type)

        json = geojson.dumps(notice)
        data = geojson.loads(json) # Get the data as a dictionary so that we can verify the contents
        self.assertEqual(zone_type, data['bbm']['area_type'])
        self.assertEqual(an.notice_type[zone_type], data['bbm']['area_type_desc'])
Exemple #15
0
    def index(self):
        lang = request.params.get('lang', 'en')
        log.error("begin")

        start_node = self._nearestVertex(request.params['source'])
        log.error("start")
        end_node = self._nearestVertex(request.params['target'])
        log.error("end")

        # FIXME: validate start_node and end_node


        edges, costs, rowcount = self._shortestPath(start_node, end_node)
        log.error("route")
        if not edges:
            return dumps({'success': False,
                          'msg': "No path from '%s' to '%s'"%(start_node, end_node)})
        else:
            features = self._roadmap(edges, costs, rowcount)
            dump = dumps(FeatureCollection(features))
            log.error("finish")
            if request.params.get('callback') != None:
                response.headers['Content-Type'] = 'text/javascript; charset=utf-8'
                return "%s(%s);"%(request.params.get('callback'), dump)
            else:
                response.headers['Content-Type'] = 'application/json; charset=utf-8'
                return dump
Exemple #16
0
 def write_api_response(self, format, obj):
     format = format.lower()
     if format=="geojson":
         self.set_header("Content-Type", "application/vnd.geo+json")
         self.write(geojson.dumps(obj))
     elif format=="html":
         self.render("../html/viewer.html", title="Viewshed API", geojson=geojson.dumps(obj))
  def testFullText(self):
    notice = area_notice.AreaNotice(
        area_notice.notice_type['cau_mammals_not_obs'],
        datetime.datetime(
            datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60,
        10, source_mmsi=2)
    notice.add_subarea(area_notice.AreaNoticeCirclePt(-69.5, 42, radius=0))  # 1

    text_sections = (
        '12345678901234',  # 2
        'More text that',  # 3
        ' spans across ',  # 4
        'multiple lines',  # 5
        '  The text is ',  # 6
        'supposed to be',  # 7
        ' cated togethe',  # 8
        'r. 12345678901'  # 9
    )
    for text in text_sections:
      notice.add_subarea(area_notice.AreaNoticeFreeText(text=text))

    expected = ''.join(text_sections).upper()
    self.assertEqual(notice.get_merged_text(), expected)

    orig = geojson.loads(geojson.dumps(notice))
    decoded = geojson.loads(
        geojson.dumps(
            area_notice.AreaNotice(
                nmea_strings=[
                    line
                    for line in
                    notice.get_aivdm()])))
    self.assertAlmostEqualGeojson(orig, decoded)
Exemple #18
0
def output_json(data, code, headers=None):
    """Automatic JSON / GeoJSON output"""
    # return empty result if data contains nothing
    if not data:
        resp = make_response(geojson.dumps({}), code)
    # if this is a Shapely object, dump it as geojson
    elif isinstance(data, BaseGeometry):
        resp = make_response(geojson.dumps(data), code)
    # if this is a list of task geometries, we need to unpack it
    elif not isinstance(data, dict) and isinstance(data[0], TaskGeometry):
        # unpack the geometries FIXME can this be done in the model?
        geometries = [geojson.Feature(
            geometry=g.geometry,
            properties={
                'selected': True,
                'osmid': g.osmid}) for g in data]
        resp = make_response(
            geojson.dumps(geojson.FeatureCollection(geometries)),
            code)
    # otherwise perform default json representation
    else:
        resp = make_response(json.dumps(data), code)
    # finish and return the response object
    resp.headers.extend(headers or {})
    return resp
 def test_sector(self):
     'sector'
     sec1 = an.AreaNotice(an.notice_type['cau_habitat_reduce_speed'],
                          datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi = 456)
     sec1.add_subarea(an.AreaNoticeSector(-69.8, 42.3, 4000, 10, 50))
     orig = geojson.loads( geojson.dumps(sec1) )
     decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in sec1.get_aivdm() ] )) )
     self.assertAlmostEqualGeojson(orig, decoded)
 def test_line(self):
     'line'
     line1 = an.AreaNotice(an.notice_type['report_of_icing'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),60,10, source_mmsi=123456)
     line1.add_subarea(an.AreaNoticePolyline([(10,2400),], -69.8, 42.4 ))
     orig = geojson.loads( geojson.dumps(line1) )
     line2 = an.AreaNotice(nmea_strings=[ line for line in line1.get_aivdm() ] )
     decoded = geojson.loads( geojson.dumps(line2) )
     self.assertAlmostEqualGeojson(orig, decoded)
 def test_polygon(self):
     'polygon'
     poly1 = an.AreaNotice(an.notice_type['cau_divers'], datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4), 60, 10, source_mmsi=987123456)
     poly1.add_subarea(an.AreaNoticePolygon([(10,1400),(90,1950)], -69.8, 42.5 ))
     orig = geojson.loads( geojson.dumps(poly1) )
     poly2 = an.AreaNotice(nmea_strings=[ line for line in poly1.get_aivdm() ] )
     decoded = geojson.loads( geojson.dumps(poly2) )
     self.assertAlmostEqualGeojson(orig, decoded)
def main():
	csv_reader = csv.DictReader(open(CSV_FILENAME))
	features = []
	for row in csv_reader:
		point = geojson.Point(coordinates=(float(row['LONG']), float(row['LAT'])))
		features.append(geojson.Feature(geometry=point))
	collection = geojson.FeatureCollection(features)
	print geojson.dumps(collection)
Exemple #23
0
    def find_loc(self, db=None, col=None, x='lon', y='lat', idcol='_id', 
                    properties=False, query=None, callback=None):
        """
        For a specific lat/lon column pair return GeoJSON representation of the 
            coordinates.

            :param db: Optional, mongodb database, if not specified a list of dbs is returned
            :param col: Optional, mongodb collection
            :praam x: x-coordinate (longitude)
            :param y: y-coordinate (lattitude)
            :param query: Optional, query provided as a python dictionary (see pymongo and mongodb docs for query syntax)
            :param callback: Optional, used for returning output as JSONP (not implemented yet)
            :param showids: Optional, return mongodb _id's
            :param date: Optional, helper for simpler syntax in date range queries (broken)

        
        Example:
        >>> get.find_loc('flora', 'data', x='midlon', y='midlat', idcol='REF_NO', properties= True)
        """
        # Make connection
        con = Connection(self.MONGOHOST,self.MONGOPORT)
        
        # Browse or return databases
        if db in con.database_names():
                db = con[db] 
        else:        
            serialized = json.dumps(con.database_names())
        
        # Browse or return collections
        if col in db.collection_names():
            col = db[col]
        else:
            serialzed = json.dumps(db.collection_names())
        
        # Two types of output, with and without properties
        if properties: # Return GeoJSON with all properties
            cur = col.find()
            serialized = geojson.dumps(geojson.FeatureCollection([ 
                            geojson.Feature(
                                geometry=geojson.Point((item[x], item[y])),
                                properties={'id': item[idcol], 'attributes': item }
                            )
                    for item in cur if x in item.keys() and y in item.keys() ]
                        ), indent=2, default=handler)
        else: # Return GeoJSON with only lat/lon and id column.
            cur = col.find(fields=[x,y,idcol])
            serialized = geojson.dumps(geojson.FeatureCollection([ 
                            geojson.Feature(
                                geometry=geojson.Point((item[x], item[y])),
                                properties={'id': item[idcol] } 
                            )
                    for item in cur if x in item.keys() and y in item.keys() ], 
                    ), indent=2, default=handler)
        
        if callback:
            return str(callback) + '(' + serialized + ')'
        else:
            return serialized
    def _convert_to_geojson(self, df, lat, lon, distance_df=None, index_col=None):
        ''' Dataconversion happens here. Process Dataframes and get 
            necessary information into geojson which is put into the template 
            var dictionary for later'''
        
        ## Support Functions For processing to geojson
        ################################################################################
        def feature_from_row(row):
            if pd.notnull(row[lat]) and pd.notnull(row[lon]):
                properties = { k:v for k,v in row.iterkv() if k in self.columns}
                return Feature(geometry=Point(( row[lon], row[lat] )),
                               properties=properties)

        def line_feature_from_distance_df():
            """
            create a geojson feature collection of lines from a dataframe with three columns: source/dest/weight
            """
            
            #Create the lookup for lat/long
            if self.samplecolumn is None:
                try:
                    cols = self.df.columns 
                    ref_df = self.df.set_index( self.df[ cols[0] ] )
                    print("Without Explicit Sample Column, I will attempt to use the first column")
                except:
                    raise ValueError('First Column cannot be used as the Sample Index')
            else:
                try:
                    ref_df = self.df.set_index( self.samplecolumn )
                except:
                    raise ValueError("Issue with Index/Sample Column")
            

            def create_line_feature(source, target, weight, ref_df):
                lat = self.lat
                lon = self.lon
                
                lat1 = ref_df.loc[source][lat]
                lon1 = ref_df.loc[source][lon]
                lat2 = ref_df.loc[target][lat]
                lon2 = ref_df.loc[target][lon]
                
                nullcheck = [ pd.notnull( l ) for l in [lat1,lon1,lat2,lon2] ]
                
                if False not in nullcheck:
                    return Feature(geometry=LineString([(lon1, lat1), (lon2, lat2)]))
        
            line_featurelist =  [ create_line_feature( row[0],row[1],row[2], ref_df) for idx,row in self.distdf.iterrows() ]
            return line_featurelist
        
        
        featurelist= [ feature_from_row(row) for idx, row in df.iterrows() ]
        self.template_vars['geojson'] = geojson.dumps( FeatureCollection(featurelist) )
        
        if self.distdf is not None:
            line_featurelist = line_feature_from_distance_df() 
            self.template_vars['lines_geojson'] = geojson.dumps( FeatureCollection(line_featurelist) )
    def test_encode_nested(self):
        """
        Ensure nested objects that implement __geo_interface__ can be encoded
        into GeoJSON strings
        """
        actual = geojson.dumps(self.restaurant_feature1, sort_keys=True)
        self.assertEqual(actual, self.restaurant_feature_str)

        actual = geojson.dumps(self.restaurant_feature2, sort_keys=True)
        self.assertEqual(actual, self.restaurant_feature_str)
    def test_freetext(self):
        'freetext'
        text1 = an.AreaNotice(an.notice_type['res_military_ops'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 4, 0), 60,10, source_mmsi=300000000)
        text1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.6, radius=0))
        text1.add_subarea(an.AreaNoticeFreeText(text="Explanation"))

        orig = geojson.loads( geojson.dumps(text1) )
        text2 = an.AreaNotice(nmea_strings=[ line for line in text1.get_aivdm() ] )
        decoded = geojson.loads( geojson.dumps(text2) )

        self.assertAlmostEqualGeojson(orig, decoded)
def geojsongen(filename="tempgeojson.json",pointnum=100):
    xy=[(rand(),rand()) for x in range(0,pointnum)]
    featurecoll = []
    for point in xy:
        my_point = Point(point)
        value = func(point)
        featurecoll.append(Feature(geometry=my_point, properties={"value": value }))
    geojsoncontent = FeatureCollection(featurecoll)
    print geojson.dumps(geojsoncontent)
    with open(filename, "w") as f:
        f.write(geojson.dumps(geojsoncontent))
        f.close()
    def test_rect(self):
        'rectangle'
        rect = an.AreaNotice( an.notice_type['cau_mammals_reduce_speed'],
                               datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 0, 4),
                               60, 10,
                               source_mmsi = 123
                               )
        rect.add_subarea( an.AreaNoticeRectangle(-69.8, 42, 4000, 1000, 0) )

        orig = geojson.loads( geojson.dumps(rect) )
        decoded = geojson.loads( geojson.dumps(an.AreaNotice(nmea_strings=[ line for line in rect.get_aivdm() ] )) )
        self.assertAlmostEqualGeojson(orig, decoded)
    def test_feature_class(self):
        """
        Test the Feature class
        """

        from geojson.examples import SimpleWebFeature

        feature = SimpleWebFeature(
            id="1",
            geometry={"type": "Point", "coordinates": [53, -4]},
            title="Feature 1",
            summary="The first feature",
            link="http://example.org/features/1",
        )

        # It satisfies the feature protocol
        self.assertEqual(feature.id, "1")
        self.assertEqual(feature.properties["title"], "Feature 1")
        self.assertEqual(feature.properties["summary"], "The first feature")
        self.assertEqual(feature.properties["link"], "http://example.org/features/1")
        self.assertEqual(geojson.dumps(feature.geometry, sort_keys=True), '{"coordinates": [53, -4], "type": "Point"}')

        # Encoding
        json = (
            '{"geometry": {"coordinates": [53, -4],'
            ' "type": "Point"},'
            ' "id": "1",'
            ' "properties":'
            ' {"link": "http://example.org/features/1",'
            ' "summary": "The first feature",'
            ' "title": "Feature 1"},'
            ' "type": "Feature"}'
        )
        self.assertEqual(geojson.dumps(feature, sort_keys=True), json)

        # Decoding
        factory = geojson.examples.createSimpleWebFeature
        json = (
            '{"geometry": {"type": "Point",'
            ' "coordinates": [53, -4]},'
            ' "id": "1",'
            ' "properties": {"summary": "The first feature",'
            ' "link": "http://example.org/features/1",'
            ' "title": "Feature 1"}}'
        )
        feature = geojson.loads(json, object_hook=factory, encoding="utf-8")
        self.assertEqual(repr(type(feature)), "<class 'geojson.examples.SimpleWebFeature'>")
        self.assertEqual(feature.id, "1")
        self.assertEqual(feature.properties["title"], "Feature 1")
        self.assertEqual(feature.properties["summary"], "The first feature")
        self.assertEqual(feature.properties["link"], "http://example.org/features/1")
        self.assertEqual(geojson.dumps(feature.geometry, sort_keys=True), '{"coordinates": [53, -4], "type": "Point"}')
    def test_freetext(self):
        'freetext'
        text1 = an.AreaNotice(an.notice_type['res_military_ops'],datetime.datetime(datetime.datetime.utcnow().year, 7, 6, 0, 4, 0), 60,10, source_mmsi=300000000)
        text1.add_subarea(an.AreaNoticeCirclePt(-69.8, 42.6, radius=0))
        text1.add_subarea(an.AreaNoticeFreeText(text="Explanation"))

        orig = geojson.loads( geojson.dumps(text1) )
        text2 = an.AreaNotice(nmea_strings=[ line for line in text1.get_aivdm() ] )
        decoded = geojson.loads( geojson.dumps(text2) )

        if not almost_equal_geojson(orig, decoded, verbose=True):
            sys.exit('FREE TEXT FAIL')
        self.failUnless( almost_equal_geojson(orig, decoded, verbose=True) )
Exemple #31
0
 def write_geojson(self, obj):
     self.set_header("Content-Type", "application/vnd.geo+json")
     self.write(geojson.dumps(obj))
                             user=ENV["DATABASE_USER"],
                             password=ENV["DATABASE_PASSWORD"],
                             db=ENV["DATABASE_NAME"],
                             client_flag=CLIENT.MULTI_STATEMENTS)

features = geojson.loads(
    Path('../data/countries.geojson').read_text())['features']

codes = [
    pycountry.countries.get(alpha_3=feature['properties']['ISO_A3'])
    for feature in features
]

codes = [code.alpha_2 if code is not None else "ZZZ" for code in codes]

geometries = [geojson.dumps(feature['geometry']) for feature in features]

country_boundaries = list(
    filter(lambda x: x[0] != 'ZZZ', zip(codes, geometries)))

print("Populating Country Boundaries:")
try:
    with connection.cursor() as cursor:
        for entry in tqdm(country_boundaries):
            cursor.execute(
                Path('./populate_sqls/country_boundaries.sql').read_text(),
                entry)

        connection.commit()

finally:
Exemple #33
0
def main():
    ext = ""
    if opts['format'] == "GeoJSON":
        ext = "json"
    elif opts['format'] == "GPKG":
        ext = "gpkg"
    elif opts['format'] == "DXF":
        ext = "dxf"
    elif opts['format'] == "ESRI Shapefile":
        ext = "shp"

    # Open dsm
    dsm = rio.open(opts['dsm'])
    # Read the tiff as an numpy masked array
    dsm_array = dsm.read(1, masked = True)
    # Create a kernel based on the parameter 'noise_filter_size' and the tiff resolution
    kernel = get_kernel(float(opts['noise_filter_size']), dsm)
    
    # Check if we want to use the dtm also
    if opts['dtm'] != '':
        # Open the dtm
        dtm = rio.open(opts['dtm'])
        # Assert that the dtm and dsm have the same bounds and resolution
        assert_same_bounds_and_resolution(dsm, dtm)
        # Calculate the different between the dsm and dtm
        array = calculate_difference(dsm_array, dtm)
    else:
        array = dsm_array    
    
    # Calculate the ranges based on the parameter 'intervals' and the elevation array
    ranges = calculate_ranges(opts['intervals'], array)
        
    features = []
    
    for bottom, top in ranges:
        # Binarize the image. Everything in [bottom, top) is white. Everything else is black
        surface_array = np.ma.where((bottom <= array) & (array < top), 255, 0).astype(np.uint8)
        # Apply kernel to reduce noise
        without_noise = cv2.morphologyEx(surface_array, cv2.MORPH_CLOSE, kernel) if kernel is not None else surface_array
        # Find contours
        contours, hierarchy = cv2.findContours(without_noise, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
        # Check if we found something
        if len(contours) > 0:
            # Transform contours from pixels to coordinates
            mapped_contours = [map_pixels_to_coordinates(dsm, opts['epsg'], to_pixel_format(contour)) for contour in contours]
            # Build the MultiPolygon for based on the contours and their hierarchy
            built_multi_polygon = LevelBuilder(bottom, top, mapped_contours, hierarchy[0]).build_multi_polygon()
            features.append(built_multi_polygon)
    
    # Write the GeoJSON to a file
    dump = dumps(FeatureCollection(features))
    with open("output.json", 'w+') as output:
        output.write(dump)

    if ext != "json":
        subprocess.check_call(["ogr2ogr", "-f", opts['format'], "output.%s" % ext, "output.json"], stdout=subprocess.DEVNULL)

    if os.path.isfile("output.%s" % ext):
        if opts['format'] == "ESRI Shapefile":
            ext="zip"
            os.makedirs("contours")
            contour_files = glob.glob("output.*")
            for cf in contour_files:
                shutil.move(cf, os.path.join("contours", os.path.basename(cf)))

            shutil.make_archive('output', 'zip', 'contours/')

        print(os.path.join(os.getcwd(), "output.%s" % ext))
    else:
        print("error")
Exemple #34
0
    def get(self, project_id):
        """
        Get a specified project including it's area
        ---
        tags:
            - projects
        produces:
            - application/json
        parameters:
            - in: header
              name: Authorization
              description: Base64 encoded session token
              required: false
              type: string
              default: Token sessionTokenHere==
            - in: header
              name: Accept-Language
              description: Language user is requesting
              type: string
              required: true
              default: en
            - name: project_id
              in: path
              description: Unique project ID
              required: true
              type: integer
              default: 1
            - in: query
              name: as_file
              type: boolean
              description: Set to true if file download is preferred
              default: False
            - in: query
              name: abbreviated
              type: boolean
              description: Set to true if only state information is desired
              default: False
        responses:
            200:
                description: Project found
            403:
                description: Forbidden
            404:
                description: Project not found
            500:
                description: Internal Server Error
        """
        try:
            authenticated_user_id = token_auth.current_user()
            as_file = (strtobool(request.args.get("as_file"))
                       if request.args.get("as_file") else False)
            abbreviated = (strtobool(request.args.get("abbreviated"))
                           if request.args.get("abbreviated") else False)

            project_dto = ProjectService.get_project_dto_for_mapper(
                project_id,
                authenticated_user_id,
                request.environ.get("HTTP_ACCEPT_LANGUAGE"),
                abbreviated,
            )

            if project_dto:
                project_dto = project_dto.to_primitive()
                if as_file:
                    return send_file(
                        io.BytesIO(geojson.dumps(project_dto).encode("utf-8")),
                        mimetype="application/json",
                        as_attachment=True,
                        attachment_filename=f"project_{str(project_id)}.json",
                    )

                return project_dto, 200
            else:
                return {"Error": "Private Project"}, 403
        except NotFound:
            return {"Error": "Project Not Found"}, 404
        except ProjectServiceError as e:
            return {"Error": str(e)}, 403
        except Exception as e:
            error_msg = f"Project GET - unhandled error: {str(e)}"
            current_app.logger.critical(error_msg)
            return {"Error": "Unable to fetch project"}, 500
        finally:
            # this will try to unlock tasks that have been locked too long
            try:
                ProjectService.auto_unlock_tasks(project_id)
            except Exception as e:
                current_app.logger.critical(str(e))

def load_geojson(fname):
    with open(fname, 'r') as f:
        return geojson.load(f)


if __name__ == '__main__':
    conn = psycopg2.connect(**db_config)
    with conn.cursor() as cur:
        for fname in glob.glob(os.path.join(path_, '*-state.json')):
            print('Processing {}'.format(fname))
            uf = os.path.split(fname)[1].split('-')[0]
            geo_json = load_geojson(fname)
            properties = geo_json['features'][0]['properties']
            nome = properties['NM_ESTADO']
            geocodigo = properties['CD_GEOCODU']
            regiao = properties['NM_REGIAO']
            cur.execute(
                'INSERT INTO "Dengue_global".estado (uf, nome, regiao, geocodigo, geojson) VALUES (%s,%s,%s,%s, %s)',
                (
                    uf,
                    nome,
                    regiao,
                    geocodigo,
                    geojson.dumps(geo_json['features'][0]),
                ),
            )
        conn.commit()
        logger.warning('All fields were updated!')
Exemple #36
0
def write_to_geojson(roads_json, filename='traffic'):
    res_geojson = {"type": "FeatureCollection", "features": roads_json}
    res_file = open(filename + ".geojson", 'w')
    res_file.write(geojson.dumps(res_geojson) + '\n')
    res_file.close()
Exemple #37
0
def visualisation(permutation, iti_matrix):
    # draw folium graph
    str_fea_list = []
    tooltip = 'Click For More Info'
    des_dict = {
        'WALK': 'Walk to ',
        'SUBWAY': 'Take subway to ',
        'BUS': 'Take bus to '
    }
    m = folium.Map(location=[1.2791, 103.8154], zoom_start=12)
    for i in range(len(permutation) - 1):  # for one itinerary
        sta_plc_idx = permutation[i]
        end_plc_idx = permutation[i + 1]
        itinerary = iti_matrix[sta_plc_idx][end_plc_idx]

        true_sta_pt = np.array((mydf._get_value(sta_plc_idx, 'latitude'),
                                mydf._get_value(sta_plc_idx, 'longitude')))
        true_end_pt = np.array((mydf._get_value(end_plc_idx, 'latitude'),
                                mydf._get_value(end_plc_idx, 'longitude')))

        temp_num_legs = len(itinerary['legs'])  # num of legs
        pt_lat = []
        pt_lon = []
        tpl_list = []
        pt_name = []
        mode_list = []
        dist_list = []
        for k in range(temp_num_legs):  # for each leg
            pt_lon.append(itinerary['legs'][k]['from']['lon'])
            pt_lat.append(itinerary['legs'][k]['from']['lat'])
            tpl_list.append((itinerary['legs'][k]['from']['lon'],
                             itinerary['legs'][k]['from']['lat']))
            pt_name.append(itinerary['legs'][k]['to']['name'])
            mode_list.append(des_dict[itinerary['legs'][k]['mode']])
            dist_list.append(
                str(round(float(itinerary['legs'][k]['distance']) / 1000, 2)) +
                ' km.')
            if k == temp_num_legs - 1:
                pt_lon.append(itinerary['legs'][k]['to']['lon'])
                pt_lat.append(itinerary['legs'][k]['to']['lat'])
                tpl_list.append((itinerary['legs'][k]['to']['lon'],
                                 itinerary['legs'][k]['to']['lat']))
        temp_feature = Feature(geometry=MultiLineString([tpl_list]),
                               properties={'stroke': '#AF4646'})
        str_fea_list.append(temp_feature)
        first_point = np.array((pt_lat[0], pt_lon[0]))

        distance1 = np.linalg.norm(first_point - true_sta_pt)
        distance2 = np.linalg.norm(first_point - true_end_pt)

        start_point = [pt_lat[0], pt_lon[0]]
        end_point = [pt_lat[-1], pt_lon[-1]]
        iterator = range(len(mode_list))
        # only affect formatting the text
        string = ''
        if distance1 > distance2:
            iterator = range(len(mode_list) - 1, -1, -1)
            start_point = [pt_lat[-1], pt_lon[-1]]
            end_point = [pt_lat[0], pt_lon[0]]
        counter = 0
        for j in iterator:
            string += str(counter + 1) + '. ' + mode_list[j] + pt_name[
                j] + '. Estimated distance is ' + dist_list[j] + '\n'
            counter += 1

        folium.Marker(
            start_point,
            popup='<strong>' + string + '</strong>',
            tooltip=tooltip,
            icon=folium.Icon(icon='trophy' if i != 0 else 'flag')).add_to(m),
        folium.Marker(end_point,
                      icon=folium.Icon(icon='trophy' if i != len(permutation) -
                                       2 else 'star')).add_to(m)

    feature_collection = FeatureCollection(str_fea_list)
    ms = geojson.dumps(feature_collection)
    folium.GeoJson(ms, name='multistring').add_to(m)

    # Generate map
    m.save('map.html')
    return m
Exemple #38
0
 def get_geojson_dump(features):
     # build a feature collection
     feature_collection = geojson.FeatureCollection(features)
     return geojson.dumps(feature_collection)
Exemple #39
0
def to_geojson(arr):
    """convert array of points to geojson LineString"""
    geom = shapely.geometry.LineString(arr)
    return geojson.dumps(geom)
            if len(gnismatches):
                # lat and lon are in the 10th and 11th columns, see http://geonames.usgs.gov/domestic/states_fileformat.htm
                lat = gnismatches[0][9]
                lon = gnismatches[0][10]
                # increase the match counter
                matchcnt += 1
            # add the found (or nodata if we didn't find a match) coordinate to the record
            record += (lat, lon)
            # create geojson object
            coordinate = [lat, lon]
            gjpoint = geojson.Point(coordinate)
            properties = {'placename': '%s, %s' % (placename, code)}
            gjfeature = geojson.Feature(geometry=gjpoint,
                                        properties=properties)
            features.append(gjfeature)
            cnt += 1
            sys.stdout.write('%i matches /%i total (%.1f%% success)\r' %
                             (matchcnt, cnt,
                              (float(matchcnt) / float(cnt)) * 100))
            # ..and also write these to our file
            csvwriter.writerow(record)
    print 'found %i Target locations for %s, %i matched with GNIS names' % (
        cnt, state, matchcnt)
    totalcnt += cnt
    totalmatchcnt += matchcnt
    statecnt += 1
gjcollection = geojson.FeatureCollection(features)
geojsonfile.write(geojson.dumps(gjcollection))
print 'done. %i states scraped, %i total Target locations found, %i matched to GNIS populated place.' % (
    statecnt, totalcnt, totalmatchcnt)
Exemple #41
0
def hybridQual(request, mapType):
    '''HYBRID qualified heatmap: accepts additional filter parameters
	AND uses NIncid thresh to select between heatmap vs marker display
	'''

    nowDT = awareDT(datetime.now())
    minDate = nowDT - timedelta(days=90)

    NIncidForMarkers = 75

    userName = request.user.get_username()

    if mapType == 'general':
        qs0 = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           exclude(xlng__isnull=True). \
           exclude(ylat__isnull=True). \
           order_by('cdateTime')

    elif mapType == 'gun':
        # replicate query ala that for Scott Morris
        # select opd_rd, nvictim, nhospital, weapon, "gswP", "cdateTime", addr from "dailyIncid_oakcrime"
        # where "cdateTime" > '2017-01-01'::date and weapon like 'gun%' and (nvictim>0 or nhospital>0 or "gswP")
        # order by opd_rd

        qs0 = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           exclude(xlng__isnull=True). \
           exclude(ylat__isnull=True). \
           filter( models.Q(weapon__contains='gun') | models.Q(gswP=True) )

    logline = 'username=%s hybridQual %s: qs0=%d' % (userName, mapType,
                                                     qs0.count())
    logger.info(logline)

    # 	list0 = list(qs0)
    # 	ocoFirst = list0[0]
    # 	ocoLast = list0[-1]
    # 	print('hybrid',ocoFirst.opd_rd,ocoFirst.cdateTime,ocoLast.opd_rd,ocoLast.cdateTime)

    ccatList = request.GET.getlist('crimeCat')

    NTopLevelCC = 14
    if len(ccatList) < NTopLevelCC:

        # NB: disjunction across separate crimeCat query sets!
        qscc = OakCrime.objects.none()
        for cc in ccatList:
            # NB: __startswith converts to LIKE cc%
            qs1 = qs0.filter(crimeCat__startswith=cc)
            qscc = (qscc | qs1)
            # print(cc,qs1.count(),qscc.count())

        logline = 'username=%s hybridQual: crimeCat="%s" postCC=%d' % (
            userName, ccatList, qscc.count())
        logger.info(logline)

    elif mapType == 'gun':
        # for guns, restrict crimeCat to those mentioned
        ccMention = set()
        for oco in qs0:
            cc = oco.crimeCat
            ccbits = cc.split('_')
            ccMention.add(ccbits[0])
        ccatList = list(ccMention)
        ccatList.remove('')

        qscc = OakCrime.objects.none()
        for cc in ccatList:
            # NB: __startswith converts to LIKE cc%
            qs1 = qs0.filter(crimeCat__startswith=cc)
            qscc = (qscc | qs1)
            # print(cc,qs1.count(),qscc.count())

        logline = 'username=%s hybridQual: gun crimeCat="%s" postCC=%d' % (
            userName, ccatList, qscc.count())
        logger.info(logline)

    else:
        qscc = qs0
        logline = 'username=%s hybridQual: No CC filter; postCC=%d' % (
            userName, qscc.count())
        logger.info(logline)

    # bounding box coordinates in a 'southwest_lng,southwest_lat,northeast_lng,northeast_lat' format
    mapboundStr = request.GET['mapBounds']
    mapBound = eval(mapboundStr)

    # bbox = xmin, ymin, xmax, ymax
    poly = Polygon.from_bbox(mapBound)

    # HACK: better django might keep this manipulation over QuerySets?
    ocoList = list(qscc)

    # returned as Y,M,D STRING, to avoid JS/Python (0 vs 1-index) month numbering
    # JS display format = "MMM D YYYY"
    selectDateFmt = "%b %d %Y"
    dateDiffThresh = timedelta(days=2)

    minSelectDateStr = request.GET['minDate']
    maxSelectDateStr = request.GET['maxDate']
    minSelectDate = awareDT(datetime.strptime(minSelectDateStr, selectDateFmt))
    maxSelectDate = awareDT(datetime.strptime(maxSelectDateStr, selectDateFmt))

    # 2do: these queryset filters don't work?
    # NB: django comparison requires just date!
    # minSelectDate = datetime.date(minSelectDate)
    # maxSelectDate = datetime.date(maxSelectDate)
    # 	qs0 = qs0.filter(cdateTime__date__gt=minSelectDate)
    #	qs0 = qs0.filter(cdateTime__date__lt=maxSelectDate)

    minDateChg = abs(minSelectDate - minDate) > dateDiffThresh
    if minDateChg:
        minDate = minSelectDate

    maxDateChg = abs(maxSelectDate - nowDT) > dateDiffThresh
    if maxDateChg:
        maxDate = maxSelectDate
    else:
        maxDate = nowDT

    ocoList3 = []
    for oco in ocoList:
        dt = oco.cdateTime
        if  (not minDateChg or (minDateChg and dt > minSelectDate)) and \
         (not maxDateChg or (maxDateChg and dt < maxSelectDate)):
            ocoList3.append(oco)

    logline = 'username=%s hybridQual: postDateFilter=%d %s (%s) - %s (%s)' % \
     (userName, len(ocoList3),minSelectDateStr,minDateChg, maxSelectDateStr, maxDateChg)
    logger.info(logline)

    ocoList4 = []
    for oco in ocoList3:
        pt = oco.point
        if pt == None:
            logline = 'username=%s hybridQual: No point, DLog?! %s %s' % \
             (userName, oco.opd_rd,oco.source)
            logger.info(logline)
            continue
        if poly.contains(pt):
            ocoList4.append(oco)

    incidList = ocoList4
    nincid = len(incidList)
    elapTime = awareDT(datetime.now()) - nowDT
    logline = 'username=%s hybridQual: nincid=%d bbox=%s (%6.2f sec)' % (
        userName, nincid, mapBound, elapTime.total_seconds())
    logger.info(logline)

    context = {}
    context['mapType'] = mapType
    context['qualified'] = True
    context['nincid'] = nincid
    context['crimeCat'] = ccatList
    # NB: need to convert to list for javascript
    context['mapBounds'] = list(mapBound)

    # NB: javascript uses ZERO-based months!
    context['minDate'] = [minDate.year, minDate.month - 1, minDate.day]
    context['maxDate'] = [maxDate.year, maxDate.month - 1, maxDate.day]
    if minDateChg:
        context['minSlider'] = [
            minSelectDate.year, minSelectDate.month - 1, minSelectDate.day
        ]
    else:
        context['minSlider'] = [minDate.year, minDate.month - 1, minDate.day]
    if maxDateChg:
        context['maxSlider'] = [
            maxSelectDate.year, maxSelectDate.month - 1, maxSelectDate.day
        ]
    else:
        context['maxSlider'] = [nowDT.year, nowDT.month - 1, nowDT.day]

    # 2do: mapbox unifies heatmap with circles

    # dataArr =  [ [lat, lng, intensity], ... ]
    # dataArr = [ [o.ylat,o.xlng,1] for o in ocoList4]

    # gjPoints = [ geojson.Point( (o.xlng, o.ylat) ) for o in ocoList4]
    # gjFeatures = [ geojson.Feature( geometry=gjpt, properties={"count": 1} ) for gjpt in gjPoints ]

    # 180130: extract only those incident details required for circle label; add as geojson properties
    # incid.opd_rd, incid.cdateTime, incid.crimeCat
    # also move major/minor crimeCat logic here (vs. javascript in heatmap.html)

    gjFeatures = []
    for o in ocoList4:
        [jlat, jlng] = jitterCoord(o.ylat, o.xlng)
        # 180129: mapbox needs points as geojson, (lng,lat order)
        pt = geojson.Point((jlng, jlat))
        f = geojson.Feature(geometry=pt, properties={"count": 1})
        f.properties['opd_rd'] = o.opd_rd
        dtstr = o.cdateTime.strftime('%a,%b-%d-%y_%I:%M%p')
        f.properties['cdateTime'] = dtstr
        f.properties['crimeCat'] = o.crimeCat
        if mapType == 'gun':
            # if o.source.startswith("DLog"):
            if o.source.find('SOC_') == -1:
                f.properties['majorIncid'] = 'DLog'
            if o.gswP:
                f.properties['majorIncid'] = 'True'
            else:
                f.properties['majorIncid'] = ' False'
        else:
            majorP = majorCrimeCatP(o)
            f.properties['majorIncid'] = majorP

        gjFeatures.append(f)

    gjCollection = geojson.FeatureCollection(gjFeatures)
    rawgj = geojson.dumps(gjCollection)

    context['dataArr'] = rawgj

    return render(request, 'dailyIncid/heatmap.html', context)
Exemple #42
0
def heatmap(request, mapType='general'):
    '''browsable version of Oakland area's crimes with date range slider
	170911
	'''

    userName = request.user.get_username()
    logger.info('user=%s mapType=%s heatmap' % (userName, mapType))

    nowDT = awareDT(datetime.now())
    minDate = nowDT - timedelta(days=90)

    begTime = nowDT

    if mapType == 'general':
        queryset = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           exclude(xlng__isnull=True). \
           exclude(ylat__isnull=True). \
           order_by('cdateTime')

    elif mapType == 'gun':
        # replicate query ala that for Scott Morris
        # select opd_rd, nvictim, nhospital, weapon, "gswP", "cdateTime", addr from "dailyIncid_oakcrime"
        # where "cdateTime" > '2017-01-01'::date and weapon like 'gun%' and (nvictim>0 or nhospital>0 or "gswP")
        # order by opd_rd

        queryset = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           exclude(xlng__isnull=True). \
           exclude(ylat__isnull=True). \
           filter( models.Q(weapon__contains='gun') | models.Q(gswP=True) )

    incidList = list(queryset)

    # 	ocoFirst = incidList[0]
    # 	ocoLast = incidList[-1]
    # 	print('heatmap',ocoFirst.opd_rd,ocoFirst.cdateTime,ocoLast.opd_rd,ocoLast.cdateTime)

    elapTime = awareDT(datetime.now()) - begTime
    logger.info('username=%s heatmap %s : NIncid=%d  (%6.2f sec)' %
                (userName, mapType, len(incidList), elapTime.total_seconds()))

    # In Django 1.8+, the template's render method takes a dictionary for
    # the context parameter. Support for passing a Context instance is
    # deprecated, and gives an error in Django 1.10+

    context = {}
    context['mapType'] = mapType
    context['nincid'] = len(incidList)
    context['cxlng'] = FTVL_lng
    context['cylat'] = FTVL_lat

    # 2do: mapbox unifies heatmap with circles
    context['heatmap'] = True

    # NB: javascript uses ZERO-based months!
    context['minDate'] = [minDate.year, minDate.month - 1, minDate.day]
    context['maxDate'] = [nowDT.year, nowDT.month - 1, nowDT.day]

    context['minSlider'] = [minDate.year, minDate.month - 1, minDate.day]
    context['maxSlider'] = [nowDT.year, nowDT.month - 1, nowDT.day]

    # dataArr =  [ [lat, lng, intensity], ... ]
    # dataArr = [ [o.ylat,o.xlng,1] for o in incidList]

    gjFeatures = []
    for o in incidList:
        # 180129: mapbox needs points as geojson, (lng,lat order)
        [jlat, jlng] = jitterCoord(o.ylat, o.xlng)
        pt = geojson.Point((jlng, jlat))
        f = geojson.Feature(geometry=pt, properties={"count": 1})
        f.properties['opd_rd'] = o.opd_rd
        dtstr = o.cdateTime.strftime('%a,%b-%d-%y_%I:%M%p')
        f.properties['cdateTime'] = dtstr
        f.properties['crimeCat'] = o.crimeCat
        if mapType == 'gun':
            # if o.source.startswith("DLog"):
            if o.source.find('SOC_') == -1:
                f.properties['majorIncid'] = 'DLog'
            if o.gswP:
                f.properties['majorIncid'] = 'True'
            else:
                f.properties['majorIncid'] = ' False'
        else:
            majorP = majorCrimeCatP(o)

            f.properties['majorIncid'] = majorP

        gjFeatures.append(f)

    gjCollection = geojson.FeatureCollection(gjFeatures)
    rawgj = geojson.dumps(gjCollection)

    context['dataArr'] = rawgj

    # MapZen bounding box coordinates in a 'southwest_lng,southwest_lat,northeast_lng,northeast_lat' format
    # MapBox bounding box coordinates in an array of LngLatLike objects in [sw, ne] order, or an array of
    # numbers in [west, south, east, north] order.

    mapBound = OaklandBBox
    context['mapBounds'] = mapBound

    if mapType == 'gun':
        # for guns, restrict crimeCat to those mentioned
        ccMention = set()
        for oco in incidList:
            cc = oco.crimeCat
            ccbits = cc.split('_')
            ccMention.add(ccbits[0])
        ccatList = list(ccMention)
        if '' in ccatList:
            ccatList.remove('')
        context['crimeCat'] = ccatList

    return render(request, 'dailyIncid/heatmap.html', context)
Exemple #43
0
 def __repr__(self):
     return geojson.dumps(self, sort_keys=True)
Exemple #44
0
def api_nearby_city(lat, lng, radius) -> dict:
    nearby_city = db.get_nearby_city({'lat': lat, 'lng': lng}, radius)
    return geojson.dumps(nearby_city)
Exemple #45
0
    def process_station(self, uid):
        """ Makes a DescribeSensor request based on a 'uid' parameter being a station procedure """

        GML_NS = "http://www.opengis.net/gml"
        XLINK_NS = "http://www.w3.org/1999/xlink"

        with app.app_context():

            metadata_value = etree.fromstring(
                self.sos.describe_sensor(
                    outputFormat=
                    'text/xml;subtype="sensorML/1.0.1/profiles/ioos_sos/1.0"',
                    procedure=uid))
            station_ds = IoosDescribeSensor(metadata_value)

            unique_id = station_ds.id
            if unique_id is None:
                app.logger.warn(
                    "Could not get a 'stationID' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/stationID'"
                )
                return

            dataset = db.Dataset.find_one({'uid': unicode(unique_id)})
            if dataset is None:
                dataset = db.Dataset()
                dataset.uid = unicode(unique_id)

            # Find service reference in Dataset.services and remove (to replace it)
            tmp = dataset.services[:]
            for d in tmp:
                if d['service_id'] == self.service.get('_id'):
                    dataset.services.remove(d)

            # Parsing messages
            messages = []

            # NAME
            name = unicode_or_none(station_ds.shortName)
            if name is None:
                messages.append(
                    u"Could not get a 'shortName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/shortName'"
                )

            # DESCRIPTION
            description = unicode_or_none(station_ds.longName)
            if description is None:
                messages.append(
                    u"Could not get a 'longName' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/longName'"
                )

            # PLATFORM TYPE
            asset_type = unicode_or_none(station_ds.platformType)
            if asset_type is None:
                messages.append(
                    u"Could not get a 'platformType' from the SensorML identifiers.  Looking for a definition of 'http://mmisw.org/ont/ioos/definition/platformType'"
                )

            # LOCATION is in GML
            gj = None
            loc = station_ds.location
            if loc is not None and loc.tag == "{%s}Point" % GML_NS:
                pos_element = loc.find("{%s}pos" % GML_NS)
                # strip out points
                positions = map(float, testXMLValue(pos_element).split(" "))
                crs = Crs(testXMLAttribute(pos_element, "srsName"))
                if crs.axisorder == "yx":
                    gj = json.loads(
                        geojson.dumps(
                            geojson.Point([positions[1], positions[0]])))
                else:
                    gj = json.loads(
                        geojson.dumps(
                            geojson.Point([positions[0], positions[1]])))
            else:
                messages.append(
                    u"Found an unrecognized child of the sml:location element and did not attempt to process it: %s"
                    % etree.tostring(loc).strip())

            service = {
                # Reset service
                'name': name,
                'description': description,
                'service_type': self.service.get('service_type'),
                'service_id': ObjectId(self.service.get('_id')),
                'data_provider': self.service.get('data_provider'),
                'metadata_type': u'sensorml',
                'metadata_value':
                unicode(etree.tostring(metadata_value)).strip(),
                'messages': map(unicode, messages),
                'keywords': map(unicode, sorted(station_ds.keywords)),
                'variables': map(unicode, sorted(station_ds.variables)),
                'asset_type': asset_type,
                'geojson': gj,
                'updated': datetime.utcnow()
            }

            dataset.services.append(service)
            dataset.updated = datetime.utcnow()
            dataset.save()
            return "Harvested"
Exemple #46
0
def api_nearby(lat, lng, radius) -> dict:
    nearby_tracks = db.get_nearby(lat, lng, radius)
    return geojson.dumps(nearby_tracks)
Exemple #47
0
def get_labs(format):
    """Gets Techshop data from techshop.ws."""

    techshops_soup = data_from_techshop_ws(techshop_us_url)
    techshops = {}

    # Load all the TechShops
    # By first parsing the html

    data = techshops_soup.findAll('div', attrs={'id': 'main-content'})
    for element in data:
        links = element.findAll('a')
        hrefs = {}
        for k, a in enumerate(links):
            if "contact" not in a['href']:
                hrefs[k] = a['href']
        for k, v in hrefs.iteritems():
            if "http://techshop.ws/" not in v:
                hrefs[k] = "http://techshop.ws/" + v
            else:
                hrefs[k] = v
        for k, v in hrefs.iteritems():
            if "http://techshop.com/" in v:
                hrefs[k] = v.replace("http://techshop.com/", "")

    # Remove duplicate pages
    hr = []
    for key, value in hrefs.iteritems():
        if value not in hr:
            hr.append(value)
    hrefs = hr

    # Check all pages
    for page in hrefs:
        data = data_from_techshop_ws(page)
        current_lab = Techshop()
        name = data.title.contents[0].split('-- ')[1].encode('utf-8')
        if "TechShop" not in name:
            name = "TechShop " + name
        current_lab.name = name
        current_lab.slug = name
        current_lab.url = page
        # Find Facebook and Twitter links
        current_lab.links = {"facebook": "", "twitter": ""}
        page_links = data.findAll('a')
        for link in page_links:
            if link.has_attr("href"):
                if "facebook" in link.attrs["href"]:
                    current_lab.links["facebook"] = link.attrs["href"]
                if "twitter" in link.attrs["href"]:
                    current_lab.links["twitter"] = link.attrs["href"]
        # Find the coordinates by analysing the embedded google map
        iframes = data.findAll('iframe')
        if len(iframes) != 0:
            for iframe in iframes:
                embed_url = iframe.attrs["src"]
                if "google" in embed_url:
                    two_d = embed_url.find("2d")
                    three_d = embed_url.find("3d")
                    longitude = embed_url[two_d:].split('!')[0]
                    latitude = embed_url[three_d:].split('!')[0]
                    longitude = longitude[2:]
                    latitude = latitude[2:]
        # ... or the link to google map
        else:
            page_links = data.findAll('a')
            for link in page_links:
                # one case...
                if "maps.google.com/" in link.attrs["href"]:
                    embed_url = link.attrs["href"]
                    if "ll=" in embed_url:
                        first_string = embed_url.split('&sspn')[0]
                        coordinates = first_string.split('ll=')[1]
                        latitude = coordinates.split(',')[0]
                        longitude = coordinates.split(',')[1]
                # ... another case
                elif "www.google.com/maps" in link.attrs["href"]:
                    embed_url = link.attrs["href"]
                    if "1d" in embed_url:
                        one_d = embed_url.find("1d")
                        two_d = embed_url.find("2d")
                        longitude = embed_url[one_d:].split('!')[0]
                        latitude = embed_url[two_d:].split('!')[0]
                        longitude = longitude[2:]
                        latitude = latitude[2:]
        current_lab.latitude = latitude
        current_lab.longitude = longitude
        current_lab.continent = "North America"
        current_lab.country_code = "USA"
        current_lab.country = "United States of America"
        location = geolocator.reverse((latitude, longitude))
        if "city" in location.raw["address"]:
            current_lab.county = location.raw["address"]["city"].encode(
                'utf-8')
        if "county" in location.raw["address"]:
            current_lab.county = location.raw["address"]["county"].encode(
                'utf-8')
        if "state" in location.raw["address"]:
            current_lab.state = location.raw["address"]["state"].encode(
                'utf-8')
        if "postcode" in location.raw["address"]:
            current_lab.postal_code = location.raw["address"][
                "postcode"].encode('utf-8')
        current_lab.address_1 = location.address.encode('utf-8')

        # Add the lab to the list
        techshops[current_lab.slug] = current_lab

    # Return a dictiornary / json
    if format.lower() == "dict" or format.lower() == "json":
        output = {}
        for j in techshops:
            output[j] = techshops[j].__dict__
    # Return a geojson
    elif format.lower() == "geojson" or format.lower() == "geo":
        labs_list = []
        for l in techshops:
            single = techshops[l].__dict__
            single_lab = Feature(
                type="Feature",
                geometry=Point((single["latitude"], single["longitude"])),
                properties=single)
            labs_list.append(single_lab)
        output = dumps(FeatureCollection(labs_list))
    # Return a Pandas DataFrame
    elif format.lower() == "pandas" or format.lower() == "dataframe":
        output = {}
        for j in techshops:
            output[j] = techshops[j].__dict__
        # Transform the dict into a Pandas DataFrame
        output = pd.DataFrame.from_dict(output)
        output = output.transpose()
    # Return an object
    elif format.lower() == "object" or format.lower() == "obj":
        output = techshops
    # Default: return an oject
    else:
        output = techshops
    # Return a proper json
    if format.lower() == "json":
        output = json.dumps(output)
    return output
Exemple #48
0
def api_shelters() -> dict:
    shelters = db.get_shelters()
    return geojson.dumps(shelters)
Exemple #49
0
    def get(self, project_id):
        """
        Get HOT Project for mapping
        ---
        tags:
            - projects
        produces:
            - application/json
        parameters:
            - in: header
              name: Accept-Language
              description: Language user is requesting
              type: string
              required: true
              default: en
            - name: project_id
              in: path
              description: Unique project ID
              required: true
              type: integer
              default: 1
            - in: query
              name: as_file
              type: boolean
              description: Set to true if file download is preferred
              default: False
        responses:
            200:
                description: Project found
            403:
                description: Forbidden
            404:
                description: Project not found
            500:
                description: Internal Server Error
        """
        try:
            as_file = (strtobool(request.args.get("as_file"))
                       if request.args.get("as_file") else False)
            locale = request.environ.get("HTTP_ACCEPT_LANGUAGE")
            project_dto = ProjectService.get_project_dto_for_mapper(
                project_id, None, locale, True)
            project_dto = project_dto.to_primitive()

            if as_file:
                return send_file(
                    io.BytesIO(geojson.dumps(project_dto).encode("utf-8")),
                    mimetype="application/json",
                    as_attachment=True,
                    attachment_filename=f"project_{str(project_id)}.json",
                )

            return project_dto, 200
        except NotFound:
            return {"Error": "Project Not Found"}, 404
        except ProjectServiceError:
            return {"Error": "Unable to fetch project"}, 403
        except Exception as e:
            error_msg = f"Project GET - unhandled error: {str(e)}"
            current_app.logger.critical(error_msg)
            return {"Error": "Unable to fetch project"}, 500
        finally:
            # this will try to unlock tasks that have been locked too long
            try:
                ProjectService.auto_unlock_tasks(project_id)
            except Exception as e:
                current_app.logger.critical(str(e))
Exemple #50
0
def getRoutes(inp, pairs):
  os.system("node route.js " + str(gjson.dumps(pairs).encode('utf8')) + " " + inp + " > path.json");
  paths = None
  with open("path.json") as data_file:
    paths = json.load(data_file)
  return paths
def regions_shape_to_json(source='INE2016',
                          simplify=0,
                          convert=0,
                          translate=0,
                          verbose=0):

    if source == 'INE2016':
        filepath_in = sorted(
            glob.glob(GEO_DATA_PATH +
                      '/sources/INE_CartografiaPrecenso2016/*/Region.shp'))
        filepath_out = [
            GEO_DATA_PATH + '/regiones/shp/' +
            slugify(REGION_CODE__NAME[i + 1]) + '.shp'
            for i in range(len(REGION_CODE__NAME))
        ]
    elif source == 'IGM':
        filepath_in = GEO_DATA_PATH + '/sources/Division_Regional/division_regional.shp'
        filepath_out = GEO_DATA_PATH + '/regiones/shp/regiones_{}.shp'.format(
            source)

    if simplify:
        shape_simplify(filepath_in, filepath_out, ref=0.001, verbose=verbose)

    if source == 'INE2016':
        filepath_in = [
            '../data/geo/chile/regiones/shp/' +
            slugify(REGION_CODE__NAME[i + 1]) + '.shp'
            for i in range(len(REGION_CODE__NAME))
        ]
        filepath_out = [
            '../data/geo/chile/regiones/json/' +
            slugify(REGION_CODE__NAME[i + 1]) + '.geojson'
            for i in range(len(REGION_CODE__NAME))
        ]
    elif source == 'IGM':
        path_in = '../data/geo/chile/regiones/shp/'
        path_out = '../data/geo/chile/regiones/json/'

    if convert:
        shape_to_json(filepath_in, filepath_out, verbose=verbose)

    if translate:
        for i, filepath in enumerate(filepath_out):

            file = open(filepath, 'r')
            geojson_i = json.load(file)
            file.close()

            geojson_o = {}
            for feature in geojson_i['features']:
                geojson_o['properties'] = {}
                region_name = REGION_CODE__NAME[i + 1]
                geojson_o['properties']['name'] = region_name
                geojson_o['properties']['code'] = i + 1
                geojson_o['properties']['iso'] = REGION_NAME__ISO_CODE[
                    region_name]
                geojson_o['geometry'] = feature['geometry']
                geojson_o['geometry']['crs'] = {
                    "type": "name",
                    "properties": {
                        "name": "EPSG:4326"
                    }
                }
                #geojson_o['geometry']['crs'] = geojson_i['crs']
                geojson_o['type'] = "Feature"

            file = open(filepath, 'w')
            file.write(geojson.dumps(geojson_o, sort_keys=True))
            file.write('\n')
            while byte >= 0x20:
                byte = ord(encoded[i]) - 63
                i += 1
                ll[j] |= (byte & 0x1f) << shift
                shift += 5
            #get the final value adding the previous offset and remember it for the next
            ll[j] = previous[j] + (~(ll[j] >> 1) if ll[j] & 1 else (ll[j] >> 1))
            previous[j] = ll[j]
        #scale by the precision and chop off long coords also flip the positions so
        #its the far more standard lon,lat instead of lat,lon
        decoded.append([float('%.6f' % (ll[1] * inv)), float('%.6f' % (ll[0] * inv))])
    #hand back the list of coordinates
    return decoded

A = [ -122.4425, 37.77823 ] # SF
B = [ -73.96625, 40.78343 ] # NY

KEY = 'valhalla-EzqiWWY'
URL = 'http://valhalla.mapzen.com/route?'
FROM_TO = '{"locations":[{"lat":'+str(A[1])+',"lon":'+str(A[0])+'},{"lat":'+str(B[1])+',"lon":'+str(B[0])+'}],"costing":"auto"}'
RST = requests.get(URL+'json='+FROM_TO+'&api_key='+KEY)
JSON = json.loads(RST.text)

line = geojson.LineString(decode(JSON['trip']['legs'][0]['shape']))
feature = geojson.Feature(geometry=line)
feature_collection = geojson.FeatureCollection([feature])

file = open('trip.json', 'w')
file.write(geojson.dumps(feature_collection, sort_key=True))
file.close()
Exemple #53
0
def run(args):
    with catalog('glottolog', args) as glottolog:
        languoids = {l.id: l for l in glottolog.api.languoids()}

    l2point = {}
    for l in languoids.values():
        if l.latitude is not None:
            l2point[l.id] = Point((l.longitude, l.latitude))

    for l in languoids.values():
        if l.id not in l2point and l.level.name == 'dialect':
            for _, gc, _ in reversed(l.lineage):  # pragma: no cover
                if gc in l2point:
                    l2point[l.id] = l2point[gc]

    def valid_languoid(gc):
        if gc in l2point:
            return languoids[gc]

    langs_by_family, isolates = {}, []
    for family, langs in itertools.groupby(
            args.api.db.fetchall(
                "select id, glottocode, family from languagetable order by family"
            ),
            lambda r: r[2],
    ):
        langs = nfilter(
            [valid_languoid(gc) for gc in set(l[1] for l in langs)])
        if family:
            langs_by_family[family] = langs
        else:
            isolates = langs

    colors = qualitative_colors(len(langs_by_family) + len(isolates))

    def feature(l, color):
        if l.level.name == 'dialect':
            fam = 'dialect'  # pragma: no cover
        else:
            fam = languoids[l.lineage[0][1]].name if l.lineage else 'isolate'
        return Feature(
            id=l.id,
            geometry=l2point[l.id],
            properties={
                'title': '{0} [{1}]'.format(l.name, fam),
                'fill-opacity': 0.5,
                'marker-size': 'small',
                'marker-color': color
            },
        )

    features, i = [], 0
    for i, (fam, langs) in enumerate(
            sorted(langs_by_family.items(), key=lambda i: (-len(i[1]), i[0]))):
        for lang in langs:
            features.append(feature(lang, colors[i]))

    for j, lang in enumerate(isolates):  # pragma: no cover
        features.append(feature(lang, colors[i + j + 1]))

    (args.api.repos / 'languoids.geojson').write_text(dumps(
        FeatureCollection(features), indent=4),
                                                      encoding='utf8')
 def geojson(self):
     return geojson.dumps(self._geom)
Exemple #55
0
def cli(user, password, geometry, start, end, uuid, name, download, sentinel,
        producttype, instrument, cloud, footprints, path, query, url, order_by,
        limit):
    """Search for Sentinel products and, optionally, download all the results
    and/or create a geojson file with the search result footprints.
    Beyond your Copernicus Open Access Hub user and password, you must pass a geojson file
    containing the geometry of the area you want to search for or the UUIDs of the products. If you
    don't specify the start and end dates, it will search in the last 24 hours.
    """

    _set_logger_handler()

    if user is None or password is None:
        try:
            user, password = requests.utils.get_netrc_auth(url)
        except TypeError:
            pass

    if user is None or password is None:
        raise click.UsageError(
            'Missing --user and --password. Please see docs '
            'for environment variables and .netrc support.')

    api = SentinelAPI(user, password, url)

    search_kwargs = {}
    if sentinel and not (producttype or instrument):
        search_kwargs["platformname"] = "Sentinel-" + sentinel

    if instrument and not producttype:
        search_kwargs["instrumentshortname"] = instrument

    if producttype:
        search_kwargs["producttype"] = producttype

    if cloud:
        if sentinel not in ['2', '3']:
            logger.error('Cloud cover is only supported for Sentinel 2 and 3.')
            exit(1)
        search_kwargs["cloudcoverpercentage"] = (0, cloud)

    if query is not None:
        search_kwargs.update((x.split('=') for x in query))

    if geometry is not None:
        search_kwargs['area'] = geojson_to_wkt(read_geojson(geometry))

    if uuid is not None:
        uuid_list = [x.strip() for x in uuid]
        products = {}
        for productid in uuid_list:
            try:
                products[productid] = api.get_product_odata(productid)
            except SentinelAPIError as e:
                if 'Invalid key' in e.msg:
                    logger.error('No product with ID \'%s\' exists on server',
                                 productid)
                    exit(1)
                else:
                    raise
    elif name is not None:
        search_kwargs["identifier"] = name[0] if len(
            name) == 1 else '(' + ' OR '.join(name) + ')'
        products = api.query(order_by=order_by, limit=limit, **search_kwargs)
    else:
        start = start or "19000101"
        end = end or "NOW"
        products = api.query(date=(start, end),
                             order_by=order_by,
                             limit=limit,
                             **search_kwargs)

    if footprints is True:
        footprints_geojson = api.to_geojson(products)
        with open(os.path.join(path, "search_footprints.geojson"),
                  "w") as outfile:
            outfile.write(gj.dumps(footprints_geojson))

    if download is True:
        product_infos, triggered, failed_downloads = api.download_all(
            products, path)
        if len(failed_downloads) > 0:
            with open(os.path.join(path, "corrupt_scenes.txt"),
                      "w") as outfile:
                for failed_id in failed_downloads:
                    outfile.write("%s : %s\n" %
                                  (failed_id, products[failed_id]['title']))
    else:
        for product_id, props in products.items():
            if uuid is None:
                logger.info('Product %s - %s', product_id, props['summary'])
            else:  # querying uuids has no summary key
                logger.info('Product %s - %s - %s MB', product_id,
                            props['title'],
                            round(int(props['size']) / (1024. * 1024.), 2))
        if uuid is None:
            logger.info('---')
            logger.info('%s scenes found with a total size of %.2f GB',
                        len(products), api.get_products_size(products))
    # create list for huell
    feature_list = list()

    print("merge_geojson_files started")
    print("input file directory {}".format(options.InDir))
    print("output file name     {}".format(options.OutFile))

    # loop over all files and merge
    for filename in filenamelist:

        with open(options.InDir + filename) as f:
            content = f.read()
            gj_data = geojson.loads(content)

            feature_list.append(gj_data)

    jsonres = FeatureCollection(feature_list)

    if jsonres.is_valid is not True:
        print(jsonres.errors())

    # print created geojson object to console
    # print(geojson.dumps(jsonres, indent=4, sort_keys=True))

    # print created geojson object to output file
    with open(options.OutFile, 'w') as f:
        f.write(geojson.dumps(jsonres, indent=4, sort_keys=True))

    print("exit merge_geojson_files")
Exemple #57
0
def geojson_deployments(dir, outfile='cproof-deployments.geojson'):
    props = [
        'deployment_start', 'deployment_end', 'platform_type', 'glider_model',
        'glider_name', 'glider_serial', 'deployment_name', 'project',
        'institution', 'comment'
    ]
    subdirs = glob.glob(dir + '/*')
    features = []

    kml = simplekml.Kml()

    np.random.seed(20190101)
    print('subdirs', subdirs)
    colornum = 0
    for d in subdirs:
        _log.info(d)
        if os.path.isdir(d):
            subdirs2 = glob.glob(d + '/*')
            for d2 in subdirs2:
                _log.info(d2)
                if os.path.isdir(d2):
                    try:
                        nc = glob.glob(d2 + '/L0-gridfiles/*.nc')
                        if len(nc) < 1:
                            # old style
                            nc = glob.glob(d2 + '/L2-gridfiles/*.nc')

                        with xr.open_dataset(nc[0]) as ds:
                            _log.info(f'opened {nc[0]}')
                            att = ds.attrs
                            good = (ds.longitude < -125)
                            line = np.vstack(
                                (ds.longitude[good], ds.latitude[good])).T
                            ls = geojson.LineString(line.tolist())
                            feat = geojson.Feature(geometry=ls)
                            for prop in props:
                                if prop in ds.attrs.keys():
                                    feat.properties[prop] = ds.attrs[prop]
                                else:
                                    feat.properties[prop] = ''

                            # get URL....
                            feat.properties['url'] = (
                                '' +
                                'http://cproof.uvic.ca/gliderdata/deployments/'
                                + d2[2:])
                            # get color:
                            cols = np.random.randint(0, 200, 3)
                            # cols = pygu.get_html_non_blue(colornum)
                            colornum += 1
                            feat.properties['color'] = '#%02X%02X%02X' % (
                                cols[0], cols[1], cols[2])
                            if ds['time'][-1] > np.datetime64(
                                    datetime.datetime.now()) - np.timedelta64(
                                        2, 'D'):
                                feat.properties['active'] = True
                            else:
                                feat.properties['active'] = False

                            features += [feat]

                            # make the kml:
                            pnt = kml.newpoint(coords=[line[-1]])
                            pnt.style.iconstyle.icon.href = 'http://cproof.uvic.ca/deployments/assets/images/slocum_glider.png'
                            coords = []
                            for thelon, thelat in zip(ds.longitude.values,
                                                      ds.latitude.values):
                                coords += [(thelon, thelat)]
                            pnt.timestamp.when = f'{ds.time.values[-1]}'[:-3]
                            ls = kml.newlinestring(
                                coords=coords,
                                name=att['deployment_name'],
                            )
                            ls.timespan.begin = f'{ds.time.values[0]}'[:-3]
                            ls.timespan.end = f'{ds.time.values[-1]}'[:-3]
                            ls.style.linestyle.color = 'ee' + '%02X%02X%02X' % (
                                cols[2], cols[1], cols[0])
                            ls.style.linestyle.width = 3
                            kml.save(d2[2:] + '/' + att['deployment_name'] +
                                     '.kml')

                    except:
                        _log.info(f'Could not find grid file {d2}')
    feature_collection = geojson.FeatureCollection(features)
    with open(outfile, 'w') as fout:
        s = geojson.dumps(feature_collection)
        fout.write(s)
Exemple #58
0
def __to_geojson(coordinates, geojson_properties={}):
        coordinatePoints = geojson.MultiPoint(coordinates)
	dump = geojson.dumps(geojson.Feature(geometry=coordinatePoints, properties=geojson_properties)) # can add properties and id to feature (perhaps trip/section id?)
        return dump
Exemple #59
0
def bldNCPCRpt(request):
    '''produce report for NCPC of beat 
	'''

    nowDT = awareDT(datetime.now())
    minDate = nowDT - timedelta(days=60)

    userName = request.user.get_username()

    if userName not in NCPCChair2Beat:
        logline = 'username=%s bldNCPCRpt No beat ?!' % (userName)
        logger.info(logline)
        need2login(request)

    beat = NCPCChair2Beat[userName]
    if beat.find('+') != -1:
        beatList = beat.split('+')
        beat0 = beatList[0]
        beat1 = beatList[1]
        qs0 = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           filter( Q(beat=beat0) | Q(beat=beat1) ). \
           order_by('cdateTime')

    else:
        qs0 = OakCrime.objects.filter(cdateTime__gt=minDate). \
           filter(cdateTime__lt=nowDT). \
           filter(beat=beat). \
           order_by('cdateTime')

    incidList0 = list(qs0)
    nbeat = len(incidList0)
    logline = 'username=%s bldNCPCRpt Beat=%s N=%d' % (userName, beat, nbeat)
    logger.info(logline)

    # qs1 uses relaxed bbox around beat's incidents

    xlngMin = ylatMin = 1000.
    xlngMax = -1000.
    ylatMax = 0.

    incid0_opd_rd_Dict = {}  # dict for quick tests by second vicinity set
    for incid in incidList0:
        incid0_opd_rd_Dict[incid.opd_rd] = True
        if incid.ylat == None:
            continue

        if incid.ylat < ylatMin:
            ylatMin = incid.ylat
        if incid.ylat > ylatMax:
            ylatMax = incid.ylat

        if incid.xlng < xlngMin:
            xlngMin = incid.xlng
        if incid.xlng > xlngMax:
            xlngMax = incid.xlng

    # relax bbox
    BBoxBorder = 1e-3

    # 	xmin = sw[0]
    # 	ymin = ne[1]
    # 	xmax = sw[1]
    # 	ymax = ne[0]
    xlngMin -= BBoxBorder
    xlngMax += BBoxBorder
    ylatMin -= BBoxBorder
    ylatMax += BBoxBorder

    bbox = (xlngMin, ylatMin, xlngMax, ylatMax)
    geom = Polygon.from_bbox(bbox)


    qs1 = OakCrime.objects.filter(cdateTime__gt=minDate). \
       filter(cdateTime__lt=nowDT). \
       filter(point__contained=geom). \
       order_by('cdateTime')
    incidList1 = list(qs1)
    nvicinity = len(incidList1)
    logline = 'username=%s bldNCPCRpt Beat=%s NVincinity=%d' % (userName, beat,
                                                                nvicinity)
    logger.info(logline)

    context = {}

    context['beat'] = beat
    context['user'] = userName
    context['nbeat'] = nbeat
    context['nvicinity'] = nvicinity

    maxDateDigits = nowDT.strftime('%y%m%d')
    minDateDigits = minDate.strftime('%y%m%d')
    maxDateStr = nowDT.strftime('%b %d %Y')
    minDateStr = minDate.strftime('%b %d %Y')

    context['minDateDigits'] = minDateDigits
    context['maxDateDigits'] = maxDateDigits

    context['minDateStr'] = minDateStr
    context['maxDateStr'] = maxDateStr

    gjFeatures = []
    for o in incidList1:
        if o.ylat == None:
            f = geojson.Feature(geometry=None, properties={"count": 1})
        else:
            [jlat, jlng] = jitterCoord(o.ylat, o.xlng)
            pt = geojson.Point((jlng, jlat))
            f = geojson.Feature(geometry=pt, properties={"count": 1})
        f.properties['opd_rd'] = o.opd_rd
        dtstr = o.cdateTime.strftime('%a,%b-%d-%y_%I:%M%p')
        f.properties['cdateTime'] = dtstr
        f.properties['crimeCat'] = o.crimeCat

        # NB: use major flag to distinguish beat from vicinity
        if o.source.find('SOC_') == -1:
            f.properties['majorIncid'] = 'DLog'
        else:
            # NB: mapbox get works on STRINGS
            f.properties['majorIncid'] = str(o.opd_rd in incid0_opd_rd_Dict)

        gjFeatures.append(f)

    gjCollection = geojson.FeatureCollection(gjFeatures)
    rawgj = geojson.dumps(gjCollection)

    context['dataArr'] = rawgj

    # MapZen bounding box coordinates in a 'southwest_lng,southwest_lat,northeast_lng,northeast_lat' format
    # MapBox bounding box coordinates in an array of LngLatLike objects in [sw, ne] order, or an array of
    # numbers in [west, south, east, north] order.

    context['mapBounds'] = list(bbox)

    return render(request, 'dailyIncid/ncpc.html', context)
Exemple #60
0
def api_spring_onway(lat, lng, radius, radius_water):
    features = db.get_track_with_spring_onway({
        'lat': lat,
        'lng': lng
    }, radius, radius_water)
    return geojson.dumps(features)