Example #1
0
    def merge_to_multi_polygon(feature_collection: str,
                               dissolve: bool) -> geojson.MultiPolygon:
        """
        Merge all geometries to a single multipolygon
        :param feature_collection: geojson feature collection str containing features
        :param dissolve: flag for wther to to dissolve internal boundaries.
        :return: geojson.MultiPolygon
        """
        parsed_geojson = GridService._to_shapely_geometries(
            json.dumps(feature_collection))
        multi_polygon = GridService._convert_to_multipolygon(parsed_geojson)
        if dissolve:
            multi_polygon = GridService._dissolve(multi_polygon)
        aoi_multi_polygon_geojson = geojson.loads(
            json.dumps(mapping(multi_polygon)))

        # validate the geometry
        if type(aoi_multi_polygon_geojson) is not geojson.MultiPolygon:
            raise InvalidGeoJson(
                "Area Of Interest: geometry must be a MultiPolygon")

        is_valid_geojson = geojson.is_valid(aoi_multi_polygon_geojson)
        if is_valid_geojson["valid"] == "no":
            raise InvalidGeoJson(
                f"Area of Interest: Invalid MultiPolygon - {is_valid_geojson['message']}"
            )

        return aoi_multi_polygon_geojson
Example #2
0
    def create_feature_collection(self, csv_file):
        """
        Parse a CSV to create a FeatureCollection object for all postcodes.
        :param csv_file: csv file name
        :return: FeatureCollection of all postcodes in file
        """
        all_features = list()

        coord_ref = crs.Named(
            properties={'name': 'urn:ogc:def:crs:EPSG::27700'})

        with open(path.join(self.cur_dir, csv_file), 'r') as f:
            reader = csv.reader(f)
            for row in reader:
                postcode = str(row[0]).replace(' ', '')
                easting = int(row[2])
                northing = int(row[3])
                point = Point(coordinates=(easting, northing))
                feature = Feature(geometry=point,
                                  properties={'postcode': postcode})
                lol = is_valid(feature)
                all_features.append(feature)
                # print(feature)
        # Set CRS to BNG
        # feature_collection = FeatureCollection(all_features, crs=coord_ref)
        return all_features
    def _attach_tasks_to_project(draft_project: Project, tasks_geojson):
        """
        Validates then iterates over the array of tasks and attach them to the draft project
        :param draft_project: Draft project in scope
        :param tasks_geojson: GeoJSON feature collection of mapping tasks
        :raises InvalidGeoJson, InvalidData
        """
        tasks = geojson.loads(json.dumps(tasks_geojson))

        if type(tasks) is not geojson.FeatureCollection:
            raise InvalidGeoJson(
                "Tasks: Invalid GeoJson must be FeatureCollection")

        is_valid_geojson = geojson.is_valid(tasks)
        if is_valid_geojson["valid"] == "no":
            raise InvalidGeoJson(
                f"Tasks: Invalid FeatureCollection - {is_valid_geojson['message']}"
            )

        task_count = 1
        for feature in tasks["features"]:
            try:
                task = Task.from_geojson_feature(task_count, feature)
            except (InvalidData, InvalidGeoJson) as e:
                raise e

            draft_project.tasks.append(task)
            task_count += 1

        task_count -= 1  # Remove last increment before falling out loop
        draft_project.total_tasks = task_count
Example #4
0
    def to_geojson(self, json_path=None, places=True, massifs=True, circuits=True, boulders=False):

        features = []
        if places:
            features.extend([place for place in self.places if place])
        if massifs:
            features.extend([massif for massif in self.massifs if massif])
        if circuits:
            if boulders:
                exported_circuits = [circuit for circuit in self.circuits if circuit]
            else:
                exported_circuits = [circuit.to_feature() for circuit in self.circuits if circuit]
            features.extend(exported_circuits)
        feature_collections = geojson.FeatureCollection(features)

        if not geojson.is_valid(feature_collections):
            raise ValueError('Non valid GeoJSON')
        # Fixme: crs geojson.named API

        kwargs = dict(indent=2, ensure_ascii=False, sort_keys=True)
        if json_path is not None:
            with open(json_path, 'w', encoding='utf8') as f:
                geojson.dump(feature_collections, f, **kwargs)
        else:
            return geojson.dumps(feature_collections, **kwargs)
Example #5
0
 def extract(self, target, data):
     s = kmltogeojson(data)
     if s:
         data = geojson.loads(s)
         result = geojson.is_valid(data)
         if result['valid'] == 'yes':
             return [('geojson', data)]
Example #6
0
    def from_geojson_feature(cls, task_id, task_feature):
        """
        Constructs and validates a task from a GeoJson feature object
        :param task_id: Unique ID for the task
        :param task_feature: A geoJSON feature object
        :raises InvalidGeoJson, InvalidData
        """
        if type(task_feature) is not geojson.Feature:
            raise InvalidGeoJson('Task: Invalid GeoJson should be a feature')

        task_geometry = task_feature.geometry

        if type(task_geometry) is not geojson.MultiPolygon:
            raise InvalidGeoJson('Task: Geometry must be a MultiPolygon')

        is_valid_geojson = geojson.is_valid(task_geometry)
        if is_valid_geojson['valid'] == 'no':
            raise InvalidGeoJson(
                f"Task: Invalid MultiPolygon - {is_valid_geojson['message']}")

        task = cls()
        try:
            task.x = task_feature.properties['x']
            task.y = task_feature.properties['y']
            task.zoom = task_feature.properties['zoom']
            task.splittable = task_feature.properties['splittable']
        except KeyError as e:
            raise InvalidData(f'Task: Expected property not found: {str(e)}')

        task.id = task_id
        task_geojson = geojson.dumps(task_geometry)
        task.geometry = ST_SetSRID(ST_GeomFromGeoJSON(task_geojson), 4326)

        return task
Example #7
0
 def test_write_stops_geojson(self):
     in_memory_file = io.StringIO()
     exports.write_stops_geojson(self.gtfs, in_memory_file)
     in_memory_file.seek(0)
     self.assertTrue(geojson.is_valid(in_memory_file.read(-1)))
     in_memory_file.seek(0)
     gjson = geojson.loads(in_memory_file.read(-1))
     gjson_properties = gjson['features'][0]['properties']
     self.assertIn("name", gjson_properties.keys())
     self.assertIn("stop_I", gjson_properties.keys())
Example #8
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Example #9
0
def test_footprints_s2(products):
    footprints = SentinelAPI.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s2.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Example #10
0
 def extract(self, target, data):
     t = data.read()
     try:
         data = geojson.loads(t)
     except:
         # possibly malformed json?
         start = min(t.find('{'), t.find('['))
         data = geojson.loads(t[start:])
     result = geojson.is_valid(data)
     if result['valid'] == 'yes':
         return [('geojson', data)]
def post():
    ''' This is the main POST method that takes in a request from Geodesign Hub and processes the input data '''
    # check if the request headers are specified and the data sent are JSON
    if request.headers['Content-Type'] == 'application/json':
        pass
    else:
        # If the media type is not JSON, return a HTTP 415 message
        msg = {"message":"Unsupported Media Type"}
        return Response(json.dumps(msg), status=415, mimetype='application/json')
    # Load the JSON data from Geodesign Hub
    req = json.loads(request.data)
    # check that
    try:
        # check that the data coming in from Geodesign Hub has all the keys
        jobid = req['jobid']
        callback = req['callback']
        geometries = req['geometry']
        jobtype = req['type']
        projectname = req['projectname']
    except KeyError as e:
        msg = json.dumps({"message":"Four parameters are required: jobid, jobtype, callback, geometry and Geodesign Hub projectname. One or more of these were not found in your JSON request."})
        return Response(msg, status=400, mimetype='application/json')
    # Once the data has been verified, parse it.
    gjData = geojson.loads(json.dumps(geometries))
	# validate geojson
    validation = geojson.is_valid(gjData)
	# if geojson is  is valid

    if validation['valid'] == 'yes':
        # Implement your impacts code here. The code below takes all the features and assigns a random impacts to them.
        impactsAssignmentList = ['orange', 'orange2', 'yellow','purple', 'purple2']
        allFeats = []
        for feature in gjData['features']:
            feature['properties']['areatype'] = random.choice(impactsAssignmentList)
            allFeats.append(feature)
        # Once all features have been parsed, build a feature collection
        fc = {"type":"FeatureCollection", "features":allFeats}
        # Setup the API.
    	myAPIHelper = GeodesignHub.GeodesignHubClient(url = config.apisettings['serviceurl'], token=config.apisettings['apitoken'])
        # Send the impact geometries back to the project
    	upload = myAPIHelper.post_gdservice_JSON(geometry=fc, jobid=jobid)
        # if status code is 202 / Accepted then all Ok
        if upload.status_code == 202:
            pass

    else:
        # If invalid geoJSON is submitted, return a 400 message.
        msg = json.dumps({"message":"Invalid GeoJSON submitted."})
        return Response(msg, status=400, mimetype='application/json')

    op = json.dumps ({"message":"OK"})
    return Response(op, status=200, mimetype='application/json')
Example #12
0
def validate_geojson(geojson):
    """
    Validate string is GeoJson.
    :param geojson: GeoJson string
    :return: bool
    """
    try:
        if is_valid(loads(geojson))['valid'].lower() == 'yes':
            return True
        else:
            return False
    except:
        return False
Example #13
0
def validate_geojson(geojson_path):
    """ Validate a GeoJSON file """

    ## Validate the GeoJSON file
    with open(geojson_path, 'r') as geojson_file:
        geojson_dump = geojson_file.read()
        features = geojson.loads(geojson_dump)
        validation = geojson.is_valid(features)

        print "Is the geojson file valid? ", validation["valid"]
        if validation["message"]:
            print "Info: ", validation["message"]

        return validation["valid"]
Example #14
0
 def _index_spatial_property(self, node, key, value, label):
     sidx_key = u"{}_{}".format(label, key)
     if sidx_key in self.sidx:
         geo_value = geojson.loads(value)
         is_valid_geojson = geojson.is_valid(geo_value)['valid'] != 'no'
         if is_valid_geojson:
             # Add node to index
             index = self.sidx[sidx_key]["index"]
             index_key = self.sidx[sidx_key]["key"]
             wkt_value = shape(geo_value).wkt
             node[index_key] = wkt_value
             index.add(SPATIAL_INDEX_KEY, SPATIAL_INDEX_VALUE, node)
             # Add node to layer
             self.spatial.addNodeToLayer(layer=index.name, node=node.url)
Example #15
0
 def test_write_sections_geojson(self):
     in_memory_file = io.StringIO()
     exports.write_sections_geojson(self.gtfs, in_memory_file)
     in_memory_file.seek(0)
     self.assertTrue(geojson.is_valid(in_memory_file.read(-1)))
     in_memory_file.seek(0)
     gjson = geojson.loads(in_memory_file.read(-1))
     gjson_properties = gjson['features'][0]['properties']
     self.assertIn("from_stop_I", gjson_properties.keys())
     self.assertIn("to_stop_I", gjson_properties.keys())
     self.assertIn("n_vehicles", gjson_properties.keys())
     self.assertIn("duration_avg", gjson_properties.keys())
     self.assertIn("route_I_counts", gjson_properties.keys())
     self.assertIn("route_type", gjson_properties.keys())
Example #16
0
 def _index_spatial_property(self, node, key, value, label):
     sidx_key = u"{}_{}".format(label, key)
     if sidx_key in self.sidx:
         geo_value = geojson.loads(value)
         is_valid_geojson = geojson.is_valid(geo_value)['valid'] != 'no'
         if is_valid_geojson:
             # Add node to index
             index = self.sidx[sidx_key]["index"]
             index_key = self.sidx[sidx_key]["key"]
             wkt_value = shape(geo_value).wkt
             node[index_key] = wkt_value
             index.add(SPATIAL_INDEX_KEY, SPATIAL_INDEX_VALUE, node)
             # Add node to layer
             self.spatial.addNodeToLayer(layer=index.name, node=node.url)
Example #17
0
def _convert_to_geojson(network):
    nodes = network.get('nodes')
    links = network.get('links')
    projection = network.get('projection', '')

    feature_list = []

    _build_feature_list_links(feature_list, links, projection)
    _build_feature_list_nodes(feature_list, nodes, projection)

    feature_collection = geojson.FeatureCollection(feature_list)

    print(geojson.is_valid(feature_collection))
    return feature_collection
Example #18
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(geojson_to_wkt(read_geojson('tests/map.geojson')),
                         datetime(2014, 10, 10),
                         datetime(2014, 12, 31),
                         producttype="GRD")

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
Example #19
0
def test_footprints_s1():
    api = SentinelAPI(**_api_auth)
    products = api.query(
        geojson_to_wkt(read_geojson('tests/map.geojson')),
        datetime(2014, 10, 10), datetime(2014, 12, 31), producttype="GRD"
    )

    footprints = api.to_geojson(products)
    for footprint in footprints['features']:
        validation = geojson.is_valid(footprint['geometry'])
        assert validation['valid'] == 'yes', validation['message']

    with open('tests/expected_search_footprints_s1.geojson') as geojson_file:
        expected_footprints = geojson.loads(geojson_file.read())
    # to compare unordered lists (JSON objects) they need to be sorted or changed to sets
    assert set(footprints) == set(expected_footprints)
    def from_dict(cls, area_poly: dict):
        """ Create a new Priority Area from dictionary """
        pa_geojson = geojson.loads(json.dumps(area_poly))

        if type(pa_geojson) is not geojson.Polygon:
            raise InvalidGeoJson("Priority Areas must be supplied as Polygons")

        is_valid_geojson = geojson.is_valid(pa_geojson)
        if is_valid_geojson["valid"] == "no":
            raise InvalidGeoJson(
                f"Priority Area: Invalid Polygon - {is_valid_geojson['message']}"
            )

        pa = cls()
        valid_geojson = geojson.dumps(pa_geojson)
        pa.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326)
        return pa
Example #21
0
    def check(self):
        result = CheckResult(geoservice_id=self.service.id,
                             geoservice_name=self.service.name,
                             geoservice_type=self.service.type)

        startTime = datetime.datetime.utcnow()
        try:
            response = requests.get(self.service.url, timeout=self.timeout)

            result.http_code = response.status_code
            # content-type не проверяется, вместо этого проверяем код ответа
            # и если он равен 200, то выполняем валидацию содержимого ответа на geojson
            if response.status_code == 200:
                geojson_obj = geojson.loads(response.text)
                validation = geojson.is_valid(geojson_obj)
                if validation['valid'] == 'yes':
                    result.data = response.text
                    result.cumulative_status = CumulativeStatus.WORKS
                else:
                    result.cumulative_status = CumulativeStatus.PROBLEMATIC
                    result.error_text = validation['message']
                    result.error_type = CheckStatusErrorType.INVALID_RESPONSE
            else:
                result.cumulative_status = CumulativeStatus.PROBLEMATIC
                result.error_text = 'Non 200 http code'
                result.http_response = response.text
                result.error_type = CheckStatusErrorType.INVALID_RESPONSE

        # если requests вернул код ошибки веб-сервера
        except HTTPError as error:
            result.cumulative_status = CumulativeStatus.FAILED
            result.error_text = unicode(error)

        # исключение по таймауту 10 секунд
        except Timeout as error:
            result.cumulative_status = CumulativeStatus.FAILED
            result.error_type = CheckStatusErrorType.TIMEOUT_ERROR

        except Exception as error:
            result.cumulative_status = CumulativeStatus.FAILED
            result.error_text = unicode(error)

        duration_time = datetime.datetime.utcnow() - startTime
        result.check_duration = duration_time.total_seconds()

        return result
 def csv_to_json(self):
     """
     Main run method
     """
     csv_list = self.get_csv()
     if csv_list:
         for csv_f in csv_list:
             print csv_f
             fc = self.create_feature_collection(csv_f)
             validation = is_valid(fc)
             if validation['valid'] == 'yes':
                 self.encode_to_raw_json(fc, csv_f)
             else:
                 print "Failed to create GeoJson for file: " + csv_f + " - Error: " + str(validation['message'])
         print "Finished!"
     else:
         print "No CSV's to process"
def upload_docs_to_ES(docs,index,doc_type,id_field="",geopoint=False):
    #input: list of JSON documents, an index name, document type, ID field, and name of an (OPTIONAL) geopoint field.
    #uploads each feature element to ElasticSearch
    #es = Elasticsearch(ES_url)

    es = Elasticsearch(['http://' + ES_username + ':' + ES_password + '@' + ES_url + ':9200/'])
    
    try:
        es.indices.create(index)
    except:
        #do not try to recreate the index
        pass
    
    #if the data has a location field, set the geo_point mapping
    if geopoint:
        mapping = {doc_type:{'properties':{geopoint:{'type':'geo_point','store':'yes'}}}}
        es.indices.put_mapping(index=index, doc_type=doc_type, body=mapping)

    actions = []
    #build the list of ElasticSearch uploads for bulk command
    for doc in docs:
        action = {
            '_index': index,
            '_type': doc_type,
            }
        #check if the document is a geojson document
        validation = geojson.is_valid(doc)
        if validation['valid'].lower() == 'yes':
            #add the point to the document properties
            doc['properties'][geopoint] = list(geojson.utils.coords(doc))[0]

            #load the document properties into ES
            action['_source'] = doc['properties']
            
            #get id from geojson properties document
	    if len(id_field) != 0:
            	action['_id'] = doc['properties'][id_field]        
        else:
            #assign id for typical json document
	    if len(id_field) != 0:
            	action['_id'] = doc[id_field]
            action['_source'] = doc
            
        actions.append(action)
    
    helpers.bulk(es, actions)
def ExporttoJson(geometry_collection):

    feature_lst = []

    for geometry in geometry_collection:

        json_geom = geojson.loads(geometry.ExportToJson())

        feature = geojson.Feature(geometry=json_geom)

        feature_lst.append(feature)
    feature_coll = geojson.FeatureCollection(feature_lst)

    validation = geojson.is_valid(feature_coll)

    if validation['valid'] is 'yes':
        # pprint(geojson.dumps(feature_json, sort_keys=False))
        return geojson.dumps(feature_coll, sort_keys=True)
    else:
        raise TypeError('This is not a valid geojson string.')
Example #25
0
    def test_ways(self, testapp):
        """ Tests that after OSM data is imported, valid GeoJSON can be returned """
        nodes, ways = osm.map_data_to_nodes_ways(mock_map_data)
        osm.update_db_ways(ways)

        rv = testapp.get('/api/0.1/ways')
        rv_str = rv.data.decode('utf-8')

        assert rv.status_code == 200
        assert 'FeatureCollection' in rv_str
        assert 'Feature' in rv_str
        assert 'LineString' in rv_str

        way_geo = geojson.loads(rv_str)

        assert way_geo['type'] == 'FeatureCollection'
        assert way_geo['features'][0]['type'] == 'Feature'
        assert way_geo['features'][0]['geometry']['type'] == 'LineString'
        assert 'id' in way_geo['features'][0]['properties']['osm_data']
        assert 'yes' == geojson.is_valid(way_geo)['valid']
Example #26
0
    def from_geojson_feature(cls, task_id, task_feature):
        """
        Constructs and validates a task from a GeoJson feature object
        :param task_id: Unique ID for the task
        :param task_feature: A geoJSON feature object
        :raises InvalidGeoJson, InvalidData
        """
        if type(task_feature) is not geojson.Feature:
            raise InvalidGeoJson("Task: Invalid GeoJson should be a feature")

        task_geometry = task_feature.geometry

        if type(task_geometry) is not geojson.MultiPolygon:
            raise InvalidGeoJson("Task: Geometry must be a MultiPolygon")

        is_valid_geojson = geojson.is_valid(task_geometry)
        if is_valid_geojson["valid"] == "no":
            raise InvalidGeoJson(
                f"Task: Invalid MultiPolygon - {is_valid_geojson['message']}"
            )

        task = cls()
        try:
            task.x = task_feature.properties["x"]
            task.y = task_feature.properties["y"]
            task.zoom = task_feature.properties["zoom"]
            task.is_square = task_feature.properties["isSquare"]
        except KeyError as e:
            raise InvalidData(f"Task: Expected property not found: {str(e)}")

        if "extra_properties" in task_feature.properties:
            task.extra_properties = json.dumps(
                task_feature.properties["extra_properties"]
            )

        task.id = task_id
        task_geojson = geojson.dumps(task_geometry)
        task.geometry = ST_SetSRID(ST_GeomFromGeoJSON(task_geojson), 4326)

        return task
    def create_feature_collection(self, csv_file):
        """
        Parse a CSV to create a FeatureCollection object for all postcodes.
        :param csv_file: csv file name
        :return: FeatureCollection of all postcodes in file
        """
        all_features = list()

        coord_ref = crs.Named(properties={'name': 'urn:ogc:def:crs:EPSG::27700'})

        with open(path.join(self.cur_dir, csv_file), 'r') as f:
            reader = csv.reader(f)
            for row in reader:
                postcode = str(row[0]).replace(' ', '')
                easting = int(row[2])
                northing = int(row[3])
                point = Point(coordinates=(easting, northing))
                feature = Feature(geometry=point, properties={'postcode': postcode})
                lol = is_valid(feature)
                all_features.append(feature)
                # print(feature)
        # Set CRS to BNG
        # feature_collection = FeatureCollection(all_features, crs=coord_ref)
        return all_features
Example #28
0
def build_polygon(coords):
    polygon = geojson.Polygon(coords)
    check = geojson.is_valid(polygon)
    if check['valid'] == 'yes':
        return polygon
		return tup

with open("gate_coords_66.txt") as coords:
	flag = 0
	for line in coords:
		sask_coords = []
		if flag == 0:
			flag = 1
			continue

		words = line.split()
		# print(line)
		for x in range(8,12):
			# print(words[x-6],words[x])
			sask_coords.append((float(words[x]),float(words[x-4])))
			cs.append((words[x-4], words[x]))
		sask_coords.append((float(words[8]),float(words[4])))

		sask_coords = [transform(x) for x in sask_coords]

		sask_polys.append(geojson.Feature(geometry=geojson.Polygon([sask_coords]),id=str(words[0].zfill(2))+str(words[1].zfill(2))))
		
featureCollection = geojson.FeatureCollection(sask_polys)
validation = geojson.is_valid(featureCollection)

with open("coords.txt",'w') as outfile:
	for c in cs:
		outfile.write(c[1]+ ',' + c[0]+"\n")

with open("cly_geojsondata.json",'w') as outfile:
	geojson.dump(featureCollection,outfile)
def upload_docs_to_ES(docs,
                      index,
                      doc_type,
                      id_field=False,
                      geopoint=False,
                      geoshape=False,
                      delete_index=False):
    #input: list of JSON documents, an index name, document type, ID field, and name of an (OPTIONAL) geopoint field.
    #uploads each feature element to ElasticSearch
    #es = Elasticsearch(ES_url)

    es = Elasticsearch([
        'http://' + ES_username + ':' + ES_password + '@' + ES_url + ':9200/'
    ])

    if delete_index:
        try:
            es.indices.delete(index=index, ignore=400)
        except:
            pass

    try:
        es.indices.create(index)
    except:
        #do not try to recreate the index
        pass

    #if the data has a location field, set the geo_point mapping
    if geopoint:
        try:
            mapping = {
                doc_type: {
                    'properties': {
                        geopoint: {
                            'type': 'geo_point',
                            'store': 'yes'
                        }
                    }
                }
            }
            es.indices.put_mapping(index=index,
                                   doc_type=doc_type,
                                   body=mapping)
        except:
            pass

    if geoshape:
        try:
            mapping = {
                doc_type: {
                    'properties': {
                        geoshape: {
                            'type': 'geo_shape',
                            'tree': 'quadtree',
                            'precision': '1m'
                        }
                    }
                }
            }
            es.indices.put_mapping(index=index,
                                   doc_type=doc_type,
                                   body=mapping)
        except:
            pass

    actions = []
    #build the list of ElasticSearch uploads for bulk command
    index = 0
    for doc in docs:
        action = {
            '_index': index,
            '_type': doc_type,
        }
        #check if the document is a geojson document
        validation = geojson.is_valid(doc)
        if validation['valid'].lower() == 'yes':
            #add the point/shape to the document properties
            if geopoint:
                doc['properties'][geopoint] = list(
                    geojson.utils.coords(doc))[0]
            if geoshape:
                doc['properties'][geoshape] = doc['geometry']
                doc['geometry']['type'] = doc['geometry']['type'].lower(
                )  #convert type to lowercase for leaflet
            #get id from geojson properties document
            if id_field: action['_id'] = doc['properties'][id_field]
            #load the document properties into ES
            action['_source'] = doc['properties']
        else:
            #assign id for typical json document
            if id_field: action['_id'] = doc[id_field]
            action['_source'] = doc

        actions.append(action)
        index += 1

        #upload 10k records at a time
        if index == 10000:
            try:
                helpers.bulk(es, actions)
                print 'Sucessfully uploaded %s records!' % str(len(actions))
                actions = []
                index = 0
            except Exception as e:
                print '#### ERROR:s'
                pprint(e)

    #upload remaining 10k records
    try:
        helpers.bulk(es, actions)
        print 'Sucessfully uploaded %s records!' % str(len(actions))
    except Exception as e:
        print '#### ERROR:'
        pprint(e)
def upload_individual_docs_to_ES(docs,
                                 index,
                                 doc_type,
                                 id_field=False,
                                 geopoint=False,
                                 geoshape=False,
                                 delete_index=False):
    #input: list of JSON documents, an index name, document type, ID field, and name of an (OPTIONAL) geopoint field.
    #uploads each feature element to ElasticSearch
    #es = Elasticsearch(ES_url)

    es = Elasticsearch([
        'http://' + ES_username + ':' + ES_password + '@' + ES_url + ':9200/'
    ])

    if delete_index:
        try:
            es.indices.delete(index=index, ignore=400)
        except:
            pass

    try:
        es.indices.create(index)
    except:
        #do not try to recreate the index
        pass

    #if the data has a location field, set the geo_point mapping
    if geopoint:
        try:
            mapping = {
                doc_type: {
                    'properties': {
                        geopoint: {
                            'type': 'geo_point',
                            'store': 'yes'
                        }
                    }
                }
            }
            es.indices.put_mapping(index=index,
                                   doc_type=doc_type,
                                   body=mapping)
        except:
            pass

    if geoshape:
        try:
            mapping = {
                doc_type: {
                    'properties': {
                        geoshape: {
                            'type': 'geo_shape',
                            'tree': 'quadtree',
                            'precision': '1m'
                        }
                    }
                }
            }
            es.indices.put_mapping(index=index,
                                   doc_type=doc_type,
                                   body=mapping)
        except:
            pass

    #iterate through and upload individual documents
    succeeded = 0
    failed = 0
    for doc in docs:
        #check if the document is a geojson document
        validation = geojson.is_valid(doc)
        if validation['valid'].lower() == 'yes':
            #add the point/shape to the document properties
            if geopoint:
                doc['properties'][geopoint] = list(
                    geojson.utils.coords(doc))[0]
            if geoshape:
                doc['properties'][geoshape] = doc['geometry']
                doc['geometry']['type'] = doc['geometry']['type'].lower(
                )  #convert type to lowercase for leaflet
            #load the document properties into ES
            to_upload = deepcopy(doc['properties'])
        else:
            to_upload = deepcopy(doc)

        if id_field: _id = to_upload[id_field]
        #upload the document
        try:
            res = es.index(index=index,
                           doc_type=doc_type,
                           id=_id,
                           body=to_upload)
            succeeded += 1
        except Exception as e:
            print "ERROR: " % str(e)
            failed += 1

    print "Finished uploading documents. %s succeeded. %s failed." % (
        succeeded, failed)
Example #32
0
 def __init__(self, feature):
     validation = geojson.is_valid(feature)
     assert validation['valid']=='yes', validation['message']
     super(Geom, self).__init__()
     self.feature = feature
    def renderMapView(self, mbtoken):

        # generate a working pandas data frame using the fields we need
        df = self.getWorkingPandasDataFrame()
        keyFields = self.getKeyFields()

        # geomType can be either 0: (Multi)Point, 1: (Multi)LineString, 2: (Multi)Polygon
        geomType = 0
        bins = []

        if len(keyFields)>0:
            if len(keyFields)==1:
                geomType = -1 #unknown as of yet
            else:
                lonFieldIdx = 0
                latFieldIdx = 1
                if keyFields[0] == self.getLatField(): 
                    lonFieldIdx = 1
                    latFieldIdx = 0
                min = [df[keyFields[lonFieldIdx]].min(), df[keyFields[latFieldIdx]].min()]
                max = [df[keyFields[lonFieldIdx]].max(), df[keyFields[latFieldIdx]].max()]
                self.options["mapBounds"] = json.dumps([min,max], default=defaultJSONEncoding)

        valueFields = self.getValueFields()
        
        #check if we have a preserveCols
        preserveCols = self.options.get("preserveCols", None)
        preserveCols = [a for a in preserveCols.split(",") if a not in keyFields and a not in valueFields] if preserveCols is not None else []

        # Transform the data into GeoJSON for use in the Mapbox client API
        allProps = valueFields + preserveCols + self.getExtraFields()
        features = []
        for rowidx, row in df.iterrows():
            feature = {'type':'Feature',
                        'properties':{},
                        'geometry':{'type':'Point',
                                    'coordinates':[]}}
            
            if geomType == 0:
                feature['geometry']['coordinates'] = [row[keyFields[lonFieldIdx]], row[keyFields[latFieldIdx]]]
            else:
                geomIdx = df.columns.get_loc(keyFields[0])+1
                feature['geometry'] = json.loads(row[geomIdx])
                
            for fld in allProps:
                feature['properties'][fld] = row[fld]
            features.append(feature)

        if len(features)>0:
            pygeojson = {'type':'FeatureCollection', 'features':features}
            self.options["mapData"] = json.dumps(pygeojson,default=defaultJSONEncoding)

            # Now let's figure out whether we have Line or Polygon data, if it wasn't already found to be Point
            if geomType != 1:
                if features[0]['geometry']['type'].endswith('LineString'):
                    geomType = 1
                elif features[0]['geometry']['type'].endswith('Polygon'):
                    geomType = 2
                else:
                    geomType = -1
                
            #### build up the map style

            # basic color
            paint = {}
            if geomType == 1:
                paint['line-color'] = '#ff0000'
                paint['line-width'] = 2
                if self.options.get("coloropacity"):
                    paint['line-opacity'] = float(self.options.get("coloropacity")) / 100
            elif geomType == 2:
                paint['fill-color'] = '#ff0000'
                paint['fill-opacity'] = 0.8
                if self.options.get("coloropacity"):
                    paint['fill-opacity'] = float(self.options.get("coloropacity")) / 100
            else:
                paint['circle-radius'] = 12
                paint['circle-color'] = '#ff0000'
                paint['circle-opacity'] = 0.25
                if self.options.get("coloropacity"):
                    paint['circle-opacity'] = float(self.options.get("coloropacity")) / 100
                if (self.options.get("kind") and self.options.get("kind").find("cluster") >= 0):
                    paint['circle-opacity'] = 1.0

            if len(valueFields) > 0:
                mapValueField = valueFields[0]
                self.options["mapValueField"] = mapValueField

            if not self.options.get("kind"): 
                self.options["kind"] = "choropleth-cluster"

            # if there's a numeric value field and type is not 'simple', paint the data as a choropleth map
            if self.options.get("kind") and self.options.get("kind").find("simple") < 0 and len(valueFields) > 0:
                # get value from the "Number of Bins" slider
                numBins = int(self.options.get("numbins", 5))

                # custom index, uses "Custom Base Color" hex value in options menu.
                customBaseColor = self.options.get("custombasecolor", "#ff0000")

                # custom index, uses "Secondary Custom Base Color" hex value in options menu.
                secondaryCustomBaseColor = self.options.get("custombasecolorsecondary", "#ff0000")

                # color options
                bincolors = []
                bincolors.append(self._getColorList('#ffffcc', '#253494', numBins)) #yellow to blue
                bincolors.append(self._getMonochromeLight('#de2d26', numBins)) # reds monochrome light
                bincolors.append(self._getMonochromeDark('#f7f7f7', numBins)) # grayscale monochrome dark
                bincolors.append(self._getColorList('#e66101', '#5e3c99', numBins)) # orange to purple
                bincolors.append(self._getColorList('#3E9E7A', '#0F091D', numBins)) # green to purple
                bincolors.append(self._getMonochromeLight(customBaseColor, numBins)) # custom monochrome light (saturation)
                bincolors.append(self._getMonochromeDark(customBaseColor, numBins)) # custom monochrome dark (value)
                bincolors.append(self._getColorList(customBaseColor, secondaryCustomBaseColor, numBins)) # custom range (value)

                bincolorsIdx = 0
                if self.options.get("colorrampname"):
                    if self.options.get("colorrampname") == "Light to Dark Red":
                        bincolorsIdx = 1
                    if self.options.get("colorrampname") == "Grayscale":
                        bincolorsIdx = 2
                    if self.options.get("colorrampname") == "Orange to Purple":
                        bincolorsIdx = 3
                    if self.options.get("colorrampname") == "Green to Purple":
                        bincolorsIdx = 4
                    if self.options.get("colorrampname") == "Custom Monochrome Light":
                        bincolorsIdx = 5
                    if self.options.get("colorrampname") == "Custom Monochrome Dark":
                        bincolorsIdx = 6
                    if self.options.get("colorrampname") == "Custom Color Range":
                        bincolorsIdx = 7

                # only use list of quantiles if it matches the number of bins
                if self.options.get("quantiles") and len(self.options.get("quantiles").split(",")) == numBins:
                    quantileFloats = [float(x) for x in self.options.get("quantiles").split(",")]
                    self.debug("Using quantileFloats: %s" % quantileFloats)
                    for i in range(numBins):
                        bins.append((df[valueFields[0]].quantile(quantileFloats[i]),bincolors[bincolorsIdx][i%len(bincolors[bincolorsIdx])]))
                else:
                    # default, equal-size bins based on numBins (if cannot find quantiles array in options)
                    self.debug("Using equal-size bins based on numBins: %s" % numBins)
                    for i in range(numBins):
                        bins.append((df[valueFields[0]].quantile(float(i)/(numBins-1.0)),bincolors[bincolorsIdx][i%len(bincolors[bincolorsIdx])]))

                if geomType == 1:
                    # paint['line-opacity'] = 0.65
                    paint['line-color'] = {"property":mapValueField}
                    paint['line-color']['stops'] = []
                    for bin in bins:
                        paint['line-color']['stops'].append([bin[0], bin[1]])
                elif geomType == 2:
                    paint['fill-color'] = {"property":mapValueField}
                    paint['fill-color']['stops'] = []
                    for bin in bins:
                        paint['fill-color']['stops'].append([bin[0], bin[1]])
                else:
                    # paint['circle-opacity'] = 0.65
                    paint['circle-color'] = {"property":mapValueField}
                    paint['circle-color']['stops'] = []
                    for bin in bins: 
                        paint['circle-color']['stops'].append([bin[0], bin[1]])
                    paint['circle-radius'] = 12


            self.options["mapStyle"] = json.dumps(paint,default=defaultJSONEncoding)
            
        w = self.getPreferredOutputWidth()
        h = self.getPreferredOutputHeight()

        # handle custom layers
        userlayers = []
        l = (ShellAccess,ShellAccess) 
        papp = self.options.get("nostore_pixieapp")
        if papp is not None and ShellAccess[papp] is not None:
            l = (ShellAccess[papp], dir(ShellAccess[papp]))
        for key in [a for a in l[1] if not callable(getattr(l[0], a)) and not a.startswith("_")]:
            v = getattr(l[0],key)            
            if isinstance(v, dict) and "maptype" in v and v["maptype"].lower() == "mapbox" and "source" in v and "type" in v["source"] and v["source"]["type"] == "geojson" and "id" in v and "data" in v["source"]:
                gj = geojson.loads(json.dumps(v["source"]["data"]))
                isvalid = True
                if hasattr(geojson, "is_valid"): # then we're using old version of geojson module
                    isvalid = geojson.is_valid(gj)["valid"] == "yes"
                    self.debug("IN hasattr(geojson,is_valid). Validity is "+str(isvalid))
                else: # we're using a newer version of geojson module
                    isvalid = gj.is_valid
                if isvalid:
                    userlayers.append(v)
                else:
                    self.debug("Invalid GeoJSON: {0}".format(str(v["source"]["data"])))
        self.debug("userlayers length: "+str(len(userlayers)))
        # end handle custom layers

        uniqueid = str(uuid.uuid4())[:8]
        return self.renderTemplate("mapView.html", bins=bins, userlayers=userlayers, prefwidth=w, prefheight=h, randomid=uniqueid)
Example #34
0
 def pre_validate(self, form):
     if self.data:
         result = geojson.is_valid(self.data)
         if result['valid'] == 'no':
             raise validators.ValidationError(result['message'])
     return True
Example #35
0
    def renderMapView(self, mbtoken):
        df = self.getWorkingPandasDataFrame()

        keyFields = self.getKeyFields()

        # geomType can be either 0: (Multi)Point, 1: (Multi)LineString, 2: (Multi)Polygon
        geomType = 0
        bins = []

        if len(keyFields) > 0:
            if len(keyFields) == 1:
                geomType = -1  # unknown as of yet
            else:
                lonFieldIdx = 0
                latFieldIdx = 1
                if keyFields[0] == self.getLatField():
                    lonFieldIdx = 1
                    latFieldIdx = 0
                min = [
                    df[keyFields[lonFieldIdx]].min(),
                    df[keyFields[latFieldIdx]].min()
                ]
                max = [
                    df[keyFields[lonFieldIdx]].max(),
                    df[keyFields[latFieldIdx]].max()
                ]
                self.options["mapBounds"] = json.dumps(
                    [min, max], default=defaultJSONEncoding)

        valueFields = self.getValueFields()

        # check if we have a preserveCols
        preserveCols = self.options.get("preserveCols", None)
        preserveCols = [
            a for a in preserveCols.split(",")
            if a not in keyFields and a not in valueFields
        ] if preserveCols is not None else []

        valueFieldIdxs = []
        allProps = valueFields + preserveCols
        for j, valueField in enumerate(allProps):
            valueFieldIdxs.append(df.columns.get_loc(valueField))

        # Transform the data into GeoJSON for use in the Mapbox client API
        features = []
        for row in df.itertuples():
            feature = {
                'type': 'Feature',
                'properties': {},
                'geometry': {
                    'type': 'Point',
                    'coordinates': []
                }
            }

            if geomType == 0:
                feature['geometry']['coordinates'] = [
                    row[lonFieldIdx + 1], row[latFieldIdx + 1]
                ]
            else:
                geomIdx = df.columns.get_loc(keyFields[0]) + 1
                feature['geometry'] = json.loads(row[geomIdx])

            for idx, valueFieldIdx in enumerate(valueFieldIdxs):
                feature['properties'][allProps[idx]] = row[valueFieldIdx + 1]
            features.append(feature)

        if len(features) > 0:
            pygeojson = {'type': 'FeatureCollection', 'features': features}
            self.options["mapData"] = json.dumps(pygeojson,
                                                 default=defaultJSONEncoding)

            # Now let's figure out whether we have Line or Polygon data, if it wasn't already found to be Point
            if geomType != 1:
                if features[0]['geometry']['type'].endswith('LineString'):
                    geomType = 1
                elif features[0]['geometry']['type'].endswith('Polygon'):
                    geomType = 2
                else:
                    geomType = -1

            #### build up the map style

            # basic color
            paint = {}
            if geomType == 1:
                paint['line-color'] = '#ff0000'
                paint['line-width'] = 2
                if self.options.get("coloropacity"):
                    paint['line-opacity'] = float(
                        self.options.get("coloropacity")) / 100
            elif geomType == 2:
                paint['fill-color'] = '#ff0000'
                paint['fill-opacity'] = 0.8
                if self.options.get("coloropacity"):
                    paint['fill-opacity'] = float(
                        self.options.get("coloropacity")) / 100
            else:
                paint['circle-radius'] = 12
                paint['circle-color'] = '#ff0000'
                paint['circle-opacity'] = 0.25
                if self.options.get("coloropacity"):
                    paint['circle-opacity'] = float(
                        self.options.get("coloropacity")) / 100
                if (self.options.get("kind")
                        and self.options.get("kind").find("cluster") >= 0):
                    paint['circle-opacity'] = 1.0

            if len(valueFields) > 0:
                mapValueField = valueFields[0]
                self.options["mapValueField"] = mapValueField

            if not self.options.get("kind"):
                self.options["kind"] = "choropleth-cluster"

            # if there's a numeric value field and type is not 'simple', paint the data as a choropleth map
            if self.options.get("kind") and self.options.get("kind").find(
                    "simple") < 0 and len(valueFields) > 0:
                # color options
                bincolors = []
                bincolors.append(
                    ['#ffffcc', '#a1dab4', '#41b6c4', '#2c7fb8',
                     '#253494'])  # yellow to blue
                bincolors.append(
                    ['#fee5d9', '#fcae91', '#fb6a4a', '#de2d26',
                     '#a50f15'])  # reds
                bincolors.append(
                    ['#f7f7f7', '#cccccc', '#969696', '#636363',
                     '#252525'])  # grayscale
                bincolors.append(
                    ['#e66101', '#fdb863', '#f7f7f7', '#b2abd2',
                     '#5e3c99'])  # orange to purple (diverging values)

                bincolorsIdx = 0
                if self.options.get("colorrampname"):
                    if self.options.get(
                            "colorrampname") == "Light to Dark Red":
                        bincolorsIdx = 1
                    if self.options.get("colorrampname") == "Grayscale":
                        bincolorsIdx = 2
                    if self.options.get("colorrampname") == "Orange to Purple":
                        bincolorsIdx = 3

                minval = df[valueFields[0]].min()
                maxval = df[valueFields[0]].max()
                bins.append((minval, bincolors[bincolorsIdx][0]))
                bins.append((df[valueFields[0]].quantile(0.25),
                             bincolors[bincolorsIdx][1]))
                bins.append((df[valueFields[0]].quantile(0.5),
                             bincolors[bincolorsIdx][2]))
                bins.append((df[valueFields[0]].quantile(0.75),
                             bincolors[bincolorsIdx][3]))
                bins.append((maxval, bincolors[bincolorsIdx][4]))

                if geomType == 1:
                    # paint['line-opacity'] = 0.65
                    paint['line-color'] = {"property": mapValueField}
                    paint['line-color']['stops'] = []
                    for bin in bins:
                        paint['line-color']['stops'].append([bin[0], bin[1]])
                elif geomType == 2:
                    paint['fill-color'] = {"property": mapValueField}
                    paint['fill-color']['stops'] = []
                    for bin in bins:
                        paint['fill-color']['stops'].append([bin[0], bin[1]])
                else:
                    # paint['circle-opacity'] = 0.65
                    paint['circle-color'] = {"property": mapValueField}
                    paint['circle-color']['stops'] = []
                    for bin in bins:
                        paint['circle-color']['stops'].append([bin[0], bin[1]])
                    paint['circle-radius'] = 12

            self.options["mapStyle"] = json.dumps(paint,
                                                  default=defaultJSONEncoding)

        w = self.getPreferredOutputWidth()
        h = self.getPreferredOutputHeight()

        # handle custom layers
        userlayers = []
        l = (ShellAccess, ShellAccess)
        papp = self.options.get("nostore_pixieapp")
        if papp is not None and ShellAccess[papp] is not None:
            l = (ShellAccess[papp], dir(ShellAccess[papp]))
        for key in [
                a for a in l[1]
                if not callable(getattr(l[0], a)) and not a.startswith("_")
        ]:
            v = getattr(l[0], key)
            if isinstance(v, dict) and "maptype" in v and v["maptype"].lower(
            ) == "mapbox" and "source" in v and "type" in v["source"] and v[
                    "source"][
                        "type"] == "geojson" and "id" in v and "data" in v[
                            "source"]:
                gj = geojson.loads(json.dumps(v["source"]["data"]))
                isvalid = geojson.is_valid(gj)
                if isvalid["valid"] == "yes":
                    userlayers.append(v)
                    # self.debug("GOT VALID GEOJSON!!!!")
                else:
                    self.debug("Invalid GeoJSON: {0}".format(
                        str(v["source"]["data"])))
        self.debug("userlayers length: " + str(len(userlayers)))
        # end handle custom layers

        uniqueid = str(uuid.uuid4())[:8]
        return self.renderTemplate("mapView.html",
                                   bins=bins,
                                   userlayers=userlayers,
                                   prefwidth=w,
                                   prefheight=h,
                                   randomid=uniqueid)
Example #36
0
def is_geojson_valid(geojson_file):
    validation = geojson.is_valid(geojson_file)
    return validation['valid']
Example #37
0
 def is_geometry_valid(geometry):
     geometry_str = ujson.dumps(geometry)
     valid = geojson.is_valid(geojson.loads(geometry_str))
     return 'valid' in valid and (valid['valid'] == 'yes' or valid['valid'] == '')
Example #38
0
scenario_df = pd.read_csv(os.path.join(scenario_path, 'scenario.csv'), sep=';')
components_df = pd.read_csv(os.path.join(scenario_path, 'components.csv'),
                            sep=';')
timeseries_df = pd.read_csv(os.path.join(scenario_path, 'timeseries.csv'),
                            sep=';')
hubs_df = pd.read_csv(os.path.join(scenario_path, 'hubs.csv'), sep=';')

features = create_features(components_df)
features.extend(create_features(hubs_df))
feature_collection = gj.FeatureCollection(features)

scenario_dict = dict(scenario_df.iloc[0])
# json package does not serialize numpy.int64 therefore
for key, value in scenario_dict.copy().items():
    if isinstance(value, numpy.int64):
        scenario_dict[key] = int(scenario_dict[key])
# probably better to use https://pypi.python.org/pypi/geojson/#custom-classes
feature_collection['scenario'] = scenario_dict

validation = gj.is_valid(feature_collection)
print("Feature collection is valid: " + validation['valid'])
if validation['valid'] != 'yes':
    print(validation['message'])
    raise Exception()

with open(output_filename, 'w') as output_file:
    gj.dump(feature_collection, output_file, sort_keys=True)

print("File saved in " + os.path.join(os.getcwd(), output_filename))
    def spatial_to_meta(self, extra, data_dict, metadata_dict):
        """
        Helper to get GeoJSON from extras->spatial into a metadata_dict for the given
        extra item
        """
        # check for valid GeoJSON to prevent ckan
        # from rejecting the whole dataset
        try:
            # fix invalid Polygon-Features
            # (because - yes - people have problems reading the spec)
            fixed_spatial_source = extra['value'].replace(
                'polygon',
                'Polygon'
            )

            spatial = geojson.loads(fixed_spatial_source)
            spatial_validation = geojson.is_valid(spatial)

            if spatial_validation['valid'] == 'yes':
                # additional check: does the interior share more
                # than 1 point with exterior? --> invalid
                if len(spatial.coordinates) > 1:
                    # check all internal polygons
                    for internal_polygon in spatial.coordinates[1:]:
                        shared_coordinates_counter = 0
                        # iterate external coordinates,
                        # see if >1 matches internal polygon
                        for coord_external in spatial.coordinates[0]:
                            if coord_external in internal_polygon:
                                shared_coordinates_counter += 1
                            if shared_coordinates_counter > 1:
                                # skip spatial coordinates
                                raise ValueError('More than one shared coordinate!')

                # extract string to JSON
                metadata_dict['boundingbox'] = json.loads(
                    fixed_spatial_source
                )

                # calculate area covered by the the shape
                spatial_area = self.calculate_geojson_area(spatial)

                # area must at least be >0. We are using 1/X to rank the results
                if spatial_area < 0:
                    spatial_area = 1

                metadata_dict['spatial_area'] = spatial_area

                # calculate center of the shape
                spatial_center_x, spatial_center_y = self.calculate_geojson_center(
                    spatial
                )
                metadata_dict['spatial_center'] = {
                    "lat": spatial_center_y,
                    "lon": spatial_center_x
                }
            else:
                raise ValueError(spatial_validation['message'])
        except Exception as ex:
            info_message = "invalid GeoJSON in extras->spatial "
            info_message += "at dataset: " + data_dict['name']
            info_message += ", Exception: "
            info_message += type(ex).__name__
            info_message += ", "
            info_message += str(ex.args)
            logger.info(info_message)
Example #40
0
        # Need to follow the right hand rule UR, UL, LL, LR, UR
        thisBox = [[mapGrid[R][C+1],
                    mapGrid[R][C],
                    mapGrid[R + 1][C],
                    mapGrid[R + 1][C + 1],
                    mapGrid[R][C + 1]]]
        mapDictionary[repr(rowValues[R])+repr(colValues[C])] = thisBox

print(len(mapDictionary))
sorted(mapDictionary.keys())
geoJsonFeatures = []

for k, v in mapDictionary.items():
    if str(k + ".dwg") in FD.printList:
        thisPolygon = geojson.Polygon(v)
        thisFeature = geojson.Feature(geometry=thisPolygon, id=k, properties={"id": k, "link": str(directory + "\\" + k + ".dwg")})
        geoJsonFeatures.append(thisFeature)


geoJsonComplete = geojson.FeatureCollection(geoJsonFeatures)

validation = geojson.is_valid(geoJsonComplete)

print(validation['valid'])
print(validation['message'])

print("Writing File")
f = open("GeoJsonOutput", "w")
geojson.dump(geoJsonComplete, f)
f.close()
    )


@app.route("/encode", methods=["POST"])
def geojson_encode():

    try:
        g = request.form["geojson"]
        f = geojson.loads(g)
    except Exception, e:
        error = "failed to load geojson, because %s" % e
        logging.error(error)
        return jsonify(ok=0, error=error)

        # Does the input pass the smell check?
    validation = geojson.is_valid(f)

    if validation["valid"] == "no":
        error = "GeoJSON doesn't smell right: %s" % validation["message"]
        logging.error(error)
        return jsonify(ok=0, error=error)

    e = mapzen.whosonfirst.geojson.encoder(precision=None)

    try:
        fh = StringIO.StringIO()
        e.encode_feature(f, fh)
    except Exception, e:
        error = "failed to encode geojson, because %s" % e
        logging.error(error)
        return jsonify(ok=0, error=error)
Example #42
0
            feat = Feature(geometry=Point([xseq[xi], yseq[yi]]), properties={"xyid": str(xi) + " " + str(yi)})
        else:
            coordlist = rectpolyctl(xseq[xi], xseq[xi + 1], yseq[yi], yseq[yi + 1])
            feat = Feature(
                geometry=Polygon(
                    [
                        coordlist
                        # [
                        # (xseq[xi], yseq[yi]),
                        # (xseq[xi], yseq[yi+1]),
                        # (xseq[xi+1], yseq[yi+1]),
                        # (xseq[xi+1], yseq[yi]),
                        # (xseq[xi], yseq[yi])
                        # ]
                    ]
                ),
                properties={"xyid": str(xi) + " " + str(yi)},
            )
        farr.append(feat)

if args.crsname is not None:
    fc = FeatureCollection(farr, crs={"type": "name", "properties": {"name": args.crsname}})
else:
    fc = FeatureCollection(farr)

v = is_valid(fc)
if v["valid"] != "yes":
    sys.stdout.write(str(v))

print(fc)
def upload_individual_docs_to_ES_cURL(docs,
                                      index,
                                      doc_type,
                                      id_field=False,
                                      geopoint=False,
                                      geoshape=False,
                                      delete_index=False):
    #input: list of JSON documents, an index name, document type, ID field, and name of an (OPTIONAL) geopoint field.
    #uploads each feature element to ElasticSearch using subprocess and cURL
    #es = Elasticsearch(ES_url)

    es = 'http://%s:%s@%s:9200' % (ES_username, ES_password, ES_url)

    if delete_index:
        try:
            p = subprocess.Popen(['curl', '-XDELETE', '%s/%s' % (es, index)])
            #subprocess.call('curl -XDELETE %s/%s' % (es,index))
            out, err = p.communicate()
            if err: print '\n' + err
        except Exception as e:
            print "Error deleting index:"
            print e
    print "\n\nPast Delete code\n\n"
    try:
        #create the index, set the replicas so uploads will not err out
        settings = {"settings": {"number_of_replicas": 1}}
        p = subprocess.Popen([
            'curl', '-XPUT',
            '%s/%s' % (es, index), '-d',
            json.dumps(settings)
        ])
        out, err = p.communicate()
        if err: print '\n' + err
        print "\n\nPast Create code\n\n"
        #build the mapping document
        mapping = {}
        mapping['properties'] = {}

        #if the data has a location field, set the geo_point mapping
        if geopoint:
            mapping['properties'][geopoint] = {
                'type': 'geo_point',
                'store': 'yes'
            }
        if geoshape:
            mapping['properties'][geoshape] = {
                'type': 'geo_shape',
                'tree': 'quadtree',
                'precision': '1m'
            }

        #use cURL to put the mapping
        p = subprocess.Popen([
            'curl',
            '%s/%s/_mapping/%s' % (es, index, doc_type), '-d',
            '%s' % json.dumps(mapping)
        ],
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        if err: print '\n' + err
        print "\n\nPast Mapping code"
    except Exception as e:
        #do not try to recreate the index
        print "Error creating index:"
        print e

    #double-check that the correct number of replicas are being used
    p = subprocess.Popen([
        'curl', '-XPUT',
        '%s/%s/_settings' % (es, index), '-d', '{"number_of_replicas": 1}'
    ])
    out, err = p.communicate()
    if err: print '\n' + err

    #iterate through and upload individual documents
    succeeded = 0
    failed = 0
    idx = 0
    for doc in docs:
        idx += 1
        #check if the document is a geojson document
        validation = geojson.is_valid(doc)
        if validation['valid'].lower() == 'yes':
            #add the point/shape to the document properties
            if geopoint:
                doc['properties'][geopoint] = list(
                    geojson.utils.coords(doc))[0]
            if geoshape:
                doc['properties'][geoshape] = doc['geometry']
                doc['geometry']['type'] = doc['geometry']['type'].lower(
                )  #convert type to lowercase for leaflet

            #load the document properties into ES
            to_upload = deepcopy(doc['properties'])
        else:
            to_upload = deepcopy(doc)

        if id_field:
            _id = to_upload[id_field]
        else:
            _id = idx
        #up_doc = json.dumps(to_upload)
        #print up_doc
        #upload the document
        p = subprocess.Popen([
            'curl', '-XPUT',
            '%s/%s/%s/%s' % (
                es,
                index,
                doc_type,
                _id,
            ), '-d',
            '%s' % json.dumps(to_upload)
        ],
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        if not err:
            succeeded += 1
        else:
            failed += 1

    print "Finished uploading documents. %s succeeded. %s failed." % (
        succeeded, failed)
def bulk_upload_docs_to_ES_cURL(docs, **kwargs):
    #input: list of JSON documents, an index name, document type, ID field, and name of an (OPTIONAL) geopoint field.
    #uploads each feature element to ElasticSearch using subprocess and cURL
    #es = Elasticsearch(ES_url)

    #define keyword inputs
    index = kwargs.get('index', 'index_tmp')
    doc_type = kwargs.get('doc_type', 'doc_tmp')
    id_field = kwargs.get('id_field', False)
    geopoint = kwargs.get('geopoint', False)
    geoshape = kwargs.get('geoshape', False)
    delete_index = kwargs.get('delete_index', False)

    es = 'http://%s:%s@%s:9200' % (ES_username, ES_password, ES_url)

    if delete_index:
        try:
            p = subprocess.Popen(['curl', '-XDELETE', '%s/%s' % (es, index)])
            #subprocess.call('curl -XDELETE %s/%s' % (es,index))
            out, err = p.communicate()
            if err: print '\n' + err + '\n\n'
        except Exception as e:
            print "Error deleting index:"
            print e
    try:
        #create the index, set the replicas so uploads will not err out
        settings = {"settings": {"number_of_replicas": 1}}
        p = subprocess.Popen([
            'curl', '-XPUT',
            '%s/%s' % (es, index), '-d',
            json.dumps(settings)
        ])
        out, err = p.communicate()
        if err: print '\n' + err + '\n\n'

        #build the mapping document
        mapping = {}
        mapping['properties'] = {}

        #if the data has a location field, set the geo_point mapping
        if geopoint:
            mapping['properties'][geopoint] = {
                'type': 'geo_point',
                'store': 'yes'
            }
        if geoshape:
            mapping['properties'][geoshape] = {
                'type': 'geo_shape',
                'tree': 'quadtree',
                'precision': '1m'
            }

        #use cURL to put the mapping
        p = subprocess.Popen([
            'curl',
            '%s/%s/_mapping/%s' % (es, index, doc_type), '-d',
            '%s' % json.dumps(mapping)
        ],
                             stderr=subprocess.PIPE)
        out, err = p.communicate()
        if err: print '\n' + err + '\n\n'

    except Exception as e:
        #do not try to recreate the index
        print "Error creating index:"
        print e

    #double-check that the correct number of replicas are being used
    p = subprocess.Popen([
        'curl', '-XPUT',
        '%s/%s/_settings' % (es, index), '-d', '{"number_of_replicas": 1}'
    ])
    out, err = p.communicate()
    if err: print '\n' + err + '\n\n'

    #iterate through and upload individual documents
    actions = []
    idx = 0
    bulk = 0
    for doc in docs:
        bulk += 1
        idx += 1
        #check if the document is a geojson document
        validation = geojson.is_valid(doc)
        if validation['valid'].lower() == 'yes':
            #add the point/shape to the document properties
            if geopoint:
                doc['properties'][geopoint] = list(
                    geojson.utils.coords(doc))[0]
            if geoshape:
                doc['properties'][geoshape] = doc['geometry']
                doc['geometry']['type'] = doc['geometry']['type'].lower(
                )  #convert type to lowercase for leaflet
            #load the document properties into ES
            to_upload = deepcopy(doc['properties'])
        else:
            to_upload = deepcopy(doc)

        if id_field:
            _id = to_upload[id_field]
        else:
            _id = idx
        actions.append(
            '{ "create" : {"_id" : "%s", "_type" : "%s", "_index" : "%s"} }\n'
            % (_id, doc_type, index))
        actions.append('%s\n' % json.dumps(to_upload))

        #upload 10k records at a time
        if bulk >= 10000:
            with open('bulk.txt', 'w') as bulk_file:
                bulk_file.writelines(
                    actions
                )  #write the actions to a file to be read by the bulk cURL command
            #upload the remaining records
            p = subprocess.Popen([
                'curl', '-XPOST',
                '%s/_bulk' % es, '--data-binary', '@bulk.txt'
            ],
                                 stderr=subprocess.PIPE)
            #p = subprocess.Popen(['curl','-XPOST','%s/_bulk' % es,'-d','%s' % ('').join(actions)],stderr=subprocess.PIPE)
            out, err = p.communicate()
            if err:
                print err + '\n\n'
            bulk = 0
            actions = []

    #upload the remaining records
    with open('bulk.txt', 'w') as bulk_file:
        bulk_file.writelines(
            actions
        )  #write the actions to a file to be read by the bulk cURL command
    p = subprocess.Popen(
        ['curl', '-XPOST',
         '%s/_bulk' % es, '--data-binary', '@bulk.txt'],
        stderr=subprocess.PIPE)
    #p = subprocess.Popen(['curl','-XPOST','%s/_bulk' % es,'-d','%s' % ('').join(actions)],stderr=subprocess.PIPE)
    out, err = p.communicate()
    if err:
        print err
    os.remove('bulk.txt')
Example #45
0
 def clean(self):
     cleaned_data = super(ItemForm, self).clean()
     not_false_values = [
         bool(unicode(v).strip()) for v in cleaned_data.values()
     ]
     if DELETION_FIELD_NAME in cleaned_data:
         self.delete = cleaned_data.pop(DELETION_FIELD_NAME)
     if ITEM_FIELD_NAME in cleaned_data:
         self.item_id = cleaned_data.pop(ITEM_FIELD_NAME)
     if (len(cleaned_data) <= 0 or not any(not_false_values)):
         msg = _("At least one field must be filled")
         for field_key, field_value in cleaned_data.items():
             if not field_value.strip():
                 self._errors[field_key] = self.error_class([msg])
                 del cleaned_data[field_key]
     # Extra check for choices fields
     choices_properties = self.itemtype.properties.filter(datatype="c")
     for choice_property in choices_properties:
         choice_dict = dict(choice_property.get_choices())
         key = choice_property.key
         if key in cleaned_data:
             value = choice_dict[cleaned_data[key]].strip()
             if value == NULL_OPTION:
                 # cleaned_data[key] = None
                 cleaned_data.pop(key)
             else:
                 cleaned_data[key] = value
         elif choice_property.required:
             msg = _("This field is required and "
                     "must have some value selected.")
             self._errors[key] = self.error_class([msg])
         else:
             cleaned_data[key] = u""
     # Extra check for spatial fields
     spatial_properties = self.itemtype.properties.filter(
         datatype__in=["p", "l", "m"])
     for spatial_property in spatial_properties:
         key = spatial_property.key
         if key in cleaned_data and cleaned_data[key]:
             try:
                 field = geojson.loads(cleaned_data[key])
             except ValueError:
                 msg = _("This field is required to "
                         "be in a valid JSON format.")
                 self._errors[key] = self.error_class([msg])
             else:
                 validity = geojson.is_valid(field)
                 is_not_valid = validity['valid'] == 'no'
                 if is_not_valid and spatial_property.datatype == u'p':
                     msg = _("This field is required to "
                             "be a valid GeoJSON point.")
                 elif is_not_valid and spatial_property.datatype == u'l':
                     msg = _("This field is required to "
                             "be a valid GeoJSON path.")
                 elif is_not_valid and spatial_property.datatype == u'm':
                     msg = _("This field is required to "
                             "be a valid GeoJSON area.")
                 elif is_not_valid:
                     msg = _("This field is required to "
                             "be a valid GeoJSON.")
                 if is_not_valid:
                     self._errors[key] = self.error_class([msg])
         else:
             cleaned_data[key] = u""
     return cleaned_data
def keyholemarkup2x(file,output='df'):
    """
    Takes Keyhole Markup Language Zipped (KMZ) or KML file as input. The  
    output is a pandas dataframe, geopandas geodataframe, csv, geojson, or
    shapefile.
    
    All core functionality from:
    http://programmingadvent.blogspot.com/2013/06/kmzkml-file-parsing-with-python.html
    
    Parameters
        ----------
        file : {string}
            The string path to your KMZ or .
        output : {string}
            Defines the type of output. Valid selections include:
                - shapefile - 'shp', 'shapefile', or 'ESRI Shapefile'

        Returns
        -------
        self : object
    """
    r = re.compile(r'(?<=\.)km+[lz]?',re.I)
    try:
        extension = r.search(file).group(0) #(re.findall(r'(?<=\.)[\w]+',file))[-1]
        
    
    except IOError as e:
        logging.error("I/O error {0}".format(e))
    if (extension.lower()=='kml') is True:
        buffer = file
    elif (extension.lower()=='kmz') is True:
        kmz = ZipFile(file, 'r')
        
        vmatch = np.vectorize(lambda x:bool(r.search(x)))
        A = np.array(kmz.namelist())
        sel = vmatch(A)
        buffer = kmz.open(A[sel][0],'r')
    
    else:
        raise ValueError('Incorrect file format entered.  Please provide the '
                         'path to a valid KML or KMZ file.')    
     
    
    parser = xml.sax.make_parser()
    handler = PlacemarkHandler()
    parser.setContentHandler(handler)
    parser.parse(buffer)
    
    try:
        kmz.close()
    except:
        pass
    
    df = pd.DataFrame(handler.mapping).T
    names = list(map(lambda x: x.lower(),df.columns))
    if 'description' in names:
        extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)
        df = df.join(extradata)
    
    
    output = output.lower()
    
    if output=='df' or output=='dataframe' or output == None:
        result = df
        
    elif output=='csv':
        out_filename = file[:-3] + "csv"
        df.to_csv(out_filename,encoding='utf-8',sep="\t")
        result = ("Successfully converted {0} to CSV and output to"
                   " disk at {1}".format(file,out_filename))
        
    elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':
        try:
            import shapely
            from shapely.geometry import Polygon,LineString,Point
        except ImportError as e:
            raise ImportError('This operation requires shapely. {0}'.format(e))
        try:
            import fiona
        except ImportError as e:
            raise ImportError('This operation requires fiona. {0}'.format(e))
        try:
            import geopandas as gpd
        except ImportError as e:
            raise ImportError('This operation requires geopandas. {0}'.format(e))
            
        geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))
        result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))
        
        
    elif output=='geojson' or output=='json':
        try:
            import shapely
            from shapely.geometry import Polygon,LineString,Point
        except ImportError as e:
            raise ImportError('This operation requires shapely. {0}'.format(e))
        try:
            import fiona
        except ImportError as e:
            raise ImportError('This operation requires fiona. {0}'.format(e))
        try:
            import geopandas as gpd
        except ImportError as e:
            raise ImportError('This operation requires geopandas. {0}'.format(e))
        try:
            import geojson
        except ImportError as e:
            raise ImportError('This operation requires geojson. {0}'.format(e))
            
        geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))
        gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))
        out_filename = file[:-3] + "geojson"
        gdf.to_file(out_filename,driver='GeoJSON')
        validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']
        if validation == 'yes':
            
            result = ("Successfully converted {0} to GeoJSON and output to"
                      " disk at {1}".format(file,out_filename))
        else:
            raise ValueError('The geojson conversion did not create a '
                            'valid geojson object. Try to clean your '
                            'data or try another file.')
            
    elif output=='shapefile' or output=='shp' or output =='esri shapefile':
        try:
            import shapely
            from shapely.geometry import Polygon,LineString,Point
        except ImportError as e:
            raise ImportError('This operation requires shapely. {0}'.format(e))
        try:
            import fiona
        except ImportError as e:
            raise ImportError('This operation requires fiona. {0}'.format(e))
            
        try:
            import geopandas as gpd
        except ImportError as e:
            raise ImportError('This operation requires geopandas. {0}'.format(e))
            
        try:
            import shapefile
        except ImportError as e:
            raise ImportError('This operation requires pyshp. {0}'.format(e))
        
            
        geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))
        gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))
        out_filename = file[:-3] + "shp"
        print(gdf)
        gdf.to_file(out_filename,driver='ESRI Shapefile')
        #sf = shapefile.Reader(out_filename)
        import shapefile
        sf = shapefile.Reader(out_filename)
        print(sf)
        print('hello')
        try:
            print(sf.shapes())
            len(sf.shapes())>0
        except:
            print('huh')
            validation = "no"
        else:
            if len(sf.shapes())>0:
                validation = "yes"
            else:
                validation = "no"
        validation = "yes"        
        if validation == 'yes':
            
            result = ("Successfully converted {0} to Shapefile and output to"
                      " disk at {1}".format(file,out_filename))
        else:
            raise ValueError('The Shapefile conversion did not create a '
                            'valid shapefile object. Try to clean your '
                            'data or try another file.') 
    else:
        raise ValueError('The conversion returned no data; check if'
                        ' you entered a correct output file type. '
                        'Valid output types are geojson, shapefile,'
                        ' csv, geodataframe, and/or pandas dataframe.')
        
    return result
Example #47
0
 def clean(self):
     cleaned_data = super(ItemForm, self).clean()
     not_false_values = [bool(unicode(v).strip())
                         for v in cleaned_data.values()]
     if DELETION_FIELD_NAME in cleaned_data:
         self.delete = cleaned_data.pop(DELETION_FIELD_NAME)
     if ITEM_FIELD_NAME in cleaned_data:
         self.item_id = cleaned_data.pop(ITEM_FIELD_NAME)
     if (len(cleaned_data) <= 0 or
             not any(not_false_values)):
         msg = _("At least one field must be filled")
         for field_key, field_value in cleaned_data.items():
             if not field_value.strip():
                 self._errors[field_key] = self.error_class([msg])
                 del cleaned_data[field_key]
     # Extra check for choices fields
     choices_properties = self.itemtype.properties.filter(datatype="c")
     for choice_property in choices_properties:
         choice_dict = dict(choice_property.get_choices())
         key = choice_property.key
         if key in cleaned_data:
             value = choice_dict[cleaned_data[key]].strip()
             if value == NULL_OPTION:
                 # cleaned_data[key] = None
                 cleaned_data.pop(key)
             else:
                 cleaned_data[key] = value
         elif choice_property.required:
             msg = _("This field is required and "
                     "must have some value selected.")
             self._errors[key] = self.error_class([msg])
         else:
             cleaned_data[key] = u""
     # Extra check for spatial fields
     spatial_properties = self.itemtype.properties.filter(
         datatype__in=["p", "l", "m"])
     for spatial_property in spatial_properties:
         key = spatial_property.key
         if key in cleaned_data and cleaned_data[key]:
             try:
                 field = geojson.loads(cleaned_data[key])
             except ValueError:
                 msg = _("This field is required to "
                         "be in a valid JSON format.")
                 self._errors[key] = self.error_class([msg])
             else:
                 validity = geojson.is_valid(field)
                 is_not_valid = validity['valid'] == 'no'
                 if is_not_valid and spatial_property.datatype == u'p':
                     msg = _("This field is required to "
                             "be a valid GeoJSON point.")
                 elif is_not_valid and spatial_property.datatype == u'l':
                     msg = _("This field is required to "
                             "be a valid GeoJSON path.")
                 elif is_not_valid and spatial_property.datatype == u'm':
                     msg = _("This field is required to "
                             "be a valid GeoJSON area.")
                 elif is_not_valid:
                     msg = _("This field is required to "
                             "be a valid GeoJSON.")
                 if is_not_valid:
                     self._errors[key] = self.error_class([msg])
         else:
             cleaned_data[key] = u""
     return cleaned_data