示例#1
0
def diffme(original_file, new_file, new_points_f, deleted_points_f, id_field):
	original = geojson.load(original_file)
	new = geojson.load(new_file)

	# Load all the points into a dict
	original_layer = loadpoints(original.features, id_field)
	new_layer = loadpoints(new.features, id_field)

	# TODO: Check that CRS is identical.

	# Find all the points that were added
	original_guids = set(original_layer.keys())
	new_guids = set(new_layer.keys())

	added_guids = new_guids - original_guids
	new_points = geojson.FeatureCollection([])
	new_points.crs = new.crs
	if id_field is None:
		new_points.features = filter((lambda x: hash_coords(*x.geometry.coordinates) in added_guids), new.features)		
	else:
		new_points.features = filter((lambda x: x.properties[id_field] in added_guids), new.features)
	geojson.dump(new_points, new_points_f)
	new_points_f.close()

	deleted_guids = original_guids - new_guids	
	deleted_points = geojson.FeatureCollection([])
	deleted_points.crs = original.crs
	if id_field is None:
		deleted_points.features = filter((lambda x: hash_coords(*x.geometry.coordinates) in deleted_guids), original.features)
	else:
		deleted_points.features = filter((lambda x: x.properties[id_field] in deleted_guids), original.features)
	geojson.dump(deleted_points, deleted_points_f)
	deleted_points_f.close()
示例#2
0
def joinme(original_file, new_file, output_f, id_field, exclude_original_only=False, include_new_only=False, original_prefix='old_', new_prefix='new_'):
	original = geojson.load(original_file)
	new = geojson.load(new_file)

	# Load all the points into a dict
	original_layer = loadpoints(original.features, id_field)
	new_layer = loadpoints(new.features, id_field)

	output_layer = geojson.FeatureCollection([])
	output_layer.crs = original.crs

	for feature in original.features:
		if exclude_original_only and feature.properties[id_field] not in new_layer:
			# feature is missing from new file.
			continue

		output_layer.features.append(feature)

		old_properties = {}
		for k, v in feature.properties.iteritems():
			if k == id_field:
				old_properties[k] = v
			else:
				old_properties[original_prefix + k] = v

		feature.properties = old_properties

		# Add in "new" properties if they exist
		if feature.properties[id_field] in new_layer:
			for k, v in new_layer[feature.properties[id_field]].properties.iteritems():
				if k == id_field:
					continue

				feature.properties[new_prefix + k] = v

	if include_new_only:
		for feature in new.features:
			if feature.properties[id_field] not in original_layer:
				properties = {}
				for k, v in feature.properties.iteritems():
					properties[new_prefix + k] = v

				feature.properties = properties
				output_layer.features.append(feature)

	# now dump the resulting file
	geojson.dump(output_layer, output_f)
	output_f.close()
    def __init__(self,path,shpfile=False):
        self.path = normpath(abspath(path))
        self.gobj = {}
        self.code = None

        if shpfile:
            dpath = tmpdir("tmp",basename(path).split(".")[0])
            system("cd " + dpath + "; unzip " + self.path)
            shpfiles = getfiles(dpath,".shp")
            for x in shpfiles:
                p = join(dpath,x)
                geop = p[:-3] + "geojson"
                cmd = "ogr2ogr -f GeoJSON " + geop + " " + p
                print "running",cmd
                system(cmd)
                bs = basename(p).split("-")
                f = bs[3]
                if self.code is None:
                    self.code = bs[2]
                geo = geojson.load(codecs.open(geop,encoding="shift_jisx0213"))
                self.gobj.setdefault(f,[]).extend(geo["features"])
            rmall(dpath)
        else:
            self.file_open()
            self.get_code()
示例#4
0
def import_lakes2(self):
    lakes = geojson.load(open(
        'src/iwlearn.project/iwlearn/project/dataimport/missing_lakes.json',
        'r'))
    parent = self.portal_url.getPortalObject()['iw-projects']['basins']['lakes']
    for lake in lakes['features']:
        if lake['properties']['TYPE']=='Lake':
            new_obj_id = idn.normalize(lake['properties']['GLWD_ID'])
            print new_obj_id
            self.portal_types.constructContent('Basin', parent, new_obj_id)
            new_obj=parent[new_obj_id]
            if lake['properties']['LAKE_NAME']:
                new_obj.setTitle(lake['properties']['LAKE_NAME'])
            new_obj.setDescription("Area: %s; Perimeter: %s; Countries: %s" % (
                            lake['properties']['AREA_SKM'],
                            lake['properties']['PERIM_KM'],
                            lake['properties']['COUNTRY'],
                            ))
            new_obj.setBasin_type('Lake')
            color='2c80d3'
            style = IGeoCustomFeatureStyle(new_obj)
            style.geostyles.data['use_custom_styles']=True
            style.geostyles.data['polygoncolor']=color
            style.geostyles.update(style.geostyles)
            geo = IGeoManager(new_obj)
            q = asShape(lake['geometry']).simplify(0.1).__geo_interface__
            geo.setCoordinates(q['type'], q['coordinates'])
示例#5
0
 def read_geojson(self, filename, quiet=False):
     if not quiet:
         log.info("Reading PNwk %s", filename)
     with open(filename + self.FILE_EXTENSION, "r") as f:
         # needs geojson>1.0.7, for non-ascii property keys
         geo = geojson.load(f)
     self._parse_geojson(geo)
示例#6
0
def osmme(input_f, output_f, name_field, all_tags=False):
	output_dom = ET.Element('osm', dict(version='0.5', generator='geojson2osm'))

	layer = geojson.load(input_f, use_decimal=True)
	# Note: does not check CRS, assumes WGS84!

	for gid, point in enumerate(layer.features):
		# Only supports points!
		if point.geometry.type != 'Point':
			continue

		node = ET.SubElement(output_dom, 'node', dict(
			id=unicode(-gid),
			visible='true',
			lat=unicode(point.geometry.coordinates[1]),
			lon=unicode(point.geometry.coordinates[0])
		))

		ET.SubElement(node, 'tag', dict(k='name', v=unicode(point.properties[name_field])))
		ET.SubElement(node, 'tag', dict(k='note', v=unicode(point.properties[name_field])))

		# so that this doesn't show up in the output again.
		del point.properties[name_field]

		if all_tags:
			# write out other properties as custom tags
			for k, v in point.properties.iteritems():
				ET.SubElement(node, 'tag', dict(k=u'custom:' + k, v=unicode(v)))

	# write XML
	output_f.write("<?xml version='1.0' encoding='UTF-8'?>\n")
	output_f.write(ET.tostring(output_dom, encoding='utf-8'))
	output_f.write('\n')
	output_f.close()
    def importer(self, config_entity, db_entity, **kwargs):
        """
            Creates various GeojsonFeature classes by importing geojson and saving it to the database via a dynamic subclass of GeojsonFeature
        :schema: The optional schema to use for the dynamic subclass's meta db_table attribute, which will allow the class's table to be saved in the specified schema. Defaults to public
        :data: Optional python dict data to use instead of loading from the db_entity.url
        :return: a list of lists. Each list is a list of features of distinct subclass of GeoJsonFeature that is created dynamically. To persist these features, you must first create the subclass's table in the database using create_table_for_dynamic_class(). You should also register the table as a DbEntity.
        """
        if self.seed_data:
            data = geojson.loads(jsonify(self.seed_data), object_hook=geojson.GeoJSON.to_instance)
        else:
            fp = open(db_entity.url.replace('file://', ''))
            data = geojson.load(fp, object_hook=geojson.GeoJSON.to_instance)
        feature_class_creator = FeatureClassCreator(config_entity, db_entity)
        # find all unique properties
        feature_class_configuration = feature_class_creator.feature_class_configuration_from_geojson_introspection(data)
        feature_class_creator.update_db_entity(feature_class_configuration)
        feature_class = feature_class_creator.dynamic_model_class(base_only=True)
        # Create our base table. Normally this is done by the import, but we're just importing into memory
        create_tables_for_dynamic_classes(feature_class)
        # Now write each feature to our newly created table
        for feature in map(lambda feature: self.instantiate_sub_class(feature_class, feature), data.features):
            feature.save()
        # Create the rel table too
        rel_feature_class = feature_class_creator.dynamic_model_class()
        create_tables_for_dynamic_classes(rel_feature_class)

        # PostGIS 2 handles this for us now
        # if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table):
        #     # Tell PostGIS about the new geometry column or the table
        #     sync_geometry_columns(db_entity.schema, db_entity.table)

        # Create association classes and tables and populate them with data
        create_and_populate_relations(config_entity, db_entity)
示例#8
0
def split(input_file, file_1, file_2, no_in_first_file):
    '''
    Split a geojson in two separate files.

       Args:
           input_file (str): Input filename.
           file_1 (str): Output file name 1.
           file_2 (str): Output file name 2.
           no_features (int): Number of features in input_file to go to file_1.
           output_file (str): Output file name.
    '''

    # get feature collection
    with open(input_file) as f:
        feat_collection = geojson.load(f)

    features = feat_collection['features']
    feat_collection_1 = geojson.FeatureCollection(features[0:no_in_first_file])
    feat_collection_2 = geojson.FeatureCollection(features[no_in_first_file:])

    with open(file_1, 'w') as f:
        geojson.dump(feat_collection_1, f)

    with open(file_2, 'w') as f:
        geojson.dump(feat_collection_2, f)
    def store_file(self, path, **kwargs):

        abs_path = os.path.abspath(path)

        fh = open(abs_path, 'r')
        feature = geojson.load(fh)

        props = feature.get('properties', {})
        id = props.get('wof:id', None)

        if id == None:
            raise Exception, "E_INSUFFICIENT_WHOSONFIRST"

        rel_path = utils.id2relpath(id)

        if kwargs.get("prefix", None):
            rel_path = os.path.join(kwargs['prefix'], rel_path)

        logging.debug("copy %s to %s:%s" % (abs_path, self.bucket, rel_path))

        k = Key(self.bucket)
        k.key = rel_path

        k.set_contents_from_filename(abs_path)

        acl = kwargs.get('acl', 'public-read')
        k.set_acl(acl)

        # print k.generate_url(expires_in=0, query_auth=False)
        return True
示例#10
0
def filter_by_property(input_file, output_file, property_name, values):
    '''
    Create a file containing only features with specified property value(s) from
        input_file.

    INPUT   input_file (str): File name.
            output_file (str): Output file name.
            property_name (str): Name of the feature property to filter by.
            values (list): Value(s) a feature may have for property_name if it is to be
                included in output_file.
    '''

    filtered_feats = []
    if not output_file.endswith('.geojson'):
        output_file += '.geojson'

    # Load feature list
    with open(input_file) as f:
        feature_collection = geojson.load(f)

    # Filter feats by property_name
    for feat in feature_collection['features']:
        if feat['properties'][property_name] in values:
            filtered_feats.append(feat)

    feature_collection['features'] = filtered_feats

    # Save filtered file
    with open(output_file, 'wb') as f:
        geojson.dump(f)
示例#11
0
    def get(self, request, geocodigo, disease):
        """

        :param kwargs:
        :return:

        """
        geocode = geocodigo

        # load shapefile
        for path in (settings.STATIC_ROOT, settings.STATICFILES_DIRS[0]):
            if not os.path.isdir(path):
                continue
            geojson_path = os.path.join(path, 'geojson', '%s.json' % geocode)

        hex_color = get_last_color_alert(geocode, disease, color_type='hex')

        with open(geojson_path) as f:
            geojson_data = geojson.load(f)

        geojson_data['features'][0]['properties']['fill'] = hex_color
        result = geojson.dumps(geojson_data)

        response = HttpResponse(
            result, content_type='application/force-download'
        )

        response['Content-Disposition'] = (
            'attachment; filename=%s.json' % geocode
        )

        return response
示例#12
0
def uniform_chip_generator(input_file, batch_size=32, **kwargs):
    '''
    Generate batches of uniformly-sized pixel intensity arrays from image strips using a
        geojson file. Output will be in the same format as get_data_from_polygon_list.

    INPUT   input_file (str): File name
            batch_size (int): Number of chips to yield per iteration

            kwargs:
            -------
            See get_data_from_polygon_list docstring for other input params. Do not use
                the num_chips arg.

    OUTPUT  chips (array): Uniformly sized chips with the following dimensions:
                (num_chips, num_channels, max_side_dim, max_side_dim)
            ids (list): Feature ids corresponding to chips.
            labels (array): One-hot encoded labels for chips with the follwoing
                dimensions: (num_chips, num_classes)
    '''

    # Load features from input_file
    with open(input_file) as f:
        feature_collection = geojson.load(f)['features']

    # Produce batches using get_data_from_polygon_list
    for batch_ix in range(0, len(feature_collection), batch_size):
        this_batch = feature_collection[batch_ix: batch_ix + batch_size]

        yield get_data_from_polygon_list(this_batch, **kwargs)
def crawl(source, **kwargs):

    validate = kwargs.get('validate', False)
    inflate = kwargs.get('inflate', False)

    for (root, dirs, files) in os.walk(source):

        for f in files:
            path = os.path.join(root, f)
            path = os.path.abspath(path)

            ret = path

            if not path.endswith('geojson'):
                continue

            if validate or inflate:

                try:
                    fh = open(path, 'r')
                    data = geojson.load(fh)

                except Exception, e:
                    logging.error("failed to load %s, because %s" % (path, e))
                    continue

                if not inflate:
                    ret = path
                else:
                    ret = data

            yield ret
示例#14
0
def get_uniform_chips(input_file, num_chips=None, **kwargs):
    '''
    Get uniformly-sized pixel intensity arrays from image strips using a geojson file.
        Output will be in the same format as get_data_from_polygon_list.

    INPUT   input_file (str): File name. This file should be filtered for polygon size
            num_chips (int): Maximum number of chips to return. If None will return all
                chips in input_file. Defaults to None

            kwargs:
            -------
            See get_data_from_polygon_list docstring for other input params

    OUTPUT  chips (array): Uniformly sized chips with the following dimensions:
                (num_chips, num_channels, max_side_dim, max_side_dim)
            ids (list): Feature ids corresponding to chips.
            labels (array): One-hot encoded labels for chips with the follwoing
                dimensions: (num_chips, num_classes)
    '''

    # Load features from input_file
    with open(input_file) as f:
        feature_collection = geojson.load(f)['features']

    if num_chips:
        feature_collection = feature_collection[: num_chips]

    return get_data_from_polygon_list(feature_collection, num_chips=num_chips, **kwargs)
示例#15
0
    def get_proportion(self, property_name, property):
        '''
        Helper function to get the proportion of polygons with a given property in a
            shapefile

        INPUT   shapefile (string): name of the shapefile containing the polygons
                property_name (string): name of the property to search for exactly as it
                    appears in the shapefile properties (ex: image_id)
                property (string): the property of which to get the proportion of in the
                    shapefile (ex: '1040010014800C00')
        OUTPUT  proportion (float): proportion of polygons that have the property of interest
        '''
        total, prop = 0,0

        # open shapefile, get polygons
        with open(self.shapefile) as f:
            data = geojson.load(f)['features']

        # loop through features, find property count
        for polygon in data:
            total += 1

            try:
                if str(polygon['properties'][property_name]) == property:
                    prop += 1
            except:
                continue

        return float(prop) / total
示例#16
0
def import_oceans(self):
    oceans = geojson.load(open(
        'src/iwlearn.project/iwlearn/project/dataimport/oceans.json',
        'r'))
    parent = self.portal_url.getPortalObject()['iw-projects']['basins']['oceans']
    for ocean in oceans['features']:
        mpoly = []
        geom = asShape(ocean['geometry'])
        for g in geom.geoms:
            if g.area > 5:
                mpoly.append(g)
        mp = MultiPolygon(mpoly).simplify(0.2)
        q = mp.__geo_interface__
        new_obj_id = idn.normalize(ocean['properties']['NAME'])
        print new_obj_id
        self.portal_types.constructContent('Basin', parent, new_obj_id)
        new_obj=parent[new_obj_id]
        new_obj.setTitle(ocean['properties']['NAME'])
        new_obj.setBasin_type('Ocean')
        color='aa22ff'
        style = IGeoCustomFeatureStyle(new_obj)
        style.geostyles.data['use_custom_styles']=True
        style.geostyles.data['polygoncolor']=color
        style.geostyles.update(style.geostyles)
        geo = IGeoManager(new_obj)
        geo.setCoordinates(q['type'], q['coordinates'])
示例#17
0
def get_file(request, waitLaneStrId, which, fileType):
	waitLaneId = str(waitLaneStrId)
	attributeName = which+'File'
	try:
		waitLane = WaitLane.objects.get(id=waitLaneId)
		fileField = getattr(waitLane, attributeName)
		if fileType == "wkt":
			wrapper = None
			response = None
			with open(fileField.path, 'r') as fileObject:
				geoJsonFile = File(fileObject)
				geoJsonObject = geojson.load(geoJsonFile)
				temp = tempfile.TemporaryFile()
				for feature in geoJsonObject['features']:
					s = shape(feature['geometry'])
					wktGeometry = dump(s, temp)
				#send content of temporary file
				wrapper = FileWrapper(temp)
				response = HttpResponse(wrapper, content_type='application/wkt')
				#response['Content-Disposition'] = 'attachment; filename=something.wkt'
				response['Content-Length'] = temp.tell()
				temp.seek(0)
				#pdb.set_trace()
			return response
		else:
			wrapper = FileWrapper(fileField.file)
			response = HttpResponse(wrapper, content_type='application/json')
			response['Content-Length'] = os.path.getsize(fileField.path)
			#pdb.set_trace()
			return response
	except ObjectDoesNotExist:
		#FIXME should send json instead
		return HttpResponseNotFound()
def crawl(source, **kwargs):

    validate = kwargs.get('validate', False)
    inflate = kwargs.get('inflate', False)

    ensure = kwargs.get('ensure_placetype', [])
    skip = kwargs.get('skip_placetype', [])

    for (root, dirs, files) in os.walk(source):

        for f in files:
            path = os.path.join(root, f)
            path = os.path.abspath(path)

            ret = path

            if not path.endswith('geojson'):
                continue

            if path.endswith('-alt.geojson'):
                continue

            if validate or inflate or len(skip) or len(ensure):

                try:
                    fh = open(path, 'r')
                    data = geojson.load(fh)

                except Exception, e:
                    logging.error("failed to load %s, because %s" % (path, e))
                    continue

                if len(ensure):

                    props = data['properties']
                    pt = props.get('wof:placetype', None)

                    if not pt in ensure:
                        logging.debug("skipping %s because it is a %s" % (path, pt))
                        continue

                elif len(skip):

                    props = data['properties']
                    pt = props.get('wof:placetype', None)

                    if pt in skip:
                        logging.debug("skipping %s because it is a %s" % (path, pt))
                        continue

                    if not pt:
                        logging.error("can not determine placetype for %s" % path)

                if not inflate:
                    ret = path
                else:
                    ret = data

            yield ret
def dump_file(path, **kwargs):

        try:
            fh = open(path, 'r')
            feature = geojson.load(fh)
        except Exception, e:
            logging.error("failed to load %s, because %s" % (path, e))
            return None
示例#20
0
def geojson2geobuf(layer):
    """Geojson to Geobuf conversion."""
    with open('{layer}.geojson'.format(layer=layer), 'r') as json:
        with open('{layer}.pbf'.format(layer=layer), 'wb') as buf:
            data = geojson.load(json)
            pbf = geobuf.encode(data)
            buf.write(pbf)
    return 'Successfully wrote geobuf.'
    def publish_file(self, path):

      path = os.path.abspath(path)
      fh = open(path, 'r')
      feature = geojson.load(fh)
      fh.close()

      return self.publish_feature(feature)
    def load_file(self, f):

        try:
            fh = open(f, 'r')
            return geojson.load(fh)
        except Exception, e:
            logging.error("failed to open %s, because %s" % (f, e))
            raise Exception, e
示例#23
0
def filter_polygon_size(shapefile, output_file, min_polygon_hw=30, max_polygon_hw=224):
    '''
    Creates a geojson file containing only acceptable side dimensions for polygons.
    INPUT   (1) string 'shapefile': name of shapefile with original samples
            (2) string 'output_file': name of file in which to save selected polygons.
            This should end in '.geojson'
            (3) int 'min_polygon_hw': minimum acceptable side length (in pixels) for
            given polygon
            (4) int 'max_polygon_hw': maximum acceptable side length (in pixels) for
            given polygon
    OUTPUT  (1) a geojson file (output_file.geojson) containing only polygons of
            acceptable side dimensions
    '''
    # load polygons
    with open(shapefile) as f:
        data = geojson.load(f)
    total = float(len(data['features']))

    # find indicies of acceptable polygons
    ix_ok, ix = [], 0
    print 'Extracting image ids...'
    img_ids = find_unique_values(shapefile, property_name='image_id')

    print 'Filtering polygons...'
    for img_id in img_ids:
        print '... for image {}'.format(img_id)
        img = geoio.GeoImage(img_id + '.tif')

        # cycle thru polygons
        for chip, properties in img.iter_vector(vector=shapefile,
                                                properties=True,
                                                filter=[{'image_id': img_id}],
                                                mask=True):
            chan,h,w = np.shape(chip)
            if chip is None or min(h, w) < min_polygon_hw or max(h, w) > max_polygon_hw:
                ix += 1
                # add percent complete to stdout
                sys.stdout.write('\r%{0:.2f}'.format(100 * ix / total) + ' ' * 20)
                sys.stdout.flush()
                continue

            ix_ok.append(ix)
            ix += 1
            # add percent complete to stdout
            sys.stdout.write('\r%{0:.2f}'.format(100 * ix / total) + ' ' * 20)
            sys.stdout.flush()

    print 'Saving...'
    ok_polygons = [data['features'][i] for i in ix_ok]
    np.random.shuffle(ok_polygons)
    filtrate = {data.keys()[0]: data.values()[0],
                data.keys()[1]: ok_polygons}

    # save new geojson
    with open(output_file, 'wb') as f:
        geojson.dump(filtrate, f)

    print 'Saved {} polygons to {}'.format(str(len(ok_polygons)), output_file)
示例#24
0
def mergeme(inputs, output, no_dupe_handling, id_field, id_property):
	if no_dupe_handling:
		assert (not id_field) and (not id_property)
	else:
		assert (not id_field) or (not id_property)
		assert not (id_field and id_property)

	known_ids = set()
	crs = None
	output_layer = geojson.FeatureCollection([])

	# Flatten the list of inputs
	inputs = list(itertools.chain.from_iterable(inputs))
	
	for i, layer_f in enumerate(inputs):
		print "Processing input file #%d..." % i
		layer = geojson.load(layer_f)
		# FIXME: this requires the CRS be specified on a "layer" level.  GeoJSON allows this to be ommitted, and this should include a check to ensure it is ommitted for all in this case.

		# Some broken GeoJSON files do weird things...
		if isinstance(layer.crs, list):
			layer.crs = layer.crs[0]
		if isinstance(layer.crs.properties, list):
			newprops = {}
			for x in range(len(layer.crs.properties)/2):
				newprops[layer.crs.properties[x*2]] = layer.crs.properties[(x*2) + 1]
			layer.crs.properties = newprops

		# We don't care about per-geometry CRS, these can mingle
		if i == 0:
			# first file sets the CRS!
			crs = layer.crs.properties['name']
			output_layer.crs = layer.crs
		else:
			assert layer.crs.properties['name'] == crs, ('CRS of files must match.  File has CRS %r, expected %r' % (layer.crs.properties['name'], crs))
		
		# We have a matching CRS, start merging geometries.
		for geometry in layer.features:
			if not no_dupe_handling:
				if id_property and geometry.properties[id_property] in known_ids:
					# Geometry is already present, skip
					continue
				elif id_field and geometry.id in known_ids:
					# Geometry is already present, skip
					continue
			
			# Geometry is new, add to list
			output_layer.features.append(geometry)
			if id_property:
				known_ids.add(geometry.properties[id_property])
			elif id_field:
				known_ids.add(geometry.id)

		print "OK! (%d total geometries written, %d read from this file)" % (len(output_layer.features), len(layer.features))
	
	# all files complete
	geojson.dump(output_layer, output)
	print "Files merged!"
示例#25
0
def load_allegheny_munis(allegheny_munis_file):
    munis = geojson.load(open(allegheny_munis_file))['features']
    for muni in munis:
        muni['shape'] = shapely.geometry.asShape(muni['geometry'])
    global allegheny_outline
    allegheny_outline = munis[0]['shape']
    for muni in munis:
        allegheny_outline = allegheny_outline.union(muni['shape'])
    return munis
def load(root, id, **kwargs):

    path = id2abspath(root, id, **kwargs)

    if not os.path.exists(path):
        raise Exception, "%s does not exist" % path

    fh = open(path, 'r')
    return geojson.load(fh)
示例#27
0
 def _get_shapes(self):
     scale = str(self.scale).zfill(2)
     filename = NESTED_FILE.format(year=self.year, scale=scale)
     url = ZIP_URL.format(year=self.year, scale=scale)
     r = requests.get(url, verify=True)
     r.raise_for_status()
     with ZipFile(BytesIO(r.content)) as zipfile:
         with zipfile.open(filename) as f:
             shapes = geojson.load(f)
     return shapes
示例#28
0
def open_json(input_file):
    """otevre vstupni geojson
        vstup: input_file: soubor json
        výstup: načtnená data vstupního souboru"""
    with open(input_file, encoding='utf-8') as f:
        gj = geojson.load(f)

    #adress = gj['features'][0]
    #print(adress)
    return (gj)
示例#29
0
def read_geojson(path,*key_path):
    """ read geojson file
    path<str>: path to geojson file
    *key_path: keys to go to in object
    """    
    with open(path,'rb') as file:
        obj=geojson.load(file)
    for k in key_path:
        obj=obj[k]
    return obj
示例#30
0
def get_geojson():
    global app
    geo = getattr(flask.g, '_goesbrowse_geojson', None)
    if geo is None:
        geo = {}
        for k, v in GEOJSON_FILES.items():
            with app.open_resource(v, mode='r') as f:
                geo[k] = geojson.load(f)
        flask.g._goesbrowse_geojson = geo
    return geo
示例#31
0
    def deserialize(filename):
        """Loads results of a search from a geojson file.

        :param filename: A filename containing a search result encoded as a geojson
        :type filename: str
        :returns: The search results encoded in `filename`
        :rtype: :class:`~eodag.api.search_result.SearchResult`
        """
        with open(filename, "r") as fh:
            return SearchResult.from_geojson(geojson.load(fh))
def load_nghds():
    neighborhoods = geojson.load(open('static/neighborhoods.json'))
    nghds = neighborhoods['features']
    for nghd in nghds:
        nghd['shape'] = shapely.geometry.asShape(nghd.geometry)
    global pittsburgh_outline
    pittsburgh_outline = nghds[0]['shape']
    for nghd in nghds:
        pittsburgh_outline = pittsburgh_outline.union(nghd['shape'])
    return nghds
def load_nghds(json_file="neighborhoods.json"):
    neighborhoods = geojson.load(open(json_file))
    nghds = neighborhoods['features']
    for nghd in nghds:
        nghd['shape'] = shapely.geometry.asShape(nghd.geometry)
    global pittsburgh_outline # TODO ugh globals
    pittsburgh_outline = nghds[0]['shape']
    for nghd in nghds:
        pittsburgh_outline = pittsburgh_outline.union(nghd['shape'])
    return nghds
示例#34
0
def bbox(inputs, output):

	crs = None
	output_layer = geojson.FeatureCollection([])

	# Flatten the list of inputs
	inputs = list(itertools.chain.from_iterable(inputs))
	
	for i, layer_f in enumerate(inputs):
		print "Processing input file #%d..." % i
		layer = geojson.load(layer_f)
		# FIXME: this requires the CRS be specified on a "layer" level.  GeoJSON allows this to be ommitted, and this should include a check to ensure it is ommitted for all in this case.

		# Some broken GeoJSON files do weird things...
		if isinstance(layer.crs, list):
			layer.crs = layer.crs[0]
		if isinstance(layer.crs.properties, list):
			newprops = {}
			for x in range(len(layer.crs.properties)/2):
				newprops[layer.crs.properties[x*2]] = layer.crs.properties[(x*2) + 1]
			layer.crs.properties = newprops

		# We don't care about per-geometry CRS, these can mingle
		if i == 0:
			# first file sets the CRS!
			crs = layer.crs.properties['name']
			output_layer.crs = layer.crs
		else:
			assert layer.crs.properties['name'] == crs, ('CRS of files must match.  File has CRS %r, expected %r' % (layer.crs.properties['name'], crs))

		# We have a matching CRS, start processing the file
		assert len(layer.bbox) == 4, 'File must have a bounding box'

		output_layer.features.append(
			geojson.Feature(
				geometry=geojson.Polygon(
					coordinates=[[
						[layer.bbox[0], layer.bbox[1]],
						[layer.bbox[0], layer.bbox[3]],
						[layer.bbox[2], layer.bbox[3]],
						[layer.bbox[2], layer.bbox[1]],
						[layer.bbox[0], layer.bbox[1]]
					]]
				),
				properties=dict(
					id=i,
					filename=layer_f.name
				),
				id=i
			)
		)

	# all files complete
	geojson.dump(output_layer, output)
	print "Bounding boxes drawn!"
示例#35
0
    def _parse_geojson(self, json_dir):
        """Iterate through each label file in a geojson dir and extract the
        feature locations

        Args
        ----
        json_dir (self) : the directory of the json files

        Returns
        -------
        dic of form {label: [ [feature coor] ]
        """

        features = {}

        # convert form dstl coors to pixels
        def coor_to_px(coor):
            return (coor[0] / self._xmax * self._W,
                    coor[1] / self._ymin * self._H)

        # track number of errors
        e_count = 0

        # For each feature type (separate file) in the image's json directory,
        # store the feature locations
        for fname in glob.glob(json_dir + "/*.geojson"):

            # Load the file with geojson, this is the same as json.load()
            with open(fname) as f:
                raw_json = geojson.load(f)

            # parse the mask (geojson polygon) for each object in a feature
            for feature in raw_json['features']:
                try:
                    dstl_label = feature['properties']['LABEL']
                except KeyError:
                    self._FLAGS['errors']['KeyError'] = e_count
                else:
                    label = MAP_TO_LOCAL_LABELS[dstl_label]
                    if label:
                        coors = feature['geometry']['coordinates'][0]
                        try:
                            features.setdefault(label, []).append(
                                list(map(coor_to_px, coors)))
                        except TypeError:
                            e_count += 1
                            self._FLAGS['errors']['TypeError'] = e_count

        else:
            # There were no files in that json directory
            print(json_dir)
            if not features:
                print("No files in {} found".format(json_dir))

        return features
示例#36
0
    def invoke(self):

        # Get inputs
        lat = self.get_input_string_port('lat')
        lon = self.get_input_string_port('lon')
        dtg = self.get_input_string_port('dtg')
        meta = self.get_input_string_port('meta')
        minsize = self.get_input_string_port('minsize', default='1000.0')
        smooth = self.get_input_string_port('smooth', default='1.0')
        img = self.get_input_data_port('image')

        vector_dir = self.get_output_data_port('vector')
        os.makedirs(vector_dir)

        raster_dir = self.get_output_data_port('raster')
        os.makedirs(raster_dir)

        tide = tide_coordination(float(lat), float(lon), dtg)

        result = {'metadata': json.loads(meta), 'tides': tide}

        # with open(os.path.join(output_dir, 'tides.json'), 'w') as f:
        #     json.dump(result, f)

        img1 = glob2.glob('%s/**/*_B1.TIF' % img)
        img2 = glob2.glob('%s/**/*_B5.TIF' % img)

        os.system(
            'bfalg-ndwi -i %s -i %s --outdir %s --basename bf --minsize %s --smooth %s'
            % (img1[0], img2[0], vector_dir, minsize, smooth))

        os.rename(os.path.join(vector_dir, 'bf_ndwi.tif'),
                  os.path.join(raster_dir, 'bf_ndwi.tif'))

        # Okay, so we need to open the output bf.geojson here, and iterate
        # through the features, added result to properties for each and every
        # one.

        with open(os.path.join(vector_dir, 'bf.geojson')) as f:
            data = geojson.load(f)

        feature_collection = data['features']
        valid_feats = []

        for feat in feature_collection:
            feat['properties'] = result
            valid_feats.append(feat)

        data['features'] = valid_feats

        with open(os.path.join(vector_dir, 'bf.geojson'), 'wb') as f:
            geojson.dump(data, f)

        self.reason = 'Successfully created shoreline GeoJSON'
示例#37
0
    def __init__(self, geojson_filename, velocity_meters_per_s):
        """

        :param geojson_file: A geojson file that contains a Feature with a LineString
        :param velocity_meters_per_s: Object velocity in meters/sec
        """
        with open(geojson_filename) as geojson_file:
            geo_feature = geojson.load(geojson_file)

        self.geojson_path = geojson.utils.coords(geo_feature)
        self.velocity_meters_per_s = float(velocity_meters_per_s)
示例#38
0
def load_neighborhoods(filename):
    """Load a geojson file. Also puts the value 'name' in the properties.
    So for anything you get back from here you can say nghd['properties']['name'].
    """
    nghds = geojson.load(open(filename))
    for nghd in nghds['features']:
        if 'neighborho' in nghd['properties']:
            nghd['properties']['name'] = nghd['properties']['neighborho']
        if 'hood' in nghd['properties']:
            nghd['properties']['name'] = nghd['properties']['hood']
    return nghds['features']
示例#39
0
def asserts(input_dir: Path, output_dir: Path, quicklook_dir: Path, logger):
    # Print out bbox of one tile
    geojson_path = output_dir / "data.json"

    with open(str(geojson_path)) as f:
        feature_collection = geojson.load(f)

    out_path = feature_collection.features[0]["properties"]["up42.data_path"]
    logger.info(out_path)

    logger.info("e2e test successful")
def generate_intersection_points(street_network_file, street_edge_name_file,
                                 intersection_points_file):
    """
    Find all the points that are intersections between two DIFFERENT streets.
    This is what we classify as a street intersection for calculating proximity.
    """
    with open(street_network_file) as f:
        streets_gj = geojson.load(f)

    # Read streets into a list of street edge id->coordinates mapping
    streets_list = streets_gj['features']

    edge_id_to_coords_list = {}

    # load all edges
    for street in streets_list:
        edge_id, coords_list = extract_street_coords_from_geojson(street)
        edge_id_to_coords_list[edge_id] = coords_list

    # compute which points are intersections of at least two different streets
    points_to_streets = dict()

    edge_to_name = pd.read_csv(street_edge_name_file)
    edge_to_name.set_index('street_edge_id', inplace=True)

    for edge_id, coords_list in edge_id_to_coords_list.items():
        try:
            street_name = edge_to_name.loc[edge_id].street_name
        except KeyError:
            continue

        for float_point in coords_list:
            point_lng, point_lat = int(float_point[0] * multiplier), int(
                float_point[1] * multiplier)

            # print(float_point)
            if (point_lng, point_lat) not in points_to_streets:
                points_to_streets[point_lng, point_lat] = set()

            if type(street_name) == str:
                # Add a street to the intersection
                points_to_streets[point_lng, point_lat].add(street_name)
            elif pd.isna(street_name):
                # Unnamed street, just represent it as an empty string
                points_to_streets[point_lng, point_lat].add('')

    # filter all points that aren't on > 1 streets
    intersection_points = dict()
    for point, street_names in points_to_streets.items():
        if len(street_names) > 1:
            intersection_points[point] = street_names

    with open(intersection_points_file, 'wb') as f:
        pickle.dump(intersection_points, f)
def prep_test_data(file_name):
    path = make_test_data_file(file_name)()
    with open(path) as gjs:
        # records is a list of maps
        records = geojson.load(gjs)
        return [{'flight_id': record['flight_id'],
                 'lats': record['horizontal_path']['lats'],
                 'lons': record['horizontal_path']['lons'],
                 'min_alt': min(record['altitude_profile']['altitudes']),
                 'max_alt': max(record['altitude_profile']['altitudes'])}
                for record in records['data']]
示例#42
0
    def load_geometry(cls, gjson_path):
        # LOAD THE FIELDS FOR WHICH THE TIMESERIES
        # SHOULD BE EXTRACTED FOR THE CROP CALENDARS
        with open(gjson_path) as f:
            gj = geojson.load(f)
        ### Buffer the fields 10 m inwards before requesting the TS from OpenEO
        polygons_inw_buffered, poly_too_small_buffer = prepare_geometry(gj)
        #TODO this is a bit confusing: I woud expect to continue with polygons_inw_buffered here
        gj = remove_small_poly(gj, poly_too_small_buffer)

        return gj,polygons_inw_buffered
def import_geojson_to_sheet(geojson_path):
    """WARNING: will clear existing spreadsheet!!!
    pushes a geojson file to the google spreadsheet."""
    with open(geojson_path) as f:
        data = geojson.load(f)
        try:
            parsed_list = []
            for feature in data.features:
                if feature.geometry is not None \
                   and feature.geometry.type == "Point":
                    lon, lat = list(geojson.coords(feature))[0]
                    verified = feature.properties.get('Verified Status',
                                                      'unknown')
                    if (verified == 'unknown'):
                        # try to use the 'verified' field instead
                        verified = feature.properties.get(
                            'verified', 'unknown')

                    marking = feature.properties.get('Pedestrian Markings',
                                                     'unknown')
                    signal = feature.properties.get('Crossing Signal',
                                                    'unknown')
                    features = feature.properties.get('Other Features', '')
                    notes = feature.properties.get('Notes', '')
                    parsed_list.append(
                        [lon, lat, verified, marking, signal, features, notes])
            # if we got here, we built a full parsed list.
            # clear the spreadsheet and push what we have up
            try:
                worksheet = backend.worksheet("gis_dataset")
            except gspread.exceptions.WorksheetNotFound:
                worksheet = backend.add_worksheet("gis_dataset", 1, 1)
            worksheet.clear()
            row_header = [
                "Longitude", "Latitude", "Verified Status",
                "Pedestrian Markings", "Crossing Signal", "Other Features",
                "Notes"
            ]
            # create enough rows and columns to batch an update
            worksheet.resize(rows=len(parsed_list) + 1, cols=len(row_header))
            batched_cells = worksheet.range(1, 1,
                                            len(parsed_list) + 1,
                                            len(row_header))
            for cel in batched_cells:
                if cel.row == 1:
                    cel.value = row_header[cel.col - 1]
                else:
                    cel.value = parsed_list[cel.row - 2][cel.col - 1]

            worksheet.update_cells(batched_cells)
        except AttributeError as e:
            print("was expecting a file with one FeatureCollection, " +
                  "where each feature is a point!")
            print(e)
示例#44
0
 def setUp(self):
     with open('testing.json', 'r') as f:
         self.result = json.load(f)
     with open('testing_input.json', 'r') as f:
         self.input = json.load(f)
     self.start_coord = (53.2815126, -6.2341631)
     self.end_coord = (53.3881208173444, -6.2659470210)
     self.graph_location = '../assets/graph.json'
     with open(self.graph_location, 'rb') as jfile:
         graph_json = geojson.load(jfile)
         self.G = nx.node_link_graph(graph_json)
示例#45
0
def remove_sensor(url, sn): 
	print('starter remove')
	
	with open(url, 'r') as data_file:
		data = geojson.load(data_file)
	print(data)
	print(len(data['features']))
		  
	data['features'] = [element for element in data['features'] if not int(element['properties']['Sn']) == int(sn)]
	print(len(data['features']))
	print(int(data['features'][0]['properties']['Sn'])
示例#46
0
    def from_geojson(cls, filename, index=0):
        json = geojson.load(open(filename))
        try:
            coords = json["coordinates"]
        except KeyError:
            try:
                coords = json["geometry"]["coordinates"]
            except KeyError:
                coords = json["features"][index]["geometry"]["coordinates"]

        return cls.from_lng_lat_tuples(coords)
示例#47
0
def get_lat_long_list_from_file(file_name):
    final_list = []
    with open(file_name) as json_file:

        feature_collection = geojson.load(json_file)
        coords = list(geojson.utils.coords(feature_collection))

        for x in coords:
            final_list.append({"lat": x[1], "long": x[0]})

        return final_list
示例#48
0
文件: chip.py 项目: cappelaere/coreg2
def LoadPolygon(eo1FileName):
    base_dir = os.path.dirname(eo1FileName)
    footstepsFileName = os.path.join(base_dir, "geojson", "footsteps.tif.json")
    with open(footstepsFileName) as json_data:
        jsond = geojson.load(json_data)
    json_data.close()

    feature = jsond['features'][0]
    coords = feature['geometry']['coordinates']
    poly = coords[0]
    return poly
示例#49
0
    def _prepare_data(self):
        with open(JSON_OUTPUT_PATH, 'r') as f:
            self.trees_geojson = json.load(f)

        with open(BUS_LINES_PATH, 'r') as f:
            self.bus_geojson = geojson.load(f)

        if DEBUG:
            self.bus_df_co2 = pd.read_csv('./data/calculated_co2.csv')
        else:
            self._prepare_bus_df()
            self._calc_carbon_emissions()
示例#50
0
def find_intersecting_polygons(all_polygons_file, bounding_polygon,
                               output_file, id_prefix):
    # Load all polygons into a feature set
    with open(all_polygons_file, mode="r") as in_file:
        all_polygons = geojson.load(in_file)

    total_area = 0
    num_polys = 0
    intersecting_polygons = []

    # Find the intersecting polygons
    for feature in all_polygons["features"]:
        polygon = geometry.shape(feature["geometry"])

        if not polygon.is_valid:
            if DEBUG:
                print("Attempting to fix invalid polygon")
            polygon = polygon.buffer(0)

            # If it's still not valid, just skip it
            if not polygon.is_valid:
                if DEBUG:
                    print("Fix was unsuccessful. Skipping polygon")
                continue

        intersection = bounding_polygon.intersection(polygon)

        # If polygon overlaps with bounds, we want to include it
        if intersection.area > 0:
            num_polys += 1

            # Construct new geojson polygon for intersection area
            if intersection.geom_type == 'MultiPolygon':
                new_polygon = geojson.MultiPolygon(
                    geometry.mapping(intersection)["coordinates"])
            else:
                new_polygon = geojson.Polygon(
                    geometry.mapping(intersection)["coordinates"])

            #Create feature and add to list
            new_feature = geojson.Feature(geometry=new_polygon)

            new_feature["properties"]["id"] = id_prefix + str(uuid.uuid4())

            # Add to list of features to return
            intersecting_polygons.append(new_feature)

    # Add all features to a feature set
    new_feature_collection = geojson.FeatureCollection(intersecting_polygons)

    # Output to a file
    with open(output_file, mode="w") as out_file:
        geojson.dump(new_feature_collection, out_file, indent=4)
示例#51
0
def merge_geojson_feature_collections(filepaths):
    """Merge all GeoJSON feature collections into a single, nation-wide file."""
    all_features = []
    for fp in filepaths:
        with open(fp, 'r') as f:
            json_blob = geojson.load(f)
            new_features = json_blob['features']
            all_features.extend(new_features)
            crs = json_blob['crs']

    full_feature_collection = geojson.FeatureCollection(features=all_features, crs=crs)
    return full_feature_collection
    def invoke(self):
        '''
        Execute task
        '''

        # Load list of features
        with open(self.geoj) as f:
            info = geojson.load(f)['features']
            poly_ct = len(info)

        # Load trained model
        if self.classes:
            m = VggNet(classes=self.classes, model_name='model')
        else:
            m = VggNet(model_name='model')
        m.model.load_weights(self.weights)

        # Format input_shape and max_side_dim
        inp_shape = m.input_shape[-3:]

        if not self.max_side_dim:
            self.max_side_dim = inp_shape[-1]

        # Check all imgs have correct bands
        bands = inp_shape[0]
        for img in self.imgs:
            img_bands = geoio.GeoImage(img).shape[0]
            if bands != img_bands:
                raise Exception('Make sure the model was trained on an image with the ' \
                                'same number of bands as all input images.')

        # Filter shapefile
        de.filter_polygon_size(self.geoj,
                               output_file=self.geoj,
                               max_side_dim=self.max_side_dim,
                               min_side_dim=self.min_side_dim)

        # Numerical vs string classes
        out_name, num_classes = 'classified.geojson', True
        if self.classes:
            num_classes = False

        # Classify file
        m.classify_geojson(self.geoj,
                           output_name=out_name,
                           numerical_classes=num_classes,
                           max_side_dim=self.max_side_dim,
                           min_side_dim=self.min_side_dim,
                           chips_in_mem=1000,
                           bit_depth=self.bit_depth)

        # Write output
        move(out_name, self.output_dir)
    def handle(self, *args, **kwargs):
        city = kwargs['city']
        file_data = kwargs['data']

        if Grid.objects.filter(city_name=city).exists():
            print(f'Город {city} уже есть в базе')
            return

        try:
            geojson.load(file_data)
        except json.decoder.JSONDecodeError:
            print(f'Файл {file_data.name} некорректен')
            return

        file_data.close()

        city_grid = Grid.objects.create(city_name=city)

        rows_cnt = int(abs(GRID_END_LOC[0] - GRID_START_LOC[0]) / GRID_STEP)
        cols_cnt = int(abs(GRID_END_LOC[1] - GRID_START_LOC[1]) / GRID_STEP)

        print(rows_cnt, cols_cnt, flush=True)

        output_dir = f'{BASE_DIR}/output'

        if not os.path.exists(output_dir):
            os.makedirs(output_dir)

        with open(f'{output_dir}/result.csv', 'w') as out:
            lat = GRID_START_LOC[0]
            for i in range(rows_cnt):
                lon = GRID_START_LOC[1]
                for j in range(cols_cnt):
                    cell = Cell.objects.create(parent_grid=city_grid,
                                               latitude=lat,
                                               longitude=lon)

                    out.write(f"{cell.id},{lat},{lon}\n")
                    lon += GRID_STEP
                lat -= GRID_STEP