def test_bounds(): with fiona.open("docs/data/test_uk.shp") as src: f = next(src) assert tuple(round(v, 6) for v in fiona.bounds(f)) == (0.735, 51.357216, 0.947778, 51.444717) assert tuple( round(v, 6) for v in fiona.bounds(f['geometry'])) == (0.735, 51.357216, 0.947778, 51.444717)
def getBbox(self, srcfile, outfile): """ Creates a bounding box of polygon. Takes a polygon shp file as an input and creates a polygon shp file of bounding boxes for each of the polygon they represent. Bounding boxes will have the attributes of their respective pylogons. PARAMETER(S): : srcfile : The source polygon shapefile. : outfile : The name of the bounding box shapefile to be created. EXAMPLE(S): import bigeo bb = bigeo.BoundingBoxCreator() bb.getBbox('/home/polygon.shp', '/home/boundingbox.shp') """ self.srcfile = srcfile self.outfile = outfile with fiona.drivers(): logging.info("Reading file: " + self.srcfile) with fiona.open(self.srcfile) as src: self.meta = src.meta logging.info("Creating output file: " + self.outfile) with fiona.open(self.outfile, 'w', **self.meta) as dst: for f in src: logging.info("Creating bounds: " + str(fiona.bounds(f))) bbox = Polygon.from_bounds( fiona.bounds(f)[0], fiona.bounds(f)[1], fiona.bounds(f)[2], fiona.bounds(f)[3]) f['geometry'] = mapping(bbox) dst.write(f) logging.info( "Done creating bounds for all features. Writing to the specified output file." )
def test_bounds(): with fiona.open("docs/data/test_uk.shp") as src: f = next(src) assert tuple(round(v, 6) for v in fiona.bounds(f)) == ( 0.735, 51.357216, 0.947778, 51.444717) assert tuple(round(v, 6) for v in fiona.bounds(f['geometry'])) == ( 0.735, 51.357216, 0.947778, 51.444717)
def bounds(ctx, precision, explode, with_id, with_obj, use_rs): """Print the bounding boxes of GeoJSON objects read from stdin. Optionally explode collections and print the bounds of their features. To print identifiers for input objects along with their bounds as a {id: identifier, bbox: bounds} JSON object, use --with-id. To print the input objects themselves along with their bounds as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') stdout = click.get_text_stream('stdout') try: source = obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) xs = [] ys = [] features = obj.get('features') or [obj] for j, feat in enumerate(features): feat_id = feat.get('id', 'feature:' + str(i)) w, s, e, n = fiona.bounds(feat) if precision > 0: w, s, e, n = (round(v, precision) for v in (w, s, e, n)) if explode: if with_id: rec = {'parent': obj_id, 'id': feat_id, 'bbox': (w, s, e, n)} elif with_obj: feat.update(parent=obj_id, bbox=(w, s, e, n)) rec = feat else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) else: xs.extend([w, e]) ys.extend([s, n]) if not explode: w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) if with_id: rec = {'id': obj_id, 'bbox': (w, s, e, n)} elif with_obj: obj.update(id=obj_id, bbox=(w, s, e, n)) rec = obj else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def get_sf_wgs_bounds(_sf_fn): """ returns the bbox of the features in a fiona.shapefile """ _sf = fiona.open(_sf_fn, 'r') bboxs = [] for feature in _sf: bboxs.append(fiona.bounds(feature)) bboxs = np.array(bboxs) e, s, w, n = [ np.min(bboxs[:, 0]), np.min(bboxs[:, 1]), np.max(bboxs[:, 2]), np.max(bboxs[:, 3]) ] proj_wkt = open(_sf_fn.replace('.shp', '') + '.prj').read() sf_proj4 = wkt_2_proj4(proj_wkt) sf_proj = pyproj.Proj(sf_proj4) wgs_proj = pyproj.Proj(wgs84_proj4) e, s = pyproj.transform(sf_proj, wgs_proj, e, s) w, n = pyproj.transform(sf_proj, wgs_proj, w, n) return e, s, w, n
def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox, layer): """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') # Validate file idexes provided in --layer option # (can't pass the files to option callback) if layer: options.validate_multilayer_file_index(files, layer) # first layer is the default for i in range(1, len(files) + 1): if str(i) not in layer.keys(): layer[str(i)] = [0] try: with fiona.drivers(CPL_DEBUG=verbosity > 2): for i, path in enumerate(files, 1): for lyr in layer[str(i)]: with fiona.open(path, layer=lyr) as src: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, feat in src.items(bbox=bbox): if dst_crs or precision >= 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox, layer): """ Concatenate and print the features of input datasets as a sequence of GeoJSON features. When working with a multi-layer dataset the first layer is used by default. Use the '--layer' option to select a different layer. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' # Validate file idexes provided in --layer option # (can't pass the files to option callback) if layer: options.validate_multilayer_file_index(files, layer) # first layer is the default for i in range(1, len(files) + 1): if str(i) not in layer.keys(): layer[str(i)] = [0] try: with fiona.drivers(CPL_DEBUG=verbosity > 2): for i, path in enumerate(files, 1): for lyr in layer[str(i)]: with fiona.open(path, layer=lyr) as src: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, feat in src.items(bbox=bbox): if dst_crs or precision >= 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def shape_inspection(self, ranch=None): if ranch is not None: ranch = ranch.replace(' ', '_') loc_path = self.loc_path _d = self._d sf_fn = _join(loc_path, _d['sf_fn']) sf_feature_properties_key = _d['sf_feature_properties_key'] sf_fn = os.path.abspath(sf_fn) sf = fiona.open(sf_fn, 'r') area_ha = {} bboxs = [] for feature in sf: properties = feature['properties'] key = properties[sf_feature_properties_key].replace(' ', '_') _pasture, _ranch = key.split(self.key_delimiter) if self.reverse_key: _ranch, _pasture = _pasture, _ranch if ranch is None or _ranch.lower() == ranch.lower(): bboxs.append(fiona.bounds(feature)) if key not in area_ha: area_ha[key] = properties.get('Hectares', 0.0) bboxs = np.array(bboxs) e, s, w, n = [ np.min(bboxs[:, 0]), np.min(bboxs[:, 1]), np.max(bboxs[:, 2]), np.max(bboxs[:, 3]) ] proj_wkt = open(sf_fn.replace('.shp', '') + '.prj').read() sf_proj4 = wkt_2_proj4(proj_wkt) sf_proj = pyproj.Proj(sf_proj4) wgs_proj = pyproj.Proj(wgs84_proj4) _e, _s = pyproj.transform(sf_proj, wgs_proj, e, s) _w, _n = pyproj.transform(sf_proj, wgs_proj, w, n) bbox = _e, _n, _w, _s return float(np.sum(list(area_ha.values()))), bbox
def create_rtree(shape_file: str, inEPSG='EPSG:4326', outEPSG='EPSG:4326'): try: buildings = fiona.open(shape_file) # outEPSG = 'EPSG:4326' # inEPSG = 'EPSG:3857' transformer = Transformer.from_crs(inEPSG, outEPSG, always_xy=True) # rtree rtree_path = shape_file.replace(".shp", '_rtree') p = index.Property() p.overwrite = True r_tree = index.Index(rtree_path, properties=p) print_interval = int(len(buildings) / 1000) for idx, building in tqdm(enumerate(buildings[:])): try: if idx % print_interval == 0: logger.info("Processing polyogn #: %d", idx) bound = fiona.bounds(building) # logger.info("bound: %s", bound) bound = list(bound) # logger.info("bound: %s", bound) bound[0], bound[1] = transformer.transform(bound[0], bound[1]) # logger.info("bound: %s", bound) bound[2], bound[3] = transformer.transform(bound[2], bound[3]) r_tree.insert(idx, bound) except Exception as e: logger.error("Error in building polygons: %s", e) continue r_tree.close() except Exception as e: logger.error("Error in creating rtree: %s", e)
def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox): """Concatenate and print the features of input datasets as a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' try: with fiona.drivers(CPL_DEBUG=verbosity > 2): for path in files: with fiona.open(path) as src: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, feat in src.items(bbox=bbox): if dst_crs or precision > 0: g = transform_geom(src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs, use_rs, bbox): """Concatenate and print the features of input datasets as a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' try: with fiona.drivers(CPL_DEBUG=verbosity > 2): for path in files: with fiona.open(path) as src: if bbox: try: bbox = tuple(map(float, bbox.split(','))) except ValueError: bbox = json.loads(bbox) for i, feat in src.items(bbox=bbox): if dst_crs or precision > 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(feat, **dump_kwds)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def cat(ctx, input, precision, indent, compact, ignore_errors, dst_crs, x_json_seq_rs, bbox): """Concatenate and print the features of input datasets as a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' try: with fiona.drivers(CPL_DEBUG=verbosity>2): for path in input: with fiona.open(path) as src: if bbox: bbox = tuple(map(float, bbox.split(','))) for i, feat in src.items(bbox=bbox): if dst_crs or precision > 0: g = transform_geom( src.crs, dst_crs, feat['geometry'], antimeridian_cutting=True, precision=precision) feat['geometry'] = g feat['bbox'] = fiona.bounds(g) if x_json_seq_rs: sink.write(u'\u001e') json.dump(feat, sink, **dump_kwds) sink.write("\n") sys.exit(0) except Exception: logger.exception("Failed. Exception caught") sys.exit(1)
def test_bounds_polygon(): g = {'type': 'Polygon', 'coordinates': [[[0, 0], [10, 10], [10, 0]]]} assert fiona.bounds(g) == (0, 0, 10, 10)
def test_bounds_z(): g = {'type': 'Point', 'coordinates': [10,10,10]} assert fiona.bounds(g) == (10, 10, 10, 10)
def test_bounds_point(): g = {'type': 'Point', 'coordinates': [10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10)
def test_bounds_line(): g = {"type": "LineString", "coordinates": [[0, 0], [10, 10]]} assert fiona.bounds(g) == (0, 0, 10, 10)
def test_bounds_point(): g = {"type": "Point", "coordinates": [10, 10]} assert fiona.bounds(g) == (10, 10, 10, 10)
def test_bounds_polygon(): g = {"type": "Polygon", "coordinates": [[[0, 0], [10, 10], [10, 0]]]} assert fiona.bounds(g) == (0, 0, 10, 10)
def test_bounds_line(): g = {'type': 'LineString', 'coordinates': [[0, 0], [10, 10]]} assert fiona.bounds(g) == (0, 0, 10, 10)
def bounds(ctx, precision, explode, with_id, with_obj, use_rs): """Print the bounding boxes of GeoJSON objects read from stdin. Optionally explode collections and print the bounds of their features. To print identifiers for input objects along with their bounds as a {id: identifier, bbox: bounds} JSON object, use --with-id. To print the input objects themselves along with their bounds as GeoJSON object, use --with-obj. This has the effect of updating input objects with {id: identifier, bbox: bounds}. """ verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') stdin = click.get_text_stream('stdin') stdout = click.get_text_stream('stdout') try: source = obj_gen(stdin) for i, obj in enumerate(source): obj_id = obj.get('id', 'collection:' + str(i)) xs = [] ys = [] features = obj.get('features') or [obj] for j, feat in enumerate(features): feat_id = feat.get('id', 'feature:' + str(i)) w, s, e, n = fiona.bounds(feat) if precision > 0: w, s, e, n = (round(v, precision) for v in (w, s, e, n)) if explode: if with_id: rec = { 'parent': obj_id, 'id': feat_id, 'bbox': (w, s, e, n)} elif with_obj: feat.update(parent=obj_id, bbox=(w, s, e, n)) rec = feat else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) else: xs.extend([w, e]) ys.extend([s, n]) if not explode: w, s, e, n = (min(xs), min(ys), max(xs), max(ys)) if with_id: rec = {'id': obj_id, 'bbox': (w, s, e, n)} elif with_obj: obj.update(id=obj_id, bbox=(w, s, e, n)) rec = obj else: rec = (w, s, e, n) if use_rs: click.echo(u'\u001e', nl=False) click.echo(json.dumps(rec)) except Exception: logger.exception("Exception caught during processing") raise click.Abort()