def dump(ctx, input, encoding, precision, indent, compact, record_buffered, ignore_errors, with_ld_context, add_ld_context_item): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' open_kwds = {} if encoding: open_kwds['encoding'] = encoding def transformer(crs, feat): tg = partial(transform_geom, crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) feat['geometry'] = tg(feat['geometry']) return feat try: with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open(input, **open_kwds) as source: meta = source.meta meta['fields'] = dict(source.schema['properties'].items()) if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps( collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") itr = iter(source) # Try the first record. try: i, first = 0, next(itr) first = transformer(first) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(json.dumps( first, **dump_kwds).replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(itr, 1): rec = transformer(rec) try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(json.dumps( rec, **dump_kwds).replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs']} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) collection['features'] = [ helpers.id_record(transformer(rec)) for rec in source] else: collection['features'] = [ transformer(source.crs, rec) for rec in source] json.dump(collection, sink, **dump_kwds) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def dump(ctx, input, encoding, precision, indent, compact, record_buffered, ignore_errors, with_ld_context, add_ld_context_item, layer): """Dump a dataset either as a GeoJSON feature collection (the default) or a sequence of GeoJSON features.""" verbosity = (ctx.obj and ctx.obj['verbosity']) or 2 logger = logging.getLogger('fio') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' open_kwds = {} if encoding: open_kwds['encoding'] = encoding if layer: open_kwds['layer'] = layer def transformer(crs, feat): tg = partial(transform_geom, crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) feat['geometry'] = tg(feat['geometry']) return feat try: with fiona.drivers(CPL_DEBUG=verbosity > 2): with fiona.open(input, **open_kwds) as source: meta = source.meta meta['fields'] = dict(list( source.schema['properties'].items())) if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'], 'features': [] } if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") itr = iter(source) # Try the first record. try: i, first = 0, next(itr) first = transformer(first) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write( json.dumps(first, **dump_kwds).replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(itr, 1): rec = transformer(rec) try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write( json.dumps(rec, **dump_kwds).replace( "\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'fiona:schema': meta['schema'], 'fiona:crs': meta['crs'] } if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) collection['features'] = [ helpers.id_record(transformer(rec)) for rec in source ] else: collection['features'] = [ transformer(source.crs, rec) for rec in source ] json.dump(collection, sink, **dump_kwds) except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, src_crs, with_ld_context, add_ld_context_item, parse): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' if src_crs: if not parse: raise click.UsageError("Can't specify --src-crs with --no-parse") transformer = partial(transform_geom, src_crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) else: def transformer(x): return x first_line = next(stdin) # If parsing geojson if parse: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) buffer = line.strip(u'\x1e') else: buffer += line else: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) else: def feature_text_gen(): feat = json.loads(first_line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) for line in stdin: feat = json.loads(line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) # If *not* parsing geojson else: # If input is RS-delimited JSON sequence. if first_line.startswith(u'\x1e'): def feature_text_gen(): buffer = first_line.strip(u'\x1e') for line in stdin: if line.startswith(u'\x1e'): if buffer: yield buffer buffer = line.strip(u'\x1e') else: buffer += line else: yield buffer else: def feature_text_gen(): yield first_line for line in stdin: yield line try: source = feature_text_gen() if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") # Try the first record. try: i, first = 0, next(source) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(first.replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(source, 1): try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(rec.replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") sink.write(",".join(source)) sink.write("]") sink.write(tail) sink.write("\n") except Exception: logger.exception("Exception caught during processing") raise click.Abort()
def collect(ctx, precision, indent, compact, record_buffered, ignore_errors, src_crs, with_ld_context, add_ld_context_item, parse): """Make a GeoJSON feature collection from a sequence of GeoJSON features and print it.""" logger = logging.getLogger(__name__) stdin = click.get_text_stream('stdin') sink = click.get_text_stream('stdout') dump_kwds = {'sort_keys': True} if indent: dump_kwds['indent'] = indent if compact: dump_kwds['separators'] = (',', ':') item_sep = compact and ',' or ', ' if src_crs: if not parse: raise click.UsageError("Can't specify --src-crs with --no-parse") transformer = partial(transform_geom, src_crs, 'EPSG:4326', antimeridian_cutting=True, precision=precision) else: def transformer(x): return x first_line = next(stdin) # If parsing geojson if parse: # If input is RS-delimited JSON sequence. if first_line.startswith('\x1e'): def feature_text_gen(): buffer = first_line.strip('\x1e') for line in stdin: if line.startswith('\x1e'): if buffer: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) buffer = line.strip('\x1e') else: buffer += line else: feat = json.loads(buffer) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) else: def feature_text_gen(): feat = json.loads(first_line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) for line in stdin: feat = json.loads(line) feat['geometry'] = transformer(feat['geometry']) yield json.dumps(feat, **dump_kwds) # If *not* parsing geojson else: # If input is RS-delimited JSON sequence. if first_line.startswith('\x1e'): def feature_text_gen(): buffer = first_line.strip('\x1e') for line in stdin: if line.startswith('\x1e'): if buffer: yield buffer buffer = line.strip('\x1e') else: buffer += line else: yield buffer else: def feature_text_gen(): yield first_line for line in stdin: yield line try: source = feature_text_gen() if record_buffered: # Buffer GeoJSON data at the feature level for smaller # memory footprint. indented = bool(indent) rec_indent = "\n" + " " * (2 * (indent or 0)) collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") # Try the first record. try: i, first = 0, next(source) if with_ld_context: first = helpers.id_record(first) if indented: sink.write(rec_indent) sink.write(first.replace("\n", rec_indent)) except StopIteration: pass except Exception as exc: # Ignoring errors is *not* the default. if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: # Log error and close up the GeoJSON, leaving it # more or less valid no matter what happens above. logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Because trailing commas aren't valid in JSON arrays # we'll write the item separator before each of the # remaining features. for i, rec in enumerate(source, 1): try: if with_ld_context: rec = helpers.id_record(rec) if indented: sink.write(rec_indent) sink.write(item_sep) sink.write(rec.replace("\n", rec_indent)) except Exception as exc: if ignore_errors: logger.error( "failed to serialize file record %d (%s), " "continuing", i, exc) else: logger.critical( "failed to serialize file record %d (%s), " "quiting", i, exc) sink.write("]") sink.write(tail) if indented: sink.write("\n") raise # Close up the GeoJSON after writing all features. sink.write("]") sink.write(tail) if indented: sink.write("\n") else: # Buffer GeoJSON data at the collection level. The default. collection = { 'type': 'FeatureCollection', 'features': []} if with_ld_context: collection['@context'] = helpers.make_ld_context( add_ld_context_item) head, tail = json.dumps(collection, **dump_kwds).split('[]') sink.write(head) sink.write("[") sink.write(",".join(source)) sink.write("]") sink.write(tail) sink.write("\n") except Exception: logger.exception("Exception caught during processing") raise click.Abort()