예제 #1
0
파일: file.py 프로젝트: ozak/geopandas
def to_file(df, filename, driver="ESRI Shapefile", schema=None,
            **kwargs):
    """
    Write this GeoDataFrame to an OGR data source

    A dictionary of supported OGR providers is available via:
    >>> import fiona
    >>> fiona.supported_drivers

    Parameters
    ----------
    df : GeoDataFrame to be written
    filename : string
        File path or file handle to write to.
    driver : string, default 'ESRI Shapefile'
        The OGR format driver used to write the vector file.
    schema : dict, default None
        If specified, the schema dictionary is passed to Fiona to
        better control how the file is written. If None, GeoPandas
        will determine the schema based on each column's dtype

    The *kwargs* are passed to fiona.open and can be used to write
    to multi-layer data, store data within archives (zip files), etc.
    """
    if schema is None:
        schema = infer_schema(df)
    filename = os.path.abspath(os.path.expanduser(filename))
    with fiona.drivers():
        with fiona.open(filename, 'w', driver=driver, crs=df.crs,
                        schema=schema, **kwargs) as colxn:
            colxn.writerecords(df.iterfeatures())
예제 #2
0
파일: Utilities.py 프로젝트: bhickson/AARMP
def writeToGPKG(filename, df):
    """ Geopandas current implimentation is very slow to write data frame to file due to file, locking/unlock for
    each feature. Get around this with fionas buffer.

    This code pulled from https://github.com/geopandas/geopandas/issues/557"""

    g = df.columns.to_series().groupby(df.dtypes).groups
    properties = {}
    for k, v in g.items():
        for i in v:
            if i != 'geometry':
                # print(i)
                properties[i] = k.name

    file_schema = {
        'geometry': df.geom_type.tolist()[0],
        'properties': {
            'Type': 'str',
            'Class': 'int',
            'PROJ': 'str',
            'NAIP_FILE': 'str'
        }
    }

    with fiona.drivers():
        with fiona.open(filename,
                        'w',
                        driver="GPKG",
                        crs=df.crs,
                        schema=file_schema) as colxn:
            colxn.writerecords(df.iterfeatures())
예제 #3
0
파일: __init__.py 프로젝트: fitnr/breaks
def get_features(infile, fields=None):
    '''
    Return the features of <infile>. Includes error checking that given fields exist.

    Args:
        infile (str): path
        fields (Sequence/Generator): Check that these fields exist in <infile>.
                            Raises ValueError if one doesn't appear.

    Returns:
        (tuple) list of features and Fiona metadata for <infile>
    '''
    fields = fields or []
    with fiona.drivers():
        with fiona.open(infile) as source:
            try:
                for f in fields:
                    assert f in source.schema['properties']
            except AssertionError:
                raise ValueError('field not found in {}: {}'.format(infile, f))

            meta = {
                'schema': source.schema,
                'crs': source.crs,
            }

            features = list(source)

    return features, meta
예제 #4
0
파일: fio.py 프로젝트: jlivni/Fiona
def load(ctx, output, driver, x_json_seq):
    """Load features from JSON to a file in another format.

    The input is a GeoJSON feature collection or optionally a sequence of
    GeoJSON feature objects."""
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('fio')
    stdin = click.get_text_stream('stdin')

    first_line = next(stdin)

    # If input is RS-delimited JSON sequence.
    if first_line.startswith(u'\x1e'):
        def feature_gen():
            buffer = first_line.strip(u'\x1e')
            for line in stdin:
                if line.startswith(u'\x1e'):
                    if buffer:
                        yield json.loads(buffer)
                    buffer = line.strip(u'\x1e')
                else:
                    buffer += line
            else:
                yield json.loads(buffer)
    elif x_json_seq:
        def feature_gen():
            yield json.loads(first_line)
            for line in stdin:
                yield json.loads(line)
    else:
        def feature_gen():
            for feat in json.load(input)['features']:
                yield feat

    try:
        source = feature_gen()

        # Use schema of first feature as a template.
        # TODO: schema specified on command line?
        first = next(source)
        schema = {'geometry': first['geometry']['type']}
        schema['properties'] = dict([
            (k, FIELD_TYPES_MAP_REV[type(v)])
            for k, v in first['properties'].items()])

        with fiona.drivers(CPL_DEBUG=verbosity>2):
            with fiona.open(
                    output, 'w',
                    driver=driver,
                    crs={'init': 'epsg:4326'},
                    schema=schema) as dst:
                dst.write(first)
                dst.writerecords(source)
        sys.exit(0)
    except IOError:
        logger.info("IOError caught")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #5
0
파일: insp.py 프로젝트: perrygeo/Fiona
def insp(ctx, src_path, interpreter):

    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')

    banner = 'Fiona %s Interactive Inspector (Python %s)\n' \
             'Type "src.schema", "next(src)", or "help(src)" ' \
             'for more information.' \
             % (fiona.__version__, '.'.join(map(str, sys.version_info[:3])))

    try:
        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            with fiona.open(src_path) as src:

                scope = locals()

                if not interpreter:
                    code.interact(banner, local=scope)
                elif interpreter == 'ipython':
                    import IPython
                    IPython.InteractiveShell.banner1 = banner
                    IPython.start_ipython(argv=[], user_ns=scope)
                else:
                    raise click.ClickException(
                        'Interpreter {} is unsupported or missing '
                        'dependencies'.format(interpreter))
    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()
예제 #6
0
def bbox_copy(in_file, out_file, bbox, in_layer=0, out_layer=None, dst_crs=None):
    """Dump all features within the provided WGS84 bbox to a new file
    """
    with fiona.drivers():
        with fiona.open(in_file, layer=in_layer) as source:
            output_schema = source.schema.copy()
            # transform the provided bbox to the crs of source data
            bbox_proj = transform_bbox(bbox, from_epsg(4326),
                                       out_crs=source.meta['crs'])
            # use source crs if no reprojection specified
            if dst_crs:
                out_crs = dst_crs
            else:
                out_crs = source.crs
            with fiona.open(out_file, 'w',
                            crs=out_crs, driver="ESRI Shapefile",
                            schema=output_schema) as sink:
                for f in source.filter(bbox=bbox_proj):
                    # transform only if dst_crs specified
                    if dst_crs:
                        g = transform_geom(
                                    source.crs, dst_crs, f['geometry'],
                                    antimeridian_cutting=True)
                        f['geometry'] = g
                    sink.write(f)
예제 #7
0
파일: fio.py 프로젝트: barrycug/Fiona
def info(ctx, input, indent, meta_member):
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('rio')

    stdout = click.get_text_stream('stdout')
    try:
        with fiona.drivers(CPL_DEBUG=verbosity>2):
            with fiona.open(input) as src:
                info = src.meta
                info.update(bounds=src.bounds, count=len(src))
                proj4 = fiona.crs.to_string(src.crs)
                if proj4.startswith('+init=epsg'):
                    proj4 = proj4.split('=')[1].upper()
                info['crs'] = proj4
                if meta_member:
                    if isinstance(info[meta_member], (list, tuple)):
                        print(" ".join(map(str, info[meta_member])))
                    else:
                        print(info[meta_member])
                else:
                    stdout.write(json.dumps(info, indent=indent))
                    stdout.write("\n")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #8
0
def main(raster, vector, bands, all_touched, no_pretty_print, indent):
    """
    Get raster stats for every feature in a vector datasource.

    \b
    Only compute against the first two bands:
    \b
        $ zonal-statistics.py sample-data/NAIP.tif \\
            sample-data/polygon-samples.geojson -b 1,2
    \b
    """

    with fio.drivers(), rio.drivers():
        with rio.open(raster) as src_r, fio.open(vector) as src_v:

            if not bands:
                bands = list(range(1, src_r.count + 1))

            results = zonal_stats_from_raster(src_v,
                                              src_r,
                                              bands=bands,
                                              all_touched=all_touched)

            if not no_pretty_print:
                results = pprint.pformat(results, indent=indent)

            click.echo(results)
예제 #9
0
def import_with_fiona(fpath, source):
    """
    Use fiona to import a parcel file.

    Return a list of dict objects containing WKT-formatted geometries in 
    addition to any metadata.
    """
    shapes = []

    try:
        with fiona.drivers():
            data = fiona.open(fpath)
            for obj in data:
                try:
                    shape = scrape_fiona_metadata(obj, source)
                    geom = to_shapely_obj(obj)
                    if geom:
                        shape['geom'] = dumps(geom)
                        shapes.append(shape)
                except Exception as e:
                    _L.warning('error loading shape from fiona. {}'.format(e))
    except Exception as e:
        _L.warning('error importing file. {}'.format(e))

    return shapes
예제 #10
0
def tile(x=None, y=None, z=None):
    if not (x and y and z):
        raise Exception('Need x, y, and z got: %s, %s, %s' % (x, y, z))
    with fiona.drivers():
        with fiona.open('world.shp') as source:
            x, y, z = [float(x), float(y), float(z)]
            minx, miny = tile2ll(x, y, z)
            maxx, maxy = tile2ll(x + 1, y + 1, z)
            bbox = MultiPoint([
                Point(minx, miny),
                Point(minx, maxy),
                Point(maxx, maxy),
                Point(maxx, miny)
            ]).envelope
            print(minx, miny, maxx, maxy)
            features = []
            for _, s in source.items(bbox=(minx, miny, maxx, maxy)):
                geom = shape(s['geometry'])
                if geom.intersects(bbox):
                    intsx = geom.intersection(bbox)
                    features.append({
                        'geometry': mapping(intsx),
                        'properties': s['properties'],
                        'type': 'Feature'
                    })

            return json.dumps(
                {'land': {
                    'type': 'FeatureCollection',
                    'features': features
                }})
예제 #11
0
def create(output, geometries, properties=None, crs=None, driver=None):
    '''
    Create a layer from a set of shapely geometries or geometry
    dicts. Use list of properties (dict) if provided, otherwise an index of list as an ID.'''
    try:
        schema = {'geometry': geometries[0].type}
    except AttributeError:
        schema = {'geometry': geometries[0]['type']}

    driver = driver or drivers.from_path(output)

    FIELD_MAP = {v: k for k, v in fiona.FIELD_TYPES_MAP.items()}

    if properties:
        schema['properties'] = {
            k: FIELD_MAP[type(v)]
            for k, v in properties[0].items()
        }
    else:
        schema['properties'] = {'id': 'int'}
        properties = [{'id': x} for x in range(len(geometries))]

    with fiona.drivers():
        with fiona.open(output, 'w', driver=driver, crs=crs,
                        schema=schema) as f:
            for geom, props in zip(geometries, properties):
                try:
                    feature = {'properties': props, 'geometry': mapping(geom)}
                except AttributeError:
                    feature = {'properties': props, 'geometry': geom}

                f.write(feature)
예제 #12
0
파일: cat.py 프로젝트: citterio/Fiona
def cat(ctx, input, precision, indent, compact, ignore_errors, dst_crs,
        x_json_seq_rs):
    """Concatenate and print the features of input datasets as a
    sequence of GeoJSON features."""
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('fio')
    sink = click.get_text_stream('stdout')

    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')
    item_sep = compact and ',' or ', '

    try:
        with fiona.drivers(CPL_DEBUG=verbosity>2):
            for path in input:
                with fiona.open(path) as src:
                    for feat in src:
                        if dst_crs or precision > 0:
                            g = transform_geom(
                                    src.crs, dst_crs, feat['geometry'],
                                    antimeridian_cutting=True,
                                    precision=precision)
                            feat['geometry'] = g
                        if x_json_seq_rs:
                            sink.write(u'\u001e')
                        json.dump(feat, sink, **dump_kwds)
                        sink.write("\n")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #13
0
    def clean_vector_file(self):
        lfile = self.cleaned_data['vector_file']

        if lfile.size > MAX_FILE_SIZE:
            error = (u"Please simplify the geometres and try uploading again."
                     " File should not be bigger than {}.")
            raise ValidationError(error.format(filesizeformat(MAX_FILE_SIZE)))

        tmp = tempfile.NamedTemporaryFile(mode='wb', suffix=lfile.name)
        destination = open(tmp.name, 'wb+')
        for chunk in lfile.chunks():
            destination.write(chunk)
        destination.close()
        with fiona.drivers():
            try:
                if lfile.name[-3:].lower() == 'zip':
                    if not zipfile.is_zipfile(tmp.name):
                        raise ValidationError("Not a valid Zip Archive")

                    self._collection = zip_collection(tmp.name)
                else:
                    self._collection = fiona.open(tmp.name, 'r')

                # When this function is over, the crs of the collection can't be
                # accessed unless it is done so here first.
                self._crs = self._collection.crs

            except Exception as e:
                print e
                er = str(e)
                er = er.replace(tmp.name.replace(lfile.name, ''), '')
                raise ValidationError(
                    "Error extracting vector data from the file. " + er)
예제 #14
0
    def read_shp(self, infile, **kwargs):
        """Reads a Shapefile and gives the results in GeoJSON format
        Speed = 0.35ms/feature
        """

        logging.info('Reading Shapefile: %s' % infile)

        with fiona.drivers():
            with fiona.open(infile) as source:
                self.geometry = set([source.meta['schema']['geometry']])
                self.properties = source.meta['schema']['properties']

                # Read EPSG
                crs = source.meta['crs']

                if 'init' in crs:
                    self.epsg = crs['init'].upper()
                elif 'crs_wkt' in source.meta:
                    self.wkt = source.meta['crs_wkt']
                else:
                    logging.warning('Coordinate Reference System was not detected (default=EPSG:4326)')
                    self.epsg = 'EPSG:4326'

                for feature in source:
                    if feature:
                        if feature['geometry']:
                            feature = confirm_geometry(feature)
                            self.features.append(feature)
                        else:
                            logging.warning('Could not find [geometry] in feature.')
예제 #15
0
파일: fio.py 프로젝트: jlivni/Fiona
def info(ctx, input, indent, meta_member):
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('rio')

    stdout = click.get_text_stream('stdout')
    try:
        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            with fiona.open(input) as src:
                info = src.meta
                info.update(bounds=src.bounds, count=len(src))
                proj4 = fiona.crs.to_string(src.crs)
                if proj4.startswith('+init=epsg'):
                    proj4 = proj4.split('=')[1].upper()
                info['crs'] = proj4
                if meta_member:
                    if isinstance(info[meta_member], (list, tuple)):
                        print(" ".join(map(str, info[meta_member])))
                    else:
                        print(info[meta_member])
                else:
                    stdout.write(json.dumps(info, indent=indent))
                    stdout.write("\n")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #16
0
def describe(in_file, layer=0):
    """Basically fio and rio info
    https://github.com/Toblerity/Fiona/blob/master/fiona/fio/info.py
    https://github.com/mapbox/rasterio/blob/master/rasterio/rio/info.py
    """
    # try vector first
    try:
        with fiona.drivers():
            with fiona.open(in_file, layer=layer) as src:
                inf = src.meta
                inf.update(bounds=src.bounds, name=src.name)
                try:
                    inf.update(count=len(src))
                except TypeError:
                    inf.update(count=None)
                    info("Setting 'count' to None/null - layer does "
                         "not support counting")
                proj4 = fiona.crs.to_string(src.crs)
                if proj4.startswith('+init=epsg'):
                    proj4 = proj4.split('=')[1].upper()
                inf['crs'] = proj4
                inf['type'] = 'VECTOR'
    # if fiona fails, try rasterio
    except:
        with rasterio.open(in_file) as src:
            inf = dict(src.profile)
            inf['shape'] = (inf['height'], inf['width'])
            inf['bounds'] = src.bounds
            proj4 = src.crs.to_string()
            if proj4.startswith('+init=epsg'):
                proj4 = proj4.split('=')[1].upper()
            inf['crs'] = proj4
            inf['type'] = 'RASTER'
    return inf
예제 #17
0
def cat(ctx, input, precision, indent, compact, ignore_errors, x_json_seq_rs):
    """Concatenate and print the features of input datasets as a
    sequence of GeoJSON features."""
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('fio')
    sink = click.get_text_stream('stdout')

    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')

    item_sep = compact and ',' or ', '

    try:
        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            for path in input:
                with fiona.open(path) as src:
                    for feat in src:
                        if precision >= 0:
                            feat = round_rec(feat, precision)
                        if x_json_seq_rs:
                            sink.write(u'\u001e')
                        json.dump(feat, sink, **dump_kwds)
                        sink.write("\n")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #18
0
def dissolve(sourcefile, sinkfile, key, unsplit=None):
    try:
        shape
    except NameError:
        raise NotImplementedError("dissolve require shapely")

    with fiona.drivers():
        with fiona.open(sourcefile) as source:
            schema = source.schema
            schema['properties'] = {key: source.schema['properties'][key]}

            with fiona.open(sinkfile,
                            'w',
                            crs=source.crs,
                            schema=schema,
                            driver=source.driver) as sink:

                gotkeys = dict()

                for _, feat in source.items():
                    fkey = feat['properties'][key]
                    fshape = shapelyshape(feat['geometry'])

                    if fkey in gotkeys:
                        gotkeys[fkey][0] = gotkeys[fkey][0].union(fshape)
                    else:
                        gotkeys[fkey] = [fshape]

                for shapelist in gotkeys.values():
                    if unsplit:
                        for s in disjointed(shapelist):
                            sink.write(s)

                    else:
                        sink.write(shapelist[0])
예제 #19
0
파일: io.py 프로젝트: qdhqf/centerline
def create_centerlines(src, dst, density=0.5):
    """
    Create centerlines and save the to an ESRI Shapefile.

    Reads polygons from the `src` ESRI Shapefile, creates Centerline
    objects with the specified `density` parameter and writes them to
    the `dst` ESRI Shapefile.

    Only Polygon features are converted to centerlines. Features of
    different types are skipped.

    Args:
        src (str): source ESRI Shapefile
        dst (str): destination ESRI Shapefile
        density (:obj:`float`, optional): the Centerline's density.
            Defaults to 0.5 (meters)

    Returns:
        None

    """
    try:
        DST_DRIVER = get_ogr_driver(filepath=dst)
    except ValueError:
        raise

    with fiona.drivers():
        with fiona.open(path=src, mode='r') as source:
            SCHEMA = source.schema.copy()
            SCHEMA.update({'geometry': 'MultiLineString'})
            with fiona.open(path=dst,
                            mode='w',
                            driver=DST_DRIVER.GetName(),
                            schema=SCHEMA,
                            crs=source.crs,
                            encoding=source.encoding) as destination:
                for record in source:
                    geom = record.get('geometry')

                    if not is_polygon(geometry_type=geom.get('type')):
                        continue

                    input_geom = shape(geom)
                    attributes = record.get('properties')
                    centerline_obj = Centerline(input_geom=input_geom,
                                                interpolation_dist=density,
                                                **attributes)

                    centerline_dict = {
                        'geometry': mapping(centerline_obj),
                        'properties': {
                            k: v
                            for k, v in centerline_obj.__dict__.items()
                            if k in attributes.keys()
                        }
                    }

                    destination.write(centerline_dict)

    return None
예제 #20
0
def fchain(*filenames):
    '''Reduce features of a layer to a single value'''
    for filename in itertools.chain(filenames):
        with fiona.drivers():
            with fiona.open(filename, "r") as layer:
                for feature in layer:
                    yield feature
예제 #21
0
    def load_faces(self):
        self.faces = {}

        with fiona.drivers():
            with fiona.open(projected_fp) as src:
                for feat in src:
                    self.faces[feat['properties']['id']] = shape(
                        feat['geometry'])
예제 #22
0
 def handle_noargs(self, **options):
     with fiona.drivers():
         with fiona.open(settings.ZIPCODES_SHP, 'r') as zipcodes_data:
             idx = index.Rtree(settings.RTREE_INDEX_FILE)
             for feature in zipcodes_data:
                 geometry = shape(feature['geometry'])
                 idx.insert(int(feature['id']), geometry.bounds)
     print 'Successfully created an Rtree index file at %s' % settings.RTREE_INDEX_FILE_NAME
예제 #23
0
def convert(inf, outf):
    with fiona.drivers():
        with fiona.open(inf) as source:
            meta = source.meta
            with fiona.open(outf, "w", **meta) as sink:
                for f in source:
                    f["properties"]["index"] = f["properties"]["index"] / 12.19 * 40
                    sink.write(f)
    def validate(shapefile, existing_geometries, already_uploaded):
        """Specifies which validation methods should be called for each input field.


        parameters:
            - shapefile: The contents of the file the user has uploaded
            - existing_geometries: A collection of features already drawn/uploaded by the user
            - already_uploaded: True if user has clicked upload button multiple times and session has been updated

        returns:
            dict: An instance of ValidationErrorBuilder with a ValidationError dict and a heading summary message.
        """
        validation_error_builder = ValidationErrorBuilder()

        FieldValidator(shapefile, 'shapefile-input', 'shapefile-input', validation_error_builder,
                       summary_message="Upload a file",
                       inline_message="Upload a file") \
            .is_required()

        FieldValidator(shapefile, 'shapefile-input', 'shapefile-input', validation_error_builder,
                       summary_message="File is bigger than 1MB",
                       inline_message="Upload a smaller file") \
            .is_uploaded_filesize_less_than_bytes(1000000)

        if already_uploaded:
            all_extents = existing_geometries['features'] \
                if existing_geometries and 'features' in existing_geometries \
                else []
        else:
            shapefile_contents = []

            try:
                with fiona.drivers():
                    with fiona.BytesCollection(shapefile.read()) as shpfile:
                        for shape in shpfile:
                            shapefile_contents.append(shape)
                shapefile.seek(0)
            except Exception:
                pass

            FieldValidator(shapefile_contents, 'shapefile-input', 'shapefile-input', validation_error_builder,
                           summary_message="File not uploaded",
                           inline_message="Upload a different file") \
                .is_required()

            if existing_geometries and 'features' in existing_geometries:
                all_extents = shapefile_contents + existing_geometries[
                    'features']
            else:
                all_extents = shapefile_contents

        FieldValidator(all_extents, 'shapefile-input', 'shapefile-input', validation_error_builder,
                       inline_message="Too many extents",
                       summary_message="Number of extents must be 500 (or fewer)") \
            .is_length_less_than_or_equal_to(500)

        return validation_error_builder.get()
예제 #25
0
    def __init__(self, force_recompute = False):
        """Initialize the local data files.
        
        The first time it will:
        - Create a directory named "datadir" in this file's current directory
        - Download the Olson database and place it in ./datadir
        - Create an Rtree on the shapes in the database and persist it in ./datadir
        - Create an additional bookmarking dict() and persist (via pickle) it in ./datadir 

        All the other times it will:
        - Load the RTree and the additional bookmarking dict() in memory
        
        Keyword arguments:
        force_recompute -- if True, deletes and recomputes the local data
        """

        data_dir =  "%s/datadir" % os.path.dirname(os.path.realpath(__file__))
        if not os.path.exists(data_dir):
            os.mkdir(data_dir)
            
        data_files = ['rtree.dat', 'rtree.idx', 'rtree.p']

        # at least one file is missing
        if  force_recompute or (not reduce(operator.and_, [os.path.isfile("%s/%s" % (data_dir,x)) for x in data_files])):

            tz_fname = "%s/%s" % (data_dir, 'tz_world.zip')
            print >> sys.stderr, "Downloading the TZ shapefile (Olson database)..."
            urllib.urlretrieve ('http://efele.net/maps/tz/world/tz_world.zip', tz_fname)
            print >> sys.stderr, "Done."

            for x in data_files:
                if  os.path.isfile("%s/%s" % (data_dir,x)): 
                    os.remove("%s/%s" % (data_dir,x))

            self.idx = index.Rtree('%s/rtree' % data_dir)
            with fiona.drivers():
                print >> sys.stderr, "Building the spatial index on the shapefile..."
                with fiona.open('/world/tz_world.shp',
                                vfs='zip://%s' % tz_fname) as collection:
                    self.polyd = {}
                    i = 0
                    for polygon in collection:
                        p = shape(polygon['geometry'])
                        self.idx.add(i,shape(p).bounds)
                        self.polyd[i] = {'shape' : p, 'tzid': polygon['properties']['TZID']}
                        i += 1
                with open('%s/rtree.p' % data_dir, 'w') as f:
                    pickle.dump(self.polyd, f)

                print >> sys.stderr, "Done."

        else:
            print >> sys.stderr, "Loading Rtree and Pickle File"
            self.idx = index.Rtree('%s/rtree' % data_dir)
            with open('%s/rtree.p' % data_dir) as f:
                self.polyd = pickle.load(f)
예제 #26
0
파일: ls.py 프로젝트: smnorris/Fiona
def ls(ctx, input, indent):
    """
    List layers in a datasource.
    """

    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2

    with fiona.drivers(CPL_DEBUG=verbosity > 2):
        result = fiona.listlayers(input)
        click.echo(json.dumps(result, indent=indent))
예제 #27
0
def fzip(*filenames):
    with fiona.drivers():
        try:
            handles = [fiona.open(f) for f in filenames]
            for features in zip(*handles):
                yield features

        finally:
            for h in handles:
                h.close()
예제 #28
0
def import_zip(shape_file):
	#construct list of zip polygons
	with fiona.drivers():
		#open a shape file
		with fiona.open(shape_file, 'r') as source:
			#creating a list of polygons from the shape file
			zips = MultiPolygon([shape(pol['geometry']) for pol in source])
			#creating a list of tuples with the zip codes from the shape file and their order in the file
			zips_key = [(pol['properties']['ZCTA5CE10'], inx) for inx, pol in enumerate(source)]
			return (sorted(zips_key), zips)
예제 #29
0
def simplify(inp, output, **kwargs):
    with fiona.drivers():
        with fiona.open(inp, 'r') as src:
            with fiona.open(output,
                            'w',
                            schema=src.schema,
                            driver=src.driver,
                            crs=src.crs) as sink:
                for f in src:
                    sink.write(simplify_feature(f, **kwargs))
예제 #30
0
    def compose(self, scalar=None, bounds=None, **kwargs):
        '''
        Draw files to svg.

        Args:
            scalar (int): factor by which to scale the data, generally a small number (1/map scale).
            bounds (Sequence): Map bounding box in input units. Defaults to map data bounds.
            style (str): CSS to append to parent object CSS.
            viewbox (bool): If True, draw SVG with a viewbox. If False, translate coordinates to the frame.
                            Defaults to True.
            precision (float): Round coordinates to this precision [default: 0].
            simplify (float): Must be between 0 and 1. Fraction of removable coordinates to keep.
            inline (bool): If True, try to run CSS into each element.

        Returns:
            String (unicode in Python 2) containing an entire SVG document.
        '''
        # Set up arguments
        scalar = scalar or self.scalar

        drgs = {
            'style': kwargs.pop('style', ''),
            'viewbox': kwargs.pop('viewbox', True),
            'inline': kwargs.pop('inline', False),
        }

        if bounds:
            reset_bounds = True
        else:
            reset_bounds = False
            bounds = self._unprojected_bounds

        if 'simplify' in kwargs:
            kwargs['simplifier'] = convert.simplifier(kwargs.pop('simplify'))
        else:
            kwargs['simplifier'] = self.simplifier

        kwargs['precision'] = kwargs.get('precision', 0)

        # Draw files
        with fiona.drivers():
            members = [
                self._compose_file(f,
                                   scalar,
                                   unprojected_bounds=bounds,
                                   **kwargs) for f in self.files
            ]

        drawing = self._draw(members, bounds, scalar, **drgs)

        # Reset bounds so that self can be used again fresh. This is hacky.
        if reset_bounds:
            self._projected_bounds = (None, None, None, None)

        return drawing
예제 #31
0
    def test_json_overwrite(self):
        path = os.path.join(self.tempdir, 'foo.json')

        with fiona.drivers(), fiona.open(path, 'w', 
                driver='GeoJSON', 
                schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c:
            c.writerecords([{
                'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]},
                'properties': {'title': 'One'}}])
            c.writerecords([{
                'geometry': {'type': 'MultiPoint', 'coordinates': [[0.0, 0.0]]},
                'properties': {'title': 'Two'}}])

        # Overwrite should raise DriverIOError.
        try:
            with fiona.drivers(), fiona.open(path, 'w', driver='GeoJSON', 
                    schema={'geometry': 'Unknown', 'properties': [('title', 'str')]}) as c:
                pass
        except IOError:
            pass
예제 #32
0
파일: ls.py 프로젝트: perrygeo/Fiona
def ls(ctx, input, indent):

    """
    List layers in a datasource.
    """

    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2

    with fiona.drivers(CPL_DEBUG=verbosity > 2):
        result = fiona.listlayers(input)
        click.echo(json.dumps(result, indent=indent))
def main(infile, outfile):

    with fio.drivers():
        with fio.open(infile) as src, \
                fio.open(outfile, 'w', **src.meta) as dst, \
                click.progressbar(src) as features:
            for feat in features:
                ogr_geom = gj2geom(feat['geometry'])
                ogr_geom.CloseRings()
                feat['geometry'] = geom2gj(ogr_geom)
                dst.write(feat)
예제 #34
0
def main(srcfile):
    
    with fiona.drivers(), fiona.open(srcfile) as src:
            
        code.interact(
            'Fiona %s Interactive Inspector (Python %s)\n'
            'Type "src.schema", "next(src)", or "help(src)" '
            'for more information.' %  (
                fiona.__version__, '.'.join(map(str, sys.version_info[:3]))),
            local=locals())

    return 1
예제 #35
0
파일: cat.py 프로젝트: smnorris/Fiona
def cat(ctx, files, precision, indent, compact, ignore_errors, dst_crs,
        use_rs, bbox, layer):

    """
    Concatenate and print the features of input datasets as a sequence of
    GeoJSON features.

    When working with a multi-layer dataset the first layer is used by default.
    Use the '--layer' option to select a different layer.
    """

    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')

    dump_kwds = {'sort_keys': True}
    if indent:
        dump_kwds['indent'] = indent
    if compact:
        dump_kwds['separators'] = (',', ':')
    item_sep = compact and ',' or ', '
    # Validate file idexes provided in --layer option
    # (can't pass the files to option callback)
    if layer:
        options.validate_multilayer_file_index(files, layer)
    # first layer is the default
    for i in range(1, len(files) + 1):
        if str(i) not in layer.keys():
            layer[str(i)] = [0]
    try:
        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            for i, path in enumerate(files, 1):
                for lyr in layer[str(i)]:
                    with fiona.open(path, layer=lyr) as src:
                        if bbox:
                            try:
                                bbox = tuple(map(float, bbox.split(',')))
                            except ValueError:
                                bbox = json.loads(bbox)
                        for i, feat in src.items(bbox=bbox):
                            if dst_crs or precision >= 0:
                                g = transform_geom(
                                        src.crs, dst_crs, feat['geometry'],
                                        antimeridian_cutting=True,
                                        precision=precision)
                                feat['geometry'] = g
                                feat['bbox'] = fiona.bounds(g)
                            if use_rs:
                                click.echo(u'\u001e', nl=False)
                            click.echo(json.dumps(feat, **dump_kwds))

    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()
예제 #36
0
def cli(input_vector, raster, output_vector, layer, bidx, field):

    """
    Samples vector points against a raster.
    """

    # Opens the input vector and the raster to sample against
    with fio.drivers(), rio.drivers(), fio.open(input_vector, layer=layer) as src, rio.open(raster) as rast:
        # Validate the input data to make sure sampling won't fail
        if src.schema['geometry'].lower() != "point":
            raise click.ClickException("Input vector must be a point layer.")
        if src.crs != rast.crs:
            raise click.ClickException("Input vector CRS and raster CRS must be the same.")
        if field in src.schema['properties']:
            raise click.ClickException("This field name already exits.")
        if bidx > rast.count:
            raise click.ClickException("This band doesn't exist.")

        # Constructs the output schema: layer type, CRS, field names, field types
        meta = copy.deepcopy(src.meta)
        if 'float' in rast.dtypes[bidx - 1].lower():
            field_type = 'float:10.4'
        else:
            field_type = 'int:10'
        meta['schema']['properties'][field] = field_type

        # Allows for operation on large rasters by sampling smaller windows (for smaller memory footprint)
        """
        x_min, y_min, x_max, y_max = src.bounds
        c_min, r_max = (x_min, y_min) * ~raster.affine
        c_max, r_min = (x_max, y_max) * ~raster.affine
        window = ((r_min, r_max),(c_min, c_max))

        ndvi = raster.read(1, window=window)
        aff = raster.window_transform(window)
        height, width = ndvi.shape
        """

        # Reads raster into numpy array
        data = rast.read(bidx)
        # Caches height and width to avoid sampling out of bounds
        height, width = data.shape

        # Opens output file, samples a feature and writes input metadata plus new feature into output file
        with fio.open(output_vector, 'w', **meta) as dst:
            for feature in src:
                feature['properties'][field] = None
                x, y = feature['geometry']['coordinates'][:2]
                col, row = (x,y) * ~rast.affine
                if 0 <= col < width and 0 <= row < height:
                    feature['properties'][field] = data[row][col].item()
                dst.write(feature)
예제 #37
0
파일: fio.py 프로젝트: MatthewArrott/Fiona
def env(ctx, key):
    """Print information about the Fiona environment: available
    formats, etc.
    """
    verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1
    logger = logging.getLogger('fio')
    stdout = click.get_text_stream('stdout')
    with fiona.drivers(CPL_DEBUG=(verbosity > 2)) as env:
        if key == 'formats':
            for k, v in sorted(fiona.supported_drivers.items()):
                modes = ', '.join("'" + m + "'" for m in v)
                stdout.write("%s (modes %s)\n" % (k, modes))
            stdout.write('\n')
예제 #38
0
    def compose(self, scalar=None, bounds=None, **kwargs):
        '''
        Draw files to svg.

        Args:
            scalar (int): factor by which to scale the data, generally a small number (1/map scale).
            bounds (Sequence): Map bounding box in input units. Defaults to map data bounds.
            style (str): CSS to append to parent object CSS.
            viewbox (bool): If True, draw SVG with a viewbox. If False, translate coordinates to the frame.
                            Defaults to True.
            precision (float): Round coordinates to this precision [default: 0].
            simplify (float): Must be between 0 and 1. Fraction of removable coordinates to keep.
            inline (bool): If True, try to run CSS into each element.

        Returns:
            String (unicode in Python 2) containing an entire SVG document.
        '''
        # Set up arguments
        scalar = scalar or self.scalar

        drgs = {
            'style': kwargs.pop('style', ''),
            'viewbox': kwargs.pop('viewbox', True),
            'inline': kwargs.pop('inline', False),
        }

        if bounds:
            reset_bounds = True
        else:
            reset_bounds = False
            bounds = self._unprojected_bounds

        if 'simplify' in kwargs:
            kwargs['simplifier'] = convert.simplifier(kwargs.pop('simplify'))
        else:
            kwargs['simplifier'] = self.simplifier

        kwargs['precision'] = kwargs.get('precision', 0)

        # Draw files
        with fiona.drivers():
            members = [self._compose_file(f, scalar, unprojected_bounds=bounds, **kwargs) for f in self.files]

        drawing = self._draw(members, bounds, scalar, **drgs)

        # Reset bounds so that self can be used again fresh. This is hacky.
        if reset_bounds:
            self._projected_bounds = (None, None, None, None)

        return drawing
예제 #39
0
def test_options(tmpdir):
    """Test that setting CPL_DEBUG=ON works"""
    logfile = str(tmpdir.mkdir('tests').join('test_options.log'))
    logger = logging.getLogger('Fiona')
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(logfile)
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)

    with fiona.drivers(CPL_DEBUG=True):
        c = fiona.open("tests/data/coutwildrnp.shp")
        c.close()
        log = open(logfile).read()
        assert "Option CPL_DEBUG" in log
예제 #40
0
def shp_to_csv(path, field_map):
    stub, ext = os.path.splitext(path)
    outpath = '%s_clean.csv' % (stub)
    fp = open(outpath, 'w')

    with fiona.drivers():
        with fiona.open(path) as shp:
            output_fields = field_map.keys()
            output_fields.append('name1_ascii')
            csv_out = csv.DictWriter(fp, fieldnames=output_fields)
            # csv_out.writeheader()

            for rec in shp:
                row_dict = extract_fields(rec, field_map)
                csv_out.writerow(row_dict)
    return
예제 #41
0
파일: fio.py 프로젝트: barrycug/Fiona
def insp(ctx, src_path):
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('fio')
    try:
        with fiona.drivers(CPL_DEBUG=verbosity>2):
            with fiona.open(src_path) as src:
                code.interact(
                    'Fiona %s Interactive Inspector (Python %s)\n'
                    'Type "src.schema", "next(src)", or "help(src)" '
                    'for more information.' %  (
                        fiona.__version__, '.'.join(
                            map(str, sys.version_info[:3]))),
                    local=locals())
            sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #42
0
파일: info.py 프로젝트: antmd/Fiona
def insp(ctx, src_path):
    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')
    try:
        with fiona.drivers(CPL_DEBUG=verbosity>2):
            with fiona.open(src_path) as src:
                code.interact(
                    'Fiona %s Interactive Inspector (Python %s)\n'
                    'Type "src.schema", "next(src)", or "help(src)" '
                    'for more information.' %  (
                        fiona.__version__, '.'.join(
                            map(str, sys.version_info[:3]))),
                    local=locals())

    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()
예제 #43
0
파일: load.py 프로젝트: mwtoews/Fiona
def load(ctx, output, driver, src_crs, dst_crs, features, layer):
    """Load features from JSON to a file in another format.

    The input is a GeoJSON feature collection or optionally a sequence of
    GeoJSON feature objects."""
    verbosity = (ctx.obj and ctx.obj['verbosity']) or 2
    logger = logging.getLogger('fio')

    dst_crs = dst_crs or src_crs

    if src_crs and dst_crs and src_crs != dst_crs:
        transformer = partial(transform_geom, src_crs, dst_crs,
                              antimeridian_cutting=True, precision=-1)
    else:
        transformer = lambda x: x

    def feature_gen():
        for feat in features:
            feat['geometry'] = transformer(feat['geometry'])
            yield feat

    try:
        source = feature_gen()

        # Use schema of first feature as a template.
        # TODO: schema specified on command line?
        first = next(source)
        schema = {'geometry': first['geometry']['type']}
        schema['properties'] = dict([
            (k, FIELD_TYPES_MAP_REV.get(type(v)) or 'str')
            for k, v in first['properties'].items()])

        with fiona.drivers(CPL_DEBUG=verbosity > 2):
            with fiona.open(
                    output, 'w',
                    driver=driver,
                    crs=dst_crs,
                    schema=schema,
                    layer=layer) as dst:
                dst.write(first)
                dst.writerecords(source)

    except Exception:
        logger.exception("Exception caught during processing")
        raise click.Abort()
예제 #44
0
    def __iter__(self):
        """ Returns generator over shapefile rows.

        Note:
            The first column is an id field, taken from the id value of each shape
            The middle values are taken from the property_schema
            The last column is a string named geometry, which has the wkt value, the type is geometry_type.

        """

        # These imports are nere, not at the module level, so the geo
        # support can be an extra

        import fiona

        from shapely.geometry import shape
        from shapely.wkt import dumps
        from .spec import ColumnSpec

        self.start()

        with fiona.drivers():
            # retrive full path of the zip and convert it to url
            virtual_fs = 'zip://{}'.format(self._fstor._fs.zf.filename)
            layer_index = self.spec.segment or 0
            with fiona.open('/', vfs=virtual_fs, layer=layer_index) as source:
                # geometry_type = source.schema['geometry']
                property_schema = source.schema['properties']
                self.spec.columns = [ColumnSpec(**c) for c in self._get_columns(property_schema)]
                self._headers = [x['name'] for x in self._get_columns(property_schema)]

                for s in source:
                    row_data = s['properties']
                    shp = shape(s['geometry'])
                    wkt = dumps(shp)
                    row = [int(s['id'])]
                    for col_name, elem in six.iteritems(row_data):
                        row.append(elem)

                    row.append(wkt)

                    yield row

        self.finish()
예제 #45
0
def test_options(tmpdir):
    """Test that setting CPL_DEBUG=ON works"""
    logfile = str(tmpdir.mkdir('tests').join('test_options.log'))
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(logfile)
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)

    with fiona.drivers(CPL_DEBUG=True):
        path = os.path.join("tests", "data", "coutwildrnp.shp")
        c = fiona.open(path)
        c.close()
        with open(logfile, "r") as f:
            log = f.read()
        if GDAL_MAJOR_VER >= 2:
            assert "GDALOpen" in log
        else:
            assert "OGROpen" in log
예제 #46
0
파일: test_io.py 프로젝트: sjsrey/geopandas
    def test_empty_shapefile(self, tmpdir):

        # create empty shapefile
        meta = {'crs': {},
                'crs_wkt': '',
                'driver': 'ESRI Shapefile',
                'schema':
                    {'geometry': 'Point',
                     'properties': OrderedDict([('A', 'int:9'),
                                                ('Z', 'float:24.15')])}}

        fname = str(tmpdir.join("test_empty.shp"))

        with fiona.drivers():
            with fiona.open(fname, 'w', **meta) as _:
                pass

        empty = read_file(fname)
        assert isinstance(empty, geopandas.GeoDataFrame)
        assert all(empty.columns == ['A', 'Z', 'geometry'])
예제 #47
0
def test_options(tmpdir, path_coutwildrnp_shp):
    """Test that setting CPL_DEBUG=ON works and that a warning is raised."""
    logfile = str(tmpdir.mkdir('tests').join('test_options.log'))
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(logfile)
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)

    # fiona.drivers() will be deprecated.
    with pytest.warns(FionaDeprecationWarning):
        with fiona.drivers(CPL_DEBUG=True):
            c = fiona.open(path_coutwildrnp_shp)
            c.close()
            with open(logfile, "r") as f:
                log = f.read()
            if fiona.gdal_version.major >= 2:
                assert "GDALOpen" in log
            else:
                assert "OGROpen" in log
예제 #48
0
def unpack_layers (username, project_name):
    try:
        #TODO: This still uses the file system
        user, project = Project.get_user_and_project(username, project_name)
        clear_uploads(SHP_DIR)
        zip_contents = StringIO(project.crane_project.zipfile.read())
        unique_name = str(uuid.uuid1())
        zipfile = SHP_DIR + '/' + unique_name +'.zip'
        with open(zipfile, "wb") as f:
            f.write(zip_contents.getvalue())
        #TODO: keep track of the data types.
        print " AT FIONA DRIVERS"
        messages = []
        project.crane_project.status = "Reading shapefiles."
        project.save(cascade = True)
        print globals()
        with fiona.drivers():
            for i, layername in enumerate(
                fiona.listlayers(
                '/',
                vfs='zip://'+zipfile)):
                feature = GeoFeat()
                feature.read_shapefile(layername, zipfile)
                feature.name = layername
                #TODO: This just leaves shitty layers out of the project, you need to report this.
                try:
                    feature.save()
                    project.crane_project.features.append(feature)
                except Exception as e:
                    messages.append(layername + ' not saved, reason: '+ str(e))
                    continue
                    #TODO: These two calls might be redundant, check if its so.
        project.crane_project.status = "Shapefiles stored. User needs to enter Interpretations"
        print messages
        project.save(cascade = True)
        return "Layers stored"
    except Exception as e:
        project.crane_project.status = "Error unpacking layers"
        project.crane_project.messages = "Error unpacking layers: " + str(e)
        project.save(cascade = True)
        return
예제 #49
0
파일: fio.py 프로젝트: barrycug/Fiona
def load(ctx, output, driver, x_json_seq):
    """Load features from JSON to a file in another format.

    The input is a GeoJSON feature collection or optionally a sequence of
    GeoJSON feature objects."""
    verbosity = ctx.obj['verbosity']
    logger = logging.getLogger('fio')
    input = click.get_text_stream('stdin')

    try:
        if x_json_seq:
            feature_gen = six.moves.filter(
                lambda o: o.get('type') == 'Feature',
                (json.loads(text.strip()) for text in input))
        else:
            collection = json.load(input)
            feature_gen = iter(collection['features'])

        # Use schema of first feature as a template.
        # TODO: schema specified on command line?
        first = next(feature_gen)
        schema = {'geometry': first['geometry']['type']}
        schema['properties'] = {
            k: FIELD_TYPES_MAP_REV[type(v)]
            for k, v in first['properties'].items()}

        with fiona.drivers(CPL_DEBUG=verbosity>2):
            with fiona.open(
                    output, 'w',
                    driver=driver,
                    crs={'init': 'epsg:4326'},
                    schema=schema) as dst:
                dst.write(first)
                dst.writerecords(feature_gen)
        sys.exit(0)
    except IOError:
        logger.info("IOError caught")
        sys.exit(0)
    except Exception:
        logger.exception("Failed. Exception caught")
        sys.exit(1)
예제 #50
0
def test_options(tmpdir=None):
    """Test that setting CPL_DEBUG=ON works"""
    if tmpdir is None:
        tempdir = tempfile.mkdtemp()
        logfile = os.path.join(tempdir, 'example.log')
    else:
        logfile = str(tmpdir.join('example.log'))
    logger = logging.getLogger('Fiona')
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(logfile)
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)

    with fiona.drivers(CPL_DEBUG=True):
        c = fiona.open("tests/data/coutwildrnp.shp")
        c.close()
        log = open(logfile).read()
        assert "Option CPL_DEBUG" in log

    if tempdir and tmpdir is None:
        shutil.rmtree(tempdir)