def Merger_Aggregate(path_in, path_out):
    from shapely.ops import cascaded_union
    from shapely.geometry import Point, MultiPolygon, shape, mapping
    from fiona import collection
    import sys
    import shutil
    from time import sleep
    
    with collection(path_in, "r") as input:
        schema = input.schema.copy()
        print "Check point Alpha"
        with collection(path_out, "w", "ESRI Shapefile", schema) as output:
            bl = []
            print "Check point Brovo"
            total = len(input)
            count = 0;
            for f in input:    
                bl.append(shape(f['geometry']))
                ml = cascaded_union(bl)                
                f['geometry'] = mapping(ml)
                f['properties']['Shape_area'] = ml.area
                output.write(f)
                count +=1
                sys.stdout.write("\r" + "[%-75s]" % ('='*((count*75)/total)))
                sys.stdout.write(str((count*100)/total) + "%")
                sys.stdout.flush()
                sleep(0.25)
    print "Done!!!"
Example #2
0
def merge(buildingIn, addressIn, mergedOut):
    addresses = []

    with collection(addressIn, "r") as input:
        for address in input:
            shape = asShape(address['geometry'])
            shape.original = address
            addresses.append(shape)

    # Load and index all buildings.
    buildings = []
    buildingShapes = []
    buildingIdx = index.Index()
    with collection(buildingIn, "r") as input:
        for building in input:
            shape = asShape(building['geometry'])
            building['properties']['addresses'] = []
            buildings.append(building)
            buildingShapes.append(shape)
            buildingIdx.add(len(buildings) - 1, shape.bounds)

    # Map addresses to buildings.
    for address in addresses:
        for i in buildingIdx.intersection(address.bounds):
            if buildingShapes[i].contains(address):
                buildings[i]['properties']['addresses'].append(
                    address.original)

    with open(mergedOut, 'w') as outFile:
	    outFile.writelines(json.dumps(buildings, indent=4))
	    print 'Exported ' + mergedOut
Example #3
0
def cleanPolys(inShp, outShp):
    logging.basicConfig(stream=sys.stderr, level=logging.INFO)

    with collection(inShp, "r") as input:
        schema = input.schema.copy()
        with collection(outShp, "w", "ESRI Shapefile", schema) as output:
            for f in input:

                try:
                    # Make a shapely object from the dict.
                    geom = shape(f['geometry'])
                    if not geom.is_valid:

                        # Use the 0-buffer polygon cleaning trick
                        clean = geom.buffer(0.0)
                        assert clean.geom_type == 'Polygon'
                        assert clean.is_valid
                        geom = clean

                    # Make a dict from the shapely object.
                    f['geometry'] = mapping(geom)
                    output.write(f)

                except Exception as e:
                    # Writing uncleanable features to a different shapefile
                    # is another option.
                    logging.exception("Error cleaning feature %s:", f['id'])
Example #4
0
def chunk(featureFileName, sectionFileName, pattern, key = None):

    # Load and index
    with collection(featureFileName, "r") as featureFile:
        featureIdx = index.Index()
        features = []
        for feature in featureFile:
            features.append(feature)
            featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds)

        # Break up by sections and export
        with collection(sectionFileName, "r") as sectionFile:
            i = 0
            for section in sectionFile:
                fileName = pattern % i
                if key:
                    fileName = pattern % section['properties'][key]
                    properties = {}
                    try:
                        with collection(fileName, 'w', 'ESRI Shapefile',
                                schema = featureFile.schema,
                                crs = featureFile.crs) as output:
                            sectionShape = asShape(section['geometry'])
                            for j in featureIdx.intersection(sectionShape.bounds):
                                if asShape(features[j]['geometry']).intersects(sectionShape):
                                    output.write(features[j])
                            print "Exported %s" % fileName
                            i = i + 1
                    except ValueError:
                        print 'Error exporting ' + fileName
                        pprint(properties)
                        pprint(featureFile.schema)
Example #5
0
    def remap_node_number(node_atrb1, node_atrb2, inshp, outshp, type, prj, error_logfile):
        with fiona.collection(inshp, "r") as input:

            schema = {'geometry': type,
                      'properties': {'node': 'int'}}

            with fiona.collection(outshp, "w", "ESRI Shapefile", schema) as output:
                for node in input:
                    node_num1 = node['properties'][node_atrb1]
                    node_num2 = node['properties'][node_atrb2]

                    # pick a node number
                    if node_num1 == 0 or node_num1 == node_num2:
                        node_num = node_num2
                    elif node_num2 == 0:
                        node_num = node_num1
                    else:
                        error_logfile.write("Warning! node number conflict. MFgrid node number: {}, "
                                  "Existing SFR node number: {}\n".format(node_num1, node_num2))

                    print "\rnode {:d}".format(node_num),

                    output.write({'properties': {'node': node_num},
                                  'geometry': mapping(shape(node['geometry']))})
        # copy over prj file
        shutil.copyfile(prj, "{}.prj".format(outshp[:-4]))
Example #6
0
def chunk(featureFileName, sectionFileName, pattern, key = None):

    # Load and index
    with collection(featureFileName, "r") as featureFile:
        featureIdx = index.Index()
        features = []
        for feature in featureFile:
            features.append(feature)
            featureIdx.add(len(features) - 1, asShape(feature['geometry']).bounds)

        # Break up by sections and export
        with collection(sectionFileName, "r") as sectionFile:
            i = 0
            for section in sectionFile:
                fileName = pattern % i
                if key:
                    fileName = pattern % section['properties'][key]
                    properties = {}
                    try:
                        with collection(fileName, 'w', 'ESRI Shapefile',
                                schema = featureFile.schema,
                                crs = featureFile.crs) as output:
                            sectionShape = asShape(section['geometry'])
                            for j in featureIdx.intersection(sectionShape.bounds):
                                if asShape(features[j]['geometry']).intersects(sectionShape):
                                    properties = features[j]['properties']
                                    output.write(features[j])
                            print "Exported %s" % fileName
                            i = i + 1
                    except ValueError:
                        print "Error exporting " + fileName
                        pprint(properties)
                        pprint(featureFile.schema)
Example #7
0
def make_buf(inShp, buf, outFile=None):
    # Creates polygon shapefile as buffers from input point shapefile
    # inShp   = points shapefile
    # buf     = buffer distance in meter
    # outFile = polygon shapefile
    epsg_32622 = 'PROJCS["WGS_1984_UTM_Zone_22N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]'
    dir = os.path.dirname(os.path.abspath(__file__))
    tempFolder = os.path.join(dir, 'temp')
    if not os.path.isdir(tempFolder):
        os.mkdir(tempFolder)

    if not outFile:
        head,tail = os.path.split(inShp)
        outFile = os.path.join(tempFolder, tail)

    if not os.path.isfile(outFile):
        with collection(inShp, 'r') as input:
            schema = input.schema.copy()
            schema['geometry'] = 'Polygon'
            with collection(outFile, "w", crs=from_epsg(32622), driver="ESRI Shapefile", schema=schema) as output:
                for point in input:
                    prop = point['properties']
                    output.write({'properties': prop,
                                    'geometry': mapping(shape(point['geometry']).buffer(buf))})
        prjfile = outFile[:-3] + 'prj'
        with open(prjfile, 'w') as prj:
            prj.write(epsg_32622)
        print 'Shapefile created: ' + outFile
    return outFile
Example #8
0
    def remap_node_number(node_atrb1, node_atrb2, inshp, outshp, type, prj,
                          error_logfile):
        with fiona.collection(inshp, "r") as input:

            schema = {'geometry': type, 'properties': {'node': 'int'}}

            with fiona.collection(outshp, "w", "ESRI Shapefile",
                                  schema) as output:
                for node in input:
                    node_num1 = node['properties'][node_atrb1]
                    node_num2 = node['properties'][node_atrb2]

                    # pick a node number
                    if node_num1 == 0 or node_num1 == node_num2:
                        node_num = node_num2
                    elif node_num2 == 0:
                        node_num = node_num1
                    else:
                        error_logfile.write(
                            "Warning! node number conflict. MFgrid node number: {}, "
                            "Existing SFR node number: {}\n".format(
                                node_num1, node_num2))

                    print "\rnode {:d}".format(node_num),

                    output.write({
                        'properties': {
                            'node': node_num
                        },
                        'geometry': mapping(shape(node['geometry']))
                    })
        # copy over prj file
        shutil.copyfile(prj, "{}.prj".format(outshp[:-4]))
Example #9
0
def make_buf(site, outFile=None, buf=0.5):
    dir = os.path.dirname(os.path.abspath(__file__))
    tempFolder = os.path.join(dir, 'temp')

    if not outFile:
        outFile = os.path.join(dir, 'plot_shapes',
                               siteDict[site] + '_plots.shp')

    # first create point shape file from shape - and save it with prj file. Other wise buffer will not work.
    schema = {
        'geometry': 'Point',
        'properties': {
            'site': 'str',
            'sitetype': 'str',
            'plot': 'int'
        }
    }
    pnt = os.path.join(tempFolder, 'pnt_temp.shp')
    with fiona.open(pnt,
                    "w",
                    crs=from_epsg(32622),
                    driver="ESRI Shapefile",
                    schema=schema) as output:
        reader = csv.DictReader(open("plotlocations.csv"), delimiter=',')
        for row in reader:
            if siteDict[site] == row['site']:
                point = Point(float(row['X']), float(row['Y']))
                prop = {
                    'site': str(row['site']),
                    'sitetype': str(row['sitetype']),
                    'plot': int(row['plot'])
                }
                output.write({'geometry': mapping(point), 'properties': prop})
    prjfile = pnt[:-3] + 'prj'
    with open(prjfile, 'w') as prj:
        prj.write(epsg_32622)

    with collection(pnt, 'r') as input:
        schema = input.schema.copy()
        schema['geometry'] = 'Polygon'
        #schema = {'geometry': 'Polygon', 'properties': {'name': 'str'}}
        with collection(outFile,
                        "w",
                        crs=from_epsg(32622),
                        driver="ESRI Shapefile",
                        schema=schema) as output:
            for point in input:
                prop = point['properties']
                output.write({
                    'properties':
                    prop,
                    'geometry':
                    mapping(shape(point['geometry']).buffer(buf))
                })
    prjfile = outFile[:-3] + 'prj'
    with open(prjfile, 'w') as prj:
        prj.write(epsg_32622)
    # delete temp file, how to remove entire shapefile?
    print 'Shapefile created: ' + outFile
Example #10
0
 def test_write_polygon(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         with collection("test_write_polygon.shp", "w", "ESRI Shapefile",
                         schema) as output:
             for f in input:
                 f['geometry'] = mapping(asShape(f['geometry']).buffer(1.0))
                 output.write(f)
Example #11
0
 def test_write_polygon_with_crs(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         with collection(
                 "test_write_polygon.shp", "w", "ESRI Shapefile",
                 schema=schema, crs={'init': "epsg:4326", 'no_defs': True}
                 ) as output:
             for f in input:
                 output.write(f)
Example #12
0
 def test_write_polygon(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         with collection(
             "test_write_polygon.shp", "w", "ESRI Shapefile", schema
             ) as output:
                 for f in input:
                     f['geometry'] = mapping(asShape(f['geometry']).buffer(1.0))
                     output.write(f)
Example #13
0
 def test_write_point(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema['geometry'] = 'Point'
         with collection("test_write_point.shp", "w", "ESRI Shapefile",
                         schema) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f['geometry'] = mapping(asShape(f['geometry']).centroid)
                 output.write(f)
Example #14
0
def get_points_along_line(DataDirectory, baseline_shapefile, distance,
                          output_shapefile):
    """
    Interpolate a series of points at equal distances along an input line shapefile. Arguments that need
    to be supplied are:
    * DataDirectory: the directory of the input/output shapefiles
    * baseline_shapefile: the name of the input line shapefile with extension
    * distance: the distance to place points at
    * output_shapefile: the name of the output points shapefile with extension
    """

    from fiona import collection
    from shapely.geometry import shape, Point, LineString, mapping

    lines = []
    points = []
    distances = []
    # read in the baseline shapefile
    c = collection(DataDirectory + baseline_shapefile, 'r')
    rec = c.next()
    line = LineString(shape(rec['geometry']))
    # get the coordinate system from the input shapefile
    crs = c.crs

    total_distance = line.length
    # handle exceptions
    if distance < 0.0 or distance >= total_distance:
        print "\tNot a valid distance, sorry pal!"

    # get the points at the specified distance along the line
    temp_distance = 0
    n_points = int(total_distance / distance)
    print "The total distance is", total_distance, ": returning ", n_points, "points"
    # have a point at the start of the line
    for j in range(n_points + 1):
        point = line.interpolate(temp_distance)
        points.append(Point(point))
        distances.append(temp_distance)
        temp_distance += distance

    #output schema
    schema = {'geometry': 'Point', 'properties': {'distance': 'float'}}

    # write the points to a shapefile
    with collection(DataDirectory + output_shapefile,
                    'w',
                    crs=crs,
                    driver='ESRI Shapefile',
                    schema=schema) as output:
        for i in range(n_points + 1):
            #print point
            output.write({
                'properties': {
                    'distance': distances[i]
                },
                'geometry': mapping(points[i])
            })
def union_polygon(shpfile_input, shapefile_output,field):
    with collection("{}.shp".format(shpfile_input), "r") as input:
        shp_schema = { 'geometry': 'Polygon', 'properties': { 'name': 'str' } }
        with collection("{}.shp".format(shapefile_output), "w", "ESRI Shapefile", shp_schema) as output:
            shapes = []
            for f in input:
                shapes.append(shape(f['geometry']))
            merged = cascaded_union(shapes)
            output.write({'properties': {'name': '{}'.format(field_name)}, 'geometry': mapping(merged)})
Example #16
0
 def setUp(self):
     os.mkdir("append-test")
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema["geometry"] = "Point"
         with collection("append-test/" + "test_append_point.shp", "w", "ESRI Shapefile", schema) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f["geometry"] = {"type": "Point", "coordinates": f["geometry"]["coordinates"][0][0]}
                 output.write(f)
Example #17
0
 def test_write_point(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema['geometry'] = 'Point'
         with collection(
             "test_write_point.shp", "w", "ESRI Shapefile", schema
             ) as output:
                 for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                     f['geometry'] = mapping(asShape(f['geometry']).centroid)
                     output.write(f)
Example #18
0
 def test_write_point_wdate(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema["geometry"] = "Point"
         schema["properties"]["date"] = "date"
         with collection("test_write_date.shp", "w", "ESRI Shapefile", schema) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f["geometry"] = {"type": "Point", "coordinates": f["geometry"]["coordinates"][0][0]}
                 f["properties"]["date"] = "2012-01-29"
                 output.write(f)
Example #19
0
def getvert(shp, poly, attr='bacia', buffer=False):

    if buffer:
        with collection(shp + '.shp', "r") as input:
            schema = input.schema.copy()
            with collection("with-shapely.shp", "w", "ESRI Shapefile",
                            schema) as output:
                for f in input:

                    try:
                        # Make a shapely object from the dict.
                        geom = shape(f['geometry'])
                        geom = geom.buffer(buffer)
                        # Make a dict from the shapely object.
                        f['geometry'] = mapping(geom)
                        output.write(f)

                    except Exception as e:
                        # Writing uncleanable features to a different shapefile
                        # is another option.
                        print("Error cleaning feature %s:", f['id'])

    if buffer:
        shpe = "with-shapely.shp"
    else:
        shpe = shp + '.shp'

    with fiona.open(shpe) as lines:
        print(shpe)

        if len(lines) == 1:
            vertices = []
            for line in lines:
                for vert in line['geometry']['coordinates'][0]:
                    vertices.append(vert)

            vertices = np.array(vertices)

        else:
            for x, poligon in enumerate(lines):

                if poligon['properties'][attr] == poly:
                    vert = poligon['geometry']['coordinates']
                    vertices = np.asarray(vert)
                    if len(vertices.shape) < 3:
                        verts2 = []
                        for m in vertices:
                            for x in m:
                                for k in x:
                                    verts2.append(k)
                        vertices = np.asarray(verts2)
            vertices = np.squeeze(vertices)

    return vertices
Example #20
0
 def test_write_point2(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema["geometry"] = "Point"
         with collection("test_write_point.shp", "w", "ESRI Shapefile", schema) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f["geometry"] = {"type": "Point", "coordinates": f["geometry"]["coordinates"][0][0]}
                 output.write(f)
             self.failUnlessEqual(len(output._buffer), 7)
             self.failUnlessEqual(len(output), 7)
     self.failUnlessEqual(len(output), 7)
Example #21
0
 def test_write_point(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema['geometry'] = 'Point'
         with collection("test_write_point.shp", "w", "ESRI Shapefile",
                         schema) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f['geometry'] = {
                     'type': 'Point',
                     'coordinates': f['geometry']['coordinates'][0][0]
                 }
                 output.write(f)
Example #22
0
 def test_write_point(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         schema['geometry'] = 'Point'
         with collection(
                 "test_write_point.shp", "w", "ESRI Shapefile", schema
                 ) as output:
             for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                 f['geometry'] = {
                     'type': 'Point',
                     'coordinates': f['geometry']['coordinates'][0][0] }
                 output.write(f)
Example #23
0
 def test_write_polygon_with_crs(self):
     with collection("docs/data/test_uk.shp", "r") as input:
         schema = input.schema.copy()
         with collection("test_write_polygon.shp",
                         "w",
                         "ESRI Shapefile",
                         schema=schema,
                         crs={
                             'init': "epsg:4326",
                             'no_defs': True
                         }) as output:
             for f in input:
                 output.write(f)
Example #24
0
    def merge_files(path, target, remove_source=False):
        """Merge files (the output of this Block) into one single file.

        Optionally removes the source files.
        """
        path = utils.safe_abspath(path)
        target = utils.safe_abspath(target)

        if os.path.exists(target):
            raise IOError("Target '{}' already exists".format(target))

        target_base, ext = os.path.splitext(target)
        source_paths = glob.glob(os.path.join(path, '*' + ext))
        if len(source_paths) == 0:
            raise IOError(
                "No source files found with matching extension '{}'".format(
                    ext))
        elif len(source_paths) == 1:
            # shortcut for single file. we need to copy/move all base_name.*
            # files (e.g. shapefiles have multiple files)
            source_base = os.path.splitext(source_paths[0])[0]
            move_or_copy = shutil.move if remove_source else shutil.copy
            for file_path in glob.glob(source_base + '.*'):
                move_or_copy(file_path,
                             target_base + os.path.splitext(file_path)[1])
            return

        with utils.fiona_env():
            # first detect the driver etc
            with fiona.collection(source_paths[0], "r") as source:
                kwargs = {
                    "driver": source.driver,
                    "crs": source.crs,
                    "schema": source.schema,
                }
                if source.encoding:
                    kwargs["encoding"] = source.encoding

            with fiona.collection(target, "w", **kwargs) as out:
                for source_path in source_paths:
                    with fiona.collection(source_path, "r") as source:
                        out.writerecords(v for k, v in source.items())
                    if remove_source:
                        os.remove(source_path)

            if remove_source:
                try:
                    os.rmdir(path)
                except IOError:  # directory not empty: do nothing
                    pass
Example #25
0
    def apply_shapely(self, method, args=None, call=True, out_geomtype=None,
                      **kwargs):
        coll = self.collection()
        out_schema = coll.schema.copy()
        if not args:
            args = []
        if out_geomtype:
            out_schema['geometry'] = out_geomtype

        tempds = self.tempds(method)
        with fiona.collection(tempds, "w", "ESRI Shapefile",
                              out_schema, crs=self.crs) as out_collection:
            for in_feature in coll:
                out_feature = in_feature.copy()
                if call:
                    geom = mapping(
                        getattr(shape(in_feature['geometry']),
                                method)(*args, **kwargs)
                    )
                else:
                    # it's not a method, it's a property
                    geom = mapping(
                        getattr(shape(in_feature['geometry']), method)
                    )

                out_feature['geometry'] = geom
                out_collection.write(out_feature)
        return Layer(tempds)
Example #26
0
def is_valid_shapefile(shape_file, mapping):
    with collection(shape_file, 'r') as source:
        if source.schema['geometry'] not in ('Polygon', 'MultiPolygon'):
            raise ConvertError(_('invalid geometry type') + ': ' + source.schema['geometry'])
        elif source.schema['geometry'] != mapping.geom_type and mapping.geom_type != '*':
            raise ConvertError(_('invalid mapping'))
    return True
Example #27
0
    def reproject(self, crsish):
        in_proj = Proj(self.crs)
        coll = self.collection()
        out_schema = coll.schema.copy()
        out_crs = guess_crs(crsish)

        tmpds = self.tempds("reproject_%s" % crsish)
        with fiona.collection(tmpds, "w", "ESRI Shapefile",
                              out_schema, crs=out_crs) as out_collection:
            out_proj = Proj(out_collection.crs)
            for in_feature in coll:
                out_feature = in_feature.copy()

                if in_feature['geometry']['type'] == "Polygon":
                    new_coords = []
                    for ring in in_feature['geometry']['coordinates']:
                        x2, y2 = transform(in_proj, out_proj, *zip(*ring))
                        new_coords.append(zip(x2, y2))
                    out_feature['geometry']['coordinates'] = new_coords

                elif in_feature['geometry']['type'] == "Point":
                    x2, y2 = transform(in_proj, out_proj,
                                       *in_feature['geometry']['coordinates'])
                    out_feature['geometry']['coordinates'] = x2, y2

                out_collection.write(out_feature)

        return Layer(tmpds)
Example #28
0
def out_shapefile_csv(csv_file_name, field_to_export, shape_file_name):
    x, y = SpatialCoordinate(mesh2d)
    x_vector, y_vector = interpolate(x, Function(P1_2d)).dat.data, interpolate(y, Function(P1_2d)).dat.data

    import csv
    with open(csv_file_name, 'w') as f:
        writer = csv.writer(f, delimiter='\t')
        writer.writerows(zip(x_vector, y_vector, field_to_export.dat.data))

    locations = list(zip(x_vector, y_vector))
    numbers = list(field_to_export.dat.data)

    import pyproj
    import shapely.geometry
    import fiona
    import fiona.crs

    UTM_ZONE30 = pyproj.Proj(
        proj='utm',
        zone=30,
        datum='WGS84',
        units='m',
        errcheck=True)
    LL_WGS84 = pyproj.Proj(proj='latlong', datum='WGS84', errcheck=True)

    schema = {'geometry': 'Point', 'properties': {'numbers': 'str'}}
    crs = fiona.crs.from_string(UTM_ZONE30.srs)
    with fiona.collection(shape_file_name, "w", "ESRI Shapefile", schema, crs=crs) as output:
        for xy, numbers in zip(locations, numbers):
            point = shapely.geometry.Point(xy[0], xy[1])
            output.write({'properties': {'numbers': numbers}, 'geometry': shapely.geometry.mapping(point)})
Example #29
0
def extract_cities():
  input_path = get_shape_path("ne_50m_populated_places")
  output_path = path.join(OUT_DIR, "cities.json")

  props_to_keep = frozenset(["scalerank", "name", "latitude", "longitude"])

  features = []
  with fiona.collection(input_path, "r") as source:
    for feat in source:
      props = lower_dict_keys(feat["properties"])

      if props["pop_max"] >= POPULATION_MAX_FILTER:
        for k in frozenset(props) - props_to_keep:
          del props[k]

        feat["properties"] = props
        features.append(feat)

  my_layer = {
    "type": "FeatureCollection",
    "features": features
  }

  with open(output_path, "w") as f:
    f.write(json.dumps(my_layer))

  print c.green("Extracted data to {}".format(output_path))
Example #30
0
def generate_layer(target_dir=None, layer_def=None):
    if not layer_def:
        layer_def = generate_layer_def()

    if not target_dir:
        target_dir = tempfile.mkdtemp(prefix="%s." % layer_def['id'])

    shp_file = os.path.join(target_dir, layer_def['shpfile_filename'])
    shp_writer = fiona.collection(
        shp_file, "w", driver="ESRI Shapefile", schema=layer_def['schema'], 
        crs=layer_def['crs'],
    )
    for record in layer_def['records']:
        shp_writer.write(record)
    shp_writer.close()

    mfile_def = layer_def.get('mapfile_def')
    if mfile_def:
        mfile_file = os.path.join(target_dir, mfile_def['filename'])
        open(mfile_file, "wb").write(mfile_def['content'])

    metadata_file = os.path.join(target_dir, "metadata.json")
    open(metadata_file, "wb").write(json.dumps(layer_def['metadata']))

    return target_dir
Example #31
0
    def import_multi_river(self, river_file, river_field, reach_field):
        """
        Reads multiple reaches from self.river_file. river_field and reach_field are the name of attributes in the
        self.river_file shapefile. Saves Rivers object to self.rivers

        :param river_file: string - river shapefile
        :param river_field: string - name of rivercode attribute field
        :param reach_field: string - name of reachcode attribute filed
        """
        self.rivers = Rivers()
        with fiona.collection(river_file, 'r') as input_file:
            for feature in input_file:
                # Fiona might give a Linestring or a MultiLineString, handle both cases
                temp_geo = shape(feature['geometry'])
                if type(temp_geo) is MultiLineString:
                    raise ShapefileError('Feature in ' + river_file +
                                         ' is MultiLineString.' +
                                         ' This is likely an error.')
                elif type(temp_geo) is LineString:
                    geo = gt.ADPolyline(shapely_geo=temp_geo)
                    river_name = feature['properties'][river_field]
                    reach_name = feature['properties'][reach_field]
                    temp_river = River(geo, river_name, reach_name)
                    self.rivers.reaches.append(temp_river)
                else:
                    raise ShapefileError('Feature in ' + river_file +
                                         ' is not a Linestring.')
Example #32
0
def add(request):
    if request.method == 'POST':
        form = LayerForm(request.POST, request.FILES)

        if form.is_valid():
            l = form.save()

            col = form.get_collection()
            srs = to_string(form.layer_crs())

            shape_path = "%s/uploads/shapefile/%s/%s.shp" % (
                settings.MEDIA_ROOT, request.tenant.schema_name, l.pk)
            if not os.path.exists(os.path.dirname(shape_path)):
                os.makedirs(os.path.dirname(shape_path))
            with fiona.collection(shape_path,
                                  "w",
                                  schema=col.schema,
                                  crs=col.crs,
                                  driver="ESRI Shapefile") as out:
                for f in col:
                    out.write(f)

            django_rq.enqueue(process_shapefile, request.tenant.schema_name,
                              l.pk, srs)

            messages.success(request, "Layer added.")
            return redirect('layers:index')
        else:
            messages.error(request,
                           "The layer could not be saved due to errors.")
    else:
        form = LayerForm()

    return render(request, 'layers/add.html', {'form': form})
Example #33
0
 def setUp(self):
     schema = {'geometry': 'Point', 'properties': {'label': 'str'}}
     self.c = collection(
             "test-no-iter.shp", 
             "w", 
             "ESRI Shapefile", 
             schema=schema)
def extract_shapefile(shapefile, uri_name, simplify_tolerance=None):

    for feature in collection(shapefile, "r"):

        geometry = feature["geometry"]
        properties = feature["properties"]
        #calculate centroid
        geom_obj = asShape(geometry)
        if simplify_tolerance:
            geom_obj = geom_obj.simplify(simplify_tolerance)

        try:
            centroid = [geom_obj.centroid.x, geom_obj.centroid.y]
        except AttributeError:
            print "Error: ", feature
            continue
        geometry = mapping(geom_obj)

        if properties["FULL_NAME"]:
            name = properties["FULL_NAME"]

        #feature code mapping
        feature_code = "ADM1H"

        source = properties  #keep all fields anyhow

        # unique URI which internally gets converted to the place id
        # Must be unique!
        uri = uri_name + "." + properties["ID"] + "." + str(
            properties["VERSION"])

        #1766/07/02  to 1766-01-01
        timeframe = {
            "start": properties["START_DATE"].replace('/', '-'),
            "start_range": 0,
            "end": properties["END_DATE"].replace('/', '-'),
            "end_range": 0
        }

        #TODO admin? for counties?

        updated = "2011-10-01"

        area = properties["AREA_SQMI"]
        place = {
            "name": name,
            "centroid": centroid,
            "feature_code": feature_code,
            "geometry": geometry,
            "is_primary": True,
            "source": source,
            "updated": updated,
            "uris": [uri],
            "relationships": [],
            "timeframe": timeframe,
            "admin": [],
            "area": area
        }

        dump.write(uri, place)
Example #35
0
def plot_regions(regions, bbox, tag):
    """Output one shapefile for each region (represented by its bottom left and
    upper right index in the grid) with color depending of its discrepancy."""
    # TODO not unicode safe
    discrepancies = [v[0] for v in regions]
    colormap = cm.ScalarMappable(
        mcolor.Normalize(min(discrepancies), max(discrepancies)), 'YlOrBr')
    schema = {'geometry': 'Polygon', 'properties': {}}
    style = []
    KARTO_CONFIG['bounds']['data'] = [BBOX[1], BBOX[0], BBOX[3], BBOX[2]]

    polys = [{'geometry': mapping(r[1]), 'properties': {}} for r in regions]
    for i, r in enumerate(regions):
        color = to_css_hex(colormap.to_rgba(r[0]))
        name = u'disc_{}_{:03}'.format(tag, i + 1)
        KARTO_CONFIG['layers'][name] = {'src': name + '.shp'}
        color = 'red'
        style.append(CSS.format(name, color, 'black'))
        # style.append(CSS.format(name, color, color))
        with fiona.collection(mkpath('disc', name + '.shp'), "w",
                              "ESRI Shapefile", schema) as f:
            f.writerecords(polys)
        break

    with open(mkpath('disc', 'photos.json'), 'w') as f:
        json.dump(KARTO_CONFIG, f)
    with open(mkpath('disc', 'photos.css'), 'w') as f:
        f.write('\n'.join(style))
Example #36
0
def df2shp(df, folder, layername, dtypes, gtype, epsg):
    """Convert a processed df to a shapefile.

    'df' is a dataframe.

    'folder' is the path to the folder where the shapefile will be saved.

    'layername' is the name of the shapefile.

    'dtypes' is an Orderdict containing the dtypes for each field.

    'gtype' is the geometry type.

    'epsg' is the EPSG code of the output.

    """
    schema = {'geometry': gtype, 'properties': dtypes}

    with fiona.collection(folder + '/' + layername + '.shp',
                          'w',
                          driver='ESRI Shapefile',
                          crs=crs.from_epsg(epsg),
                          schema=schema) as shpfile:
        for index, row in df.iterrows():
            if row['geom'] != 'POINT EMPTY':
                geometry = loads(row['geom'])
                props = {}
                for prop in dtypes:
                    props[prop] = row[prop]
                shpfile.write({
                    'properties': props,
                    'geometry': mapping(geometry)
                })

    return 'Extracted {layername} shapefile.'.format(layername=layername)
Example #37
0
def sampleShapeFile(shapefile, xypoints, attribute):
    """
    Open a shapefile (decimal degrees) and get the attribute value at each of the input XY points. Slower than sampling grids.

    :param shapefile:
      ESRI shapefile (decimal degrees) of predictor variable.
    :param xypoints:
      2D numpy array of XY points, in decimal degrees.
    :param attribute:
      String name of attribute to sample in each of the shapes.
    :returns:
      1D array of attribute values at each of XY points.
    """

    xmin = np.min(xypoints[:, 0])
    xmax = np.max(xypoints[:, 0])
    ymin = np.min(xypoints[:, 1])
    ymax = np.max(xypoints[:, 1])
    #xypoints should be projected back to lat/lon
    f = fiona.collection(shapefile, 'r')
    tshapes = list(f.items(bbox=(xmin, ymin, xmax, ymax)))
    shapes = []
    for fid, shape1 in tshapes:
        shapes.append(shape1)
    f.close()
    return sampleShapes(shapes, xypoints, attribute)
Example #38
0
def df2shp(df, folder, layername, dtypes, gtype, epsg):
    """Convert a processed df to a shapefile.

    'df' is a dataframe.

    'folder' is the path to the folder where the shapefile will be saved.

    'layername' is the name of the shapefile.

    'dtypes' is an Orderdict containing the dtypes for each field.

    'gtype' is the geometry type.

    'epsg' is the EPSG code of the output.

    """
    schema = {'geometry': gtype, 'properties': dtypes}

    with fiona.collection(
        folder + '/' + layername + '.shp',
        'w',
        driver='ESRI Shapefile',
        crs=crs.from_epsg(epsg),
        schema=schema
    ) as shpfile:
        for index, row in df.iterrows():
            if row['geom'] != 'POINT EMPTY':
                geometry = loads(row['geom'])
                props = {}
                for prop in dtypes:
                    props[prop] = row[prop]
                shpfile.write({'properties': props, 'geometry': mapping(geometry)})

    return 'Extracted {layername} shapefile.'.format(layername=layername)
Example #39
0
    def import_single_river(self, river_file):
        """
        imports river from river_file shapefile
        raises exception if file has more than one feature or is not Linestring
        :param river_file: string - name of river shapefile
        """
        with fiona.collection(river_file, 'r') as input_file:
            feature = list(input_file)

            if len(feature) > 1:
                raise ShapefileError(
                    'More than one feature in river shapefile' + river_file)

            # Fiona might give a Linestring or a MultiLineString, handle both cases
            temp_geo = shape(feature[0]['geometry'])
            if type(temp_geo) is MultiLineString:
                raise ShapefileError('Feature in ' + str(river_file) +
                                     ' is MultiLineString.' +
                                     ' This is likely an error.')
            elif type(temp_geo) is LineString:
                geo = gt.ADPolyline(shapely_geo=temp_geo)
                self.river = River(geo, None, None)
            else:
                raise ShapefileError('Feature in ' + river_file +
                                     ' is not a Linestring.')
Example #40
0
def createLayerFromCSV(dataconnection):
    rawheaders, data = getFormhubCSV(dataconnection)
    headers = fixShpNames(rawheaders)
    print headers
    props = {}
    for headstr in headers:
        props[headstr] = 'str'
    schema = { 'geometry': 'Point', 'properties': props}

    temporaryfile = tempfile.gettempdir() + "/" + slugify(dataconnection.title)

    with collection(temporaryfile + ".shp", "w", "ESRI Shapefile", schema) as output:
        for row in data:
            dataset = dict(zip(rawheaders, row))
            atrributes = dict(zip(headers, row))
            try:
                point = Point(float(dataset[dataconnection.lon_column]), float(dataset[dataconnection.lat_column]))
            except Exception,e: 
                print str(e)
                point = None
            if not point and dataconnection.geocode_column:
                print "trying to geocode"
                pointset = geocodeSet(dataconnection.geocode_column, dataconnection.geocode_country)
                if not pointset:
                    continue
                else:
                    point = Point(float(pointset['lon']), float(pointset['lat']))

                #attempt to geocode
            if not point:
                continue
            output.write({
                'properties': atrributes,
                'geometry': mapping(point)
            })
def get_mask_array(longitude_list, latitude_list, shapefiles):

    mask = []

    if (shapefiles is None or shapefiles == []):
        mask = [False] * len(longitude_list)
        return mask

    #  make points
    points = [
        Point(longitude_list[i], latitude_list[i])
        for i in range(0, len(longitude_list))
    ]

    #  union all shapes in the shapefiles
    polygons = []
    for shapefile in shapefiles:
        with fiona.collection(shapefile, "r") as input:
            for feature in input:
                s = shape(feature['geometry'])
                s = s.buffer(1.0)
                polygons.append(s)
    polygon = cascaded_union(polygons)

    for i in range(0, len(longitude_list)):
        if polygon.intersects(points[i]):
            mask.append(False)
        else:
            mask.append(True)

    return mask
Example #42
0
def lasdir2shp(lasdir, fout, crs, overwrite=False):
    '''
    Map each file in the lasdir to a polygon in the shapefile fout.
    '''
    if path.exists(fout):
        if overwrite:
            remove(fout)
        else:
            print('Output file {} already exists.  Skipping...'.format(fout))
            return

    filenames = glob(path.join(lasdir, '*.las'))
    oschema = _polygon_file_schema.copy()
    poly = deepcopy(_polygon_template)

    with collection(
            fout, 'w', crs=crs, driver="ESRI Shapefile", schema=oschema
        ) as oshp:
        for filename in filenames:
            try:
                poly['geometry']['coordinates'] = [get_bounding_box(filename)]
            except Exception as e:
                if 'min_points' in str(e):
                    continue
                raise e
            poly['properties']['las_file'] = filename
            oshp.write(poly)
            poly['id'] = str(int(poly['id']) + 1)
Example #43
0
    def test_write_point(self):
        with collection("docs/data/test_uk.shp", "r") as input:
            schema = input.schema.copy()
            schema["geometry"] = "Point"
            with collection("test_write_point.shp", "w", "ESRI Shapefile", schema) as output:
                for f in input.filter(bbox=(-5.0, 55.0, 0.0, 60.0)):
                    f["geometry"] = {"type": "Point", "coordinates": f["geometry"]["coordinates"][0][0]}
                    output.writerecords([f])
                self.failUnlessEqual(len(output._buffer), 7)
                self.failUnlessEqual(len(output), 7)

        self.failUnlessEqual(len(output), 7)
        self.failUnlessAlmostEqual(output.bounds[0], -3.231389, 6)
        self.failUnlessAlmostEqual(output.bounds[1], 51.614998, 6)
        self.failUnlessAlmostEqual(output.bounds[2], -1.180556, 6)
        self.failUnlessAlmostEqual(output.bounds[3], 60.224998, 6)
Example #44
0
def write_hob_shapefile(hob_df,shp_fout,model_epsg):
    '''Writes the groundwater observation locations to a 2D point shapefile.'''
    
    schema = {'geometry':'Point','properties':{'Site':'str','SiteName':'str',\
                                            'Head (m)':'float','DOW (m)':'float','NWIS_DTW (m)':'float',\
                                            'DOW-DTW (m)':'float','ModelLand (m)':'float',\
                                            'WellElev (m)':'float','NWISLand (m)':'float',\
                                            '(Lay,Row,Col)':'str','NWIS_Link':'str','Violation':'str'}}

    with fiona.collection(shp_fout, "w", "ESRI Shapefile",crs=from_epsg(model_epsg),schema=schema) as output:
        
        for index,irow in hob_df.iterrows():
            
            isite = str(index)
            ilink = http_dict['GW'][0] + str(isite) + http_dict['GW'][1]
            iname = irow['station_nm']
            ihob = irow['HeadObs']
            idow = irow['well_depth_va']
            idtw = irow['lev_va']
            iwellelev = irow['well_elev']
            inwis_land = irow['alt_va']
            imodel_land = irow['ModelTop']
            irowcol = '(%i,%i,%i)'%(irow['Layer'],irow['Row'],irow['Column'])
            iexclude = irow['Violation']
            
            point = Point(irow['Projected_X'],irow['Projected_Y'])
            output.write({'geometry': mapping(point),                            
                        'properties':{'Site':isite,'SiteName':iname,\
                                        'Head (m)':ihob,'DOW (m)':idow,'NWIS_DTW (m)':idtw,\
                                        'DOW-DTW (m)':(idow-idtw),'ModelLand (m)':imodel_land,\
                                        'WellElev (m)':iwellelev,'NWISLand (m)':inwis_land,\
                                        '(Lay,Row,Col)':irowcol,'NWIS_Link':ilink,'Violation':iexclude}})
            
    return
Example #45
0
def sampleShapeFile(shapefile, xypoints, attribute):
    """
    Open a shapefile (decimal degrees) and get the attribute value at each of the input XY points. Slower than sampling grids.

    :param shapefile:
      ESRI shapefile (decimal degrees) of predictor variable.
    :param xypoints:
      2D numpy array of XY points, in decimal degrees.
    :param attribute:
      String name of attribute to sample in each of the shapes.
    :returns:
      1D array of attribute values at each of XY points.
    """

    xmin = np.min(xypoints[:, 0])
    xmax = np.max(xypoints[:, 0])
    ymin = np.min(xypoints[:, 1])
    ymax = np.max(xypoints[:, 1])
    #xypoints should be projected back to lat/lon
    f = fiona.collection(shapefile, 'r')
    tshapes = list(f.items(bbox=(xmin, ymin, xmax, ymax)))
    shapes = []
    for fid, shape1 in tshapes:
        shapes.append(shape1)
    f.close()
    return sampleShapes(shapes, xypoints, attribute)
Example #46
0
 def test_filter_1(self):
     with collection("docs/data/test_uk.shp", "r") as c:
         results = list(c.filter(bbox=(-15.0, 35.0, 15.0, 65.0)))
         self.failUnlessEqual(len(results), 48)
         f = results[0]
         self.failUnlessEqual(f['id'], "0")
         self.failUnlessEqual(f['properties']['FIPS_CNTRY'], 'UK')
Example #47
0
 def test_io(self):
     c = collection("docs/data/test_uk.shp", "r")
     self.failUnlessEqual(c.name, "test_uk")
     self.failUnlessEqual(c.mode, "r")
     self.failUnless(iter(c))
     c.close()
     self.assertRaises(ValueError, iter, c)
Example #48
0
def parse_tcx3(infiles):
    schema = { 'geometry': 'LineString', 'properties': {} }
    with collection(
        "lines.shp", "w", "ESRI Shapefile", schema) as output:
        for infile in infiles:
            print "processing %s" % infile
            soup = bss(open(infile,'r'))

            ls = []
            # Activity
            for activity in soup.findAll('activity'):

                # Lap
                for lap in activity.findAll('lap'):
                    # Track
                    for track in lap.findAll('track'):

                        # Trackpoint
                        for point in track.findAll('trackpoint'):
                            try:
                                coords = [float(x) for x in
                                         [point.position.longitudedegrees.string,
                                          point.position.latitudedegrees.string]]
                                ls.append(coords)
                            except: coords = None
            if len(ls) > 2:
                output.write({
                    'properties': {
                    },
                    'geometry': mapping(LineString(ls))
                })
Example #49
0
def plot_regions(regions, bbox, tag):
    """Output one shapefile for each region (represented by its bottom left and
    upper right index in the grid) with color depending of its discrepancy."""
    # TODO not unicode safe
    discrepancies = [v[0] for v in regions]
    colormap = cm.ScalarMappable(mcolor.Normalize(min(discrepancies),
                                                  max(discrepancies)),
                                 'YlOrBr')
    schema = {'geometry': 'Polygon', 'properties': {}}
    style = []
    KARTO_CONFIG['bounds']['data'] = [BBOX[1], BBOX[0],
                                      BBOX[3], BBOX[2]]

    polys = [{'geometry': mapping(r[1]), 'properties': {}} for r in regions]
    for i, r in enumerate(regions):
        color = to_css_hex(colormap.to_rgba(r[0]))
        name = u'disc_{}_{:03}'.format(tag, i+1)
        KARTO_CONFIG['layers'][name] = {'src': name+'.shp'}
        color = 'red'
        style.append(CSS.format(name, color, 'black'))
        # style.append(CSS.format(name, color, color))
        with fiona.collection(mkpath('disc', name+'.shp'),
                              "w", "ESRI Shapefile", schema) as f:
            f.writerecords(polys)
        break

    with open(mkpath('disc', 'photos.json'), 'w') as f:
        json.dump(KARTO_CONFIG, f)
    with open(mkpath('disc', 'photos.css'), 'w') as f:
        f.write('\n'.join(style))
def createShapefileFromCSV(inCSV): 
    #need dataframe as df
    #need uid as unique ID
    #need lng field as longitude
    #need lat field as latitude
    
    df = pd.read_csv(inCSV)
    ioSHP = inCSV.replace('.csv','.shp')
    data  = df

    lng = 'lng'
    lat = 'lat'

    data = data

    schema = { 'geometry': 'Point', 'properties': { 'uid': 'str','lat':'float','lng':'float'} }

    with collection(ioSHP, "w", "ESRI Shapefile", schema) as output:
        for index, row in data.iterrows():
            point = Point(row[lng], row[lat])
            output.write({

                'properties': {'uid': row['uid'],'lat': row['lat'],'lng': row['lng']},
                'geometry': mapping(point)
            })

    print 'shapefile has a shapefile created'
Example #51
0
def write_discharge_shapefile(discharge_df,shp_fout,model_epsg,discharge_label):
    '''Writes the discharge measurement locations to a 2D point shapefile.'''
    
    schema = {'geometry':'Point','properties':{'Site':'int','SiteName':'str',\
                                            'GageElev':'float','NObs':'int',\
                                            discharge_label:'float','StdError':'float',\
                                            'NWIS_Link':'str'}}

    with fiona.collection(shp_fout, "w", "ESRI Shapefile",crs=from_epsg(model_epsg),schema=schema) as output:
        
        for index,irow in discharge_df.iterrows():
            
            isite = int(index)
            isite = str(isite).zfill(8) # NWIS discharge ids minimum 8 characters
            
            iname = irow['station_nm']
            igage = irow['alt_va']
            inobs = irow['NObs']
            imean = irow[discharge_label]
            istd  = irow['StdError']
            ilink = http_dict['ST'] + str(isite)
            
            point = Point(irow['Projected_X'],irow['Projected_Y'])
            output.write({'geometry': mapping(point),                            
                        'properties':{'Site':isite,'SiteName':iname,\
                                            'GageElev':igage,'NObs':inobs,\
                                            discharge_label:imean,'StdError':istd,\
                                            'NWIS_Link':ilink}})
            
    return
Example #52
0
def _shp2json(shp, output, encode=False):
    features = []
    with fiona.collection(shp, "r") as source:
        src_srs = Proj(source.crs)
        if src_srs is None:
            print "Not able to determine spatial reference assuming WGS84."
        records = []
        for rec in source:
            if rec["geometry"] is None:
                print "Skipping feature with empty geometry."
                continue
            geom_type = rec["geometry"]["type"]
            if src_srs:
                geom = to_wgs84(rec["geometry"], src_srs)
            else:
                geom = rec["geometry"]
            if encode:
                rec["geometry"]["coordinates"] = _encode_geometry(geom)
            else:
                rec["geometry"] = geom
            records.append(rec)

        layer = {
            "type": "FeatureCollection",
            "features": records
        }

        geojson = json.dumps(layer)
        output.write(geojson)
        output.close()
Example #53
0
def lasdir2shp(lasdir, fout, crs, overwrite=False):
    '''
    Map each file in the lasdir to a polygon in the shapefile fout.
    '''
    if path.exists(fout):
        if overwrite:
            remove(fout)
        else:
            print('Output file {} already exists.  Skipping...'.format(fout))
            return

    filenames = glob(path.join(lasdir, '*.las'))
    oschema = _polygon_file_schema.copy()
    poly = deepcopy(_polygon_template)

    with collection(fout,
                    'w',
                    crs=crs,
                    driver="ESRI Shapefile",
                    schema=oschema) as oshp:
        for filename in filenames:
            try:
                poly['geometry']['coordinates'] = [get_bounding_box(filename)]
            except Exception as e:
                if 'min_points' in str(e):
                    continue
                raise e
            poly['properties']['las_file'] = filename
            oshp.write(poly)
            poly['id'] = str(int(poly['id']) + 1)
Example #54
0
 def test_filter_1(self):
     with collection("docs/data/test_uk.shp", "r") as c:
         results = list(c.filter(bbox=(-15.0, 35.0, 15.0, 65.0)))
         self.failUnlessEqual(len(results), 48)
         f = results[0]
         self.failUnlessEqual(f['id'], "0")
         self.failUnlessEqual(f['properties']['FIPS_CNTRY'], 'UK')
Example #55
0
def build():
    with collection("data/ne_10m_admin_1_states_provinces_shp.shp", "r") as shapes:
        for s in shapes:
            geom = shape(s["geometry"])
            id = int(s["id"])
            obj = s
            yield (id, geom.bounds, obj)
Example #56
0
    def poligonizeShape(self):

        outputname = os.path.splitext(self.theshape)[0] + '_buffer.shp'
        openshape = fiona.open(self.theshape)
        geometrytype = openshape[0]['geometry']['type']

        if geometrytype == 'Point' or geometrytype == 'LineString':
            copyfile(
                os.path.splitext(self.theshape)[0] + '.prj',
                os.path.splitext(outputname)[0] + '.prj')
            schema = {'geometry': 'Polygon', 'properties': {'ID': 'str'}}
            with collection(outputname, "w", "ESRI Shapefile",
                            schema) as output:
                for point in openshape:
                    output.write({
                        'properties': {
                            'ID': point['properties']['ID']
                        },
                        'geometry':
                        mapping(shape(point['geometry']).buffer(cfg.buffer))
                    })
            print('INFO: theshape was points or linestrings, now its polygon')
            return ShapeObject(output)
        else:
            return ShapeObject(output)
        print('INFO: theshape is polygons, that works')
def load_shape_info(shapefile):
    # This script converts .kml, .shp and .txt files to the right format. If multiple shapes are available the script
    # will select the first one.

    if shapefile.endswith('.shp'):
        with collection(shapefile, "r") as inputshape:
            for shape in inputshape:
                # only first shape
                dat = shape['geometry']['coordinates']

                st = '('
                for p in dat[0]:
                    st = st + str(p[0]) + ' ' + str(p[1]) + ','
                st = st[:-1] + ')'

                break
    elif shapefile.endswith('.kml'):
        doc = file(shapefile).read()
        k = kml.KML()
        k.from_string(doc)
        shape = list(list(
            k.features())[0].features())[0].geometry.exterior.coords[:]
        st = '('
        for p in shape:
            st = st + str(p[0]) + ' ' + str(p[1]) + ','
        st = st[:-1] + ')'
    else:
        print 'format not recognized! Pleas creat either a .kml or .shp file.'
        return []

    return st
Example #58
0
def build_point_shape(file_path, shapefile, schema, geometry_field):
    try:
        with collection(shapefile,
                        "w",
                        "ESRI Shapefile",
                        schema,
                        crs=from_epsg(4326)) as output:
            with open(file_path, 'rU') as f:
                reader = csv.DictReader(f)
                for row in reader:
                    shape_props = build_schema_key_par(schema['properties'],
                                                       row)
                    try:
                        lat = float(row[geometry_field[0]])
                        lng = float(row[geometry_field[1]])
                        point = Point(lng, lat)
                        output.write({
                            'properties': shape_props,
                            'geometry': mapping(point)
                        })
                    except:
                        continue
    except:
        return False
    return True
def get_distance_along_baseline(terraces, lp):
    """
    This function gets the distance along the baseline for each of the terrace
    points. This gives continuous distances along the baseline, compared to the
    DistAlongBaseline column in the CSV which is just the nearest point along the
    baseline.

    Args:
        terraces: the dataframe with the terrace info
        lp: the name shapefile of the baseline (a line shapefile)

    Returns:
        terrace dataframe with additional column - 'DistAlongBaseline_new'.

    FJC
    """
    # get the shapefile as a shapely line
    # read in the baseline shapefile
    c = fiona.collection(lp, 'r')
    rec = c.next()
    line = LineString(shape(rec['geometry']))
    terraces['DistAlongBaseline_new'] = terraces.apply(
        lambda x: dist_along_line(x['X'], x['Y'], line), axis=1)

    return terraces