コード例 #1
0
ファイル: geo_utils.py プロジェクト: yghlc/tod
def shapely_shapes_to_shapefile(shapely_shapes, shp_path, sf=None,
                                prj_path=None):

    if sf:
        w = shapefile.Writer(sf.shapeType)
        for field in sf.fields:
            w.field(*field)
        for record in sf.iterRecords():
            w.record(*record)
    else:
        shape_type = shapely_to_pyshp(shapely_shapes[0]).shapeType
        w = shapefile.Writer(shape_type)
        w.field(str("id"), str("N"), 255, 0)
        for id_shape in xrange(len(shapely_shapes)):
            w.record(*[id_shape])

    for shape in shapely_shapes:
        if shape.type == "Polygon":
            pyshp_shape = shapely_to_pyshp(shape.buffer(0.00000001))
            w._shapes.append(pyshp_shape)
        elif shape.type == "Point":
            coords = shape.coords[0]
            w.point(coords[0], coords[1])
        else:
            pyshp_shape = shapely_to_pyshp(shape)
            w._shapes.append(pyshp_shape)

    # return w
    w.save(shp_path)

    if prj_path:
        copy_prj(prj_path, shp_path)
コード例 #2
0
ファイル: create_buffers.py プロジェクト: SSTyT/tod
def create_buffered_shp(directory, distance, buffer_dir=BUFFER_DIR,
                        resolution=16, recalculate=False,
                        context_shp_or_polygon=None):
    context_polygon = create_context_polygon(context_shp_or_polygon)

    shp_path = pf.find_shp_path(directory)
    shp_name = _create_shp_name(shp_path, distance)
    # print("\n".join([directory, shp_name, buffer_dir]))
    buffer_shp_path = _create_shp_path(directory, shp_name, buffer_dir)
    # print(buffer_shp_path)

    if not os.path.isfile(buffer_shp_path + ".shp") or recalculate:

        # read shapefile with pyshp
        sf_est = shapefile.Reader(shp_path)

        # create buffers from shapely shapes
        buffer_shapes = []
        for shape in geo_utils.get_shapely_shapes(sf_est):
            if not context_polygon or context_polygon.contains(shape):
                buffer_shapes.append(create_buffer(shape, distance,
                                                   resolution,
                                                   context_polygon))

        write_shapefile(sf_est, buffer_shapes, buffer_shp_path)
        utils.copy_prj(shp_path, buffer_shp_path)
コード例 #3
0
ファイル: create_buffers.py プロジェクト: yghlc/tod
def create_buffered_shp(directory,
                        distance,
                        buffer_dir=BUFFER_DIR,
                        resolution=16,
                        recalculate=False,
                        context_shp_or_polygon=None):
    context_polygon = create_context_polygon(context_shp_or_polygon)

    shp_path = pf.find_shp_path(directory)
    shp_name = _create_shp_name(shp_path, distance)
    # print("\n".join([directory, shp_name, buffer_dir]))
    buffer_shp_path = _create_shp_path(directory, shp_name, buffer_dir)
    # print(buffer_shp_path)

    if not os.path.isfile(buffer_shp_path + ".shp") or recalculate:

        # read shapefile with pyshp
        sf_est = shapefile.Reader(shp_path)

        # create buffers from shapely shapes
        buffer_shapes = []
        for shape in geo_utils.get_shapely_shapes(sf_est):
            if not context_polygon or context_polygon.contains(shape):
                buffer_shapes.append(
                    create_buffer(shape, distance, resolution,
                                  context_polygon))

        write_shapefile(sf_est, buffer_shapes, buffer_shp_path)
        utils.copy_prj(shp_path, buffer_shp_path)
コード例 #4
0
def merge_shapefiles(shp_paths, output_path, merging_field="orig_sf",
                     replacements=None, group_fields=None,
                     group_fields_by_sf=None, prj_path=None):
    """Merge shapefiles in a single shapefile.

    There is a merging_field retaining the name of the original field taken
    from shp_paths keys. Fields not shared by all merged shape files will take
    a None value.

    Args:
        shp_paths (dict or str):
            If dict, paths to shps to merge as  {"RADIO": "radio_indic_path"}
            If str, dir where all the subdirs are shps to be merged.
        output_path (str): Path of the merged shapefile.
        merging_field (str): Name of the field retaining name of original shps.
        replacements (dict): Only if shp_paths is str. Replace long names with
            short ones, to put in the merging_field. {"long_name": "short"}
        group_fields (dict): Fields with different names that should be grouped
            into the same field (group_field: ["name1", "name2", "name3"])
        group_fields_by_sf (dict): Field name replacements for each shapefile
            merged like in
                {"RADIO": {"co_frac_ra": "id_div"},
                 "FRAC": {"co_fracc": "id_div"}}
            where "id_div" is the column gathering all ids.
        prj_path (str): Path to a prj file to be used. If none is provided, the
            prj file of the first shapefile to be merged will be used.
    """

    if type(shp_paths) == str or type(shp_paths) == unicode:
        shp_paths = create_shp_paths_dict(shp_paths, replacements)

    sf_first = shapefile.Reader(shp_paths.values()[0])
    w = shapefile.Writer(sf_first.shapeType)

    if not prj_path:
        copy_prj(shp_paths.values()[0], output_path)
    else:
        copy_prj(prj_path, output_path)

    # write all the fields first
    write_fields(w, shp_paths, merging_field, group_fields, group_fields_by_sf)

    # now write shapes and records
    new_fields = [new_field[0] for new_field in w.fields]
    # print(new_fields)
    for id_sf, sf_path in shp_paths.iteritems():
        sf = shapefile.Reader(sf_path)
        print("Merging", sf.shapeType, id_sf.ljust(15),
              os.path.basename(sf_path))

        gfsf = group_fields_by_sf
        orig_fields = [translate(f, group_fields,
                                 gfsf[id_sf] if gfsf else None)[0]
                       for f in sf.fields if f[0] != "DeletionFlag"]
        # print(orig_fields)

        # extend writing shapefile with all new shapes
        write_curated_shapes(w, sf)
        for sr in sf.iterShapeRecords():
            record = sr.record
            # shape = sr.shape

            dict_record = {orig_field: value for orig_field, value in
                           zip(orig_fields, record)}

            # add elements to the record in the writing sf fields order
            new_record = []
            for field in new_fields:
                if field == merging_field:
                    new_record.append(id_sf)
                elif field in dict_record:
                    new_record.append(dict_record[field])
                else:
                    new_record.append(None)

            w.record(*new_record)

    # return w
    w.save(output_path)
コード例 #5
0
ファイル: recalculate_indicators.py プロジェクト: yghlc/tod
def recalculate_indicators(new_shp_dir,
                           area_level,
                           skip=None,
                           subcategory=None,
                           omit_fields=None,
                           by_area=None):
    skip = skip or []
    by_area = by_area or []

    # some fields are omitted always
    if omit_fields:
        if not type(omit_fields) == list:
            omit_fields = [omit_fields]
        omit_fields = OMIT_FIELDS
        omit_fields.extend(omit_fields)

    new_shp_path = find_shp_path(new_shp_dir)
    shp_name = os.path.basename(new_shp_path)

    sf = shapefile.Reader(new_shp_path)
    df_indicators = get_indicators(area_level)
    weights = get_weights(new_shp_path, area_level)

    w = shapefile.Writer(shapefile.POLYGON)

    indicators = _get_indicator_names(df_indicators)
    for field in sf.fields[1:]:
        w.field(*field)
    for indicator in indicators:
        field = [str(indicator), str("N"), 20, 18]
        # print(indicator)
        w.field(*field)
    w.field(str("area_km2"), str("N"), 20, 18)
    w.field(str("hab_km2"), str("N"), 20, 18)
    # print(w.fields)

    for record_shape in sf.iterShapeRecords():
        record = record_shape.record
        shape = record_shape.shape

        # print(record[0])
        if type(record[0]) == int:
            id_record = unicode(record[0])
        else:
            id_record = unicode(record[0].decode("utf-8"))

        if len(weights[id_record]) > 0:
            calculated_indicators = _calc_indicators(indicators, df_indicators,
                                                     weights[id_record],
                                                     area_level, skip, by_area)
            # print(calculated_indicators)
            record.extend(calculated_indicators)

            area = calculate_area(shape) / 1000000
            record.append(area)

            population = calculated_indicators[indicators.index(POPULATION)]
            pop_density = population / area
            record.append(pop_density)

            w.record(*record)

            w.poly(shapeType=shapefile.POLYGON, parts=[shape.points])

    path = get_indicators_shp_path(shp_name, subcategory)
    w.save(path)

    utils.copy_prj(new_shp_path.decode("utf-8"), path)
コード例 #6
0
def merge_shapefiles(shp_paths, output_path, merging_field="orig_sf",
                     replacements=None, group_fields=None,
                     group_fields_by_sf=None, skip_fields=None,
                     fields_to_keep=None):
    """Merge shapefiles in a single shapefile.

    There is a merging_field retaining the name of the original field taken
    from shp_paths keys. Fields not shared by all merged shape files will take
    a None value.

    Args:
        shp_paths (dict or str):
            If dict, paths to shps to merge as  {"RADIO": "radio_indic_path"}
            If str, dir where all the subdirs are shps to be merged.
        output_path (str): Path of the merged shapefile.
        merging_field (str): Name of the field retaining name of original shps.
        replacements (dict): Only if shp_paths is str. Replace long names with
            short ones, to put in the merging_field. {"long_name": "short"}
        group_fields (dict): Fields with different names that should be grouped
            into the same field (group_field: ["name1", "name2", "name3"])
    """
    skip_fields = skip_fields or []

    if type(shp_paths) == str or type(shp_paths) == unicode:
        shp_paths = create_shp_paths_dict(shp_paths, replacements)

    sf_first = shapefile.Reader(shp_paths.values()[0])
    w = shapefile.Writer(sf_first.shapeType)
    # print(sf_first.shapeType)
    copy_prj(shp_paths.values()[0], output_path)

    # write all the fields first
    write_fields(w, shp_paths, merging_field, group_fields, group_fields_by_sf,
                 skip_fields, fields_to_keep)

    # now write shapes and records
    new_fields = [new_field[0] for new_field in w.fields]
    # print(new_fields)
    for id_sf, sf_path in shp_paths.iteritems():
        sf = shapefile.Reader(sf_path)
        print("Merging", sf.shapeType, id_sf.ljust(15),
              os.path.basename(sf_path))

        gfsf = group_fields_by_sf
        orig_fields = [translate(f, group_fields,
                                 gfsf[id_sf] if gfsf else None)[0]
                       for f in sf.fields if f[0] != "DeletionFlag"]
        # print(orig_fields)

        # extend writing shapefile with all new shapes
        write_curated_shapes(w, sf)
        for sr in sf.iterShapeRecords():
            record = sr.record
            # shape = sr.shape

            dict_record = {orig_field: value for orig_field, value in
                           zip(orig_fields, record)}

            # add elements to the record in the writing sf fields order
            new_record = []
            for field in new_fields:
                if field == merging_field:
                    new_record.append(id_sf)
                elif field in dict_record:
                    new_record.append(dict_record[field])
                else:
                    new_record.append(None)

            w.record(*new_record)

    # return w
    w.save(output_path)
コード例 #7
0
ファイル: recalculate_indicators.py プロジェクト: SSTyT/tod
def recalculate_indicators(new_shp_dir, area_level, skip=None,
                           subcategory=None, omit_fields=None, by_area=None):
    skip = skip or []
    by_area = by_area or []

    # some fields are omitted always
    if omit_fields:
        if not type(omit_fields) == list:
            omit_fields = [omit_fields]
        omit_fields = OMIT_FIELDS
        omit_fields.extend(omit_fields)

    new_shp_path = find_shp_path(new_shp_dir)
    shp_name = os.path.basename(new_shp_path)

    sf = shapefile.Reader(new_shp_path)
    df_indicators = get_indicators(area_level)
    weights = get_weights(new_shp_path, area_level)

    w = shapefile.Writer(shapefile.POLYGON)

    indicators = _get_indicator_names(df_indicators)
    for field in sf.fields[1:]:
        w.field(*field)
    for indicator in indicators:
        field = [str(indicator), str("N"), 20, 18]
        # print(indicator)
        w.field(*field)
    w.field(str("area_km2"), str("N"), 20, 18)
    w.field(str("hab_km2"), str("N"), 20, 18)
    # print(w.fields)

    for record_shape in sf.iterShapeRecords():
        record = record_shape.record
        shape = record_shape.shape

        # print(record[0])
        if type(record[0]) == int:
            id_record = unicode(record[0])
        else:
            id_record = unicode(record[0].decode("utf-8"))

        if len(weights[id_record]) > 0:
            calculated_indicators = _calc_indicators(indicators,
                                                     df_indicators,
                                                     weights[id_record],
                                                     area_level,
                                                     skip, by_area)
            # print(calculated_indicators)
            record.extend(calculated_indicators)

            area = calculate_area(shape) / 1000000
            record.append(area)

            population = calculated_indicators[indicators.index(POPULATION)]
            pop_density = population / area
            record.append(pop_density)

            w.record(*record)

            w.poly(shapeType=shapefile.POLYGON, parts=[shape.points])

    path = get_indicators_shp_path(shp_name, subcategory)
    w.save(path)

    utils.copy_prj(new_shp_path.decode("utf-8"), path)