Example #1
0

import shapefile #meng-import library shapefile

w = shapefile.Writer('soal8', shapeType=shapefile.POLYGON) #membuat object Writer baru dan memberi argumen berupa nama file dari shapefilenya yaitu soal8 dan argumen berupa shapeTypenya yaitu shapefile.POLYGON yang berupa poligon
w.shapeType #menentukan shapeTypenya

w.field("kolom1", "C") #membuat field baru dan memberi argumen berupa nama fieldnya yaitu kolom1 dan tipe datanya yaitu char
w.field("kolom2", "C") #membuat field baru dan memberi argumen berupa nama fieldnya yaitu kolom2 dan tipe datanya yaitu char

w.record("ngek", "satu") #mengisi data pada field yang telah dibuat yaitu kolom1 = ngek dan kolom2 = satu

w.poly([[[1,3],[5,3],[1,2],[5,2],[1,3]]]) #mengisi data poly sesuai koordinatnya masing-masing

w.close() #menutup file
Example #2
0
# distance between points is bigger than a threshold
# Each point will be saved in a separate record

#-------------------------------------------------------------------
# Required input:
WorkDir = "/home/svassili/OpiniconShp/OpiniconModelBuild/"
InputFile = "Refined_Data/opinicon_perimeter_ed_simpl_0.4.shp"
OutFile = os.path.splitext(InputFile)[0] + "_extra_points."
# Fill gaps between points separated by distance bigger than 'space'
space = 5.0
#-------------------------------------------------------------------
InputFile = WorkDir + InputFile

# Initialize output shapefile
ShapeType = shapefile.POINTZ
w = shapefile.Writer(ShapeType)
w.autobalance = 1
w.field("Depth", "F", 10, 5)
x = []
y = []
z = []
depth = []

# Read shape file
#-----------------------------------------------------------
np = len(x)
nn = 0
try:
    bt = shapefile.Reader(InputFile)
    bt_records = bt.shapeRecords()
    bt_shapes = bt.shapes()
Example #3
0
import shapefile  #mengimpor modul shapefile
w = shapefile.Writer()  #membuat shapefile baru
w.shapeType  #setting menggunakan jenis shape apa (point/line/polygon)

#membuat tabel penyimpanan untuk koordinat x dan y berupa 2 buah kolom
w.field(
    "kolom1", "C"
)  #w untuk write, field mendefinisikan kolom, "kolom1" mendefinisikan nama kolom dan c mendefinisikan tipe character
w.field(
    "kolom2", "C"
)  #w untuk write, field mendefinisikan kolom, "kolom2" mendefinisikan nama kolom dan c mendefinisikan tipe character

#membuat 2 row record data untuk mengisi kolom pada database
w.record(
    "ngek", "satu"
)  #w untuk write, record mendefinisikan perintah insert data ke kolom database
w.record(
    "ngok", "dua"
)  #w untuk write, record mendefinisikan perintah insert data ke kolom database

#mengisi 2 # row data membuat sebuah titik untuk mengisi.shp
w.point(1,
        1)  # w untuk write, point mendifinisikan membuat jenis point, x=1, y=1
w.point(2,
        2)  # w untuk write, point mendifinisikan membuat jenis point, x=2, y=2
w.save("soal1")
Example #4
0
    '59567213355', '59567218015', '59567202818', '59567213236', '59567202945',
    '59567215346', '59567214445', '59567216282', '59567214704'
]

SEED3 = [
    '59567211554', '59567209884', '59567216282', '59567202858', '59567216227',
    '59567205374', '59567217348', '59567216285', '59567214702', '59567214794',
    '59567213355', '59567218015', '59567214793', '59567215346'
]
sf = shapefile.Reader("/home/qiushye/experiment_map/road_choose.shp")

Records = sf.records()
shapesRecords = sf.shapeRecords()
fields = sf.fields

w = shapefile.Writer(shapeType=3)
w.autoBlance = 1
for f in fields:
    if f != fields[0]:
        w.field(f[0], f[1], f[2], f[3])
    # if f == fields[3]:
    # break

for road in SEED3:
    for rec in Records:
        if road == rec[2]:
            index = Records.index(rec)
            points = shapesRecords[index].shape.points
            # for point in points:
            # w.point(point[0],point[1])
            w.line(parts=[[[val[0], val[1]] for val in points]])
Example #5
0
def run(ctx):
    pgconn = psycopg2.connect(database='postgis', host='iemdb', user='******')
    cursor = pgconn.cursor()

    """
    Need to limit what we are allowing them to request as the file would get
    massive.  So lets set arbitrary values of
    1) If 2 or more RADARs, less than 7 days
    """
    if len(ctx['radar']) == 1:
        ctx['radar'].append('XXX')
    radarlimit = ''
    if 'ALL' not in ctx['radar']:
            radarlimit = " and nexrad in %s " % (str(tuple(ctx['radar'])), )
    if len(ctx['radar']) > 2 and (ctx['ets'] - ctx['sts']).days > 6:
        ctx['ets'] = ctx['sts'] + datetime.timedelta(days=7)

    sql = """
        SELECT to_char(valid at time zone 'UTC', 'YYYYMMDDHH24MI') as utctime,
        storm_id, nexrad, azimuth, range, tvs, meso, posh, poh, max_size,
        vil, max_dbz, max_dbz_height, top, drct, sknt,
        ST_y(geom) as lat, ST_x(geom) as lon
        from nexrad_attributes_log WHERE
        valid >= '%s' and valid < '%s' %s  ORDER by valid ASC
        """ % (ctx['sts'].strftime("%Y-%m-%d %H:%M+00"),
               ctx['ets'].strftime("%Y-%m-%d %H:%M+00"), radarlimit)

    # print 'Content-type: text/plain\n'
    # print sql
    # sys.exit()
    # sys.stderr.write("Begin SQL...")
    cursor.execute(sql)
    if cursor.rowcount == 0:
        sys.stdout.write("Content-type: text/plain\n\n")
        sys.stdout.write("ERROR: no results found for your query")
        return

    fn = "stormattr_%s_%s" % (ctx['sts'].strftime("%Y%m%d%H%M"),
                              ctx['ets'].strftime("%Y%m%d%H%M"))

    # sys.stderr.write("End SQL with rowcount %s" % (cursor.rowcount, ))
    if ctx['fmt'] == 'csv':
        sys.stdout.write("Content-type: application/octet-stream\n")
        sys.stdout.write(("Content-Disposition: attachment; "
                          "filename=%s.csv\n\n") % (fn,))
        sys.stdout.write(("VALID,STORM_ID,NEXRAD,AZIMUTH,RANGE,TVS,MESO,POSH,"
                          "POH,MAX_SIZE,VIL,MAX_DBZ,MAZ_DBZ_H,TOP,DRCT,SKNT,"
                          "LAT,LON\n"))
        for row in cursor:
            sys.stdout.write(",".join([str(s) for s in row])+"\n")
        return

    w = shapefile.Writer(shapeType=shapefile.POINT)
    w.field('VALID', 'C', 12)
    w.field('STORM_ID', 'C', 2)
    w.field('NEXRAD', 'C', 3)
    w.field('AZIMUTH', 'I')
    w.field('RANGE', 'I')
    w.field('TVS', 'C', 10)
    w.field('MESO', 'C', 10)
    w.field('POSH', 'I')
    w.field('POH', 'I')
    w.field('MAX_SIZE', 'F', 5, 2)
    w.field('VIL', 'I')
    w.field('MAX_DBZ', 'I')
    w.field('MAX_DBZ_H', 'F', 5, 2)
    w.field('TOP', 'F', 5, 2)
    w.field('DRCT', 'I')
    w.field('SKNT', 'I')
    w.field('LAT', 'F', 7, 4)
    w.field('LON', 'F', 9, 4)
    for row in cursor:
        w.point(row[-1], row[-2])
        w.record(*row)

    # sys.stderr.write("End LOOP...")

    shp = cStringIO.StringIO()
    shx = cStringIO.StringIO()
    dbf = cStringIO.StringIO()

    w.save(shp=shp, shx=shx, dbf=dbf)
    # sys.stderr.write("End of w.save()")

    zio = cStringIO.StringIO()
    zf = zipfile.ZipFile(zio, mode='w',
                         compression=zipfile.ZIP_DEFLATED)
    zf.writestr(fn+'.prj',
                open(('/mesonet/www/apps/iemwebsite/data/gis/meta/4326.prj'
                      )).read())
    zf.writestr(fn+".shp", shp.getvalue())
    zf.writestr(fn+'.shx', shx.getvalue())
    zf.writestr(fn+'.dbf', dbf.getvalue())
    zf.close()
    sys.stdout.write(("Content-Disposition: attachment; "
                      "filename=%s.zip\n\n") % (fn,))
    sys.stdout.write(zio.getvalue())
Example #6
0
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 21 14:17:27 2019

@author: ADVENT
"""
import shapefile  # Meng-import library shapefile
w = shapefile.Writer(
    '10', shapeType=5
)  # untuk Membuat penggambar pada shapefile yang nantinya akan di namakan nomor10 dan bentuknya itu adalah shapetype 5 yaitu Polygon

w.field("C1", "C")  # untuk Membuat table dengan kolom pertama
w.field("C2", "C")  # untuk Membuat table dengan kolom kedua

w.record(
    "alex",
    "ferguso")  # untuk Mengisi table pada kolom satu yaitu nama dan kolom dua
w.record(
    "gagak",
    "guguk")  # untuk Mengisi table pada kolom satu yaitu nama dan kolom dua
w.record(
    "bujang",
    "kijang")  # untuk Mengisi table pada kolom satu yaitu nama dan kolom dua
w.record(
    "lama",
    "ribet")  # untuk Mengisi table pada kolom satu yaitu nama dan kolom dua
w.record(
    "tunggu",
    "bosan")  # untuk Mengisi table pada kolom satu yaitu nama dan kolom dua
w.record(
    "bucat",
Example #7
0
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 21 00:01:46 2019

@author: bakti
"""
import shapefile #Meng-import library shapefile

w=shapefile.Writer('soal9', shapeType = 5) #Membuat file dengan soal9.shp 
                                        #dan mendefinisikan shapefile 5 = POLYGON

w.field("kolom1","C") #Membuat tabel dengan kolom pertama
w.field("kolom2","C") #Membuat tabel dengan kolom kedua

w.record("ngek","satu") #tabel ngek memiliki isi dari kolom1 dan satu dari kolom2
w.record("crot","dua") #tabel ngek memiliki isi dari kolom1

w.poly([[[1,3],[5,3],[5,2],[1,2],[1,3]]]) 
#membuat garis dengan menghubungkan titik titik yang dibuat dan memberi warna di dalam garis yg di hubungkan

w.poly([[[1,6],[5,6],[5,9],[1,9],[1,6]]]) 
#membuat garis dengan menghubungkan titik titik yang dibuat dan memberi warna di dalam garis yg di hubungkan

w.close() #penutup
Example #8
0
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 20 17:11:57 2019

@author: lenovo
"""

import shapefile

w = shapefile.Writer('soal9', shapeType=5)

w.field("kolom1", "C")
w.field("kolom2", "C")

w.record("ngek", "satu")
w.record("ngok", "dua")

w.poly([[[1, 3], [5, 3], [5, 2], [1, 2], [1, 3]]])
w.poly([[[1, 6], [5, 6], [5, 9], [1, 9], [1, 6]]])
w.close()
Example #9
0
import shapefile  #meng-import library shapefile

w = shapefile.Writer(
    'soal4', shapefile.POINT
)  #membuat object Writer baru dan memberi argumen berupa nama file dari shapefilenya yaitu soal4 dan argumen berupa shapeTypenya yaitu shapefile.POINT yang berupa point
w.shapeType  #menentukan shapeTypenya

w.field(
    "kolom1", "C"
)  #membuat field baru dan memberi argumen berupa nama fieldnya yaitu kolom1 dan tipe datanya yaitu char
w.field(
    "kolom2", "C"
)  #membuat field baru dan memberi argumen berupa nama fieldnya yaitu kolom2 dan tipe datanya yaitu char

w.record(
    "ngek", "satu"
)  #mengisi data pada field yang telah dibuat yaitu kolom1 = ngek dan kolom2 = satu
w.record(
    "ngok", "dua"
)  #mengisi data pada field yang telah dibuat yaitu kolom1 = ngok dan kolom2 = dua

w.point(1, 1)  #mengisi data point sesuai koordinatnya yaitu x = 1 dan y = 1
w.point(2, 2)  #mengisi data point sesuai koordinatnya yaitu x = 2 dan y = 2

w.close()  #menutup file
Example #10
0
    if count < len(b) - 1:
        if count > 0 and count % 2 != 0:
            r = r + item + ';'
        else:
            r = r + item + ','
    else:
        r = r + item
    count = count + 1

content = r.split(';')
index = 0
wdata = [0]
# fdata = [0 for col in range(len(content))]
# tmp = [0 for col in range(2)]
params = {'from': 6, 'to': 5, 'ak': ak}
w = shp.Writer(shp.POINT)
w.field('id', 'C', '8')
for point in content:
    params['coords'] = point
    rURL = bd + urllib.urlencode(params)
    print rURL
    data = urllib2.urlopen(rURL).read()
    result = json.loads(data)
    pxpy = result['result']
    # tmp[0] = pxpy[0]['x']
    # tmp[1] = pxpy[0]['y']
    # print tmp
    # fdata[index] = tmp
    index += 1
    w.point(pxpy[0]['x'], pxpy[0]['y'])
    w.record(str(index))
Example #11
0
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 21 13:31:54 2019

@author: USER
"""

import shapefile
w = shapefile.Writer("Soal7")
w.shapeType

w.field("kolom1", "C")
w.field("kolom2", "C")

w.record("ngek", "satu")

w.poly([[[1, 3], [5, 3], [1, 2], [5, 2], [1, 3]]])

w.close()
Example #12
0
def _pyshp2(df, out_path, out_name):
    """
    Saves a SpatialDataFrame to a Shapefile using pyshp v2.0

    :Parameters:
     :df: spatial dataframe
     :out_path: folder location to save the data
     :out_name: name of the shapefile
    :Output:
     path to the shapefile or None if pyshp isn't installed or
     spatial dataframe does not have a geometry column.
    """
    from arcgis.geometry._types import Geometry
    if HASPYSHP:
        GEOMTYPELOOKUP = {
            "Polygon" : shapefile.POLYGON,
            "Point" : shapefile.POINT,
            "Polyline" : shapefile.POLYLINE,
            'null' : shapefile.NULL
        }
        if os.path.isdir(out_path) == False:
            os.makedirs(out_path)
        out_fc = os.path.join(out_path, out_name)
        if out_fc.lower().endswith('.shp') == False:
            out_fc += ".shp"
        geom_field = df.spatial.name
        if geom_field is None:
            return
        geom_type = "null"
        idx = df[geom_field].first_valid_index()
        if idx > -1:
            geom_type = df.loc[idx][geom_field].type
        shpfile = shapefile.Writer(target=out_fc, shapeType=GEOMTYPELOOKUP[geom_type], autoBalance=True)
        dfields = []
        cfields = []
        for c in df.columns:
            idx = df[c].first_valid_index() or df.index.tolist()[0]
            if idx > -1:
                if isinstance(df[c].loc[idx],
                              Geometry):
                    geom_field = (c, "GEOMETRY")
                else:
                    cfields.append(c)
                    if isinstance(df[c].loc[idx], (str)):
                        shpfile.field(name=c, size=255)
                    elif isinstance(df[c].loc[idx], (int)):
                        shpfile.field(name=c, fieldType="N", size=5)
                    elif isinstance(df[c].loc[idx], (np.int, np.int32, np.int64)):
                        shpfile.field(name=c, fieldType="N", size=10)
                    elif isinstance(df[c].loc[idx], (np.float, np.float64)):
                        shpfile.field(name=c, fieldType="F", size=19, decimal=11)
                    elif isinstance(df[c].loc[idx], (datetime.datetime, np.datetime64)) or \
                         df[c].dtype.name == 'datetime64[ns]':
                        shpfile.field(name=c, fieldType="D", size=8)
                        dfields.append(c)
                    elif isinstance(df[c].loc[idx], (bool, np.bool)):
                        shpfile.field(name=c, fieldType="L", size=1)
            del c
            del idx
        for idx, row in df.iterrows():
            geom = row[df.spatial._name]
            if geom.type == "Polygon":
                shpfile.poly(geom['rings'])
            elif geom.type == "Polyline":
                shpfile.line(geom['paths'])
            elif geom.type == "Point":
                shpfile.point(x=geom.x, y=geom.y)
            else:
                shpfile.null()
            row = row[cfields].tolist()
            for fld in dfields:
                idx = df[cfields].columns.tolist().index(fld)
                if row[idx]:
                    if isinstance(row[idx].to_pydatetime(), (type(pd.NaT))):
                        row[idx] = None
                    else:
                        row[idx] = row[idx].to_pydatetime()
            shpfile.record(*row)
            del idx
            del row
            del geom
        shpfile.close()


        # create the PRJ file
        try:
            from urllib import request
            wkid = df.spatial.sr['wkid']
            if wkid == 102100:
                wkid = 3857
            prj_filename = out_fc.replace('.shp', '.prj')

            url = 'http://epsg.io/{}.esriwkt'.format(wkid)

            opener = request.build_opener()
            opener.addheaders = [('User-Agent', 'geosaurus')]
            resp = opener.open(url)

            wkt = resp.read().decode('utf-8')
            if len(wkt) > 0:
                prj = open(prj_filename, "w")
                prj.write(wkt)
                prj.close()
        except:
            # Unable to write PRJ file.
            pass

        del shpfile
        return out_fc
    return None
Example #13
0
    async def execute(self, filter={}, user=None):

        language = 'en'
        if ('language' in filter and filter['language'] in ['it', 'de', 'fr']):
            language = filter['language']

        permissions = None
        if user is not None:
            permissions = self.filterPermission(user)

        where, params = self.filterBorehole(filter)

        sql = """
            {}
            WHERE geom_bho IS NOT NULL
        """.format(ListBorehole.get_sql_text(language))

        if len(where) > 0:
            sql += """
                AND %s
            """ % " AND ".join(where)

        if permissions is not None:
            sql += f"""
                AND {permissions}
            """

        # recs = await self.conn.fetch(sql, *(params))

        rec = await self.conn.fetchval(
            """
            SELECT
                array_to_json(
                    array_agg(
                        row_to_json(t)
                    )
                )
            FROM (
                %s
                ORDER BY 1
            ) AS t
        """ % sql, *(params))

        data = self.decode(rec) if rec is not None else []

        if len(data) > 0:

            cl = await ListCodeList(self.conn).execute('borehole_form')

            shp_header = {}
            for c in cl['data']['borehole_form']:
                shp_header[c['code']] = c

            shp = BytesIO()
            shx = BytesIO()
            dbf = BytesIO()

            w = shapefile.Writer(shp=shp, shx=shx, dbf=dbf, encoding="utf8")

            keys = data[0].keys()
            for key in keys:
                if key in ['location_x', 'location_y']:
                    continue

                w.field((shp_header[key][language]['text']
                         if key in shp_header else key).upper(), 'C')
            # w.field('NAME', 'C')
            # w.field('KIND', 'C')
            # w.field('DATE', 'D')

            # for rec in recs:
            #     w.point(rec[3], rec[4])
            #     w.record(rec[1], rec[2])

            for row in data:
                r = []
                w.point(row['location_x'], row['location_y'])

                for col in keys:
                    if isinstance(row[col], list):
                        r.append(",".join(str(x) for x in row[col]))
                    else:
                        r.append(row[col])

                w.record(*r)

            # Go to: https://spatialreference.org/
            # Then download the ESRI WKT epsg.
            prj = BytesIO(
                b'PROJCS["CH1903+ / LV95",GEOGCS["CH1903+",DATUM["D_CH1903",' \
                b'SPHEROID["Bessel_1841",6377397.155,299.1528128]],PRIMEM["G' \
                b'reenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTI' \
                b'ON["Hotine_Oblique_Mercator_Azimuth_Center"],PARAMETER["la' \
                b'titude_of_center",46.95240555555556],PARAMETER["longitude_' \
                b'of_center",7.439583333333333],PARAMETER["azimuth",90],PARA' \
                b'METER["scale_factor",1],PARAMETER["false_easting",2600000]' \
                b',PARAMETER["false_northing",1200000],UNIT["Meter",1]]'
            )

        return shp, shx, dbf, prj
Example #14
0
import shapefile  #modul
w = shapefile.Writer(
)  #deklarasi, dalam kurung bisa dikosongkan boleh diisi sesuai type
w.shapeType  #berfungsi untuk menjalankan code diatas

w.field("kolom1", "C")  #nama kolom
w.field("kolom2", "C")  #nama kolom

w.record("ngek", "satu")

w.poly(parts=[[[1, 3], [5, 3], [1, 2], [5, 2], [1, 3]]],
       shapeType=shapefile.POLYLINE)
w.save("soal8")  #nama file
Example #15
0
def recarray2shp(recarray,
                 geoms,
                 shpname="recarray.shp",
                 mg=None,
                 epsg=None,
                 prj=None,
                 **kwargs):
    """
    Write a numpy record array to a shapefile, using a corresponding
    list of geometries. Method supports list of flopy geometry objects,
    flopy Collection object, shapely Collection object, and geojson
    Geometry Collection objects

    Parameters
    ----------
    recarray : np.recarray
        Numpy record array with attribute information that will go in the
        shapefile
    geoms : list of flopy.utils.geometry, shapely geometry collection,
            flopy geometry collection, shapefile.Shapes,
            list of shapefile.Shape objects, or geojson geometry collection
        The number of geometries in geoms must equal the number of records in
        recarray.
    shpname : str
        Path for the output shapefile
    epsg : int
        EPSG code. See https://www.epsg-registry.org/ or spatialreference.org
    prj : str
        Existing projection file to be used with new shapefile.

    Notes
    -----
    Uses pyshp.
    epsg code requires an internet connection the first time to get the
    projection file text from spatialreference.org, but then stashes the text
    in the file epsgref.json (located in the user's data directory) for
    subsequent use. See flopy.reference for more details.

    """
    from ..utils.geospatial_utils import GeoSpatialCollection

    if len(recarray) != len(geoms):
        raise IndexError(
            "Number of geometries must equal the number of records!")

    if len(recarray) == 0:
        raise Exception("Recarray is empty")

    geomtype = None

    geoms = GeoSpatialCollection(geoms).flopy_geometry

    for g in geoms:
        try:
            geomtype = g.shapeType
        except AttributeError:
            continue

    # set up for pyshp 2
    shapefile = import_shapefile()
    w = shapefile.Writer(shpname, shapeType=geomtype)
    w.autoBalance = 1

    # set up the attribute fields
    names = enforce_10ch_limit(recarray.dtype.names)
    for i, npdtype in enumerate(recarray.dtype.descr):
        key = names[i]
        if not isinstance(key, str):
            key = str(key)
        w.field(key, *get_pyshp_field_info(npdtype[1]))

    # write the geometry and attributes for each record
    ralist = recarray.tolist()
    if geomtype == shapefile.POLYGON:
        for i, r in enumerate(ralist):
            w.poly(geoms[i].pyshp_parts)
            w.record(*r)
    elif geomtype == shapefile.POLYLINE:
        for i, r in enumerate(ralist):
            w.line(geoms[i].pyshp_parts)
            w.record(*r)
    elif geomtype == shapefile.POINT:
        # pyshp version 2.x w.point() method can only take x and y
        # code will need to be refactored in order to write POINTZ
        # shapes with the z attribute.
        for i, r in enumerate(ralist):
            w.point(*geoms[i].pyshp_parts[:2])
            w.record(*r)

    w.close()
    write_prj(shpname, mg, epsg, prj)
    print("wrote {}".format(shpname))
    return
Example #16
0
 def __init__(self):
     self.gununghalu = shapefile.Writer('gununghalutes', shapeType = shapefile.POLYGON)
     self.gununghalu.shapeType
     self.gununghalu.field('nama_kelurahan', 'C')
Example #17
0
def write_grid_shapefile(
        filename,
        mg,
        array_dict,
        nan_val=np.nan,
        epsg=None,
        prj=None,  # -1.0e9,
):
    """
    Method to write a shapefile of gridded input data

    Parameters
    ----------
    filename : str
        shapefile file name path
    mg : flopy.discretization.Grid object
        flopy model grid
    array_dict : dict
        dictionary of model input arrays
    nan_val : float
        value to fill nans
    epsg : str, int
        epsg code
    prj : str
        projection file name path

    Returns
    -------
    None

    """
    shapefile = import_shapefile()
    w = shapefile.Writer(filename, shapeType=shapefile.POLYGON)
    w.autoBalance = 1

    if isinstance(mg, SpatialReference):
        verts = copy.deepcopy(mg.vertices)
        warnings.warn(
            "SpatialReference has been deprecated. Use StructuredGrid"
            " instead.",
            category=DeprecationWarning,
        )
    elif mg.grid_type == "structured":
        verts = [
            mg.get_cell_vertices(i, j) for i in range(mg.nrow)
            for j in range(mg.ncol)
        ]
    elif mg.grid_type == "vertex":
        verts = [mg.get_cell_vertices(cellid) for cellid in range(mg.ncpl)]
    else:
        raise Exception("Grid type {} not supported.".format(mg.grid_type))

    # set up the attribute fields and arrays of attributes
    if isinstance(mg, SpatialReference) or mg.grid_type == "structured":
        names = ["node", "row", "column"] + list(array_dict.keys())
        dtypes = [
            ("node", np.dtype("int")),
            ("row", np.dtype("int")),
            ("column", np.dtype("int")),
        ] + [(enforce_10ch_limit([name])[0], array_dict[name].dtype)
             for name in names[3:]]
        node = list(range(1, mg.ncol * mg.nrow + 1))
        col = list(range(1, mg.ncol + 1)) * mg.nrow
        row = sorted(list(range(1, mg.nrow + 1)) * mg.ncol)
        at = np.vstack([node, row, col] +
                       [array_dict[name].ravel()
                        for name in names[3:]]).transpose()

        names = enforce_10ch_limit(names)

    elif mg.grid_type == "vertex":
        names = ["node"] + list(array_dict.keys())
        dtypes = [("node", np.dtype("int"))] + [(enforce_10ch_limit(
            [name])[0], array_dict[name].dtype) for name in names[1:]]
        node = list(range(1, mg.ncpl + 1))
        at = np.vstack([node] +
                       [array_dict[name].ravel()
                        for name in names[1:]]).transpose()

        names = enforce_10ch_limit(names)

    # flag nan values and explicitly set the dtypes
    if at.dtype in [np.float, np.float32, np.float64]:
        at[np.isnan(at)] = nan_val
    at = np.array([tuple(i) for i in at], dtype=dtypes)

    # write field information
    fieldinfo = {
        name: get_pyshp_field_info(dtype.name)
        for name, dtype in dtypes
    }
    for n in names:
        w.field(n, *fieldinfo[n])

    for i, r in enumerate(at):
        # check if polygon is closed, if not close polygon for QGIS
        if verts[i][-1] != verts[i][0]:
            verts[i] = verts[i] + [verts[i][0]]
        w.poly([verts[i]])
        w.record(*r)

    # close
    w.close()
    print("wrote {}".format(filename))
    # write the projection file
    write_prj(filename, mg, epsg, prj)
    return
Example #18
0
    ]]


parser = argparse.ArgumentParser()
parser.add_argument(
    "-d",
    "--dataset",
    required=True,
    help="The folder for the dataset to extract the geoinformation from")
parser.add_argument("-o",
                    "--output",
                    required=True,
                    help="The output shapefile for the geoinformation")
args = vars(parser.parse_args())

with shapefile.Writer(args["output"]) as shp:
    shp.field('FILENAME', 'C')
    shp.field('UUID', 'C', size=37)

    widgets = [
        "Classifying images: ",
        progressbar.Percentage(), " ",
        progressbar.Bar(), " ",
        progressbar.ETA()
    ]

    image_paths = [
        image_path for image_path in list_images(args["dataset"])
        if "mask" not in image_path and image_path.endswith(".tiff")
    ]
    pbar = progressbar.ProgressBar(maxval=len(image_paths),
Example #19
0
import shapefile
w = shapefile.Writer('Soal5', shapeType=shapefile.POLYLINE)
w.shapeType
w.field("kolom1", "C")
w.field("kolom2", "C")
w.record("ngek", "satu")
w.line([[[1, 5], [5, 5], [5, 1], [3, 3], [1, 1]]])
w.close()
Example #20
0
import shapefile  #mengimpor library shape pada python
w = shapefile.Writer()  #menulis shape file
w.shapeType  #menggunakan tipe apa shape di buat
w.field("kolom1", "C")  #membuat file dengan nama kolom 1 dengan type character
w.field("kolom2", "C")  #membuat file dengan nama kolom 2 dengan type character
w.record("line1", "satu")
w.record("line2", "dua")
w.record("line3", "tiga")
w.record("line4", "empat")
w.line(parts=[[[-6.8743901794, 107.57582112], [-6.8742404509, 107.5757617658],
               [-6.8740486876, 107.575817996],
               [-6.87375,
                107.5762]]])  #memberikan koordinat untuk membuat line
w.line(parts=[[[-6.8743924337, 107.5758005192],
               [-6.8742400642, 107.5757367221], [-6.87405, 107.57578],
               [-6.87398,
                107.5753]]])  #memberikan koordinat untuk membuat line
w.line(parts=[[[-6.87396, 107.5753], [-6.8740309305, 107.5757943296],
               [-6.8737401594,
                107.5761551673]]])  #memberikan koordinat untuk membuat line

w.save("polyline")  #menyimpan file shape dengan nama soal5
Example #21
0
    def createConvexPath(self, pair):
        #pr = cProfile.Profile()
        #pr2 = cProfile.Profile()
        
        print pair[1]
        odPointsList = ((pair[0][0].x, pair[0][0].y), (pair[0][1].x, pair[0][1].y))
        #st_line = LineString(odPointsList)
        labeledObstaclePoly = []
        totalConvexPathList = {}
        
        dealtArcList = {}
        totalConvexPathList[odPointsList] = LineString(odPointsList)
        
        terminate = 0
        idx_loop1 = 0
        #sp_l_set = []
        time_loop1 = 0
        time_contain2 = 0
        time_crossingDict = 0
        time_convexLoop = 0 
        time_impedingArcs = 0
        time_spatialFiltering = 0
        time_loop1_crossingDict = 0
        time_buildConvexHulls = 0
        while terminate == 0:
            t1s = time.time()
            idx_loop1 += 1
            
            t6s = time.time()
            w = shapefile.Writer(shapefile.POLYLINE)
            w.field('nem')
            for line in totalConvexPathList:
                w.line(parts=[[ list(x) for x in line ]])
                w.record('ff')
            w.save(self.path + "graph_" + str(idx_loop1) + self.version_name)

            totalGrpah = self.createGraph(totalConvexPathList.keys())
            spatial_filter_n = networkx.dijkstra_path(totalGrpah, odPointsList[0], odPointsList[1])            
            spatial_filter = []
            for i in xrange(len(spatial_filter_n)-1):
                spatial_filter.append([spatial_filter_n[i], spatial_filter_n[i+1]])

            w = shapefile.Writer(shapefile.POLYLINE)
            w.field('nem')
            for line in spatial_filter:
                w.line(parts=[[ list(x) for x in line ]])
                w.record('ff')
            w.save(self.path + "spatial Filter_" + str(idx_loop1) + self.version_name)
            
            #sp_length = 0
            #for j in spatial_filter:
                #sp_length += LineString(j).length        
            #sp_l_set.append(sp_length)
            
            crossingDict = defaultdict(list)
            
            for line in spatial_filter:
                Line = LineString(line)
                for obs in self.obstaclesPolygons:
                    if Line.crosses(obs):
                        if obs not in labeledObstaclePoly:
                            labeledObstaclePoly.append(obs)
                    
                        crossingDict[tuple(line)].append(obs)
            
            t6e = time.time()
            time_spatialFiltering += t6e - t6s 
            
            if len(crossingDict.keys()) == 0:
                terminate = 1
                continue
            else:
                t7s = time.time()
                for tLine in crossingDict.keys():
                    #cLine = list(tLine)
                    if dealtArcList.has_key(tLine):
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        continue
                    else:
                        dealtArcList[tLine] = LineString(list(tLine))
                        try:
                            del totalConvexPathList[tLine]
                        except:
                            del totalConvexPathList[(tLine[1], tLine[0])]
                        containingObs = []
                        for obs in crossingDict[tLine]:
                            
                            convexHull = self.createConvexhull(obs, tLine)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            
                            
                            convexHull = self.createConvexhull(obs, odPointsList)
                            self.splitBoundary(totalConvexPathList, convexHull)
                            convexHull2 = self.createConvexhull(obs)
                            if convexHull2.contains(Point(tLine[0])):
                                containingObs.append(obs)
                            elif convexHull2.contains(Point(tLine[1])):
                                containingObs.append(obs)
                        if len(containingObs) != 0:   #SPLIT
                            subConvexPathList = {}
                            vi_obs = MultiPolygon([x for x in containingObs])
                            containedLineCoords = list(tLine)
                            fromX = containedLineCoords[0][0]
                            fromY = containedLineCoords[0][1]
                            toX = containedLineCoords[1][0]
                            toY = containedLineCoords[1][1]
                            fxA = (fromY - toY) / (fromX - toX)
                            fxB = fromY - (fxA * fromX)
                            minX = vi_obs.bounds[0]
                            maxX = vi_obs.bounds[2]
                            split_line = LineString([(min(minX, fromX, toX), fxA * min(minX, fromX, toX) + fxB), (max(maxX, fromX, toX), fxA * max(maxX, fromX, toX) + fxB)])
                            
                            for obs in containingObs:
                                s1, s2 = self.splitPolygon(split_line, obs)
                                dividedObsPoly = []
                                #to deal with multipolygon
                                a = s1.intersection(obs)
                                b = s2.intersection(obs)
                                if a.type == "Polygon":
                                    dividedObsPoly.append(a)
                                else:
                                    for o in a.geoms:
                                        if o.type == "Polygon":
                                            dividedObsPoly.append(o)
                                if b.type == "Polygon":
                                    dividedObsPoly.append(b)
                                else:
                                    for o2 in b.geoms:
                                        if o2.type == "Polygon":
                                            dividedObsPoly.append(o2)
                                
                                for obs2 in dividedObsPoly:
                                    for pt in tLine:
                                        convexHull = self.createConvexhull(obs2, [pt])
                                        self.splitBoundary(subConvexPathList, convexHull)
                            subVertices = []
                            for line in subConvexPathList:
                                subVertices.extend(line)
                            subVertices = list(set(subVertices))
                            containingObsVertices = []
                            for obs in containingObs:
                                containingObsVertices.extend(list(obs.exterior.coords))
                            subVertices = [x for x in subVertices if x in containingObsVertices]
                            deleteList = []
                            for line in subConvexPathList:
                                chk_cross = 0
                                for obs in containingObs:
                                    if subConvexPathList[line].crosses(obs):
                                        chk_cross = 1
                                if chk_cross == 1:
                                    deleteList.append(line)
                            for line in deleteList:
                                del subConvexPathList[line]
                                #subConvexPathList.remove(line)
                            pairList = []
                            for i in range(len(subVertices)):
                                for j in range(i+1, len(subVertices)):
                                    pairList.append((subVertices[i], subVertices[j]))
                            for i in pairList:
                                Line = LineString(i)
                                chk_cross = 0
                                for obs in containingObs:
                                    if Line.crosses(obs):
                                        chk_cross = 1
                                    elif Line.within(obs):
                                        chk_cross = 1
                                if chk_cross == 0:
                                    subConvexPathList[i] = Line
                                    #subConvexPathList.append(i)
                            buffer_st_line = split_line.buffer(0.1)
                            deleteList = []
                            for line in subConvexPathList:
                                if buffer_st_line.contains(subConvexPathList[line]):
                                    deleteList.append(line)
                            for line in deleteList:
                                if subConvexPathList.has_key(line):
                                    del subConvexPathList[line]
                            #subConvexPathList = [x for x in subConvexPathList if x not in deleteList]
                            for line in subConvexPathList:
                                if not totalConvexPathList.has_key(line):
                                    if not totalConvexPathList.has_key((line[1],line[0])):
                                        totalConvexPathList[line] = subConvexPathList[line]                                #if line not in totalConvexPathList:
                                    #if [line[1], line[0]] not in totalConvexPathList:
                                        #totalConvexPathList.append(line)

                w = shapefile.Writer(shapefile.POLYLINE)
                w.field('nem')
                for line in totalConvexPathList:
                    w.line(parts=[[ list(x) for x in line ]])
                    w.record('ff')
                w.save(self.path + "graph2_" + str(idx_loop1) + self.version_name) 
                t7e = time.time()
                time_loop1_crossingDict += t7e - t7s
                #new lines            
                labeled_multyPoly = MultiPolygon([x for x in labeledObstaclePoly])
                convexHull = self.createConvexhull(labeled_multyPoly, odPointsList)
                self.splitBoundary(totalConvexPathList, convexHull)
                #new lines end             
                                  
                #impededPathList 
                t5s = time.time()
                impededPathList = {}
                for line in totalConvexPathList:
                    for obs in labeledObstaclePoly:
                        if totalConvexPathList[line].crosses(obs):
                            impededPathList[line] = totalConvexPathList[line]
                            break
                t5e = time.time()
                time_impedingArcs += t5e - t5s
                for line in impededPathList:
                    del totalConvexPathList[line]
               
                terminate2 = 0
                idx_loop2 = 0
                t1e = time.time()
                time_loop1 += t1e - t1s    
                #w = shapefile.Writer(shapefile.POLYGON)
                #w.field('net')
                #for obs in labeledObstaclePoly:
                    #w.poly(parts=[[list(x) for x in list(obs.exterior.coords)]])
                    #w.record('ff')
                #w.save(self.path + "obs"+ str(idx_loop1) + "_" + self.version_name)                  
                while terminate2 == 0:
                    idx_loop2 += 1

                    deleteList = []
                    crossingDict = defaultdict(list)

                    for line in dealtArcList:
                        if impededPathList.has_key(line):
                            del impededPathList[line]
                        elif impededPathList.has_key((line[1], line[0])):
                            del impededPathList[(line[1],line[0])]
                    
                    t3s = time.time()
                    #pr.enable()
                    for line in impededPathList:
                        for obs in labeledObstaclePoly:
                            if impededPathList[line].crosses(obs):
                                crossingDict[line].append(obs)
                    
                    t3e = time.time()
                    time_crossingDict += t3e - t3s
                    #at this point, impededArcList should be emptied, as it only contains crossing arcs, and all of them 
                    #should be replaced by convex hulls. 
                    for line in crossingDict:
                        del impededPathList[line]
                    for line in impededPathList:
                        if not totalConvexPathList.has_key(line):
                            totalConvexPathList[line] = impededPathList[line]
                    impededPathList = {}
   
                    if len(crossingDict.keys()) == 0:
                        terminate2 = 1
                        continue
                    else:
                        #w = shapefile.Writer(shapefile.POLYLINE)
                        #w.field('nem')
                        #for line in crossingDict:
                            #w.line(parts=[[ list(x) for x in line ]])
                            #w.record('ff')
                        #w.save(self.path + "crossingDict_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)                        
                        t4s = time.time()
                        
                        for tLine in crossingDict.keys():
                            dealtArcList[tLine] = crossingDict[tLine]                
                            containingObs = []
                            for obs in crossingDict[tLine]:
                                chk_contain = 0
                                convexHull2 = self.createConvexhull(obs)
                                if convexHull2.contains(Point(tLine[0])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                elif convexHull2.contains(Point(tLine[1])):
                                    containingObs.append(obs)
                                    chk_contain = 1
                                if chk_contain == 0:
                                    t10s = time.time()
                                    convexHull = self.createConvexhull(obs, tLine)
                                    self.splitBoundary(impededPathList, convexHull)
                                    t10e = time.time()
                                    time_buildConvexHulls += t10e - t10s

                            if len(containingObs) != 0:  #SPLIT
                                #print "SPLIT"
                                t2s = time.time()
                                subConvexPathList = {}
                                vi_obs = MultiPolygon([x for x in containingObs])
                                containedLineCoords = tLine
                                fromX = containedLineCoords[0][0]
                                fromY = containedLineCoords[0][1]
                                toX = containedLineCoords[1][0]
                                toY = containedLineCoords[1][1]
                                fxA = (fromY - toY) / (fromX - toX)
                                fxB = fromY - (fxA * fromX)
                                minX = vi_obs.bounds[0]
                                maxX = vi_obs.bounds[2]
                                split_line = LineString([(min(minX, fromX, toX), fxA * min(minX, fromX, toX) + fxB), (max(maxX, fromX, toX), fxA * max(maxX, fromX, toX) + fxB)])
                                
                                for obs in containingObs:
                                    s1, s2 = self.splitPolygon(split_line, obs)
                                    dividedObsPoly = []
                                    #to deal with multipolygon
                                    a = s1.intersection(obs)
                                    b = s2.intersection(obs)
                                    if a.type == "Polygon":
                                        dividedObsPoly.append(a)
                                    else:
                                        for o in a.geoms:
                                            if o.type == "Polygon":
                                                dividedObsPoly.append(o)
                                    if b.type == "Polygon":
                                        dividedObsPoly.append(b)
                                    else:
                                        for o2 in b.geoms:
                                            if o2.type == "Polygon":
                                                dividedObsPoly.append(o2)
                                    
                                    for obs2 in dividedObsPoly:
                                        for pt in tLine:
                                            convexHull = self.createConvexhull(obs2, [pt])
                                            self.splitBoundary(subConvexPathList, convexHull)
                                subVertices = []
                                for line in subConvexPathList:
                                    subVertices.extend(line)
                                subVertices = list(set(subVertices))
                                containingObsVertices = []
                                for obs in containingObs:
                                    containingObsVertices.extend(list(obs.exterior.coords))
                                subVertices = [x for x in subVertices if x in containingObsVertices]
                                deleteList = []
                                for line in subConvexPathList:
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if subConvexPathList[line].crosses(obs):
                                            chk_cross = 1
                                    if chk_cross == 1:
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                    
                                pairList = []
                                for i in range(len(subVertices)):
                                    for j in range(i+1, len(subVertices)):
                                        pairList.append((subVertices[i], subVertices[j]))
                                
                                for i in pairList:
                                    Line = LineString(list(i))
                                    chk_cross = 0
                                    for obs in containingObs:
                                        if Line.crosses(obs):
                                            chk_cross = 1
                                        elif Line.within(obs):
                                            chk_cross = 1
                                    if chk_cross == 0:
                                        subConvexPathList[i] = Line
                                      
                                buffer_st_line = split_line.buffer(0.1)
                                deleteList = []
                                for line in subConvexPathList:
                                    if buffer_st_line.contains(subConvexPathList[line]):
                                        deleteList.append(line)
                                for line in deleteList:
                                    del subConvexPathList[line]
                                for line in subConvexPathList:
                                    if not impededPathList.has_key(line):
                                        if not impededPathList.has_key((line[1], line[0])):
                                            impededPathList[line] = subConvexPathList[line]
                                    
                                t2e = time.time()
                                time_contain2 += t2e - t2s
                        #pr.disable()
                        for line in dealtArcList:
                            if impededPathList.has_key(line):
                                del impededPathList[line]
                        #impededPathList = [x for x in impededPathList if x not in dealtArcList]
                        t4e = time.time()
                        time_convexLoop += t4e - t4s
                        #end of else
                    w = shapefile.Writer(shapefile.POLYLINE)
                    w.field('nem')
                    for line in impededPathList:
                        w.line(parts=[[ list(x) for x in line ]])
                        w.record('ff')
                    w.save(self.path + "After_graph_" + str(idx_loop1) + "_"+ str(idx_loop2) +"_"+ self.version_name)
                    #end of while2
                for line in impededPathList:
                    if not totalConvexPathList.has_key(line):
                        totalConvexPathList[line] = impededPathList[line]
                
                #totalConvexPathList.extend(impededPathList)
        totalGraph = self.createGraph(totalConvexPathList.keys())
        esp_n = networkx.dijkstra_path(totalGraph, odPointsList[0], odPointsList[1])
        esp = []
        for i in range(len(esp_n)-1):
            esp.append([esp_n[i], esp_n[i+1]])
        w = shapefile.Writer(shapefile.POLYLINE)
        w.field('nem')
        no_edges = 0
        for line in totalConvexPathList.keys():
            no_edges += 1
            w.line(parts=[[ list(x) for x in line ]])
            w.record('ff')
        w.save(self.path + "totalpath" + self.version_name + "%d" % pair[1] )              
        w = shapefile.Writer(shapefile.POLYLINE)
        w.field('nem')
        for line in esp:
            w.line(parts=[[ list(x) for x in line ]])
            w.record('ff')
        w.save(self.path + "ESP_" + self.version_name + "%d" % pair[1])
        #sp_len_str = str(sp_l_set)[1:-1]
        
        #s = StringIO.StringIO()
        #sortby = 'cumulative'
        #ps = pstats.Stats(pr, stream=s).sort_stats(sortby)
        #ps.print_stats()
        #print s.getvalue()
#        
#        s = StringIO.StringIO()
#        sortby = 'cumulative'
#        ps = pstats.Stats(pr2, stream=s).sort_stats(sortby)
#        ps.print_stats()
#        print s.getvalue()
        
        print "loop1: ", time_loop1
        print "Spatial Filtering: ", time_spatialFiltering
        print "loop1 crossingDict: ", time_loop1_crossingDict
        print "crossingDict: ", time_crossingDict
        print 'convexLoop: ', time_convexLoop
        print "time_contain: ", time_contain2
        print "impedingArcs: ", time_impedingArcs
        print "convexHUll: ", time_buildConvexHulls
        return 'convexpath %d %d %d %f %f %f' % (pair[1], no_edges, len(labeledObstaclePoly), time_convexLoop, time_crossingDict, time_buildConvexHulls)
Example #22
0
import shapefile #merupakan pyshp yang tadi telah diinstal
w=shapefile.Writer()  #mendeklarasikan file shapefile yang baru
w.shapeType #mengecek type dari writer yang dibuat diatas

w.field("kolom1","C") #membuat field dengan nama kolom1 dan dengan type character
w.field("kolom2","C") #membuat field dengan nama kolom2  dan dengan type character

w.record("ngek","satu") #mengisi record dari field yang telah dibuatkan
w.record("crot","dua") #mengisi record dari field yang telah dibuatkan



w.poly(parts=[[[1,3],[5,3], [5,2],[1,2], [1,3]]],shapeType=shapefile.POLYLINE) #menggunakan shapetype polyline yang dimana titik koordinat pertama akan bertemu dengan ujung nya.
w.poly(parts=[[[1,6],[5,6], [5,9],[1,9], [1,6]]],shapeType=shapefile.POLYLINE) #menggunakan shapetype polyline yang dimana titik koordinat pertama akan bertemu dengan ujung nya.
 

w.save("soal9") #yang berada didalam kurung merupakn nama file yang telah/akan di save
Example #23
0
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 20 16:54:38 2019

@author: lenovo
"""

import shapefile

w = shapefile.Writer("soal6")

w.field("kolom1", "C")
w.field("kolom2", "C")

w.record("ngek", "satu")

w.line([[[1, 3], [5, 3]]])
w.close()
Example #24
0
# -*- coding: utf-8 -*-
"""
Created on wed Oct 23 12:47:35 2019

@author: Sri Rahayu
"""

import shapefile  # Meng-import library shapefile

w = shapefile.Writer(
    'Nomor3', shapeType=1
)  # Membuat penggambar pada shapefile yang nantinya akan di namakan nomor3 dan bentuknya itu adalah shapetype 1 yaitu point (titik)

w.field("kolom1", "C")  # Membuat table dengan kolom pertama
w.field("kolom2", "C")  # Membuat table dengan kolom kedua

w.record(
    "ngek", "satu"
)  # Mengisi untuk table yaitu ngek adalah isi pada kolom1 dan satu adalah isi pada kolom2
w.record(
    "ngok", "dua"
)  # Mengisi untuk table yaitu ngok adalah isi pada kolom1 dan satu adalah isi pada kolom2

w.point(1, 1)  # Menggambarkan point (titik) pada koordinat x,y yaitu 1,1
w.point(2, 2)  # Menggambarkan point (titik) pada koordinat x,y yaitu 2,2

w.close(
)  # Menutup penggambar (writer) karena kita sudah beres menggambar yang kita perlukan
Example #25
0
def merge_ems_zips(emergency_tags, elements):
    clean_folder(emergency_tags)
    #exit()

    for tags in emergency_tags:
        create_path(folder_input_zip + tags + "_merged")
        for category in elements:
            create_path(folder_input_zip + tags + "_" + category)

    for file in sorted(os.listdir(folder_input_zip)):
        for tags in emergency_tags:
            if tags in file:
                if file.endswith(".zip"):
                    extract_zipped_files(file)

    # move files
    for file in sorted(os.listdir(folder_input_shp)):
        for ems in emergency_tags:
            if ems in file:
                for category in elements:
                    if category in file:
                        file_move_name_in = folder_input_shp + file
                        file_move_name_out = folder_output_shp + ems + "_" + category + "/" + file
                        shutil.move(file_move_name_in, file_move_name_out)

    # merge files
    for ems in emergency_tags:
        for category in elements:
            folder_input_in = folder_input_merge + ems + "_" + category + '/*.shp'
            folder_input_out = folder_output_merge + ems + "_" + category + '_merged.shp'
            files = sorted(glob.glob(folder_input_in))
            w = shapefile.Writer()
            for f in files:
                r = shapefile.Reader(f)
                w._shapes.extend(r.shapes())
                w.records.extend(r.records())
                w.fields = list(r.fields)
            try:
                w.save(folder_input_out)
            except:
                print "cannot save object %s" % f
                pass

            folder_input_in = folder_input_merge + ems + "_" + category + '/*.prj'
            folder_input_out = folder_output_merge + ems + "_" + category + '_merged.prj'
            files = sorted(glob.glob(folder_input_in))
            for f in files:
                shutil.copy(f, folder_input_out)
                break
            '''outpath = folder_output_merge + ems + "_" + category + '_merged.wkt'
            computed_union_geometry = ems_feed_reader.compute_union_geometry(files)
            with open(outpath, 'w') as geom_file:
                geom_file.write(computed_union_geometry)'''
        #move files
        move_merged_files(ems)
        #import shapefile into postgis
        print('importing shapefile into POSTGIS')
        import_shp.shp2postgis(folder_output_merge + ems + '_merged', ems)

    #remove temp folder
    try:
        shutil.rmtree(folder_output_zip)
    except OSError, e:
        print("Error: %s - %s." % (e.filename, e.strerror))
Example #26
0
                    break  # Terminate the "for ilon" loop
            if failure:
                break  # Terminate the "for ilat" loop and start analysing the next shape (the next iteration in the for ii loop)

##### GENERATE OUTPUTS ##########################################################################################
'''
Recall that the set of 3 GIS shapefiles is called a layer:
.shp – geographical information (latitudes and longitudes)
.dbf – dbase III table containing attribute information for the shapes in the .shp file
.shx – shape index file, contains the offset of the records in the .shp file (for random access reading)
For details visit https://pypi.python.org/pypi/pyshp
'''

# Update/Re-write the Natural Gas Buses shapefiles with the new failure criteria
# For details visit https://pypi.python.org/pypi/pyshp
w = shapefile.Writer(
    shapeType=shapefile.POINT)  # Recall that sf_ngbu.shapeType = 1 ==> Point
w.fields = list(
    fields_ngbuses
)  # Atributes: shx file  #Do not use sf_ngbu.fields, because it does not have the new 'Outaged' atribute
w._shapes = list(sf_ngbu.shapes())  # Geometry: shp file
w.records = list(records_ngbuses)  # Records: dbf file
if want2printFailures == 2:
    print 'Writing NGBuses output shapefiles at... ' + myshp_ngbuses + '_analyzed.*'
w.save(myshp_ngbuses + '_analyzed')

# Update/Re-write the Electric Power Buses shapefiles with the new failure criteria
w = shapefile.Writer(
    shapeType=shapefile.POINT)  # Recall that sf_epbu.shapeType = 1 ==> Point
w.fields = list(
    fields_epbuses
)  # Atributes: shx file  #Do not use sf_epbu.fields, because it does not have the new 'Outaged' atribute
from GPSTrans import *
import shapefile

#基本路径设置
city_name = "Guangzhou"
city_wander_dir = "/Users/mac/Desktop/CityWander/"
spider_path = city_wander_dir + "Streetview_Spider/"
picture_path = city_wander_dir + "Streetview_Pictures/"

img_info_file = open(
    spider_path + "Catched_data/" + city_name + "/" + city_name +
    "_img_info_file.txt", 'r')

#point_shp=open("/Users/mac/Desktop/城市漫游/代码/Tecent_Experiment/point_shp",'w')

w = shapefile.Writer()
w.autoBalance = 1
w = shapefile.Writer(shapefile.POINT)

w = shapefile.Writer(shapefile.POINT)
w.autoBalance = 1

dic = {}

for i in img_info_file.readlines():
    lat = i.split('_')[1]
    lng = i.split('_')[2]

    dic[(lat, lng)] = 1

w.field('FIRST_FLD')
Example #28
0
def contours(
    fn,
    package_dir,
    t2convert,
    levels=[0.1, 0.4, 0.8],
    names=['Light', 'Medium', 'Heavy'],
):

    print "contouring data in:", fn
    nc = Dataset(fn)
    particles = nc_particles.Reader(nc)
    times = particles.times
    dt = [
        np.abs(((output_t - t2convert).total_seconds()) / 3600)
        for output_t in times
    ]
    t = dt.index(min(dt))
    print 'Converting output from: ', times[t]

    TheData = particles.get_timestep(t,
                                     variables=[
                                         'latitude', 'longitude', 'id',
                                         'depth', 'mass', 'age', 'status_codes'
                                     ])

    # contouring
    status = TheData['status_codes']
    floating = np.where(status == 2)[0]
    x = TheData['longitude'][floating]
    y = TheData['latitude'][floating]

    # Peform the kernel density estimate
    xx, yy = np.mgrid[min(x) - .1:max(x) + .1:100j,
                      min(y) - .1:max(y) + .1:100j]
    positions = np.vstack([xx.ravel(), yy.ravel()])
    values = np.vstack([x, y])
    kernel = st.gaussian_kde(values)
    f = np.reshape(kernel(positions).T, xx.shape)
    max_density = f.max()

    levels.sort()
    particle_contours = [lev * max_density for lev in levels]

    cs = contour(xx, yy, f, particle_contours)

    w = shp.Writer(shp.POLYGON)
    w.autobalance = 1

    w.field('Year', 'C')
    w.field('Month', 'C')
    w.field('Day', 'C')
    w.field('Hour', 'C')
    w.field('Depth', 'N')
    w.field('Type', 'C')

    for c in range(len(cs.collections)):
        p = cs.collections[c].get_paths()[0]
        v = p.vertices
        coords = [[[i[0], i[1]] for i in v]]
        w.poly(shapeType=3, parts=coords)
        w.record(times[t].year, times[t].month, times[t].day, times[t].hour,
                 TheData['depth'][c], names[c])
        print names[c]

    source_fdir = os.path.join(package_dir, 'source_files')
    shapefile_name = os.path.split(fn)[-1].split('.')[0]
    w.save(os.path.join(source_fdir, shapefile_name))

    nc.close()

    # create the PRJ file
    prj_filename = os.path.join(source_fdir, shapefile_name)
    write_proj_file(prj_filename)

    files = os.listdir(source_fdir)
    zipfname = shapefile_name + '.zip'
    zipf = zipfile.ZipFile(os.path.join(source_fdir, zipfname), 'w')
    for f in files:
        if f.split('.')[0] == shapefile_name:
            zipf.write(os.path.join(source_fdir, f), arcname=f)
    zipf.close()

    return zipfname
#                                      branch_points,branch_lengths,chain_breaks)

#--set break points at xsections - only use unique xsections
branch_points,branch_lengths,chain_breaks = msu.set_xsec_breaks_dissolve(names,topo_id,\
                                      xsec_attri,branch_points,branch_lengths,\
                                      chain_breaks)

#--set connection points
branch_points,branch_lengths = msu.set_conn_points(names,st_chain,end_chain,bpoints,conn_name,\
                                     conn_chain,branch_points,branch_lengths)

#--branch_points from meters to feet
branch_points = msu.m_2_ft(branch_points)

#--set the writer instance
wr = shapefile.Writer()
wr.field('name', fieldType='C', size=50)
wr.field('topo_id', fieldType='C', size=50)
wr.field('reach', fieldType='N', size=50)
#start and end branch xsec profile attributes
wr.field('pf_name_up', fieldType='C', size=100)
wr.field('pf_name_dw', fieldType='C', size=100)
wr.field('pf_area_up', fieldType='N', size=100, decimal=5)
wr.field('pf_area_dw', fieldType='N', size=100, decimal=5)
wr.field('pf_bot_up', fieldType='N', size=100, decimal=5)
wr.field('pf_bot_dw', fieldType='N', size=100, decimal=5)

#--tolerance for length of reaches and chainage length
tol = 1.0

#--some info for writing the xsections
Example #30
0
def run(key, debug=False):

  try:
    print("starting create_shapefiles with key " + key)

    from django.db import connection
    connection.close()

    directory = join(MAPS_DIRECTORY,  key)
    if not isdir(directory):
        mkdir(directory)


    writer_points = shapefile.Writer(POINT)
    writer_polygons = shapefile.Writer(POLYGONM)

    # makes sure dbf and shapes are in sync
    writer_points.autoBalance = 1
    writer_polygons.autoBalance = 1

    # set shape type to point
    #writer_points.shapeType = 1
    #writer_polygons.shapeType = 25

    # create fields
    for writer in [writer_points, writer_polygons]:
        writer.field("name")
        writer.field("confidence")
        writer.field("countrycode")
        writer.field("geonames_id")
        writer.field("pcode")
        writer.field("start_time")
        writer.field("end_time")

    number_of_points = 0
    number_of_polygons = 0
    for feature in Feature.objects.filter(order__token=key):

        fp = feature.featureplace_set.filter(correct=True).first()
        if fp:

            end = feature.end.strftime('%y-%m-%d') if feature.end else None
            start = feature.start.strftime('%y-%m-%d') if feature.start else None

            place = fp.place

            # what happens to feature.confidence decimal??? need to convert to float?
            writer_points.record(place.name.encode("utf-8"), fp.confidence, place.country_code, place.geonames_id, place.pcode, start, end)
            writer_points.point(float(place.point.x), float(place.point.y))
            number_of_points += 1

            try:
                if place.mpoly:
                    #print "pyshp doesn't seem to be able to handle mpoly with original coords"
                    for c in place.mpoly.coords:
                        writer_polygons.record(place.name.encode("utf-8"), fp.confidence, place.country_code, place.geonames_id, place.pcode, start, end)
                        writer_polygons.poly(parts=c, shapeType=POLYGONM)
                        number_of_polygons += 1
            except Exception as e:
                print("caught the following error while trying to write multipolygons", e)

    if number_of_points > 0 or number_of_polygons > 0:
        if number_of_points > 0:
            writer_points.save(join(directory, key + "_points"))
            with open(join(directory, key + "_points.prj"), "wb") as f:
                f.write(CRS)
            print("WROTE PRJ")
        if number_of_polygons > 0:
            writer_polygons.save(join(directory, key + "_polygons"))
            with open(join(directory, key + "_polygons.prj"), "wb") as f:
                f.write(CRS)

        with ZipFile(join(directory, key + ".zip"), 'w') as zipped_shapefile:
            for filename in listdir(directory):
                if filename.split(".")[-1] in ("cpg","dbf","shp","shx","prj"):
                    path_to_file = join(directory, filename)
                    zipped_shapefile.write(path_to_file, filename)
                    remove(path_to_file)

    print("finished creating shapefiles")

  except Exception as e:
    print("CAUGHT EXCEPTION in create_shapefiles:", e)