Exemplo n.º 1
0
    def __init__(self, path, time_col):
        shp = pysal.open(path + '.shp')
        dbf = pysal.open(path + '.dbf')

        # extract the spatial coordinates from the shapefile
        x = []
        y = []
        n = 0
        for i in shp:
            count = 0
            for j in i:
                if count == 0:
                    x.append(j)
                elif count == 1:
                    y.append(j)
                count += 1
            n += 1

        self.n = n
        x = np.array(x)
        y = np.array(y)
        self.x = np.reshape(x, (n, 1))
        self.y = np.reshape(y, (n, 1))
        self.space = np.hstack((self.x, self.y))

        # extract the temporal information from the database
        t = np.array(dbf.by_col(time_col))
        line = np.ones((n, 1))
        self.t = np.reshape(t, (n, 1))
        self.time = np.hstack((self.t, line))

        # close open objects
        dbf.close()
        shp.close()
Exemplo n.º 2
0
    def test_chi2(self):
        import pysal

        f = pysal.open(pysal.examples.get_path("usjoin.csv"))
        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
        pci = pci.transpose()
        rpci = pci / (pci.mean(axis=0))
        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
        w.transform = "r"
        sm = pysal.Spatial_Markov(rpci, w, fixed=True, k=5)
        chi = np.matrix(
            [
                [4.05598541e01, 6.44644317e-04, 1.60000000e01],
                [5.54751974e01, 2.97033748e-06, 1.60000000e01],
                [1.77528996e01, 3.38563882e-01, 1.60000000e01],
                [4.00390961e01, 7.68422046e-04, 1.60000000e01],
                [4.67966803e01, 7.32512065e-05, 1.60000000e01],
            ]
        ).getA()
        obs = np.matrix(sm.chi2).getA()
        np.testing.assert_array_almost_equal(obs, chi)
        obs = np.matrix(
            [
                [4.61209613e02, 0.00000000e00, 4.00000000e00],
                [1.48140694e02, 0.00000000e00, 4.00000000e00],
                [6.33129261e01, 5.83089133e-13, 4.00000000e00],
                [7.22778509e01, 7.54951657e-15, 4.00000000e00],
                [2.32659201e02, 0.00000000e00, 4.00000000e00],
            ]
        )
        np.testing.assert_array_almost_equal(obs.getA(), np.matrix(sm.shtest).getA())
Exemplo n.º 3
0
    def test_chi2(self):
        import pysal

        f = pysal.open(pysal.examples.get_path("usjoin.csv"))
        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)])
        pci = pci.transpose()
        rpci = pci / (pci.mean(axis=0))
        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
        w.transform = "r"
        sm = pysal.Spatial_Markov(rpci, w, fixed=True, k=5)
        chi = np.matrix(
            [
                [4.06139105e01, 6.32961385e-04, 1.60000000e01],
                [5.55485793e01, 2.88879565e-06, 1.60000000e01],
                [1.77772638e01, 3.37100315e-01, 1.60000000e01],
                [4.00925436e01, 7.54729084e-04, 1.60000000e01],
                [4.68588786e01, 7.16364084e-05, 1.60000000e01],
            ]
        ).getA()
        obs = np.matrix(sm.chi2).getA()
        np.testing.assert_array_almost_equal(obs, chi)
        obs = np.matrix(
            [
                [4.61209613e02, 0.00000000e00, 4.00000000e00],
                [1.48140694e02, 0.00000000e00, 4.00000000e00],
                [6.33129261e01, 5.83089133e-13, 4.00000000e00],
                [7.22778509e01, 7.54951657e-15, 4.00000000e00],
                [2.32659201e02, 0.00000000e00, 4.00000000e00],
            ]
        )
        np.testing.assert_array_almost_equal(obs.getA(), np.matrix(sm.shtest).getA())
Exemplo n.º 4
0
    def __init__(self, shapefile, idvariable=None, attribute=False):
        self.points = {}
        self.npoints = 0

        if idvariable:
            ids = get_ids(shapefile, idvariable)
        else:
            ids = None

        pts = ps.open(shapefile)

        # Get attributes if requested
        if attribute == True:
            dbname = os.path.splitext(shapefile)[0] + '.dbf'
            db = ps.open(dbname)
        else:
            db = None

        for i, pt in enumerate(pts):
            if ids and db:
                self.points[ids[i]] = {'coordinates':pt, 'properties':db[i]}
            elif ids and not db:
                self.points[ids[i]] = {'coordinates':pt, 'properties':None}
            elif not ids and db:
                self.points[i] = {'coordinates':pt, 'properties':db[i]}
            else:
                self.points[i] = {'coordinates':pt, 'properties':None}

        pts.close()
        if db:
            db.close()
        self.npoints = len(self.points.keys())
Exemplo n.º 5
0
def _writeShp():
    oShp = ps.open(out_shp, 'w')
    shp = ps.open(pt_shp)
    oDbf = ps.open(out_shp[:-3]+'dbf', 'w')
    dbf = ps.open(pt_shp[:-3]+'dbf')
    oDbf.header = dbf.header
    col_name = 'in_poly'
    col_spec = ('C', 14, 0)
    if polyID_col:
        col_name = polyID_col
        #db = ps.open(poly_shp[:-3]+'dbf')
        #col_spec = db.field_spec[db.header.index(polyID_col)]
    oDbf.header.append(col_name)
    oDbf.field_spec = dbf.field_spec
    oDbf.field_spec.append(col_spec)
    for poly, rec, i in zip(shp, dbf, correspondences):
        oShp.write(poly)
        rec.append(i)
        oDbf.write(rec)
    shp.close()
    oShp.close()
    dbf.close()
    oDbf.close()
    prj = open(pt_shp[:-3]+'prj').read()
    oPrj = open(out_shp[:-3]+'prj', 'w')
    oPrj.write(prj); oPrj.close()
    t4 = time.time()
    print '\t', t4-t3, ' seconds to write shapefile'
    print 'Shapefile written to %s'%out_shp
Exemplo n.º 6
0
def test():
    # Test
    shp = pysal.open(pysal.examples.get_path('NAT.shp'),'r')
    dbf = pysal.open(pysal.examples.get_path('NAT.dbf'),'r')
    
    show_map(shp)
    
    ids = get_selected(shp)
    print ids
    
    w = pysal.rook_from_shapefile(pysal.examples.get_path('NAT.shp'))
    moran_scatter_plot(shp, dbf, "HR90", w)
    
    scatter_plot(shp, ["HR90", "PS90"])
    scatter_plot_matrix(shp, ["HR90", "PS90"])
    
    quantile_map(shp, dbf, "HC60", 5, basemap="leaflet_map")
    
    
    select_ids = [i for i,v in enumerate(dbf.by_col["HC60"]) if v < 20.0]
    select(shp, ids=select_ids)
    
    
    quantile_map(shp, dbf, "HC60", 5)
    
    
    lisa_map(shp, dbf, "HC60", w)
Exemplo n.º 7
0
    def __init__(self, path, time_col, infer_timestamp=False):
        shp = pysal.open(path + '.shp')
        dbf = pysal.open(path + '.dbf')

        # extract the spatial coordinates from the shapefile
        x = [coords[0] for coords in shp]
        y = [coords[1] for coords in shp]

        self.n = n = len(shp)
        x = np.array(x)
        y = np.array(y)
        self.x = np.reshape(x, (n, 1))
        self.y = np.reshape(y, (n, 1))
        self.space = np.hstack((self.x, self.y))

        # extract the temporal information from the database
        if infer_timestamp:
            col = dbf.by_col(time_col)
            if isinstance(col[0], date):
                day1 = min(col)
                col = [(d - day1).days for d in col]
                t = np.array(col)
            else:
                print("Unable to parse your time column as Python datetime \
                      objects, proceeding as integers.")
                t = np.array(col)
        else:
            t = np.array(dbf.by_col(time_col))
        line = np.ones((n, 1))
        self.t = np.reshape(t, (n, 1))
        self.time = np.hstack((self.t, line))

        # close open objects
        dbf.close()
        shp.close()
Exemplo n.º 8
0
def test():
    import pysal

    road_shp = pysal.open('/Users/xun/Dropbox/dog_bites/phoenix.osm-roads.shp')    
    road_dbf = pysal.open('/Users/xun/Dropbox/dog_bites/phoenix.osm-roads.dbf')    
    
    import d3viz
    d3viz.setup()

    d3viz.show_map(road_shp, rebuild=True)
    
    roadJsonFile = d3viz.get_json_path(road_shp)
    
    import network_cluster 
    #roadJsonFile = '../test_data/man_road.geojson'
    network = network_cluster.NetworkCluster(roadJsonFile)    

    # Segment the road network equally in 1,000 feet
    network.SegmentNetwork(1000) 
    
    # Read into car accident points file
    points_shp = pysal.open('/Users/xun/Dropbox/dog_bites/DogBitesOriginal/DogBitesOriginal.shp')
    d3viz.add_layer(points_shp)
    pointsJsonFile = d3viz.get_json_path(points_shp)
    points = network_cluster.GetJsonPoints(pointsJsonFile, encoding='latin-1')    

    # Snap these points to nearest road segment
    network.SnapPointsToNetwork(points)    
Exemplo n.º 9
0
 def db(self, headerOnly=False):
     if self.data['config']['other_missingValueCheck']:
         pysal.MISSINGVALUE = self.data['config']['other_missingValue']
     if 'fname' in self.data:
         fileType = self.data['fname'].rsplit('.')[-1].lower()
         self.fileType = fileType
         if fileType == 'csv':
             if headerOnly:
                 f = pysal.open(self.data['fname'], 'rU')
                 db = {}
                 db['header'] = f.header
                 f.close()
                 return db
             else:
                 return pysal.open(self.data['fname'], 'rU')
         elif fileType == 'dbf':
             db = pysal.open(self.data['fname'], 'r')
             header = []
             # grab only the numeric fields.
             if headerOnly:
                 for field, spec in zip(db.header, db.field_spec):
                     typ = spec[0].lower()
                     if typ in ['d', 'n', 'f']:
                         header.append(field)
                 return {'header': header}
             else:
                 return db
         else:
             print "Unknown File Type"
             return False
     else:
         return None
Exemplo n.º 10
0
def test2():
    #d6cd52286e5d3e9e08a5a42489180df3.shp
    import pysal
    shp = pysal.open('../www/tmp/d6cd52286e5d3e9e08a5a42489180df3.shp')
    dbf = pysal.open('../www/tmp/d6cd52286e5d3e9e08a5a42489180df3.dbf')        
    
    w = pysal.queen_from_shapefile('../www/tmp/d6cd52286e5d3e9e08a5a42489180df3.shp')
    #d3viz.moran_scatter_plot(shp, dbf, "dog_cnt", w)    
    import numpy as np
    y = np.array(dbf.by_col["dog_cnt"])
    lm = pysal.Moran_Local(y, w)    
    
    for i,j in enumerate(lm.q):
        if lm.p_sim[i] >= 0.05:
            print 0
        else:
            print j
        
    import d3viz
    d3viz.setup()
    
    d3viz.show_map(shp)
    d3viz.scatter_plot(shp, ["dog_cnt","home_cnt"])
    
    d3viz.lisa_map(shp, "dog_cnt", lm)
Exemplo n.º 11
0
def map_line_shp(shp_link, which='all'):
    '''
    Create a map object from a line shapefile
    ...

    Arguments
    ---------

    shp_link        : str
                      Path to shapefile
    which           : str/list

    Returns
    -------

    map             : PatchCollection
                      Map object with the polygons from the shapefile

    '''
    shp = ps.open(shp_link)
    if which == 'all':
        db = ps.open(shp_link.replace('.shp', '.dbf'))
        n = len(db.by_col(db.header[0]))
        db.close()
        which = [True] * n
    patches = []
    for inwhich, shape in zip(which, shp):
        if inwhich:
            for xy in shape.parts:
                patches.append(xy)
    lc = LineCollection(patches)
    _ = _add_axes2col(lc, shp.bbox)
    return lc
Exemplo n.º 12
0
def test_d3viz():
    import pysal
    shp = pysal.open(pysal.examples.get_path('NAT.shp'),'r')
    dbf = pysal.open(pysal.examples.get_path('NAT.dbf'),'r')    

    #shp = pysal.open('/Users/xun/github/PySAL-Viz/test_data/man_road.shp','r')
    #dbf = pysal.open('/Users/xun/github/PySAL-Viz/test_data/man_road.dbf','r')
    import d3viz
    d3viz.setup()
    
    d3viz.init_map(shp)
    d3viz.show_map(shp)
   
    d3viz.scatter_plot(shp, field_x="HR90", field_y="HC90") 
    w = pysal.queen_from_shapefile(pysal.examples.get_path('NAT.shp'))
    d3viz.moran_scatter_plot(shp, dbf, "HR90", w)
    
    d3viz.quantile_map(shp, "HR90", 5)
    d3viz.quantile_map(shp, "HR90", 5, basemap="leaflet")
   
    import numpy as np
    y = np.array(dbf.by_col["HR90"])
    lm = pysal.Moran_Local(y, w)
    
    d3viz.lisa_map(shp, "HR90", lm)
Exemplo n.º 13
0
 def setUp(self):
     db =  pysal.open(pysal.examples.get_path("baltim.dbf"),'r')
     self.ds_name = "baltim.dbf"
     self.y_name = "PRICE"
     self.y = np.array(db.by_col(self.y_name)).T
     self.y.shape = (len(self.y),1)
     self.x_names = ["NROOM","AGE","SQFT"]
     self.x = np.array([db.by_col(var) for var in self.x_names]).T
     ww = pysal.open(pysal.examples.get_path("baltim_q.gal"))
     self.w = ww.read()
     ww.close()
     self.w_name = "baltim_q.gal"
     self.w.transform = 'r'
     self.regimes = db.by_col("CITCOU")
     #Artficial:
     n = 256
     self.n2 = n/2
     self.x_a1 = np.random.uniform(-10,10,(n,1))
     self.x_a2 = np.random.uniform(1,5,(n,1))
     self.q_a = self.x_a2 + np.random.normal(0,1,(n,1))
     self.x_a = np.hstack((self.x_a1,self.x_a2))
     self.y_a = np.dot(np.hstack((np.ones((n,1)),self.x_a)),np.array([[1],[0.5],[2]])) + np.random.normal(0,1,(n,1))
     latt = int(np.sqrt(n))
     self.w_a = pysal.lat2W(latt,latt)
     self.w_a.transform='r'
     self.regi_a = [0]*(n/2) + [1]*(n/2)
     self.w_a1 = pysal.lat2W(latt/2,latt)
     self.w_a1.transform='r'
Exemplo n.º 14
0
def getHexShp(codeBook,outFile):
    """Create the hexagon shapeFile
    Do NOT specify file extension in outFile"""
    inCOD = open(codeBook)
    inCOD = inCOD.readlines()
    header = inCOD.pop(0)
    dims,topo,cols,rows,neigh = header.split()
    cols=int(cols)
    rows=int(rows)
    fieldSpec = [('N',20,10)]*int(dims)
    fieldHead = ['plane%d'%i for i in range(int(dims))]
    #hexpts=hexPts(x,y) #Orig WRONG!!!
    hexpts=hexPts(cols,rows)
    polys=hexPoly(hexpts)# polygons
    out = pysal.open(outFile+'.shp','w')
    outd = pysal.open(outFile+'.dbf','w')
    outd.header = fieldHead
    outd.field_spec = fieldSpec
    for line in inCOD:
        outd.write(map(float,line.split()))
    outd.close()
    for bbox,poly in polys:
        poly = map(pysal.cg.shapes.Point,poly)
        poly = pysal.cg.shapes.Polygon(poly)
        out.write(poly)
    out.close()
    return 'Shapefile created'
Exemplo n.º 15
0
def map_poly_shp(shp_link, which='all'):
    '''
    Create a map object from a shapefile
    ...

    Arguments
    ---------

    shp_link        : str
                      Path to shapefile
    which           : str/list

    Returns
    -------

    map             : PatchCollection
                      Map object with the polygons from the shapefile

    '''
    shp = ps.open(shp_link)
    if which == 'all':
        db = ps.open(shp_link.replace('.shp', '.dbf'))
        n = len(db.by_col(db.header[0]))
        db.close()
        which = [True] * n
    patches = []
    for inwhich, shape in zip(which, shp):
        if inwhich:
            for ring in shape.parts:
                xy = np.array(ring)
                patches.append(xy)
    return PolyCollection(patches)
Exemplo n.º 16
0
def dl_merge(outname="tracts",sumlevel="140"):
    os.chdir('/tmp')
    outshp = pysal.open(outname+'.shp','w')
    outdbf = pysal.open(outname+'.dbf','w')
    for st in config.STATE_FIPS:
        fname = name_template%(st)#,sumlevel)
        if DEBUG: print fname
        url = urllib.urlopen(base_url+fname)
        dat = url.read()
        if not os.path.exists(fname):
            with open(fname,'wb') as o:
                o.write(dat)
            os.system('unzip '+fname)
        shp = pysal.open(fname.replace('.zip','.shp'),'r')
        for x in shp:
            outshp.write(x)
        dbf = pysal.open(fname.replace('.zip','.dbf'),'r')
        outdbf.header = dbf.header
        outdbf.field_spec = dbf.field_spec
        for row in dbf:
            outdbf.write(row)
        os.remove(fname)
        os.remove(fname.replace('.zip','.shp'))
        os.remove(fname.replace('.zip','.shx'))
        os.remove(fname.replace('.zip','.dbf'))
        os.remove(fname.replace('.zip','.shp.xml'))
        os.remove(fname.replace('.zip','.prj'))
    outshp.close()
    outdbf.close()
Exemplo n.º 17
0
 def test_sids(self):
     w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
     f = pysal.open(pysal.examples.get_path("sids2.dbf"))
     SIDR = np.array(f.by_col("SIDR74"))
     mi = pysal.Moran(SIDR, w, two_tailed=False)
     np.testing.assert_allclose(mi.I, 0.24772519320480135, atol=ATOL, rtol=RTOL)
     self.assertAlmostEquals(mi.p_norm,  5.7916539074498452e-05)
Exemplo n.º 18
0
 def setUp(self):
     f = pysal.open(pysal.examples.get_path("sids2.dbf"))
     varnames = ['SIDR74', 'SIDR79', 'NWR74', 'NWR79']
     self.names = varnames
     vars = [np.array(f.by_col[var]) for var in varnames]
     self.vars = vars
     self.w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
Exemplo n.º 19
0
def map_point_shp(shp_link, which='all'):
    '''
    Create a map object from a point shapefile
    ...

    Arguments
    ---------

    shp_link        : str
                      Path to shapefile
    which           : str/list

    Returns
    -------

    map             : PatchCollection
                      Map object with the points from the shapefile

    '''
    shp = ps.open(shp_link)
    if which == 'all':
        db = ps.open(shp_link.replace('.shp', '.dbf'))
        n = len(db.by_col(db.header[0]))
        db.close()
        which = [True] * n
    pts = []
    for inwhich, pt in zip(which, shp):
        if inwhich:
                pts.append(pt)
    pts = np.array(pts)
    sc = plt.scatter(pts[:, 0], pts[:, 1])
    _ = _add_axes2col(sc, shp.bbox)
    return sc
Exemplo n.º 20
0
 def test_sids(self):
     w = pysal.open(pysal.examples.get_path("sids2.gal")).read()
     f = pysal.open(pysal.examples.get_path("sids2.dbf"))
     SIDR = np.array(f.by_col("SIDR74"))
     mi = pysal.Moran(SIDR, w)
     self.assertAlmostEquals(mi.I, 0.24772519320480135)
     self.assertAlmostEquals(mi.p_norm,  5.7916539074498452e-05)
Exemplo n.º 21
0
def reader(shp_file_name, doubleEdges=True):
    """
    Read a PySAL network (geographic graph) shapefile and create edges and
    coordinates data structures


    Parameters
    ----------

    shp_file_name: Path to shapefile with .shp extension. Has to have been
    created by contrib/spatialnet/

    doubleEdges:  Boolean if True create a twin for each edge

    Returns
    -------

    coords: dict with key a node id and the value a pair of x,y coordinates
    for the node's embedding in the plane

    edges: list of edges (t,f) where t and f are ids of the nodes
    """


    dir_name = os.path.dirname(shp_file_name)
    base_name = os.path.basename(shp_file_name)
    pre,suf = base_name.split(".")
    shp_file = os.path.join(dir_name,pre+".shp")
    dbf_file = os.path.join(dir_name,pre+".dbf")
    sf = ps.open(shp_file)
    df = ps.open(dbf_file)
    edges = []
    coords = {}
    records = df.read()
    df.close()
    for record in records:
        t = record[0]
        f = record[1]
        edges.append((t,f))
    df.close()
    i = 0
    shps = sf.read()
    sf.close()
    for shp in shps:
        t_xy, f_xy = shp.vertices
        t = edges[i][0]
        f = edges[i][1]
        if t not in coords:
            coords[t] = t_xy
        if f not in coords:
            coords[f] = f_xy
        i += 1

    if doubleEdges:
        for edge in edges:
            twin = edge[1],edge[0]
            if twin not in edges:
                edges.append(twin)
    return coords, edges
Exemplo n.º 22
0
 def setUp(self):
     self.filehandler = ps.open(ps.examples.get_path('columbus.dbf'))
     self.df = self.filehandler.to_df()
     self.filehandler.seek(0)
     self.shapefile = ps.open(ps.examples.get_path('columbus.shp'))
     self.csvhandler = ps.open(ps.examples.get_path('usjoin.csv'))
     self.csv_df = self.csvhandler.to_df()
     self.csvhandler.seek(0)
Exemplo n.º 23
0
 def test_network_from_allvertices(self):
     shape = pysal.open(self.net)
     dbf = pysal.open(self.net[:-3] + 'dbf')
     graph = pynet.network_from_allvertices(shape, dbf)
     neighbors = {(724432.38723173144, 877800.08747069736): 155.41097171058956, 
                  (725247.70571468933, 877812.36851842562): 660.00000000003809}
     start_point = (724587.78057580709, 877802.4281426128)
     self.assertEqual(graph[start_point], neighbors)
Exemplo n.º 24
0
def pip_xy_shp_multi(xy, poly_shp, polyID_col=None, out_shp=None,
        empty=None):
    '''
    Point in polygon operation taking as input a points array and a polygon
    shapefile (running on multicore)
    ...

    Arguments
    =========
    xy              : np.array
                      nx2 array with xy coordinates
    poly_shp        : str
                      Path to polygon shapefile
    polyID_col      : str
                      Name of the column in the polygon shapefile to be used as ID
                      in the output shape
    out_shp         : str
                      Path to the output shapefile where to write xy with a
                      column with correspondences appended (Optional, defaults to
                      None)
    empty           : str
                      String to insert if the point is not contained in any
                      polygon. Defaults to None

    Returns
    =======
    correspondences : list
                      List of length len(xy) with the polygon ID where the
                      points are located
    '''
    t0 = time.time()
    polys = ps.open(poly_shp)
    if polyID_col:
        polyIDs = ps.open(poly_shp[:-3]+'dbf').by_col(polyID_col)
    pl = ps.cg.PolygonLocator(polys)
    t1 = time.time()
    print '\t', t1-t0, ' secs to build rtree'

    parss = zip(xy, [pl]*xy.shape[0])
    cores = mp.cpu_count()
    pool = mp.Pool(cores)
    correspondences = pool.map(_poly4xy, parss)
    t2 = time.time()
    print '\t', t2-t1, ' secs to get correspondences'
    if polyID_col:
        correspondences_names= []
        for i in correspondences:
            try:
                correspondences_names.append(polyIDs[int(i)])
            except:
                correspondences_names.append(empty)
        correspondences = correspondences_names
    polys.close()
    t3 = time.time()
    print '\t', t3-t2, ' secs to convert correspondences'
    if out_shp:
        _writeShp()
    return correspondences
Exemplo n.º 25
0
 def setUp(self):
     sids = pysal.open(pysal.examples.get_path("sids2.shp"), "r")
     self.sids = sids
     self.d = np.array([i.centroid for i in sids])
     self.w = knnW_from_array(self.d, k=5)
     if not self.w.id_order_set:
         self.w.id_order = self.w.id_order
     sids_db = pysal.open(pysal.examples.get_path("sids2.dbf"), "r")
     self.b, self.e = np.array(sids_db[:, 8]), np.array(sids_db[:, 9])
Exemplo n.º 26
0
    def moranI(self):

        if (self.dlg.checkBox.checkState()==2 and self.dlg.rdButton_nm.isChecked()==True)or self.dlg.rdButton_sm.isChecked()==True:

            self.w=pysal.open(os.path.join(os.path.dirname(__file__), 'default.gal')).read()
            try:
                self.w = pysal.open(self.dlg.txtSelect.text()).read()

            except IOError, e:
                print e.errno
                print e

            selectedFieldIndex = self.dlg.cmbBoxSelectField.currentIndex()
            field= self.fieldsnumeric[selectedFieldIndex]
            self.fieldName = field.name()

            dbasefile=self.myfile[:-3]+'dbf'
            dbase = pysal.open(dbasefile)
            self.y = np.array(dbase.by_col[self.fieldName], dtype=('<f8'))

            """self.dlg.txtSelectSystemC.setPlainText("")
            for i in self.y:
                self.dlg.txtSelectSystemC.appendPlainText(str(i))"""

            if self.w.n == self.y.size:
                self.mi = pysal.Moran(self.y, self.w, two_tailed=False)

                self.dlg.txtMoranI.setText(str(round(self.mi.I,5)))
                self.dlg.txtMoranIp.setText(str(round(self.mi.p_norm,5)))

                if self.mi.I >0.75 and self.mi.p_norm <0.05:
                    self.dlg.txtMoranIint.setText("I de Moran cercano a 1 y p value significativo (confianza 95%): posible autocorrelacion espacial")
                elif self.mi.I <-0.75 and self.mi.p_norm <0.05:
                    self.dlg.txtMoranIint.setText("I de Moran cercano a -1 y p value significativo (confianza 95%): posible autocorrelacion espacial")
                elif self.mi.I >-0.05 and  self.mi.I <0.05:
                    self.dlg.txtMoranIint.setText("I de Moran cercano a 0: no existe autocorrelacion espacial")
                elif self.mi.p_norm <0.05:
                    self.dlg.txtMoranIint.setText("p value significativo (confianza 95%): posible autocorrelacion espacial debil")
                else:
                    self.dlg.txtMoranIint.setText("p value no significativo (confianza 95%): no existe autocorrelacion espacial")

                """lm = pysal.Moran_Local(y,w)"""
                """self.dlg.txtSelectSystemC2.appendPlainText(str(lm.p_sim))"""
                """self.dlg.txtOutput_2.setText(str(self.w.n)+' - '+str(self.y.size))"""
            else:
                QtGui.QMessageBox.warning(None,"Error","Oops!  matrix has not been generated or not exists.  Generate and try again...")
                self.dlg.checkBox.setCheckState(0)
                self.dlg.txtMoranI.clear()
                self.dlg.txtMoranIp.clear()
                self.dlg.txtMoranIint.clear()
                """self.dlg.txtSelectSystemC.setPlainText("")"""
                self.dlg.groupBox_wm.setEnabled(True)
                self.dlg.groupBox_mi.setEnabled(False)
                self.dlg.widget_sm.setEnabled(False)
                self.dlg.rdButton_sm.setChecked(0)
                self.dlg.rdButton_nm.setChecked(1)
Exemplo n.º 27
0
    def test___init__(self):
        import numpy as np

        f = pysal.open(pysal.examples.get_path("usjoin.csv"))
        pci = np.array([f.by_col[str(y)] for y in range(1929, 2010)]).transpose()
        w = pysal.open(pysal.examples.get_path("states48.gal")).read()
        lm = pysal.LISA_Markov(pci, w)
        obs = np.array([1, 2, 3, 4])
        np.testing.assert_array_almost_equal(obs, lm.classes)
        """
Exemplo n.º 28
0
 def test_write(self):
     w = self.obj.read()
     f = tempfile.NamedTemporaryFile(suffix='.gal')
     fname = f.name
     f.close()
     o = pysal.open(fname, 'w')
     o.write(w)
     o.close()
     wnew = pysal.open(fname, 'r').read()
     self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
Exemplo n.º 29
0
 def test_network_from_endnodes(self):
     shape = pysal.open(self.net)
     dbf = pysal.open(self.net[:-3] + 'dbf')
     def weight(geo_object, record):
         return 1
     graph = pynet.network_from_endnodes(shape, dbf, weight)
     neighbors = {(724432.38723173144, 877800.08747069736): 1, 
                  (725247.70571468933, 877812.36851842562): 1}
     start_point = (724587.78057580709, 877802.4281426128)
     self.assertEqual(graph[start_point], neighbors)
Exemplo n.º 30
0
 def __init__(self,parent=None,size=(600,600), style=wx.DEFAULT_FRAME_STYLE, geo=None, w=None):
     wx.Frame.__init__(self,parent,size=size,style=style)
     self.Bind
     self.SetTitle("Weights Inspector")
     if issubclass(type(geo),basestring):
         geo = pysal.open(geo,'r')
     self.geo = geo
     if issubclass(type(w),basestring):
         w = pysal.open(w,'r').read()
     self.w = w
     self.wm = WeightsMap(self,self.geo,self.w)
Exemplo n.º 31
0
    def OnSaveQueryToDBF(self, event):
        try:
            if self.query_data == None:
                return
            dlg = wx.FileDialog(
                self,
                message="Save query into new dbf files...",
                defaultDir=os.getcwd(),
                defaultFile='%s.dbf' %
                (self.lisa_layer.name + '_dynamic_lisa'),
                wildcard="shape file (*.dbf)|*.dbf|All files (*.*)|*.*",
                style=wx.SAVE)
            if dlg.ShowModal() != wx.ID_OK:
                dlg.Destroy()
                return
            path = dlg.GetPath()
            dlg.Destroy()

            dbf = self.lisa_layer.dbf
            newDBF = pysal.open('%s.dbf' % path[:-4], 'w')
            newDBF.header = []
            newDBF.field_spec = []
            for i in dbf.header:
                newDBF.header.append(i)
            for i in dbf.field_spec:
                newDBF.field_spec.append(i)

            for i in self.query_data.keys():
                newDBF.header.append('T_%d' % (i + 1))
                newDBF.field_spec.append(('N', 4, 0))

            for i in range(dbf.n_records):
                newRow = []
                row = dbf.read_record(i)
                newRow = [item for item in row]
                for j in self.query_data.keys():
                    val = self.query_data[j][i]
                    newRow.append(int(val))
                newDBF.write(newRow)
            newDBF.close()
            self.ShowMsgBox("Query results have been saved to a new dbf file.",
                            mtype='CAST information',
                            micon=wx.ICON_INFORMATION)
        except:
            self.ShowMsgBox(
                "Saving query results to new dbf file failed. Please check if the dbf file already exists."
            )
Exemplo n.º 32
0
def get_points_array_from_shapefile(shapefile):
    """
    Gets a data array of x and y coordinates from a given shapefile.

    Parameters
    ----------
    shapefile     : string
                    name of a shape file including suffix

    Returns
    -------
    points        : array
                    (n, 2)
                    a data array of x and y coordinates

    Notes
    -----
    If the given shape file includes polygons,
    this function returns x and y coordinates of the polygons' centroids

    Examples
    --------
    Point shapefile

    >>> from pysal.weights.util import get_points_array_from_shapefile
    >>> xy = get_points_array_from_shapefile(pysal.examples.get_path('juvenile.shp'))
    >>> xy[:3]
    array([[ 94.,  93.],
           [ 80.,  95.],
           [ 79.,  90.]])

    Polygon shapefile

    >>> xy = get_points_array_from_shapefile(pysal.examples.get_path('columbus.shp'))
    >>> xy[:3]
    array([[  8.82721847,  14.36907602],
           [  8.33265837,  14.03162401],
           [  9.01226541,  13.81971908]])
    """

    f = pysal.open(shapefile)
    if f.type.__name__ == 'Polygon':
        data = np.array([shape.centroid for shape in f])
    elif f.type.__name__ == 'Point':
        data = np.array([shape for shape in f])
    f.close()
    return data
def _importArcData(filename):
    """Creates a new Layer from a shapefile (<file>.shp)

    This function wraps and extends a core clusterPy function to utilize PySAL
    W constructors and dbf readers.


    Parameters
    ==========

    filename: string
              suffix of shapefile (fileName not fileName.shp)


    Returns
    =======
    layer: clusterpy layer instance



    """
    layer = _clusterpy.Layer()
    layer.name = filename.split('/')[-1]
    #print "Loading " + filename + ".dbf"
    dbf = ps.open(filename+".dbf")
    fields = dbf.header
    #data, fields, specs = importDBF(filename + '.dbf')
    data = {}
    #print "Loading " + filename + ".shp"
    if fields[0] != "ID":
        fields = ["ID"] + fields
        for y in range(dbf.n_records):
            data[y] = [y] + dbf.by_row(y)
    else:
        for y in range(dbf.n_records):
            data[y] = dbf.by_row_(y)

    layer.fieldNames = fields
    layer.Y = data
    shpf = filename+".shp"
    layer.shpType = 5
    #print 'pysal reader'
    layer.Wrook = ps.rook_from_shapefile(filename+".shp").neighbors
    layer.Wqueen = ps.queen_from_shapefile(filename+".shp").neighbors
    #print "Done"
    return layer
Exemplo n.º 34
0
 def setUp(self):
     db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
     y = np.array(db.by_col("HOVAL"))
     self.y = np.reshape(y, (49,1))
     X = []
     X.append(db.by_col("INC"))
     self.X = np.array(X).T
     self.X = np.hstack((np.ones(self.y.shape),self.X))
     self.X = sparse.csr_matrix(self.X)
     yd = []
     yd.append(db.by_col("CRIME"))
     self.yd = np.array(yd).T
     q = []
     q.append(db.by_col("DISCBD"))
     self.q = np.array(q).T
     self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     self.w.transform = 'r'
Exemplo n.º 35
0
def dbf2DF(dbfile):
    """
	Reads in DBF files and returns Pandas DF.

	dbfile  : DBF file - Input to be imported
	upper   : Condition - If true, make column heads upper case

	This block of code adapted from https://gist.github.com/ryan-hill/f90b1c68f60d12baea81 
	"""
    #Pysal to open DBF
    db = ps.open(dbfile)
    #Convert dbf to dictionary
    d = {col: db.by_col(col) for col in db.header}
    #Convert to Pandas DF
    pandasDF = pd.DataFrame(d)
    db.close()
    return pandasDF
Exemplo n.º 36
0
def lisa_mapa(variavel, shapefile, p_thres=0.05, **kws):

    w = ps.queen_from_shapefile(shapefile)
    lisa = ps.Moran_Local(variavel, w)

    fig = plt.figure(figsize=(9, 7))
    shp = ps.open(shapefile)
    base = maps.map_poly_shp(shp)
    base = maps.base_lisa_cluster(base, lisa, p_thres=p_thres)
    base.set_edgecolor('1')
    base.set_linewidth(0.1)
    ax = maps.setup_ax([base], [shp.bbox])

    boxes, labels = maps.lisa_legend_components(lisa, p_thres=p_thres)
    plt.legend(boxes, labels, fancybox=True, **kws)

    plt.show()
Exemplo n.º 37
0
 def test_WSP2W(self):
     sp = pysal.weights.lat2SW(2, 5)
     wsp = pysal.weights.WSP(sp)
     w = pysal.weights.WSP2W(wsp)
     self.assertEquals(w.n, 10)
     self.assertEquals(w[0], {1: 1, 5: 1})
     w = pysal.open(pysal.examples.get_path('sids2.gal'), 'r').read()
     wsp = pysal.weights.WSP(w.sparse, w.id_order)
     w = pysal.weights.WSP2W(wsp)
     self.assertEquals(w.n, 100)
     self.assertEquals(w['37135'], {
         '37001': 1.0,
         '37033': 1.0,
         '37037': 1.0,
         '37063': 1.0,
         '37145': 1.0
     })
Exemplo n.º 38
0
def df2dbf(df, dbf_path):
    type2spec = {int: ('N', 20, 0),
                 np.int64: ('N', 20, 0),
                 np.int32: ('N', 20, 0),
                 float: ('N', 36, 15),
                 np.float32: ('N', 36, 15),
                 np.float64: ('N', 36, 15),
                 str: ('C', 14, 0)
                 }
    types = [type(df[i].iloc[0]) for i in df.columns]
    specs = [type2spec[t] for t in types]
    db = ps.open(dbf_path, 'w')
    db.header = list(df.columns)
    db.field_spec = specs
    for i, row in df.T.iteritems():
        db.write(row)
    db.close()
Exemplo n.º 39
0
 def write(self, filename=None, as_type='gal'):
     """
     Write data (assumed pysal weight object) to gal binary weight files
     :param filename: Base filename
     :param as_type: gal
     :return: location of file
     """
     if not filename:
         filename = self.uri
     self.create_output_dir(filename)
     if as_type == 'gal':
         gal = pysal.open(filename, 'w')
         gal.write(self.data)
         gal.close()
     else:
         raise NotImplementedError('{} not a valid type'.format(as_type))
     return self.uri
Exemplo n.º 40
0
def dbf2DF(dbfile, upper=True):
    '''
    __author__ = "Ryan Hill <*****@*****.**>"
                 "Marc Weber <*****@*****.**>"
    Reads and converts a dbf file to a pandas data frame using pysal.

    Arguments
    ---------
    dbfile           : a dbase (.dbf) file
    '''
    db = ps.open(dbfile)
    cols = {col: db.by_col(col) for col in db.header}
    db.close()  #Close dbf
    pandasDF = pd.DataFrame(cols)
    if upper == True:
        pandasDF.columns = pandasDF.columns.str.upper()
    return pandasDF
Exemplo n.º 41
0
def dbfdups(dbfpath, idvar):
    """checks duplicates in a dBase file
       ID variable must be specified correctly

       __author__  = "Luc Anselin <*****@*****.**> " 
       
       Arguments
       ---------
       dbfpath  : file path to dBase file
       idvar    : ID variable in dBase file
       
       Returns
       -------
       a list with the duplicate IDs
    """
    db = ps.open(dbfpath, 'r')
    li = db.by_col(idvar)
    return list(set([x for x in li if li.count(x) > 1]))
Exemplo n.º 42
0
    def on_inputbutton_clicked(self):
	myFile1 = QFileDialog.getOpenFileName(self, "Select a shapefile","","comma_separatedfile(*.csv)")
	if self.ui.inputbutton != None:
		self.ui.inputline.setText(myFile1)
	else:
		pass

	#create a new combobox(1)use pysal to open file(2) read  in
    #for saved shapefile to show columns
	openfile=str(self.ui.inputline.text()) 
	f=pysal.open(openfile)
	#opendbf=openfile[:-3] + "dbf" #open the same file only with dbf 
	#f_dbf = pysal.open(opendbf)
	self.fileheader=f.header #find columns, already in a list
	
	for i in self.fileheader: #i is in a string
		self.ui.startcombobox.addItem(i)
		self.ui.endcombobox.addItem(i)
Exemplo n.º 43
0
 def test_process_weight(self):
     """Test Weight Process"""
     with open(os.path.join(testfile_path, 'weight_process.json')) as inf:
         body_text = inf.read().replace('{basepath}', testfile_path)
     process = json.loads(body_text, object_hook=deserialize)
     try:
         process.compute()
         output = process.output.read(format=formats.WEIGHT)
         exp = pysal.open(
             os.path.join(testfile_path, 'weight_process_result.gal'), 'r')
         expected_w = exp.read()
         exp.close()
         self.assertEquals(expected_w.n, output.n)
         self.assertIsNotNone(process.id)
         self.assertIn(process.id, process.output.uri)
     finally:
         if process:
             process.purge()
Exemplo n.º 44
0
 def generate_contours(self):
     allPoly = ps.open(self.shp_link)
     blobPoly = [None for i in range(len(np.unique(self.regions)))]
     for i in range(len(allPoly)):
         if(blobPoly[int(self.regions[i])] == None):
             blobPoly[int(self.regions[i])] = pl.Polygon(allPoly[i].vertices)
         else:
             blobPoly[int(self.regions[i])] = blobPoly[int(self.regions[i])] + pl.Polygon(allPoly[i].vertices)
     outputPoly = []
     contours_to_blobs = []
     counter = 0
     for poly in blobPoly:
         for i in range(len(poly)):
             outputPoly.append(Polygon(poly.contour(i)))
             contours_to_blobs.append(counter)
         counter+=1
     self.contours = outputPoly
     self.contours_to_blobs = contours_to_blobs
Exemplo n.º 45
0
def df2dbf(df, dbf_path, my_specs=None):
    '''
    Convert a pandas.DataFrame into a dbf.

    __author__  = "Dani Arribas-Bel <*****@*****.**> "
    ...

    Arguments
    ---------
    df          : DataFrame
                  Pandas dataframe object to be entirely written out to a dbf
    dbf_path    : str
                  Path to the output dbf. It is also returned by the function
    my_specs    : list
                  List with the field_specs to use for each column.
                  Defaults to None and applies the following scheme:
                    * int: ('N', 14, 0)
                    * float: ('N', 14, 14)
                    * str: ('C', 14, 0)
    '''
    if my_specs:
        specs = my_specs
    else:
        type2spec = {
            int: ('N', 20, 0),
            np.int64: ('N', 20, 0),
            np.int32: ('N', 20, 0),
            np.int16: ('N', 20, 0),
            np.int8: ('N', 20, 0),
            float: ('N', 36, 15),
            np.float64: ('N', 36, 15),
            np.float32: ('N', 36, 15),
            str: ('C', 14, 0)
        }
        types = [type(df[i].iloc[0]) for i in df.columns]
        specs = [type2spec[t] for t in types]
    db = ps.open(dbf_path, 'w')
    db.header = list(df.columns)
    db.field_spec = specs
    for i, row in df.T.iteritems():
        db.write(row)
    db.close()
    return dbf_path
Exemplo n.º 46
0
def spw_from_shapefile(shapefile, norm=False):
    polygons = ps.open(shapefile, 'r').read()
    spolygons = list(map(asShape, polygons))
    spolygons = [fix_mp(p) for p in spolygons]
    perimeters = [p.length if norm else 1. for p in spolygons]
    Wsrc = ps.queen_from_shapefile(shapefile)
    new_weights, edges = {}, {}
    for i in Wsrc.neighbors:
        a = spolygons[i]
        p = perimeters[i]
        new_weights[i] = []
        for j in Wsrc.neighbors[i]:

            intersect = a.intersection(spolygons[j])
            new_weights[i].append(intersect.length)

        edges[i] = a.length - sum(new_weights[i])  # /a.length

    return edges, ps.W(Wsrc.neighbors, new_weights)
Exemplo n.º 47
0
 def test_weight(self):
     """
     Test WeightProcess for vector inputs
     """
     vector_io = VectorFileIO(name='input',
                              uri=os.path.join(testfile_path,
                                               'baghdad_hospitals.geojson'))
     process = geo.WeightProcess('knnW', inputs=[vector_io])
     try:
         process.compute()
         exp = pysal.open(
             os.path.join(testfile_path, 'weight_process_result.gal'), 'r')
         expected_w = exp.read()
         exp.close()
         actual = process.output.read(format=formats.WEIGHT)
         self.assertEquals(expected_w.n, actual.n)
     finally:
         if process:
             process.purge()
Exemplo n.º 48
0
    def Show(self):
        try:
            self.dialog.Fit()
            if self.dialog.ShowModal() == wx.ID_OK:
                path = self.txt_weight_path.GetValue()
                name = os.path.splitext(os.path.basename(path))[0]
                w = pysal.open(path).read()
                data = {name:w.histogram}
                data = dict([(w.id2i[id],len(nbs)) for id, nbs in w.neighbors.iteritems()])
                hist_widget = PlotWidget(
                    self.main,
                    name,
                    {'Connectivity':data}, 
                    Histogram, 
                    title="Histogram(%s) Connectivity of Weights" % name)
                hist_widget.Show() 
                """
                shp = self.main.GetSHP(name)
                if shp == None:
                    msg = "Please open shapefile \"%s\" first." % name
                    dlg = wx.MessageDialog(self.main, msg, 'Warning', wx.OK|wx.ICON_WARNING)
                    dlg.ShowModal()
                    dlg.Destroy()
                else:
                    w = pysal.open(path).read()
                    data = {name:w.histogram}
                    data = dict([(w.id2i[id],len(nbs)) for id, nbs in w.neighbors.iteritems()])
                    hist_widget = PlotWidget(
                        self.main,
                        shp,
                        {'Connectivity':data}, 
                        Histogram, 
                        title="Histogram(%s) Connectivity of Weights" % name)
                    hist_widget.Show() 
                """
            self.dialog.Destroy()
        except Exception as err:
            dlg = wx.MessageDialog(self.main, 
                                   """Could not open weights file! Please select a valid weights file.
                                   
Details: """+ str(err.message), 'Warning', wx.OK|wx.ICON_WARNING)
            dlg.ShowModal()
            dlg.Destroy()
def dbf2DF(dbfile, mycols):

    # Pysal to open DBF file
    db = ps.open(dbfile)

    # convert to a dictionary with key:value pairs
    d = dict([(var, db.by_col(var)) for var in mycols])

    # Convert to Pandas DF
    pandasDF = pd.DataFrame(d)

    # Make columns all uppercase
    pandasDF.columns = map(str.upper, pandasDF.columns)

    # close the dbf file
    db.close()

    # return the pandas data frame
    return pandasDF
Exemplo n.º 50
0
def get_shpdbf(filename, field):
    """
    Extract a column from a shapefile (geom) or dbf (attribute)
    """
    files = (os.path.join(UPLOAD_FOLDER, filename))
    if field == 'thegeom':
        geoms = []
        with fiona.collection(files + '.shp', "r") as source:
            for feat in source:
                geoms.append(feat)

        geojson = {"type": "FeatureCollection", "features": geoms}
        response = {'status': 'success', 'data': {'geojson': geojson}}
    else:
        dbf = ps.open(files + '.dbf', 'r')
        attr = dbf.by_col(field)
        response = {'status': 'success', 'data': {field: attr}}

    return jsonify(response)
def addGal2Layer(galfile, layer, contiguity='rook'):
    """
    Attach an adjacency object to a layer

    Parameters
    ==========
    galfile: string
             galfile

    layer: clusterpy layer

    contiguity: type of contguity ['rook'|'queen']


    Returns
    =======
    None

    Examples
    ========
    >>> import pysal as ps
    >>> import pysal.contrib.clusterpy as cp
    >>> csvfile = ps.examples.get_path('mexico.csv')
    >>> galfile = ps.examples.get_path('mexico.gal')
    >>> mexico = cp.importCsvData(csvfile)
    >>> cp.addRook2Layer(galfile, mexico)
    >>> mexico.Wrook[0]
    [31, 13]


    """
    gal = ps.open(galfile).read().neighbors
    w = {}
    for key in gal:
        w[int(key)] =  list(map(int, gal[key])) 
    
    if contiguity.upper()== "ROOK":
        layer.Wrook = w
    elif contiguity.upper() == "QUEEN":
        layer.Wqueen = w
    else:
        print('Unsupported contiguity type: ', contiguity)
Exemplo n.º 52
0
 def __init__(self, featureDataset, idField, countField, countFieldListFilePath):
     '''
     Constructor of MaxP. Note that isolated feature will not be joined with other features.
     @param featureDataset - instance of FeatureDataSet in data.py of util package
     @param idField - Primiary key field for spatial unit in the shapefile in featureDataset, not FID or ObjectID these kinds, should be sth. like GEOID etc.
     @param countField - single field from the shapefile in featureDataset which will be used as constrain in max-p
     @param countFieldListFilePath - paht to a file constaining a list of fields from the shapefile in featureDataset which will be used as parameters for max-p to compute SSD between regions
     '''
     index = []
     count_values = []
     targetCount_values = []
     srcFeatureSet = featureDataset.dataPath
     targetCountFields = read1ColText(countFieldListFilePath, False)
     # Make sure the feature dataset is a shapefile
     srcFeatureSetName = os.path.basename(srcFeatureSet)
     if '.shp' not in srcFeatureSetName:
         scratchFolder = os.path.join(os.path.dirname(os.path.dirname(srcFeatureSet)),'MaxPSrcData')
         if not os.path.isdir(scratchFolder):
             os.mkdir(scratchFolder)
         scratchShapefileName = '%s.shp' % srcFeatureSetName
         self._scratchShapefile = os.path.join(scratchFolder, scratchShapefileName)
         if arcpy.Exists(self._scratchShapefile):
             arcpy.Delete_management(self._scratchShapefile)
         arcpy.FeatureClassToFeatureClass_conversion(srcFeatureSet, scratchFolder, scratchShapefileName)
     else:
         self._scratchShapefile = srcFeatureSet
     # Generate pandas dataframe for ACS regionalization input
     self._fieldList = [idField, countField]
     self._fieldList.extend(targetCountFields)
     with arcpy.da.SearchCursor(self._scratchShapefile, self._fieldList) as cursor:  #@UndefinedVariable
         for row in cursor:
             index.append(row[0])
             count_values.append(row[1])
             targetCount_values.append(row[2:])
     self._count_pdFrame = pd.DataFrame(count_values, index, [countField])
     self._targetCount_pdFrame = pd.DataFrame(targetCount_values, index, targetCountFields)
     self._targetMOE_pdFrame = np.empty(self._targetCount_pdFrame.shape)
     self._targetMOE_pdFrame.fill(0.0)
     self._targetMOE_pdFrame = pd.DataFrame(self._targetMOE_pdFrame, list(self._targetCount_pdFrame.index.values), list(self._targetCount_pdFrame.columns.values))
     # Get pysal object from shapefile in featureDataset
     self._w = ps.rook_from_shapefile(self._scratchShapefile, idVariable = idField)
     self._shp = ps.open(self._scratchShapefile)
Exemplo n.º 53
0
    def __init__(self, shape_file_root, store_obj=False, dimension=2):
        """
        """
        # Check that the rtree files are there, otherwise, create them
        if not (os.path.exists(shape_file_root + '.idx')
                and os.path.exists(shape_file_root + '.dat')):
            print("Rtree data base does not exist. Create it now.")
            write_shape_rtree_2D(shape_file_root, store_obj=store_obj)

        rtree_time = os.path.getctime(shape_file_root + '.dat')
        shape_time = os.path.getctime(shape_file_root + '.shp')
        if shape_time > rtree_time:
            warnings.warn('Shape file newer than rtree database,' +
                          ' you may want to update/delete the old database')

        self.shape = pysal.open(shape_file_root + '.shp')
        p = rtree.index.Property()
        p.dimension = dimension
        p.overwrite = False
        self.idx = rtree.index.Index(shape_file_root, properties=p)
    def __init__(self, filename, name=None):
        super(Morans, self).__init__()
        self.filename = filename
        self.shapefile = filename + '.shp'
        self.dbf = filename + '.dbf'

        if name:
            self.name = name
        else:
            self.name = os.path.splitext(ntpath.basename(self.filename))[0]

        self.results = {}

        # Calculate the faster properties on init
        self._threshold = pysal.min_threshold_dist_from_shapefile(
            self.shapefile)
        self._points_array = pysal.weights.util.get_points_array_from_shapefile(
            self.shapefile)
        self._data = pysal.open(self.dbf)
        self._columns = self._data.by_col
Exemplo n.º 55
0
 def test_Headbanging_Median_Rate(self):
     sids_d = np.array([i.centroid for i in self.sids])
     sids_w = pysal.knnW(sids_d, k=5)
     if not sids_w.id_order_set:
         sids_w.id_order = sids_w.id_order
     s_ht = sm.Headbanging_Triples(sids_d, sids_w, k=5)
     sids_db = pysal.open(pysal.examples.get_path('sids2.dbf'), 'r')
     s_e, s_b = np.array(sids_db[:, 9]), np.array(sids_db[:, 8])
     sids_hb_r = sm.Headbanging_Median_Rate(s_e, s_b, s_ht)
     sids_hb_rr5 = np.array([0.00075586, 0.,
                             0.0008285, 0.0018315, 0.00498891])
     np.testing.assert_array_almost_equal(sids_hb_rr5, sids_hb_r.r[:5])
     sids_hb_r2 = sm.Headbanging_Median_Rate(s_e, s_b, s_ht, iteration=5)
     sids_hb_r2r5 = np.array([0.0008285, 0.00084331,
                              0.00086896, 0.0018315, 0.00498891])
     np.testing.assert_array_almost_equal(sids_hb_r2r5, sids_hb_r2.r[:5])
     sids_hb_r3 = sm.Headbanging_Median_Rate(s_e, s_b, s_ht, aw=s_b)
     sids_hb_r3r5 = np.array([0.00091659, 0.,
                              0.00156838, 0.0018315, 0.00498891])
     np.testing.assert_array_almost_equal(sids_hb_r3r5, sids_hb_r3.r[:5])
Exemplo n.º 56
0
def dataframe_to_dbf(df, dbf_path, specs=None):
    """Given a pandas Dataframe, write a dbase database to ``dbf_path``.

    :type df: pandas.Dataframe
    :type dbf_path: basestring
    :param specs: A list of column specifications for the dbase table. Each column is specified by a tuple (datatype,
        size, decimal) - we support ``datatype in ('N', 'C')`` for strings, integers and floating point numbers, if
        no specs are provided (see ``TYPE_MAPPING``)
    :type specs: list[tuple(basestring, int, int)]
    """
    if specs is None:
        types = [type(df[i].iloc[0]) for i in df.columns]
        specs = [TYPE_MAPPING[t] for t in types]
    dbf = pysal.open(dbf_path, 'w', 'dbf')
    dbf.header = list(df.columns)
    dbf.field_spec = specs
    for row in range(len(df)):
        dbf.write(df.iloc[row])
    dbf.close()
    return dbf_path
Exemplo n.º 57
0
 def setUp(self):
     data = pysal.open(pysal.examples.get_path('Tokyomortality.csv'), mode='Ur')
     self.coords = list(zip(data.by_col('X_CENTROID'), data.by_col('Y_CENTROID')))
     self.y = np.array(data.by_col('db2564')).reshape((-1,1))
     self.off = np.array(data.by_col('eb2564')).reshape((-1,1))
     OCC  = np.array(data.by_col('OCC_TEC')).reshape((-1,1))
     OWN = np.array(data.by_col('OWNH')).reshape((-1,1)) 
     POP = np.array(data.by_col('POP65')).reshape((-1,1))
     UNEMP = np.array(data.by_col('UNEMP')).reshape((-1,1))
     self.X = np.hstack([OCC,OWN,POP,UNEMP])
     self.BS_F = pysal.open(pysal.examples.get_path('tokyo_BS_F_listwise.csv'))
     self.BS_NN = pysal.open(pysal.examples.get_path('tokyo_BS_NN_listwise.csv'))
     self.GS_F = pysal.open(pysal.examples.get_path('tokyo_GS_F_listwise.csv'))
     self.GS_NN = pysal.open(pysal.examples.get_path('tokyo_GS_NN_listwise.csv'))
     self.BS_NN_OFF = pysal.open(pysal.examples.get_path('tokyo_BS_NN_OFF_listwise.csv'))
Exemplo n.º 58
0
def dbf2df(dbf_path, index=None, cols=False, incl_index=False):
    '''
    Read a dbf file as a pandas.DataFrame, optionally selecting the index
    variable and which columns are to be loaded.

    __author__  = "Dani Arribas-Bel <*****@*****.**> "
    ...

    Arguments
    ---------
    dbf_path    : str
                  Path to the DBF file to be read
    index       : str
                  Name of the column to be used as the index of the DataFrame
    cols        : list
                  List with the names of the columns to be read into the
                  DataFrame. Defaults to False, which reads the whole dbf
    incl_index  : Boolean
                  If True index is included in the DataFrame as a
                  column too. Defaults to False

    Returns
    -------
    df          : DataFrame
                  pandas.DataFrame object created
    '''
    db = ps.open(dbf_path)
    if cols:
        if incl_index:
            cols.append(index)
        vars_to_read = cols
    else:
        vars_to_read = db.header
    data = dict([(var, db.by_col(var)) for var in vars_to_read])
    if index:
        index = db.by_col(index)
        db.close()
        return pd.DataFrame(data, index=index, columns=vars_to_read)
    else:
        db.close()
        return pd.DataFrame(data, columns=vars_to_read)
Exemplo n.º 59
0
def dataframe_to_dbf(df, dbf_path, specs=None):
    if specs is None:
        type2spec = {
            int: ('N', 20, 0),
            np.int64: ('N', 20, 0),
            float: ('N', 36, 15),
            np.float64: ('N', 36, 15),
            unicode: ('C', 25, 0),
            str: ('C', 25, 0)
        }
        types = [type(df[i].iloc[0]) for i in df.columns]
        specs = [type2spec[t] for t in types]
    dbf = pysal.open(dbf_path, 'w', 'dbf')
    dbf.header = list(df.columns)
    dbf.field_spec = specs
    df_transpose = df.T
    length = len(df_transpose.columns)
    for row in range(length):
        dbf.write(df_transpose[row])
    dbf.close()
    return dbf_path
def saveDataFrame2Dbf(data_frame_input, dbf_file, sep=","):

    if six.PY2:
        type2spec = {
            int: ('N', 20, 0),
            numpy.int64: ('N', 20, 0),
            float: ('N', 36, 15),
            numpy.float64: ('N', 36, 15),
            str: ('C', 14, 0),
            unicode: ('C', 14, 0)
        }
    else:
        type2spec = {
            int: ('N', 20, 0),
            numpy.int64: ('N', 20, 0),
            float: ('N', 36, 15),
            numpy.float64: ('N', 36, 15),
            str: ('C', 14, 0)
        }

    # Ecriture du contenu de la dataframe vers un fichier dbf
    types = [
        type(data_frame_input[i].iloc[0]) for i in data_frame_input.columns
    ]
    specs = [type2spec[t] for t in types]

    if six.PY2:
        db_desc = pysal.open(dbf_file, 'w')
    else:
        if IS_libpysal:
            db_desc = libpysal.io.fileio.FileIO.open(dbf_file, 'w')
        else:
            db_desc = pysal.lib.io.fileio.FileIO.open(dbf_file, 'w')
    db_desc.header = list(data_frame_input.columns)
    db_desc.field_spec = specs
    for i, row in data_frame_input.T.iteritems():
        db_desc.write(row)
    db_desc.close()

    return