def contiguity_from_shapefile(shapefile, criteria='rook'):
    """
    Create a spatial weights object based on a contiguity criteria from a
    shapefile.

    Produces a "*.gal" file in the directory of the shapefile


    Argument
    --------
    shapefile: string with full path to shapefile

    criteria: string for type of contiguity ['rook'|'queen']

    Returns
    -------
    cards: nx1 numpy array with the number of neighbors for each element based
    on criterion

    """

    if criteria == 'rook':
        PS.rook_from_shapefile(shapefile)
        abb = 'r'
    else:
        PS.queen_from_shapefile(shapefile)
        abb = 'q'
    cards = NP.array(w.cardinalities.values())
    cards.shape = (len(cards),1)
    galfile = shapefile.split(".")[0] + "_" + abb + ".gal"
    gal = PS.open(galfile,'w')
    gal.write(w)
    gal.close()

    return cards
Example #2
0
    def processAlgorithm(self, progress):
        field = self.getParameterValue(self.FIELD)
        field = field[0:10] # try to handle Shapefile field length limit
        filename = self.getParameterValue(self.INPUT)
        layer = dataobjects.getObjectFromUri(filename)
        filename = dataobjects.exportVectorLayer(layer)        
        
        contiguity = self.getParameterValue(self.CONTIGUITY)
        if contiguity == 0: # queen
            print 'INFO: Moran\'s using queen contiguity'
            w=pysal.queen_from_shapefile(filename)
        else: # 1 for rook
            print 'INFO: Moran\'s using rook contiguity'
            w=pysal.rook_from_shapefile(filename)
    
        f = pysal.open(filename.replace('.shp','.dbf'))
        y=np.array(f.by_col[str(field)])
        m = pysal.Moran(y,w,transformation = "r", permutations = 999)

        self.setOutputValue(self.I,m.I)
        
        print "Moran's I: %f" % (m.I)
        print "INFO: Moran's I values range from -1 (indicating perfect dispersion) to +1 (perfect correlation). Values close to -1/(n-1) indicate a random spatial pattern."
        print "p_norm: %f" % (m.p_norm)
        print "p_rand: %f" % (m.p_rand)
        print "p_sim: %f" % (m.p_sim)
        print "INFO: p values smaller than 0.05 indicate spatial autocorrelation that is significant at the 5% level."
        print "z_norm: %f" % (m.z_norm)
        print "z_rand: %f" % (m.z_rand)
        print "z_sim: %f" % (m.z_sim)
        print "INFO: z values greater than 1.96 or smaller than -1.96 indicate spatial autocorrelation that is significant at the 5% level."
    def accept(self):
        # look for open shapefile layers, if none 
        if len(self.comboBox.currentText()) == 0 and self.lineEdit.text() == "":
            QMessageBox.information(self, self.tr("Weights from Shapefile"), self.tr("Please select input polygon vector layer"))

        # elif self.outShape.text() == "":
        #    QMessageBox.information(self, self.tr("Sum Line Lengths In Polyons"), self.tr("Please specify output shapefile"))
        else:
            # run the PySAL logic
            if str(self.comboBox.currentText()) == "":
                shapefile = str(self.shapefile)
            else:
                shapefile = str(self.d[str(self.comboBox.currentText())])
                       
                if self.radioButton.isChecked():
                    w = PS.queen_from_shapefile(shapefile)
                    abb = 'q'
                else:
                    w = PS.rook_from_shapefile(shapefile)
                    abb = 'r'
    	        cards = NP.array(w.cardinalities.values())
    	        cards.shape = (len(cards),1)
    	        galfile = shapefile.split(".")[0] + "_" + abb + ".gal"
    	        gal = PS.open(galfile,'w')
    	        gal.write(w)
    	        gal.close()
            QDialog.accept(self)
Example #4
0
    def contiguity_from_shapefile(shapefile, criteria='rook'):
        print shapefile
        if criteria == 'rook':
	    PS.rook_from_shapefile(shapefile)
            abb = 'r'
    	else:
            PS.queen_from_shapefile(shapefile)
            abb = 'q'
    	    cards = NP.array(w.cardinalities.values())
    	    cards.shape = (len(cards),1)
    	    galfile = shapefile.split(".")[0] + "_" + abb + ".gal"
    	    gal = PS.open(galfile,'w')
    	    gal.write(w)
    	    gal.close()

    	return cards
Example #5
0
def test():
    # Test
    shp = pysal.open(pysal.examples.get_path('NAT.shp'),'r')
    dbf = pysal.open(pysal.examples.get_path('NAT.dbf'),'r')
    
    show_map(shp)
    
    ids = get_selected(shp)
    print ids
    
    w = pysal.rook_from_shapefile(pysal.examples.get_path('NAT.shp'))
    moran_scatter_plot(shp, dbf, "HR90", w)
    
    scatter_plot(shp, ["HR90", "PS90"])
    scatter_plot_matrix(shp, ["HR90", "PS90"])
    
    quantile_map(shp, dbf, "HC60", 5, basemap="leaflet_map")
    
    
    select_ids = [i for i,v in enumerate(dbf.by_col["HC60"]) if v < 20.0]
    select(shp, ids=select_ids)
    
    
    quantile_map(shp, dbf, "HC60", 5)
    
    
    lisa_map(shp, dbf, "HC60", w)
Example #6
0
def read_files(filepath, **kwargs):
    """
    Reads a dbf/shapefile pair, squashing geometries into a "geometry" column.
    """
    #keyword arguments wrapper will strip all around dbf2df's required arguments
    geomcol = kwargs.pop('geomcol', 'geometry')
    weights = kwargs.pop('weights', '')
    
    dbf_path, shp_path = _pairpath(filepath)

    df = dbf2df(dbf_path, **kwargs)
    df[geomcol] = shp2series(shp_path)

    if weights != '' and isinstance(weights, str):
        if weights.lower() in ['rook', 'queen']:
            if weights.lower() == 'rook':
                df.W = ps.rook_from_shapefile(shp_path)
            else:
                df.W = ps.queen_from_shapefile(shp_path)
        else:
            try:
                W_path = os.path.splitext(dbf_path)[0] + '.' + weights
                df.W = ps.open(W_path).read()
            except IOError:
                print('Weights construction failed! Passing on weights')
    
    return df
Example #7
0
    def __init__(self, filepath, outname, namelist, idlist, nb="queen", factor=2):
        """
        Initiation of modules
        """
        f=Dbf(filepath+".dbf")
        #Create mapping of locations to row id
        self.locations = dict()
        i=0
        for row in f:
            uid=unicode("".join([row[k] for k in idlist]))
            locnames = unicode(", ".join([row[k] for k in namelist]),"ascii","ignore")
            self.locations[i] = {outname:locnames,"id":uid}
            i+=1
        self.__dict__[outname]= self.locations
        self.outname = outname
        #Get Neightbor weights by queen, rook, knn, distance
        if nb=="queen":
            self.wt = pysal.queen_from_shapefile(filepath+".shp")
        elif nb=="rook":
            self.wt = pysal.rook_from_shapefile(filepath+".shp")
        elif nb=="knn":
            self.wt = pysal.knnW_from_shapefile(filepath+".shp", k=factor)
        elif nb=="distance":
            self.wt = pysal.threshold_binaryW_from_shapefile(filepath+".shp",k)

        #Create dictionary of neighbors for each region
        self.neighbors ={}
        for i,j in enumerate(self.wt):
            self.neighbors[self.locations[i]["id"]] = {self.outname:self.locations[i][self.outname]
                            ,"neighbors":dict([[self.locations[k]["id"],self.locations[k][self.outname]] for k in j.keys()])}
 def test_build_lattice_shapefile(self):
     of = "lattice.shp"
     pysal.build_lattice_shapefile(20, 20, of)
     w = pysal.rook_from_shapefile(of)
     self.assertEquals(w.n, 400)
     os.remove('lattice.shp')
     os.remove('lattice.shx')
Example #9
0
def test_cartodb():
    import pysal
    
    # load San Francisco plots data using PySAL
    shp_path = "../test_data/sfpd_plots.shp"
    plots_shp = pysal.open(shp_path)
    plots_dbf = pysal.open(shp_path[:-3]+"dbf")     
    
    import d3viz
    d3viz.setup()    
    
    d3viz.show_map(plots_shp)
    
    shp_path = "../test_data/sf_cartheft.shp"
    crime_shp = pysal.open(shp_path)
    crime_dbf = pysal.open(shp_path[:-3]+"dbf")
    
    d3viz.show_map(crime_shp)    
    
    d3viz.quantile_map(plots_shp,'cartheft',5)
    
    user_name = 'lixun910'
    api_key = '340808e9a453af9680684a65990eb4eb706e9b56'
    
    d3viz.setup_cartodb(api_key, user_name)    
    
    plots_table = d3viz.cartodb_upload(plots_shp)
    crime_table = d3viz.cartodb_upload(crime_shp)
    print plots_table
    print crime_table    
    
    d3viz.cartodb_show_maps(plots_shp, layers=[{'shp':crime_shp}])
    
    d3viz.cartodb_show_maps(plots_shp, layers=[{'shp':crime_shp, 'css':d3viz.CARTO_CSS_POINT_CLOUD}])
    
    new_cnt_col = "mycnt"
    d3viz.cartodb_count_pts_in_polys(plots_table, crime_table, new_cnt_col)    
    
    shp_path = d3viz.cartodb_get_data(plots_table, [new_cnt_col])
    
    shp = pysal.open(shp_path)
    dbf = pysal.open(shp_path[:-3]+"dbf") 
    w = pysal.rook_from_shapefile(shp_path)    
    
    import numpy as np
    y = np.array(dbf.by_col[new_cnt_col])
    lm = pysal.Moran_Local(y, w)
    
    new_lisa_table = "cartheft_lisa" 
    new_lisa_table = d3viz.cartodb_lisa(lm, new_lisa_table)    
    
    d3viz.cartodb_show_lisa_map(shp, new_lisa_table, uuid=plots_table)
    
    d3viz.cartodb_show_lisa_map(shp, new_lisa_table, uuid=plots_table, layers=[{'shp':crime_shp, 'css':d3viz.CARTO_CSS_POINT_CLOUD}])
    
    d3viz.quantile_map(shp, new_cnt_col, 5)
    
    d3viz.quantile_map(shp, new_cnt_col, 5, basemap="leaflet_map")
    
    d3viz.close_all()
Example #10
0
    def compute(self, vlayer, tfield, idvar,matType):
	vlayer=qgis.utils.iface.activeLayer()
	idvar=self.idVariable.currentText()
        # print type(idvar)
	tfield=self.inField.currentText()
        # print type(tfield)
	provider=vlayer.dataProvider()
	allAttrs=provider.attributeIndexes()
	caps=vlayer.dataProvider().capabilities()
	if caps & QgsVectorDataProvider.AddAttributes:
            TestField = idvar[:5]+"_qrr"
	    res = vlayer.dataProvider().addAttributes([QgsField(TestField, QVariant.Double)])
	wp=str(self.dic[str(self.inShape.currentText())])
        if matType == "Rook":
           w = py.rook_from_shapefile(wp, idVariable=unicode(idvar))
        else:
           w = py.queen_from_shapefile(wp, idVariable=unicode(idvar))

	w1=wp[:-3]+"dbf"
	db=py.open(w1)
	y=np.array(db.by_col[unicode(tfield)])
      	np.random.seed(12345)
	gc = py.Geary(y,w)
        #lm=py.Moran_Local(y,w)
	#l=lm.p_sim
	gg = gc.C
        self.SAresult.setText("The Global Geary's C index is " + str(gg))
Example #11
0
 def get(self,shpName='',width=0,height=0):
     print shpName,width,height
     if not shpName in self.SHPS:
         return self.index()
     shp = pysal.open(self.SHPS[shpName])
     W = None
     if 'w' in self.request.GET:
         wtype = self.request.GET['w']
         if wtype.lower() == 'rook':
             W = pysal.rook_from_shapefile(self.SHPS[shpName])
         elif wtype.lower() == 'queen':
             W = pysal.queen_from_shapefile(self.SHPS[shpName])
         else:
             try:
                 k = int(wtype)
                 W = pysal.knnW_from_shapefile(self.SHPS[shpName],k)
             except:
                 print "No valid W"
     print shp
     if width and height:
         width=int(width)
         height=int(height)
         if W:
             return self.write({'len':len(shp), 'polygons':shift_scale_shp(shp,width,height),'width':width,'height':height,'W':W.neighbors})
         else:
             return self.write({'len':len(shp), 'polygons':shift_scale_shp(shp,width,height),'width':width,'height':height,'W':'null'})
     return self.write({'len':len(shp)})
Example #12
0
    def get_weight_matrix(self, array, rook=False, shpfile=None):
        """Return the spatial weight matrix based on pysal functionalities

        Keyword arguments:
            array    Numpy array with inventory values.
            rook    Boolean to select spatial weights matrix as rook or
                    queen case.
            shpfile    Name of file used to setup weight matrix.
        """
        # Get case name.
        if rook:
            case = 'rook'
        else:
            case = 'queen'
        # Get grid dimension.
        dim = array.shape
        if self.sptype == 'vector':
            try:
                # Create weights based on shapefile topology using defined key.
                if shpfile is None:
                    shpfile = self.invfile
                # Differentiat between rook and queen's case.
                if rook:
                    w = pysal.rook_from_shapefile(shpfile, self.invcol)
                else:
                    w = pysal.queen_from_shapefile(shpfile, self.invcol)
            except:
                msg = "Couldn't build spatial weight matrix for vector "
                "inventory <%s>" % (self.name)
                raise RuntimeError(msg)

            # Match weight index to inventory array index.
            w.id_order = list(self.inv_index)

            logger.info("Weight matrix in %s's case successfully calculated "
                        "for vector dataset" % case)
        elif self.sptype == 'raster':
            try:
                # Construct weight matrix in input grid size.
                w = pysal.lat2W(*dim, rook=rook)
            except:
                msg = "Couldn't build spatial weight matrix for raster "
                "inventory <%s>" % (self.name)
                raise RuntimeError(msg)

            logger.info("Weight matrix in %s's case successfully calculated "
                        "for raster dataset" % case)

        # Print imported raster summary.
        print("[ WEIGHT NUMBER ] = ", w.n)
        print("[ MIN NEIGHBOR ] = ", w.min_neighbors)
        print("[ MAX NEIGHBOR ] = ", w.max_neighbors)
        print("[ ISLANDS ] = ", *w.islands)
        print("[ HISTOGRAM ] = ", *w.histogram)

        self._Inventory__modmtime()

        return(w)
Example #13
0
 def setUp(self):
     db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
     y = np.array(db.by_col("HOVAL"))
     self.y = np.reshape(y, (49,1))
     X = []
     X.append(db.by_col("INC"))
     self.X = np.array(X).T
     self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     self.w.transform = 'r'
Example #14
0
    def setUp(self):
        from pysal import rook_from_shapefile
        self.w = rook_from_shapefile(pysal.examples.get_path('10740.shp'))

        self.neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4], 4: [1, 3,
                                                                                7, 5], 5: [2, 4, 8], 6: [3, 7], 7: [4, 6, 8], 8: [5, 7]}
        self.weights = {0: [1, 1], 1: [1, 1, 1], 2: [1, 1], 3: [1, 1, 1], 4: [1, 1,
                                                                              1, 1], 5: [1, 1, 1], 6: [1, 1], 7: [1, 1, 1], 8: [1, 1]}

        self.w3x3 = pysal.lat2W(3, 3)
Example #15
0
 def setUp(self):
     db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
     self.y_var = 'CRIME'
     self.y = np.array([db.by_col(self.y_var)]).reshape(49,1)
     self.x_var = ['INC','HOVAL']
     self.x = np.array([db.by_col(name) for name in self.x_var]).T
     self.r_var = 'NSA'
     self.regimes = db.by_col(self.r_var)
     self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     self.w.transform = 'r'
Example #16
0
 def test_DistanceBand(self):
     """ see issue #126 """
     w = pysal.rook_from_shapefile(
         pysal.examples.get_path("lattice10x10.shp"))
     polygons = pysal.open(
         pysal.examples.get_path("lattice10x10.shp"), "r").read()
     points1 = [poly.centroid for poly in polygons]
     w1 = pysal.DistanceBand(points1, 1)
     for k in range(w.n):
         self.assertEqual(w[k], w1[k])
Example #17
0
 def test_DistanceBand_ints(self):
     """ see issue #126 """
     w = pysal.rook_from_shapefile(
         pysal.examples.get_path("lattice10x10.shp"))
     polygons = pysal.open(
         pysal.examples.get_path("lattice10x10.shp"), "r").read()
     points2 = [tuple(map(int, poly.vertices[0])) for poly in polygons]
     w2 = pysal.DistanceBand(points2, 1)
     for k in range(w.n):
         self.assertEqual(w[k], w2[k])
Example #18
0
def spw_from_shapefile(shapefile, idVariable=None):
    polygons = pysal.open(shapefile,'r').read()
    polygons = map(shapely.geometry.asShape,polygons)
    perimeters = [p.length for p in polygons]
    Wsrc = pysal.rook_from_shapefile(shapefile)
    new_weights = {}
    for i in Wsrc.neighbors:
        a = polygons[i]
        p = perimeters[i]
        new_weights[i] = [a.intersection(polygons[j]).length/p for j in Wsrc.neighbors[i]]
    return pysal.W(Wsrc.neighbors,new_weights)
Example #19
0
 def setUp(self):
     db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
     y = np.array(db.by_col("CRIME"))
     y = np.reshape(y, (49,1))
     self.y = (y>40).astype(float)
     X = []
     X.append(db.by_col("INC"))
     X.append(db.by_col("HOVAL"))
     self.X = np.array(X).T
     self.X = np.hstack((np.ones(self.y.shape),self.X))
     self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     self.w.transform = 'r'
Example #20
0
File: wmd.py Project: CartoDB/pysal
def _contiguity(arg_dict):
    """
    General handler for building contiguity weights from shapefiles

    Examples
    --------

    >>> w = wmd_reader('wrook1.wmd')
    >>> w.n
    49
    >>> w.meta_data
    {'root': {u'input1': {u'data1': {u'type': u'shp',
                                     u'uri': u'http://toae.org/pub/columbus.shp'}},
                          u'weight_type': u'rook', u'transform': u'O'}}
    """
    input1 = arg_dict['input1']
    for key in input1:
        input1 = input1[key]
        break
    uri = input1['uri']
    weight_type = arg_dict['weight_type']
    weight_type = weight_type.lower()
    if weight_type == 'rook':
        w = ps.rook_from_shapefile(uri)
    elif weight_type == 'queen':
        w = ps.queen_from_shapefile(uri)
    else:
        print "Unsupported contiguity criterion: ", weight_type
        return None
    if 'parameters' in arg_dict:
        order = arg_dict['parameters'].get('order', 1)  # default to 1st order
        lower = arg_dict['parameters'].get('lower', 0)  # default to exclude lower orders
        if order > 1:
            w_orig = w
            w = ps.higher_order(w, order)
            if lower:
                for o in xrange(order-1, 1, -1):
                    w = ps.weights.w_union(ps.higher_order(w_orig, o), w)
                w = ps.weights.w_union(w, w_orig)
        parameters = arg_dict['parameters']
    else:
        parameters = {'lower': 0, 'order': 1}
    w = WMD(w.neighbors, w.weights)
    w.meta_data = {}
    w.meta_data["input1"] = {"type": 'shp', 'uri': uri}
    w.meta_data["transform"] = w.transform
    w.meta_data["weight_type"] = weight_type
    w.meta_data['parameters'] = parameters
    return w
Example #21
0
    def setUp(self):
        from pysal import rook_from_shapefile
        self.w = rook_from_shapefile(pysal.examples.get_path('10740.shp'))
        wsp = self.w.to_WSP()
        self.w = wsp.to_W()

        self.neighbors = {0: [3, 1], 1: [0, 4, 2], 2: [1, 5], 3: [0, 6, 4],
                          4: [1, 3, 7, 5], 5: [2, 4, 8], 6: [3, 7],
                          7: [4, 6, 8], 8: [5, 7]}
        self.weights = {0: [1, 1], 1: [1, 1, 1], 2: [1, 1], 3: [1, 1, 1],
                        4: [1, 1, 1, 1], 5: [1, 1, 1], 6: [1, 1], 7: [1, 1, 1],
                        8: [1, 1]}

        self.w3x3 = pysal.lat2W(3, 3)
        w3x3 = pysal.weights.WSP(self.w3x3.sparse, self.w3x3.id_order)
        self.w3x3 = pysal.weights.WSP2W(w3x3)
Example #22
0
 def setUp(self):
     db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
     y = np.array(db.by_col("HOVAL"))
     self.y = np.reshape(y, (49,1))
     X = []
     X.append(db.by_col("INC"))
     self.X = np.array(X).T
     self.X = sparse.csr_matrix(self.X)
     yd = []
     yd.append(db.by_col("CRIME"))
     self.yd = np.array(yd).T
     q = []
     q.append(db.by_col("DISCBD"))
     self.q = np.array(q).T
     self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     self.w.transform = 'r'
Example #23
0
def _importArcData(filename):
    """Creates a new Layer from a shapefile (<file>.shp)

    This function wraps and extends a core clusterPy function to utilize PySAL
    W constructors and dbf readers.


    Parameters
    ==========

    filename: string
              suffix of shapefile (fileName not fileName.shp)


    Returns
    =======
    layer: clusterpy layer instance



    """
    layer = _clusterpy.Layer()
    layer.name = filename.split('/')[-1]
    #print "Loading " + filename + ".dbf"
    dbf = ps.open(filename+".dbf")
    fields = dbf.header
    #data, fields, specs = importDBF(filename + '.dbf')
    data = {}
    #print "Loading " + filename + ".shp"
    if fields[0] != "ID":
        fields = ["ID"] + fields
        for y in range(dbf.n_records):
            data[y] = [y] + dbf.by_row(y)
    else:
        for y in range(dbf.n_records):
            data[y] = dbf.by_row_(y)

    layer.fieldNames = fields
    layer.Y = data
    shpf = filename+".shp"
    layer.shpType = 5
    #print 'pysal reader'
    layer.Wrook = ps.rook_from_shapefile(filename+".shp").neighbors
    layer.Wqueen = ps.queen_from_shapefile(filename+".shp").neighbors
    #print "Done"
    return layer
 def _get_spatial_weights(self):
     """
     creates the spatial weights object for use in
     OLS. This structure defaults to a certain
     spatial relationship. We can add more key words
     and create different relationships
     """
     #this matrix tells the algorithm how to 
     #look at neighboring features
     #queen looks at polygons with shared edges
     #queen b/c the way the chess piece moves
     if self.spat_weights == "queen":
         return pysal.queen_from_shapefile(self.infile)
         
     elif self.spat_weights == "rook":
         return pysal.rook_from_shapefile(self.infile)
         
     else:
         #won't use spatial weights
         return None
Example #25
0
    def compute(self, vlayer, tfield, idvar,matType):
	vlayer=qgis.utils.iface.activeLayer()
	idvar=self.idVariable.currentText()
	tfield=self.inField.currentText()
	provider=vlayer.dataProvider()
	allAttrs=provider.attributeIndexes()
	caps=vlayer.dataProvider().capabilities()
	if caps & QgsVectorDataProvider.AddAttributes:
            TestField = idvar[:5]+"_qrr"
	    res = vlayer.dataProvider().addAttributes([QgsField(TestField, QVariant.Double)])
	wp=str(self.dic[str(self.inShape.currentText())])
        if matType == "Rook":
           w = py.rook_from_shapefile(wp, idVariable=unicode(idvar))
        else:
           w = py.queen_from_shapefile(wp, idVariable=unicode(idvar))

	w1=wp[:-3]+"dbf"
	db=py.open(w1)
	y=np.array(db.by_col[unicode(tfield)])
	mi = py.Moran(y,w)
	mg = mi.I
        self.SAresult.setText("Global Moran's I index is " + str(mg))
 def setUp(self):
     db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
     y = np.array(db.by_col("CRIME"))
     y = np.reshape(y, (49,1))
     self.y = y
     X = []
     X.append(db.by_col("INC"))
     X = np.array(X).T
     self.X = X
     yd = []
     yd.append(db.by_col("HOVAL"))
     yd = np.array(yd).T
     self.yd = yd
     q = []
     q.append(db.by_col("DISCBD"))
     q = np.array(q).T
     self.q = q
     reg = TSLS(y, X, yd, q=q)
     self.reg = reg
     w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
     w.transform = 'r'
     self.w = w
Example #27
0
def bugs_adjacency(shp, shp_sort_field=None, adj_type='queen', write_output = True):
    if adj_type=='queen':
        adj = ps.queen_from_shapefile(shp, shp_sort_field)
    if adj_type=='rook':
        adj = ps.rook_from_shapefile(shp, shp_sort_field)
    neigh_dict = adj.neighbors
    sort_order = sorted(adj.neighbors.keys())   
    
    neigh_list_nest = [neigh_dict[i] for i in sort_order]
    neigh_list_nest_ids=[[sort_order.index(X)+1 for X in sorted(sublist)] for sublist in neigh_list_nest]

    neigh_list = [Key for SubList in neigh_list_nest_ids for Key in (SubList)]
    num_neigh_list = [len(x) for x in neigh_list_nest]    
    
    out_folder = os.path.dirname(shp) if os.path.dirname(shp)<>'' else os.getcwd()
    basename = os.path.basename(shp)
    f=open(os.path.join(out_folder, basename[:-4]+'_adj.txt'),'w')
    f.write(',\n'.join([', '.join(str(i) for i in s) for s in neigh_list_nest_ids]))
    f.close()
    f=open(os.path.join(out_folder, basename[:-4]+'_num.txt'),'w')
    f.write(', '.join([str(x) for x in num_neigh_list]))
    f.close()
    return (num_neigh_list, neigh_list)
Example #28
0
def parsewmd(jwmd, uploaddir=None):
    #Get the URI
    uri = jwmd['input1']['data1']['uri']
    url = urlparse(uri)

    if url.scheme == 'http':
        basename = url.path.split('/')[-1]

        if uploaddir != None:
            basename = os.path.join(uploaddir, basename)

        #The WMD never spcifies the .shx, but PySAL needs it
        shx = basename.replace('.shp', '.shx')
        shxuri = uri.replace('.shp', '.shx')

        #Download both files locally
        response = urllib.urlretrieve(uri, basename)
        response = urllib.urlretrieve(shxuri, shx)

    elif url.scheme == 'file':
        pass

    #Get the generation information
    wtype = jwmd['weight_type']
    transform = jwmd['transform']


    #Populate the W
    if wtype.lower() == 'rook':
        w = ps.rook_from_shapefile(basename)
    elif wtype.lower() == 'queen':
        w = ps.queen_from_shapefile(basename)

    #Transform
    w.transform = transform

    return w
Example #29
0

def _run_stp1(y,x,eq_ids,r):
    y_r = y[eq_ids[r]]
    x_r = x[eq_ids[r]]     
    x_constant = USER.check_constant(x_r)
    model = BaseOLS(y_r, x_constant)
    #model.logll = diagnostics.log_likelihood(model) 
    return model            

if __name__ == '__main__':
    #_test()          

    import pysal
    import numpy as np
    db = pysal.open(pysal.examples.get_path("NAT.dbf"),'r')
    y_var = ['HR80','HR90']
    y = np.vstack((np.array([db.by_col(r)]).T for r in y_var))
    y_name = 'HR'
    x_var = ['PS80','UE80','PS90','UE90']
    x = np.array([db.by_col(name) for name in x_var]).T
    x = np.vstack((x[:,0:2],x[:,2:4]))
    x_name = ['PS','UE']
    eq_name = 'Year'
    eq_ID = [1980]*3085 + [1990]*3085
    w = pysal.rook_from_shapefile(pysal.examples.get_path("NAT.shp"))
    w.transform = 'r'

    sur1=ML_SUR(y, x, eq_ID)
    print sur1.summary        
Example #30
0
 def setUp(self):
     self.w = pysal.rook_from_shapefile(
         pysal.examples.get_path('10740.shp'))
import pysal
import numpy as np
import processing
from processing.core.VectorWriter import VectorWriter
from qgis.core import *
from PyQt4.QtCore import *

field = field[0:10]  # try to handle Shapefile field length limit

if contiguity == 'queen':
    print 'INFO: Global Moran\'s using queen contiguity'
    w = pysal.queen_from_shapefile(input)
else:
    print 'INFO: Global Moran\'s using rook contiguity'
    w = pysal.rook_from_shapefile(input)

f = pysal.open(pysal.examples.get_path(input.replace('.shp', '.dbf')))
y = np.array(f.by_col[str(field)])
m = pysal.Moran(y, w, transformation="r", permutations=999)

print "Moran's I: %f" % (m.I)
print "INFO: Moran's I values range from -1 (indicating perfect dispersion) to +1 (perfect correlation). Values close to -1/(n-1) indicate a random spatial pattern."
print "p_norm: %f" % (m.p_norm)
print "p_rand: %f" % (m.p_rand)
print "p_sim: %f" % (m.p_sim)
print "INFO: p values smaller than 0.05 indicate spatial autocorrelation that is significant at the 5% level."
print "z_norm: %f" % (m.z_norm)
print "z_rand: %f" % (m.z_rand)
print "z_sim: %f" % (m.z_sim)
print "INFO: z values greater than 1.96 or smaller than -1.96 indicate spatial autocorrelation that is significant at the 5% level."
Example #32
0
# get shapefile
shp_link = 'blocks/CensusBlockTIGER2010.shp'
maps.plot_poly_lines(shp_link)  # test shapefile

# get associated data
dbf = ps.open('blocks/CensusBlockTIGER2010.dbf')
cols = np.array([dbf.by_col(col) for col in dbf.header]).T
df = pd.DataFrame(cols)
df.columns = dbf.header
df.columns = df.columns.map(lambda x: x.lower())

# if duplicates, need to remove
len(df.ix[df.duplicated('geoid10'),:])  # number of duplicate pairs

## create weights (only need to run once)
w = ps.rook_from_shapefile(shp_link)
w.n == df.shape[0] # should be true
gal = ps.open('blocks/CensusBlockTIGER2010.gal','w')
gal.write(w)
gal.close()

df.tractce10 = df.tractce10.astype('int')
df['order'] = df.index

# plot community areas
maps.plot_choropleth(shp_link, np.array(df.tractce10), type='equal_interval',
     title='Initial Map', k=80)

# get spatial weights
w=ps.open('blocks/CensusBlockTIGER2010.gal').read()
# need to fix the ohare island (tracts 980000 and 770602)
    def processAlgorithm(self, progress):

        field = self.getParameterValue(self.FIELD)
        field = field[0:10] # try to handle Shapefile field length limit
        filename = self.getParameterValue(self.INPUT)
        layer = dataobjects.getObjectFromUri(filename)
        filename = dataobjects.exportVectorLayer(layer)        
        provider = layer.dataProvider()
        fields = provider.fields()
        fields.append(QgsField('MORANS_P', QVariant.Double))
        fields.append(QgsField('MORANS_Z', QVariant.Double))
        fields.append(QgsField('MORANS_Q', QVariant.Int))
        fields.append(QgsField('MORANS_I', QVariant.Double))
        fields.append(QgsField('MORANS_C', QVariant.Double))

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, provider.geometryType(), layer.crs() )

        contiguity = self.getParameterValue(self.CONTIGUITY)
        if contiguity == 'queen':
            print 'INFO: Local Moran\'s using queen contiguity'
            w=pysal.queen_from_shapefile(filename)
        else:
            print 'INFO: Local Moran\'s using rook contiguity'
            w=pysal.rook_from_shapefile(filename)

        f = pysal.open(filename.replace('.shp','.dbf'))
        y=np.array(f.by_col[str(field)])
        lm = pysal.Moran_Local(y,w,transformation = "r", permutations = 999)

        # http://pysal.readthedocs.org/en/latest/library/esda/moran.html?highlight=local%20moran#pysal.esda.moran.Moran_Local
        # values indicate quadrat location 1 HH,  2 LH,  3 LL,  4 HL

        # http://www.biomedware.com/files/documentation/spacestat/Statistics/LM/Results/Interpreting_univariate_Local_Moran_statistics.htm
        # category - scatter plot quadrant - autocorrelation - interpretation
        # high-high - upper right (red) - positive - Cluster - "I'm high and my neighbors are high."
        # high-low - lower right (pink) - negative - Outlier - "I'm a high outlier among low neighbors."
        # low-low - lower left (med. blue) - positive - Cluster - "I'm low and my neighbors are low."
        # low-high - upper left (light blue) - negative - Outlier - "I'm a low outlier among high neighbors."

        # http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#/What_is_a_z_score_What_is_a_p_value/005p00000006000000/
        # z-score (Standard Deviations) | p-value (Probability) | Confidence level
        #     < -1.65 or > +1.65        |        < 0.10         |       90%
        #     < -1.96 or > +1.96        |        < 0.05         |       95%
        #     < -2.58 or > +2.58        |        < 0.01         |       99%

        self.setOutputValue(self.P_SIM, str(lm.p_sim))

        sig_q = lm.q * (lm.p_sim <= 0.01) # could make significance level an option
        outFeat = QgsFeature()
        i = 0
        for inFeat in processing.features(layer):
            inGeom = inFeat.geometry()
            outFeat.setGeometry(inGeom)
            attrs = inFeat.attributes()
            attrs.append(float(lm.p_sim[i]))
            attrs.append(float(lm.z_sim[i]))
            attrs.append(int(lm.q[i]))
            attrs.append(float(lm.Is[i]))
            attrs.append(int(sig_q[i]))
            outFeat.setAttributes(attrs)
            writer.addFeature(outFeat)
            i+=1

        del writer
Example #34
0
    np.set_printoptions(suppress=start_suppress)


if __name__ == '__main__':
    _test()
    import numpy as np
    import pysal
    from ols_regimes import OLS_Regimes
    db = pysal.open(pysal.examples.get_path('columbus.dbf'), 'r')
    y_var = 'CRIME'
    y = np.array([db.by_col(y_var)]).reshape(49, 1)
    x_var = ['INC', 'HOVAL']
    x = np.array([db.by_col(name) for name in x_var]).T
    r_var = 'NSA'
    regimes = db.by_col(r_var)
    w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
    w.transform = 'r'
    olsr = OLS_Regimes(y,
                       x,
                       regimes,
                       w=w,
                       constant_regi='many',
                       nonspat_diag=False,
                       spat_diag=False,
                       name_y=y_var,
                       name_x=x_var,
                       name_ds='columbus',
                       name_regimes=r_var,
                       name_w='columbus.gal')
    print olsr.summary
Example #35
0
    print("time elapsed for rook... using bins: " + str(t1 - t0))

    from pysal.weights._contW_rtree import ContiguityWeights_rtree

    t0 = time.time()
    rt = ContiguityWeights_rtree(pysal.open(fname), ROOK)
    t1 = time.time()

    print("time elapsed for rook... using rtree: " + str(t1 - t0))
    print(rt.w == rb.w)

    print('QUEEN')
    t0 = time.time()
    qt = ContiguityWeights_rtree(pysal.open(fname), QUEEN)
    t1 = time.time()
    print("using " + str(fname))
    print("time elapsed for queen... using rtree: " + str(t1 - t0))
    print(qb.w == qt.w)

    print('knn4')
    t0 = time.time()
    knn = pysal.knnW_from_shapefile(fname, k=4)
    t1 = time.time()
    print(t1 - t0)

    print('rook from shapefile')
    t0 = time.time()
    knn = pysal.rook_from_shapefile(fname)
    t1 = time.time()
    print(t1 - t0)
Example #36
0
import pysal as ps
import sys
import time as t

t1 = t.time()
w = ps.rook_from_shapefile(sys.argv[1])
t2 = t.time()
print "Serial time was {} seconds.".format(t2-t1)
print w[0]
print w[2499]
Example #37
0
 def setUp(self):
     self.wq = pysal.queen_from_shapefile(
         pysal.examples.get_path("columbus.shp"))
     self.wr = pysal.rook_from_shapefile(
         pysal.examples.get_path("columbus.shp"))
import pysal 
import numpy as np
import processing 
from processing.tools.vector import VectorWriter
from qgis.core import *
from PyQt4.QtCore import *

field = field[0:10] # try to handle Shapefile field length limit

if contiguity == 0: # queen
    print 'INFO: Local Moran\'s using queen contiguity'
    w=pysal.queen_from_shapefile(input)
else: # 1 for rook
    print 'INFO: Local Moran\'s using rook contiguity'
    w=pysal.rook_from_shapefile(input)
    
f = pysal.open(pysal.examples.get_path(input.replace('.shp','.dbf')))
y=np.array(f.by_col[str(field)])
m = pysal.Moran(y,w,transformation = "r", permutations = 999)

i=m.I

print "Moran's I: %f" % (m.I)
print "INFO: Moran's I values range from -1 (indicating perfect dispersion) to +1 (perfect correlation). Values close to -1/(n-1) indicate a random spatial pattern."
print "p_norm: %f" % (m.p_norm)
print "p_rand: %f" % (m.p_rand)
print "p_sim: %f" % (m.p_sim)
print "INFO: p values smaller than 0.05 indicate spatial autocorrelation that is significant at the 5% level."
print "z_norm: %f" % (m.z_norm)
print "z_rand: %f" % (m.z_rand)
Example #39
0
    def compute(self, vlayer, tfield, idvar, matType, progressBar):
        vlayer = qgis.utils.iface.activeLayer()
        idvar = self.idVariable.currentText()
        # print type(idvar)
        tfield = self.inField.currentText()
        # print type(tfield)
        provider = vlayer.dataProvider()
        allAttrs = provider.attributeIndexes()
        caps = vlayer.dataProvider().capabilities()
        start = 15.00
        if caps & QgsVectorDataProvider.AddAttributes:
            if matType == "Rook":
                TestField = tfield[:7] + "_r"
            else:
                TestField = tfield[:7] + "_q"

            res = vlayer.dataProvider().addAttributes(
                [QgsField(TestField, QVariant.Double)])
        wp = str(self.dic[str(self.inShape.currentText())])
        if matType == "Rook":
            w = py.rook_from_shapefile(wp, idVariable=unicode(idvar))
        else:
            w = py.queen_from_shapefile(wp, idVariable=unicode(idvar))

        w1 = wp[:-3] + "dbf"
        db = py.open(w1)
        y = np.array(db.by_col[unicode(tfield)])
        np.random.seed(12345)
        lm = py.Moran_Local(y, w)
        l = lm.p_sim

        # Replace insignificant values with the number 5:
        for i in range(len(l)):
            if l[i] > 0.05:
                l[i] = 5

        # Replace the significant values with their quadrant:
        for i in range(len(l)):
            if l[i] <= 0.05:
                l[i] = lm.q[i]

        a = range(len(l))
        l1 = np.array(l).flatten().tolist()
        d = dict(zip(a, l1))
        fieldList = ftools_utils.getFieldList(vlayer)
        print len(fieldList)
        n = len(fieldList) - 1
        add = 85.00 / len(l)
        print add
        vlayer.startEditing()
        for i in a:
            fid = int(i)
            # print fid
            vlayer.changeAttributeValue(fid, n, d[i])
            start = start + add
            print start

            # print d[i] #index of the new added field
        vlayer.commitChanges()
        self.SAresult.setText(
            "Significance values have been added to your attribute table!")
        start = start + 1
        progressBar.setValue(start)
Example #40
0
q = []
q.append(db.by_col("DISCBD"))
q = np.array(q).T
reg = TSLS(y, X, yd, q)

# create regression object for spatial test
db = pysal.open(pysal.examples.get_path("columbus.dbf"),'r')
y = np.array(db.by_col("HOVAL"))
y = np.reshape(y, (49,1))
X = np.array(db.by_col("INC"))
X = np.reshape(X, (49,1))
yd = np.array(db.by_col("CRIME"))
yd = np.reshape(yd, (49,1))
q = np.array(db.by_col("DISCBD"))
q = np.reshape(q, (49,1))
w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp")) 
w.transform = 'r'
regsp = GM_Lag(y, X, w=w, yend=yd, q=q, w_lags=2)


class TestTStat(unittest.TestCase):
    def test_t_stat(self):
        obs = diagnostics_tsls.t_stat(reg)
        exp = [(5.8452644704588588, 4.9369075950019865e-07),
               (0.36760156683572748, 0.71485634049075841),
               (-1.9946891307832111, 0.052021795864651159)]
        for i in range(3):
            for j in range(2):
                self.assertAlmostEquals(obs[i][j],exp[i][j])

class TestPr2Aspatial(unittest.TestCase):
import pandas as pd
import numpy as np
import pysal as ps
import geopandas as geo
 
shape ='counties-temporal.shp'
shape

file = geo.read_file(shape)
file.head()


# In[2]:


RookWeight = ps.rook_from_shapefile(shape)
RookWeightMatrix , ids = RookWeight.full()
RookWeightMatrix


# In[3]:


#Spatial Lag
#Is a variable that averages the neighboring values of a locaation
#Accounts for the acutocorrelation in the model with the weight matrix
#
data = ps.pdio.read_files(shape)
Rook = ps.rook_from_shapefile(shape)
Rook.transform = 'r'
percent16Lag = ps.lag_spatial(Rook, data.percent16)