Esempio n. 1
0
    def GetNeighborhood(self, centralFeature):
        centralPnt = centralFeature.geometry().asPoint()
        
        # In a first step get the nearest points inside a rectangle
        # to limit the search loop
        halfDistance = self.GetCharacteristicDistance() / 1.4
        min_X = centralPnt.x() - halfDistance
        max_X = centralPnt.x() + halfDistance
        min_Y = centralPnt.y() - halfDistance
        max_Y = centralPnt.y() + halfDistance
        # select features inside a rectangle : to restrict the search loop
        self.gstaLayer.select(QgsRectangle(min_X, min_Y, max_X, max_Y), False)
        
        # Return an empty list if no point on the layer
        if self.gstaLayer.selectedFeatureCount() == 0:
            return []
        
        # Initialisation of the list returned
        neighborList = []
           
        # loop over the selected features to construct the neighborhood list
        for f in processing.features(self.gstaLayer):
            geomF = f.geometry()  
            if self.GetAnisotropy() == True:
                # Construction of the ellipse to take into account anisotropy
                ellipse = self.MakeEllipse(centralPnt, self.GetCharacteristicDistance(), self.GetTolerance(), self.GetDirection())
                if not ellipse:
                    return []

                if geomF.asPoint() != centralPnt and ellipse.geometry().contains(geomF):
                    if len(self.GetBarrierLayerList()) > 0:
                        line = QgsFeature()
                        line.setGeometry(QgsGeometry.fromPolyline([geomF.asPoint(),centralFeature.geometry().asPoint()])) # construct segment
                        for layer in self.GetBarrierLayerList():
                            cross = False
                            for feature in processing.features(layer):
                                if line.geometry().crosses(feature.geometry()):
                                    cross = True
                            if not cross:
                                neighborList.append(f)
                    else:
                        neighborList.append(f)
            else:  
                distance = geomF.distance(centralFeature.geometry())
                if distance <= self.GetCharacteristicDistance() and distance > 0.0:
                    if len(self.GetBarrierLayerList()) > 0:
                        line = QgsFeature()
                        line.setGeometry(QgsGeometry.fromPolyline([geomF.asPoint(),centralFeature.geometry().asPoint()])) # construct segment
                        for layer in self.GetBarrierLayerList():
                            cross = False
                            for feature in processing.features(layer):
                                if line.geometry().crosses(feature.geometry()):
                                    cross = True
                            if not cross:
                                neighborList.append(f)
                    else:
                        neighborList.append(f)
                        
        self.gstaLayer.removeSelection()            
        return neighborList
Esempio n. 2
0
 def test_SagaVectorAlgorithmWithSelection(self):
     layer = processing.getObject(polygons2())
     feature = layer.getFeatures().next()
     selected = [feature.id()]
     layer.setSelectedFeatures(selected)
     outputs = processing.runalg('saga:polygoncentroids', polygons2(),
                                 True, None)
     layer.setSelectedFeatures([])
     output = outputs['CENTROIDS']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'POLY_NUM_B', 'POLY_ST_B']
     expectedtypes = ['Real', 'Real', 'String']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['2', '1', 'string a']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POINT(270806.69221918 4458924.97720492)'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 3
0
 def getVectorLayerFieldValues(self,vectorLayer, fieldName):
     """
     purpose:
     return all field values for vector layer using qgis processing conveince function
     
     notes:
     takes into account feature selection
     
     returns:
     tuple of (message, list of values, boolean status)
     """
     
     # convert attribute name to qgis object using processing convience method
     inputLayer = processing.getObject(vectorLayer)
     
     # convert attribute name to qgis object using processing convience method
     values = []
     
     # get index of polygon id
     idx = inputLayer.fieldNameIndex(fieldName)
     
     # get list of features. takes into account spatial selection
     iter = processing.features(inputLayer)
     
     # iterate over each feature and get attribute value wanted
     for feature in iter:
         values.append(feature.attributes()[idx])
     
     # check values present
     if len(values) == 0:
         return ("no values for vector layer fieldname provided", None, False)
     else:
         return ("values present", values, True)
Esempio n. 4
0
 def test_SagaVectorAlgorithmWithSelection(self):
     layer = processing.getObject(polygons2());
     feature = layer.getFeatures().next()
     selected = [feature.id()]
     layer.setSelectedFeatures(selected)
     outputs=processing.runalg("saga:polygoncentroids",polygons2(),True,None)
     layer.setSelectedFeatures([])
     output=outputs['CENTROIDS']
     layer=dataobjects.getObjectFromUri(output, True)
     fields=layer.pendingFields()
     expectednames=['ID','POLY_NUM_B','POLY_ST_B']
     expectedtypes=['Real','Real','String']
     names=[str(f.name()) for f in fields]
     types=[str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features=processing.features(layer)
     self.assertEqual(1, len(features))
     feature=features.next()
     attrs=feature.attributes()
     expectedvalues=["2","1","string a"]
     values=[str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt='POINT(270806.69221918 4458924.97720492)'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 5
0
    def test_SagaVectorAlgorithWithUnsupportedInputAndOutputFormat(self):
        """This tests both the exporting to shp and then the format
        change in the output layer.
        """

        layer = processing.getObject(polygonsGeoJson())
        feature = layer.getFeatures().next()
        selected = [feature.id()]
        layer.setSelectedFeatures(selected)
        outputs = processing.runalg('saga:polygoncentroids',
                                    polygonsGeoJson(), True,
                                    getTempFilename('geojson'))
        layer.setSelectedFeatures([])
        output = outputs['CENTROIDS']
        layer = dataobjects.getObjectFromUri(output, True)
        fields = layer.pendingFields()
        expectednames = ['ID', 'POLY_NUM_A', 'POLY_ST_A']
        expectedtypes = ['Real', 'Real', 'String']
        names = [unicode(f.name()) for f in fields]
        types = [unicode(f.typeName()) for f in fields]
        self.assertEqual(expectednames, names)
        self.assertEqual(expectedtypes, types)
        features = processing.features(layer)
        self.assertEqual(1, len(features))
        feature = features.next()
        attrs = feature.attributes()
        expectedvalues = ['0', '1.1', 'string a']
        values = [unicode(attr) for attr in attrs]
        self.assertEqual(expectedvalues, values)
        wkt = 'POINT(270787.49991451 4458955.46775295)'
        self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
def create_lvl_copies(input_shp="DEFAULT",height_attr="height"):
    """
    creates zoom level dependent copies of 10 m spaced contour lines
    scale factor = {zoom-level : isoline spacing in m}
    used on selected layer
    """
    scale_factor = {9:500,10:200,11:100,12:50,13:20}
    if input_shp == "DEFAULT":
        layer = iface.mapCanvas().currentLayer()
        height_attr = "height"
    features = processing.features(layer)
    for level in range(9,14):
        export_features_per_lvl = []
        for feature in features:
            # can be divided by spacing -> must be contained in the dataset
            # change back to height
            if ((feature[height_attr] % scale_factor[level]) == 0):
                export_features_per_lvl.append(feature.id())
                print "selecting following ids:",len(export_features_per_lvl)
                print "current level is: "+str(level)+"; added feature: "+str(feature.id())+" with height: "+str(feature[height_attr])
        # select features that contain the specified criteria
        print "selecting following ids:",len(export_features_per_lvl)
        layer.setSelectedFeatures(export_features_per_lvl)

        # write down selected features
        basepath= iface.activeLayer().dataProvider().dataSourceUri()
        new_file = basepath[:-4]+"-level"+str(level)+".shp"
        # boolean 1 at the end saves only selected features
        error = QgsVectorFileWriter.writeAsVectorFormat(layer, new_file, "utf-8", None, "ESRI Shapefile", 1)
        if error == QgsVectorFileWriter.NoError:
            print "Exported: ",new_file
def calculateFields(listStats, output):
    # iterates over input layer features to get attributes as a list of lists
    # uses the processing method so as to get only selected features if this option is set in the processing options
    iter = processing.features(inputLayer)
    attrs = [feature.attributes() for feature in iter]
    # get index of dissolve field
    provider = inputLayer.dataProvider()
    fields = provider.fields()
    listFieldNames = [field.name() for field in fields]
    indexDissolveField = listFieldNames.index(Dissolve_field)
    # get all values of the dissolve field (before processing : with duplicate values)
    valuesDissolveField = [feature[indexDissolveField] for feature in attrs]
    # get unique values for dissolve field, from output (seems more secure than to get it from valuesDissolveField ?)
    outputLayer = QgsVectorLayer(output, "name", "ogr")
    provider = dissolveLayer.dataProvider()
    fields = provider.fields()
    listFieldNames = [field.name() for field in fields]
    iter = outputLayer.getFeatures()
    uniqueValuesDissolveField = [feature.attributes()[indexDissolveField] for feature in iter]
    # initializes list of lists which will contain results (it will have one element per kept field)
    listRes = []
    # for each kept field
    for i in range(len(listFieldNames)):
        if listStats[i]  != 'no':
            # creates list which will contain attribute values for current field, one empty element per unique dissolve field value
            listAttrs = [[] for val in range(len(uniqueValuesDissolveField))]
            # fill this list with all the current field values corresponding to each dissolve field value
            valuesField = [feature[i] for feature in attrs]
            for (x,y) in zip(valuesDissolveField, valuesField):
                listAttrs[uniqueValuesDissolveField.index(x)].append(y)
            # removes any NULL values
            listAttrs = [[x for x in l if x] for l in listAttrs]
            # for each list in listAttrs, calculates one value according to the chosen stat
            # if list is empty (can happen if it contained originally only NULL values), return NULL as a result
            if listStats[i] == "mean":
                listAttrs = [sum(y) / len(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "sum":
                listAttrs = [sum(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "min":
                listAttrs = [min(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "max":
                listAttrs = [max(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "count":
                listAttrs = [len(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "first":
                listAttrs = [y[0] if y else NULL for y in listAttrs]
            elif listStats[i] == "last":
                listAttrs = [y[-1] if y else NULL for y in listAttrs]
            elif listStats[i] == "median":
                listAttrs = [self.median(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "sd":
                listAttrs = [self.standard_dev(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "concat":
                listAttrs = [", ".join(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "unique":
                listAttrs = [", ".join(set(y)) if y else NULL for y in listAttrs]
            # append each field result to listRes
            listRes.append(listAttrs)
    return listRes
Esempio n. 8
0
 def test_featuresWithSelection(self):
     layer = processing.getObject(points())
     feature = layer.getFeatures().next()
     selected = [feature.id()]
     layer.setSelectedFeatures(selected)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     layer.setSelectedFeatures([])
Esempio n. 9
0
 def test_featuresWithSelection(self):
     layer = processing.getObject(points())
     feature = layer.getFeatures().next()
     selected = [feature.id()]
     layer.setSelectedFeatures(selected)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     layer.setSelectedFeatures([])
Esempio n. 10
0
    def processAlgorithm(self, progress):
        field = self.getParameterValue(self.FIELD)
        field = field[0:10] # try to handle Shapefile field length limit
        filename = self.getParameterValue(self.INPUT)
        layer = dataobjects.getObjectFromUri(filename)
        filename = dataobjects.exportVectorLayer(layer)        
        provider = layer.dataProvider()
        fields = provider.fields()
        fields.append(QgsField('L_G', QVariant.Double))
        fields.append(QgsField('L_G_p', QVariant.Double))
        fields.append(QgsField('L_G_S', QVariant.Int))
        fields.append(QgsField('L_G_ll_hh', QVariant.Int))

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, provider.geometryType(), layer.crs() )

        contiguity = self.getParameterValue(self.CONTIGUITY)
        if self.CONTIGUITY_OPTIONS[contiguity] == 'queen':
            print 'INFO: Local G and G* using queen contiguity'
            w = pysal.queen_from_shapefile(filename)
        elif self.CONTIGUITY_OPTIONS[contiguity] == 'rook':
            print 'INFO: Local G and G* using rook contiguity'
            w = pysal.rook_from_shapefile(filename)

        significance_level = self.getParameterValue(self.SIGNIFICANCE_LEVEL)
        if self.SIGNIFICANCE_OPTIONS[significance_level] == '90%':
            max_p = 0.10
        elif self.SIGNIFICANCE_OPTIONS[significance_level] == '95%':
            max_p = 0.05
        elif self.SIGNIFICANCE_OPTIONS[significance_level] == '99%':
            max_p = 0.01    
        print 'INFO: significance level ' + self.SIGNIFICANCE_OPTIONS[significance_level]

        f = pysal.open(pysal.examples.get_path(filename.replace('.shp','.dbf')))
        y = np.array(f.by_col[str(field)])
        lg = pysal.G_Local(y,w,transform = "b", permutations = 999) 

        sig_g =  1.0 * lg.p_sim <= max_p
        ll_hh = 1.0 * (lg.Gs > lg.EGs) + 1
        sig_ll_hh = sig_g * ll_hh
        outFeat = QgsFeature()
        i = 0
        for inFeat in processing.features(layer):
            inGeom = inFeat.geometry()
            outFeat.setGeometry(inGeom)
            attrs = inFeat.attributes()
            attrs.append(float(lg.Gs[i]))
            attrs.append(float(lg.p_sim[i]))
            attrs.append(int(sig_g[i]))
            attrs.append(int(sig_ll_hh[i]))
            outFeat.setAttributes(attrs)
            writer.addFeature(outFeat)
            i+=1

        del writer
    def geocode(self):
        
        self.dlg.pushButton.setDisabled( True )
        print "Geocode"
        
        layer = self.dlg.mMapLayerComboBox.currentLayer()
        layer_field_index = layer.fieldNameIndex(self.dlg.mFieldComboBox.currentField())

        # prepare
        features = processing.features(layer)
        
        count = 0
        total = 0
        for f in features:
            total +=1
            
        features = processing.features(layer)
        

        for f in features:
            count += 1
            self.dlg.progressBar.setValue(int(100 * total / count))
            print int(100 * count / total)
 def test_simplification(self):
     outputs = processing.runalg('schematizationprovider:topologypreservingsimplifier', lines(), '2000', None)
     output = outputs['OUTPUT']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'LINE_NUM_A', 'LINE_ST_A']
     names = [str(f.name()) for f in fields]
     self.assertEqual(expectednames, names)
     features = processing.features(layer)
     self.assertEqual(5, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['NULL', 'NULL', 'NULL']
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
 def test_simplification(self):
     outputs = processing.runalg(
         'schematizationprovider:topologypreservingsimplifier', lines(),
         '2000', None)
     output = outputs['OUTPUT']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'LINE_NUM_A', 'LINE_ST_A']
     names = [str(f.name()) for f in fields]
     self.assertEqual(expectednames, names)
     features = processing.features(layer)
     self.assertEqual(5, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['NULL', 'NULL', 'NULL']
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
Esempio n. 14
0
 def test_qgiscountpointsinpolygon(self):
     outputs=processing.runalg("qgis:countpointsinpolygon",polygons(),points(),"NUMPOINTS", self.getOutputFile())
     output=outputs['OUTPUT']
     layer=dataobjects.getObjectFromUri(output, True)
     fields=layer.pendingFields()
     expectednames=['ID','POLY_NUM_A','POLY_ST_A','NUMPOINTS']
     expectedtypes=['Integer','Real','String','Real']
     names=[str(f.name()) for f in fields]
     types=[str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features=processing.features(layer)
     self.assertEqual(2, len(features))
     feature=features.next()
     attrs=feature.attributes()
     expectedvalues=["1","1.1","string a","6"]
     values=[str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
Esempio n. 15
0
 def test_modeleremptystring(self):
     outputs = processing.runalg('modeler:emptystring', union(), None)
     output = outputs['OUTPUT_LAYER_ALG0']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = [
         'ID',
         'POLY_NUM_A',
         'POLY_ST_A',
         'ID_2',
         'POLY_NUM_B',
         'POLY_ST_B',
         'NewField',
     ]
     expectedtypes = [
         'Integer',
         'Real',
         'String',
         'Integer',
         'Real',
         'String',
         'Integer',
     ]
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(8, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = [
         '1',
         '1.1',
         'string a',
         '2',
         '1',
         'string a',
         '10',
     ]
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 16
0
 def test_modeleremptystring(self):
     outputs = processing.runalg('modeler:emptystring', union(), None)
     output = outputs['OUTPUT_LAYER_ALG0']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = [
         'ID',
         'POLY_NUM_A',
         'POLY_ST_A',
         'ID_2',
         'POLY_NUM_B',
         'POLY_ST_B',
         'NewField',
     ]
     expectedtypes = [
         'Integer',
         'Real',
         'String',
         'Integer',
         'Real',
         'String',
         'Integer',
     ]
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(8, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = [
         '1',
         '1.1',
         'string a',
         '2',
         '1',
         'string a',
         '10',
     ]
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 17
0
 def test_gdalogrogr2ogrWrongExtension(self):
     outputs = processing.runalg('gdalogr:ogr2ogr', union(), 3, '',
                                 getTempFilename('wrongext'))
     output = outputs['OUTPUT_LAYER']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = [
         'id',
         'poly_num_a',
         'poly_st_a',
         'id_2',
         'poly_num_b',
         'poly_st_b',
     ]
     expectedtypes = [
         'Integer',
         'Real',
         'String',
         'Integer',
         'Real',
         'String',
     ]
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(8, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = [
         '1',
         '1.1',
         'string a',
         '2',
         '1',
         'string a',
     ]
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 18
0
 def test_gdalogrogr2ogrWrongExtension(self):
     outputs = processing.runalg('gdalogr:ogr2ogr', union(), 3, '',
                                 getTempFilename('wrongext'))
     output = outputs['OUTPUT_LAYER']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = [
         'id',
         'poly_num_a',
         'poly_st_a',
         'id_2',
         'poly_num_b',
         'poly_st_b',
     ]
     expectedtypes = [
         'Integer',
         'Real',
         'String',
         'Integer',
         'Real',
         'String',
     ]
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(8, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = [
         '1',
         '1.1',
         'string a',
         '2',
         '1',
         'string a',
     ]
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 19
0
 def test_modeleroptionalfield(self):
     outputs = processing.runalg('modeler:optionalfield', points(), None)
     output = outputs['OUTPUT_ALG0']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['id', 'value', 'area', 'perim']
     expectedtypes = ['Integer', 'String', 'Real', 'Real']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['0', 'all', '3592.818848', '230.989919']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270839.46818665 4458921.97813894,270778.60197966 4458935.96883677,270786.54279065 4458980.04784113,270803.15756434 4458983.84880322,270839.65586926 4458983.16267036,270855.74530134 4458940.79948673,270839.46818665 4458921.97813894))'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 20
0
 def test_qgiscountpointsinpolygon(self):
     outputs = processing.runalg('qgis:countpointsinpolygon', polygons(),
                                 points(), 'NUMPOINTS',
                                 self.getOutputFile())
     output = outputs['OUTPUT']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'POLY_NUM_A', 'POLY_ST_A', 'NUMPOINTS']
     expectedtypes = ['Integer', 'Real', 'String', 'Real']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(2, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['1', '1.1', 'string a', '6']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
Esempio n. 21
0
 def test_modeleroptionalfield(self):
     outputs = processing.runalg("modeler:optionalfield", points(), None)
     output = outputs["OUTPUT_ALG0"]
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ["id", "value", "area", "perim"]
     expectedtypes = ["Integer", "String", "Real", "Real"]
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ["0", "all", "3592.818848", "230.989919"]
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = "POLYGON((270839.46818665 4458921.97813894,270778.60197966 4458935.96883677,270786.54279065 4458980.04784113,270803.15756434 4458983.84880322,270839.65586926 4458983.16267036,270855.74530134 4458940.79948673,270839.46818665 4458921.97813894))"
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 22
0
 def test_scriptcreatetilingfromvectorlayer(self):
     outputs=processing.runalg("script:createtilingfromvectorlayer",union(),10,None)
     output=outputs['polygons']
     layer=dataobjects.getObjectFromUri(output, True)
     fields=layer.pendingFields()
     expectednames=['longitude','latitude']
     expectedtypes=['Real','Real']
     names=[str(f.name()) for f in fields]
     types=[str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features=processing.features(layer)
     self.assertEqual(10, len(features))
     feature=features.next()
     attrs=feature.attributes()
     expectedvalues=["270761.415396242","4458948.29588823"]
     values=[str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt='POLYGON((270755.54427424 4458901.23378639,270755.54427424 4458995.35799007,270767.28651824 4458995.35799007,270767.28651824 4458901.23378639,270755.54427424 4458901.23378639))'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 23
0
 def test_scripthexgridfromlayerbounds(self):
     outputs=processing.runalg("script:hexgridfromlayerbounds",polygons(),10,None)
     output=outputs['grid']
     layer=dataobjects.getObjectFromUri(output, True)
     fields=layer.pendingFields()
     expectednames=['longitude','latitude']
     expectedtypes=['Real','Real']
     names=[str(f.name()) for f in fields]
     types=[str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features=processing.features(layer)
     self.assertEqual(117, len(features))
     feature=features.next()
     attrs=feature.attributes()
     expectedvalues=["270765.621834001","4458907.27146471"]
     values=[str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt='POLYGON((270771.39533669 4458907.27146471,270768.50858535 4458902.27146471,270762.73508265 4458902.27146471,270759.84833131 4458907.27146471,270762.73508265 4458912.27146471,270768.50858535 4458912.27146471,270771.39533669 4458907.27146471))'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 24
0
 def test_modelersagagrass(self):
     outputs = processing.runalg('modeler:sagagrass', points(), None)
     output = outputs['CENTROIDS_ALG1']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['CAT']
     expectedtypes = ['Real']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(12, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['1']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POINT(270839.65586926 4458983.16267036)'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 25
0
 def test_modeleremptystring(self):
     outputs = processing.runalg("modeler:emptystring", union(), None)
     output = outputs["OUTPUT_LAYER_ALG0"]
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ["ID", "POLY_NUM_A", "POLY_ST_A", "ID_2", "POLY_NUM_B", "POLY_ST_B", "NewField"]
     expectedtypes = ["Integer", "Real", "String", "Integer", "Real", "String", "Integer"]
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(8, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ["1", "1.1", "string a", "2", "1", "string a", "10"]
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = "POLYGON((270807.08580285 4458940.1594565,270798.42294527 4458914.62661676,270780.81854858 4458914.21983449,270763.52289518 4458920.715993,270760.3449542 4458926.6570575,270763.78234766 4458958.22561242,270794.30290024 4458942.16424502,270807.08580285 4458940.1594565))"
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 26
0
 def test_modeleroptionalfield(self):
     outputs = processing.runalg('modeler:optionalfield', points(), None)
     output = outputs['OUTPUT_ALG0']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['id', 'value', 'area', 'perim']
     expectedtypes = ['Integer', 'String', 'Real', 'Real']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(1, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['0', 'all', '3592.818848', '230.989919']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270839.46818665 4458921.97813894,270778.60197966 4458935.96883677,270786.54279065 4458980.04784113,270803.15756434 4458983.84880322,270839.65586926 4458983.16267036,270855.74530134 4458940.79948673,270839.46818665 4458921.97813894))'
     self.assertEqual(wkt, unicode(feature.geometry().exportToWkt()))
Esempio n. 27
0
 def test_modelersagagrass(self):
     outputs = processing.runalg("modeler:sagagrass", points(), None)
     output = outputs["CENTROIDS_ALG1"]
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ["CAT"]
     expectedtypes = ["Real"]
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(12, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ["1"]
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = "POINT(270839.65586926 4458983.16267036)"
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 28
0
 def testWrongformat(self):
     outputs = processing.runalg('qgis:countpointsinpolygon', polygons(),
                                 points(), 'NUMPOINTS',
                                 getTempFilename('wrongext'))
     output = outputs['OUTPUT']
     self.assertTrue(output.endswith('shp'))
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'POLY_NUM_A', 'POLY_ST_A', 'NUMPOINTS']
     expectedtypes = ['Integer', 'Real', 'String', 'Real']
     names = [unicode(f.name()) for f in fields]
     types = [unicode(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(2, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['1', '1.1', 'string a', '6.0']
     values = [unicode(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
Esempio n. 29
0
 def test_scriptcreatetilingfromvectorlayer(self):
     outputs = processing.runalg('script:createtilingfromvectorlayer',
                                 union(), 10, None)
     output = outputs['polygons']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['longitude', 'latitude']
     expectedtypes = ['Real', 'Real']
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(10, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['270761.415396242', '4458948.29588823']
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270755.54427424 4458901.23378639,270755.54427424 4458995.35799007,270767.28651824 4458995.35799007,270767.28651824 4458901.23378639,270755.54427424 4458901.23378639))'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 30
0
 def test_scripthexgridfromlayerbounds(self):
     outputs = processing.runalg('script:hexgridfromlayerbounds',
                                 polygons(), 10, None)
     output = outputs['grid']
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['longitude', 'latitude']
     expectedtypes = ['Real', 'Real']
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(117, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['270765.621834001', '4458907.27146471']
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt = 'POLYGON((270771.39533669 4458907.27146471,270768.50858535 4458902.27146471,270762.73508265 4458902.27146471,270759.84833131 4458907.27146471,270762.73508265 4458912.27146471,270768.50858535 4458912.27146471,270771.39533669 4458907.27146471))'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))
Esempio n. 31
0
 def testWrongformat(self):
     outputs = processing.runalg('qgis:countpointsinpolygon', polygons(),
                                 points(), 'NUMPOINTS',
                                 getTempFilename('wrongext'))
     output = outputs['OUTPUT']
     self.assertTrue(output.endswith('shp'))
     layer = dataobjects.getObjectFromUri(output, True)
     fields = layer.pendingFields()
     expectednames = ['ID', 'POLY_NUM_A', 'POLY_ST_A', 'NUMPOINTS']
     expectedtypes = ['Integer', 'Real', 'String', 'Real']
     names = [str(f.name()) for f in fields]
     types = [str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features = processing.features(layer)
     self.assertEqual(2, len(features))
     feature = features.next()
     attrs = feature.attributes()
     expectedvalues = ['1', '1.1', 'string a', '6.0']
     values = [str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
	units = "?"

# Make a list to stage feature IDs, their vertex lists, and output elevations
# Each part within a multipart feature will be considered a separate geometry,
# for the purposes of exporting to Collada)
outputFeatures = []

# Count any errors when getting the feature elevation values
feat_elev_error_counter = 0

# Count the number of features;
# we will use this as part of the feature ID for each geometry instance
feat_counter = 0

progress.setText("Scanning over input features...")
for feat in processing.features(linework):
	feat_counter += 1
	
	geom_col = feat.geometry().asGeometryCollection()
	# The geom_col will be a list containing each "part" of the geometry
	# (if the geometry is single-part, then we expect len(geom_col) == 1)
	# We want to handle both polyline and polygon geometries
	
	part_counter = 0
	for geom_part in geom_col:
		part_counter += 1
		
		if len(geom_part.asPolyline()) > 0:
			# This part is a polyline
			vertexlist = []
			geom_vertexlist = geom_part.asPolyline()
Esempio n. 33
0
                        edge1.increase_weight(edge2.get_weight())
                        edge2.increase_weight(edge1.get_weight()) 
        return ids_to_delete


t_start = datetime.now()
print '{0}: Collapsing lines'.format(t_start)

layer = processing.getObject(input_layer)
crs = layer.crs()
provider = layer.dataProvider()
fields = provider.fields()
fields.append(QgsField('SEG_ID',QVariant.Int))
fields.append(QgsField('MERGED_N', QVariant.Double))
writer = processing.VectorWriter(collapsed_lines, None, fields, QGis.WKBLineString, crs)
features = list(processing.features(layer))
weight_index = provider.fieldNameIndex(weight_field)

# get all labels from the input features 
labels = []
for feat in features:
    labels.append(int(feat[cluster_field]))
    
# one cluster per label
clusters = []
for l in range(0,max(labels)+1):
    clusters.append(list())

# populate clusters
vl = QgsVectorLayer("LineString", "line_segments", "memory")
pr = vl.dataProvider()
 def calculateFields(self, listKeep, listStats, output):
     # get selected layer
     index = self.ui.comboLayerList.currentIndex()
     legendInterface = self.iface.legendInterface()
     listLayers = [layer for layer in legendInterface.layers() if layer.type() == QgsMapLayer.VectorLayer]
     selectedLayer = listLayers[index]
     # iterates over layer features to get attributes as a list of lists
     # uses the processing method so as to get only selected features if this option is set in the processing options
     iter = processing.features(selectedLayer)
     attrs = [feature.attributes() for feature in iter]
     # get all values of the dissolve field (before processing : with duplicate values)
     indexDissolveField = self.ui.comboFieldList.currentIndex()
     valuesDissolveField = [feature[indexDissolveField] for feature in attrs]
     # get unique values for dissolve field, from output (seems more secure than to get it from valuesDissolveField ?)
     outputLayer = QgsVectorLayer(output, "name", "ogr")
     provider = outputLayer.dataProvider()
     fields = provider.fields()
     listFieldNames = [field.name() for field in fields]
     iter = outputLayer.getFeatures()
     uniqueValuesDissolveField = [feature.attributes()[indexDissolveField] for feature in iter]
     # initializes list of lists which will contain results (it will have one element per kept field)
     listRes = []
     # trick for dissolve field, if kept
     if listKeep[indexDissolveField] == 2:
         listStats [indexDissolveField] = 'First'
     # for each kept field
     for i in range(len(listFieldNames)):
         if listKeep[i]  == 2:
             # creates list which will contain attribute values for current field, one empty element per unique dissolve field value
             listAttrs = [[] for val in range(len(uniqueValuesDissolveField))]
             # fill this list with all the current field values corresponding to each dissolve field value
             valuesField = [feature[i] for feature in attrs]
             for (x,y) in zip(valuesDissolveField, valuesField):
                 listAttrs[uniqueValuesDissolveField.index(x)].append(y)
             # removes any NULL values
             listAttrs = [[x for x in l if x] for l in listAttrs]
             # for each list in listAttrs, calculates one value according to the chosen stat
             # if list is empty (can happen if it contained originally only NULL values), return NULL as a result
             if listStats[i] == "Mean":
                 listAttrs = [sum(y) / len(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Sum":
                 listAttrs = [sum(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Min":
                 listAttrs = [min(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Max":
                 listAttrs = [max(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Count":
                 listAttrs = [len(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "First":
                 listAttrs = [y[0] if y else NULL for y in listAttrs]
             elif listStats[i] == "Last":
                 listAttrs = [y[-1] if y else NULL for y in listAttrs]
             elif listStats[i] == "Median":
                 listAttrs = [self.median(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Standard deviation":
                 listAttrs = [self.standard_dev(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Concatenation":
                 listAttrs = [", ".join(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Uniquification":
                 listAttrs = [", ".join(set(y)) if y else NULL for y in listAttrs]
             # append each field result to listRes
             listRes.append(listAttrs)
     return listRes
Esempio n. 35
0
    def run(self):
        """Run method that performs all the real work"""

        # show the dialog
        self.dlg.show()
        # Run the dialog event loop
        result = self.dlg.exec_()
        # See if OK was pressed
        if result:

            # CARREGAR VALORES DOS PARAMETROS:
            #PARAMETRO 1
            ListaVarIndep = []
            ListaLayerName = []
            NrLinhasTabela = self.dlg.tableWidget.rowCount()
            for Linhas in range(NrLinhasTabela):
                VarIndepPath = self.dlg.tableWidget.item(Linhas, 0).text()
                VarIndepLayerName = self.dlg.tableWidget.item(Linhas, 1).text()
                ListaVarIndep.append(VarIndepPath)
                ListaLayerName.append(VarIndepLayerName)

        #PARAMETRO 2
            VarDep = self.dlg.lineEdit_2.text()
            VarDepDisplayName = self.dlg.lineEdit_4.text()

            #PARAMETRO 3
            InputOutputFolder = self.dlg.lineEdit_3.text()

            #PARAMETRO 4
            RasterValidacao = self.dlg.lineEdit_5.text()
            ValidacaoDisplayName = self.dlg.lineEdit_6.text()

            # INICIO DOS PROCESSOS:
            # CRIAR PASTA OUTPUT
            PastaOutput = os.path.join(InputOutputFolder, "Output")
            if not os.path.exists(PastaOutput):
                os.makedirs(PastaOutput)
            else:
                for NrPastas in range(1, 10):
                    sufixo = "_" + str(NrPastas)
                    PastaOutput = os.path.join(InputOutputFolder,
                                               "Output" + sufixo)
                    if not os.path.exists(PastaOutput):
                        os.makedirs(PastaOutput)
                        break

        # CRIAR SUBPASTA TABELAS
            PastaTabelas = os.path.join(PastaOutput, "Tabelas")
            os.makedirs(PastaTabelas)

            # CARREGAR VARIAVEL DEPENDENTE E ADICIONAR LAYER AO QGIS
            LoadVarDep = QgsRasterLayer(VarDep, VarDepDisplayName)

            ListaVarIndepVI = []

            # PROPRIEDADES DOS FICHEIROS DE INPUT
            for VarIndep, VarIndepLayerName in zip(ListaVarIndep,
                                                   ListaLayerName):

                # CARREGAR VARIAVEL INDEPENDENTE E ADICIONAR LAYER AO QGIS
                LoadVarIndep = QgsRasterLayer(VarIndep, VarIndepLayerName)
                AddVarIndep = QgsMapLayerRegistry.instance().addMapLayer(
                    LoadVarIndep)

                # DEFINIR EXTENSAO
                ext = AddVarIndep.extent()
                xmin = ext.xMinimum()
                xmax = ext.xMaximum()
                ymin = ext.yMinimum()
                ymax = ext.yMaximum()
                Mask = "%f,%f,%f,%f" % (xmin, xmax, ymin, ymax)

                # DEFINIR CELL SIZE
                PixelSizeX = LoadVarIndep.rasterUnitsPerPixelX()
                PixelSizeY = LoadVarIndep.rasterUnitsPerPixelY()
                CellSize = PixelSizeX * PixelSizeY

                # CRIAR REPORT E CALCULAR VALORES UNICOS
                CountUniqueValues = os.path.join(
                    PastaTabelas, VarIndepLayerName + "_CountUniqueValues.txt")
                processing.runalg("grass7:r.report", VarIndep, 5, "*", 255,
                                  True, True, True, True, Mask, None,
                                  CountUniqueValues)

                ReportReadLines = open(CountUniqueValues).readlines()
                ReportSelectLines = ReportReadLines[4:-4]
                UniqueValues = len(ReportSelectLines)

                # DEFINIR CAMINHO DO OUTPUT E EXECUTAR R.COIN
                RCoinFile = os.path.join(
                    PastaTabelas, VarIndepLayerName + "_x_" +
                    VarDepDisplayName + "_Original.txt")
                processing.runalg("grass7:r.coin", VarIndep, VarDep, 0, False,
                                  Mask, None, RCoinFile)

                # LER RCOINFILE E SELECIONAR AS LINHAS COM INFORMACAO UTIL
                ReadLines = open(RCoinFile).readlines()
                SelectLines = ReadLines[22:UniqueValues + 22]

                # FORMATAR DADOS PARA IMPORTACAO EM CSV
                ListaValores = []
                for row in SelectLines:
                    RemoverEspacos = re.sub(' +', ' ', row)
                    SubstituirEspacos = RemoverEspacos.replace(' ', ';')
                    SepararPontoVirgula = SubstituirEspacos.split(";")
                    SelecionarColunas = itemgetter(1, 3, 5,
                                                   7)(SepararPontoVirgula)
                    JuntarColunas = ';'.join(SelecionarColunas)
                    ListaValores.append(JuntarColunas)

                if UniqueValues <= 2:
                    JuntarLinhas = ';'.join(ListaValores)
                    SepararValores = JuntarLinhas.split(";")
                    ConversaoInteiros = map(int, SepararValores)
                    Linha0 = "V;V0;V1;T\n"
                    Linha1 = str(ConversaoInteiros[0] + 1) + ";" + str(
                        ConversaoInteiros[1]) + ";" + str(
                            ConversaoInteiros[5]) + ";" + str(
                                ConversaoInteiros[1] +
                                ConversaoInteiros[5]) + "\n"
                    Linha2 = str(ConversaoInteiros[4] + 1) + ";" + str(
                        ConversaoInteiros[2]) + ";" + str(
                            ConversaoInteiros[6]) + ";" + str(
                                ConversaoInteiros[2] + ConversaoInteiros[6])
                    ValoresImportar = [Linha0, Linha1, Linha2]
                else:
                    ListaValores.insert(0, 'V;V0;V1;T')
                    ValoresImportar = '\n'.join(ListaValores)

            # ESCREVER DADOS FORMATADOS NUM NOVO FICHEIRO TXT
                RCoinTemp = os.path.join(
                    PastaTabelas, VarIndepLayerName + "_x_" +
                    VarDepDisplayName + "_Tratado.txt")
                open(RCoinTemp, 'wb').writelines(ValoresImportar)

                # IMPORTAR PARA FICHEIRO CSV
                TabulateAreaCSV = os.path.join(
                    PastaTabelas,
                    VarIndepLayerName + "_x_" + VarDepDisplayName + ".csv")
                csv.writer(open(TabulateAreaCSV, 'wb')).writerows(
                    csv.reader(open(RCoinTemp, 'rb')))

                # EXPORTAR PARA DBF
                LoadTabulateAreaCSV = QgsVectorLayer(
                    TabulateAreaCSV,
                    VarIndepLayerName + "_x_" + VarDepDisplayName, "ogr")
                DbfTablePath = os.path.join(
                    PastaTabelas,
                    VarIndepLayerName + "_x_" + VarDepDisplayName)
                QgsVectorFileWriter.writeAsVectorFormat(
                    LoadTabulateAreaCSV, DbfTablePath, "System", None,
                    "ESRI Shapefile")
                os.remove(DbfTablePath + ".prj")
                os.remove(DbfTablePath + ".qpj")

                # CARREGAR TABELA DBF PARA o QGIS
                DbfTable = QgsVectorLayer(
                    DbfTablePath + ".dbf",
                    VarIndepLayerName + "_x_" + VarDepDisplayName + ".dbf",
                    "ogr")
                AddDbfTable = QgsMapLayerRegistry.instance().addMapLayer(
                    DbfTable)

                # OBTER INDEXs DOS CAMPOS EXISTENTES
                IndexCampoV = DbfTable.fieldNameIndex("V")
                IndexCampoV0 = DbfTable.fieldNameIndex("V0")
                IndexCampoV1 = DbfTable.fieldNameIndex("V1")
                IndexCampoT = DbfTable.fieldNameIndex("T")

                # CRIAR CAMPOS A CALCULAR
                CampoVALUE = DbfTable.dataProvider().addAttributes(
                    [QgsField("VALUE", QVariant.Int)])
                CampoVALUE_0 = DbfTable.dataProvider().addAttributes(
                    [QgsField("VALUE_0", QVariant.Int)])
                CampoVALUE_1 = DbfTable.dataProvider().addAttributes(
                    [QgsField("VALUE_1", QVariant.Int)])
                CampoARCLASSE = DbfTable.dataProvider().addAttributes(
                    [QgsField("ARCLASSE", QVariant.Int)])
                CampoPROBCOND = DbfTable.dataProvider().addAttributes(
                    [QgsField("PROBCOND", QVariant.Double)])
                CampoSUM_VALUE0 = DbfTable.dataProvider().addAttributes(
                    [QgsField("SUM_VALUE0", QVariant.Int)])
                CampoSUM_VALUE1 = DbfTable.dataProvider().addAttributes(
                    [QgsField("SUM_VALUE1", QVariant.Int)])
                CampoAR_TOTAL = DbfTable.dataProvider().addAttributes(
                    [QgsField("AR_TOTAL", QVariant.Int)])
                CampoPRIORI = DbfTable.dataProvider().addAttributes(
                    [QgsField("PRIORI", QVariant.Double)])
                CampoSINI_SN = DbfTable.dataProvider().addAttributes(
                    [QgsField("SINI_SN", QVariant.Double)])
                CampoVI = DbfTable.dataProvider().addAttributes(
                    [QgsField("VI", QVariant.Double)])
                DbfTable.updateFields()

                # OBTER INDEXs DOS CAMPOS CRIADOS
                IndexCampoVALUE = DbfTable.fieldNameIndex("VALUE")
                IndexCampoVALUE_0 = DbfTable.fieldNameIndex("VALUE_0")
                IndexCampoVALUE_1 = DbfTable.fieldNameIndex("VALUE_1")
                IndexCampoARCLASSE = DbfTable.fieldNameIndex("ARCLASSE")
                IndexCampoPROBCOND = DbfTable.fieldNameIndex("PROBCOND")
                IndexCampoSUM_VALUE0 = DbfTable.fieldNameIndex("SUM_VALUE0")
                IndexCampoSUM_VALUE1 = DbfTable.fieldNameIndex("SUM_VALUE1")
                IndexCampoAR_TOTAL = DbfTable.fieldNameIndex("AR_TOTAL")
                IndexCampoPRIORI = DbfTable.fieldNameIndex("PRIORI")
                IndexCampoSINI_SN = DbfTable.fieldNameIndex("SINI_SN")
                IndexCampoVI = DbfTable.fieldNameIndex("VI")

                # COPIAR VALORES PARA OS CAMPOS BASE
                DbfTable.startEditing()
                for Valores in processing.features(DbfTable):
                    DbfTable.changeAttributeValue(Valores.id(),
                                                  IndexCampoVALUE,
                                                  Valores[IndexCampoV])
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoVALUE_0,
                        int(Valores[IndexCampoV0]) * CellSize)
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoVALUE_1,
                        int(Valores[IndexCampoV1]) * CellSize)
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoARCLASSE,
                        int(Valores[IndexCampoT]) * CellSize)
                DbfTable.commitChanges()
                DbfTable.updateFields()

                ListaVALUE_0 = []
                ListaVALUE_1 = []
                DbfTable.startEditing()
                for Valores in processing.features(DbfTable):
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoPROBCOND,
                        float(Valores[IndexCampoVALUE_1]) /
                        float(Valores[IndexCampoARCLASSE]))
                    ListaVALUE_0.append(int(Valores[IndexCampoVALUE_0]))
                    ListaVALUE_1.append(int(Valores[IndexCampoVALUE_1]))
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CALCULAR CAMPOS 'SUM_VALUE0' e 'SUM_VALUE1'
                SomaVALUE_0 = sum(ListaVALUE_0)
                SomaVALUE_1 = sum(ListaVALUE_1)
                DbfTable.startEditing()
                for Valores in processing.features(DbfTable):
                    DbfTable.changeAttributeValue(Valores.id(),
                                                  IndexCampoSUM_VALUE0,
                                                  SomaVALUE_0)
                    DbfTable.changeAttributeValue(Valores.id(),
                                                  IndexCampoSUM_VALUE1,
                                                  SomaVALUE_1)
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CALCULAR CAMPO 'AR_TOTAL'
                DbfTable.startEditing()
                [
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoAR_TOTAL,
                        float(Valores[IndexCampoSUM_VALUE0]) +
                        float(Valores[IndexCampoSUM_VALUE1]))
                    for Valores in processing.features(DbfTable)
                ]
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CALCULAR CAMPO 'PRIORI'
                DbfTable.startEditing()
                [
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoPRIORI,
                        float(Valores[IndexCampoSUM_VALUE1]) /
                        float(Valores[IndexCampoAR_TOTAL]))
                    for Valores in processing.features(DbfTable)
                ]
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CALCULAR CAMPO 'SINI_SN'
                DbfTable.startEditing()
                [
                    DbfTable.changeAttributeValue(
                        Valores.id(), IndexCampoSINI_SN,
                        float(Valores[IndexCampoPROBCOND]) /
                        float(Valores[IndexCampoPRIORI]))
                    for Valores in processing.features(DbfTable)
                ]
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CALCULAR CAMPO 'VI'
                DbfTable.startEditing()
                ListaVI_Min = []
                for Valores in processing.features(DbfTable):
                    if float(Valores[IndexCampoSINI_SN]) > 0:
                        DbfTable.changeAttributeValue(
                            Valores.id(), IndexCampoVI,
                            math.log(float(Valores[IndexCampoSINI_SN])))
                        ListaVI_Min.append(
                            math.log(float(Valores[IndexCampoSINI_SN])))
                        ListaVI_Min.sort()
                        VI_MIN = (ListaVI_Min[0])
                for Valores in processing.features(DbfTable):
                    if float(Valores[IndexCampoSINI_SN]) == 0:
                        DbfTable.changeAttributeValue(Valores.id(),
                                                      IndexCampoVI,
                                                      float(VI_MIN))
                DbfTable.commitChanges()
                DbfTable.updateFields()

                # CRIAR EXPRESSAO E FICHEIRO TXT PARA RECLASSIFICACAO COM VALORES DE VI
                ListaReclass = []
                for Valores in processing.features(DbfTable):
                    ListaReclass.append(
                        str(Valores[IndexCampoVALUE]) + "=" +
                        str(int(round(Valores[IndexCampoVI], 9) * (10**8))))
                ExpressaoReclass = '\n'.join(ListaReclass)

                ReclassVITxt = os.path.join(
                    PastaTabelas, VarIndepLayerName + "_ReclassVI.txt")
                open(ReclassVITxt, 'wb').writelines(ExpressaoReclass)

                # RECLASSIFICACAO DAS VARIAVEIS INDEPENDENTES COM VALORES DE VI
                VarIndepVI = os.path.join(PastaOutput,
                                          VarIndepLayerName + "_VI.tif")
                processing.runalg("grass7:r.reclass", VarIndep, ReclassVITxt,
                                  Mask, 0, VarIndepVI)
                ListaVarIndepVI.append(VarIndepVI)

                # APAGAR CAMPOS INICIAIS PROVENIENTES DO CSV
                DbfTable.dataProvider().deleteAttributes(
                    [IndexCampoV, IndexCampoV0, IndexCampoV1, IndexCampoT])
                DbfTable.updateFields()

                # REMOVER VARIAVEIS INDEPENDENTES DO QGIS
                QgsMapLayerRegistry.instance().removeMapLayers(
                    [AddVarIndep.id()])

        # SOMAR RASTERS DAS VARIAVEIS INDEPENDENTES NO RASTER CALCULATOR PARA OBTER O MAPA VI FINAL
            EntriesVIRaster = []
            ListaVIRasterRef = []
            for Index, VarIndepVI, VarIndepLayerName in zip(
                    range(0, len(ListaVarIndepVI)), ListaVarIndepVI,
                    ListaLayerName):
                LoadVarIndepVI = QgsRasterLayer(VarIndepVI,
                                                VarIndepLayerName + "_VI")
                AddVarIndepVI = QgsMapLayerRegistry.instance().addMapLayer(
                    LoadVarIndepVI)
                VIRasterObject = processing.getObject(ListaVarIndepVI[Index])
                VIRaster = QgsRasterCalculatorEntry()
                VIRaster.raster = VIRasterObject
                VIRaster.ref = str(VarIndepLayerName + '_VI@1')
                VIRaster.bandNumber = 1
                EntriesVIRaster.append(VIRaster)
                ListaVIRasterRef.append(VIRaster.ref)

            ExpressaoCalculateVI = "(" + " + ".join(ListaVIRasterRef) + ")"
            VI = os.path.join(PastaOutput, "VI.tif")
            CalculateVI = QgsRasterCalculator(ExpressaoCalculateVI, VI,
                                              'GTiff', VIRasterObject.extent(),
                                              VIRasterObject.width(),
                                              VIRasterObject.height(),
                                              EntriesVIRaster)
            CalculateVI.processCalculation()

            # ADICIONAR RASTER DO VALOR INFORMATIVO AO QGIS
            LoadVI = QgsRasterLayer(VI, "VI")
            AddVI = QgsMapLayerRegistry.instance().addMapLayer(LoadVI)

            ####VALIDACAO:####

            # CONVERTER RASTER DO VI PARA VALORES INTEIROS
            VIint = os.path.join(PastaOutput, "VIint.tif")
            processing.runalg("gdalogr:rastercalculator", VI, "1", None, "1",
                              None, "1", None, "1", None, "1", None, "1",
                              "rint(A)", "", 4, "", VIint)

            # CRIAR REPORT E CALCULAR VALORES UNICOS DE VI
            VI_CountUniqueValues = os.path.join(PastaTabelas,
                                                "VI_CountUniqueValues.txt")
            processing.runalg("grass7:r.report", VIint, 5, "*", 255, True,
                              True, True, True, Mask, None,
                              VI_CountUniqueValues)

            VI_ReportReadLines = open(VI_CountUniqueValues).readlines()
            VI_ReportSelectLines = VI_ReportReadLines[4:-4]
            VI_UniqueValues = len(VI_ReportSelectLines)

            # DEFINIR CAMINHO DO OUTPUT E EXECUTAR R.COIN DE VALIDACAO
            VI_RCoin = os.path.join(
                PastaTabelas, "VI_x_" + ValidacaoDisplayName + "_Original.txt")
            processing.runalg("grass7:r.coin", VIint, RasterValidacao, 0,
                              False, Mask, None, VI_RCoin)

            # LER VI_RCOIN E SELECIONAR AS LINHAS COM INFORMACAO UTIL
            ValidacaoReadLines = open(VI_RCoin).readlines()
            ValidacaoSelectLines = ValidacaoReadLines[22:VI_UniqueValues + 22]

            # FORMATAR DADOS PARA IMPORTACAO EM CSV
            ValidacaoListaValores = []
            for row in ValidacaoSelectLines:
                RemoverEspacos = re.sub(' +', ' ', row)
                SubstituirEspacos = RemoverEspacos.replace(' ', ';')
                SepararPontoVirgula = SubstituirEspacos.split(";")
                SelecionarColunas = itemgetter(1, 5, 7)(SepararPontoVirgula)
                ConversaoInteiros = map(int, SelecionarColunas)
                ValidacaoListaValores.append(ConversaoInteiros)
            ValidacaoListaValores = sorted(ValidacaoListaValores, reverse=True)

            ListaOrdenada = []
            for row in ValidacaoListaValores:
                SubstituirEspacos = str(row).replace(', ', ';')
                RemoverParentese1 = SubstituirEspacos.replace('[', '')
                RemoverParentese2 = RemoverParentese1.replace(']', '')
                ListaOrdenada.append(RemoverParentese2)
            ListaOrdenada.insert(0, 'V;V1;T')
            ValidacaoValoresImportar = '\n'.join(ListaOrdenada)

            # ESCREVER DADOS FORMATADOS NUM NOVO FICHEIRO TXT
            VI_RCoinTemp = os.path.join(
                PastaTabelas, "VI_x_" + ValidacaoDisplayName + "_Tratado.txt")
            open(VI_RCoinTemp, 'wb').writelines(ValidacaoValoresImportar)

            # IMPORTAR PARA FICHEIRO CSV
            TS_CSV = os.path.join(PastaTabelas,
                                  "VI_x_" + ValidacaoDisplayName + ".csv")
            csv.writer(open(TS_CSV, 'wb')).writerows(
                csv.reader(open(VI_RCoinTemp, 'rb')))

            # EXPORTAR PARA DBF
            LoadTSCSV = QgsVectorLayer(TS_CSV, "TS", "ogr")
            DbfTSPath = os.path.join(PastaTabelas, "TS")
            QgsVectorFileWriter.writeAsVectorFormat(LoadTSCSV, DbfTSPath,
                                                    "System", None,
                                                    "ESRI Shapefile")
            os.remove(DbfTSPath + ".prj")
            os.remove(DbfTSPath + ".qpj")

            # CARREGAR TABELA DBF PARA o QGIS
            DbfTS = QgsVectorLayer(DbfTSPath + ".dbf", "TS.dbf", "ogr")
            AddDbfTS = QgsMapLayerRegistry.instance().addMapLayer(DbfTS)

            # OBTER INDEXs DOS CAMPOS EXISTENTES
            TS_IndexCampoV = DbfTS.fieldNameIndex("V")
            TS_IndexCampoV1 = DbfTS.fieldNameIndex("V1")
            TS_IndexCampoT = DbfTS.fieldNameIndex("T")

            # CRIAR CAMPOS A CALCULAR
            TS_CampoVI = DbfTS.dataProvider().addAttributes(
                [QgsField("VI", QVariant.Double)])
            TS_CampoARESTUDO = DbfTS.dataProvider().addAttributes(
                [QgsField("ARESTUDO", QVariant.Int)])
            TS_CampoARFENOM = DbfTS.dataProvider().addAttributes(
                [QgsField("ARFENOM", QVariant.Int)])
            TS_CampoArEstudAc = DbfTS.dataProvider().addAttributes(
                [QgsField("ArEstudAc", QVariant.Double)])
            TS_CampoArFenomAc = DbfTS.dataProvider().addAttributes(
                [QgsField("ArFenomAc", QVariant.Double)])
            TS_CampoLsi_Li = DbfTS.dataProvider().addAttributes(
                [QgsField("Lsi_Li", QVariant.Double)])
            TS_Campoai_b1_2 = DbfTS.dataProvider().addAttributes(
                [QgsField("ai_b1_2", QVariant.Double)])
            TS_CampoACC = DbfTS.dataProvider().addAttributes(
                [QgsField("ACC", QVariant.Double)])
            DbfTS.updateFields()

            # OBTER INDEXs DOS CAMPOS CRIADOS
            TS_IndexCampoVI = DbfTS.fieldNameIndex("VI")
            TS_IndexCampoARESTUDO = DbfTS.fieldNameIndex("ARESTUDO")
            TS_IndexCampoARFENOM = DbfTS.fieldNameIndex("ARFENOM")
            TS_IndexCampoArEstudAc = DbfTS.fieldNameIndex("ArEstudAc")
            TS_IndexCampoArFenomAc = DbfTS.fieldNameIndex("ArFenomAc")
            TS_IndexCampoLsi_Li = DbfTS.fieldNameIndex("Lsi_Li")
            TS_IndexCampoai_b1_2 = DbfTS.fieldNameIndex("ai_b1_2")
            TS_IndexCampoACC = DbfTS.fieldNameIndex("ACC")

            # COPIAR VALORES PARA OS CAMPOS BASE
            DbfTS.startEditing()
            for Valores in processing.features(DbfTS):
                DbfTS.changeAttributeValue(
                    Valores.id(), TS_IndexCampoVI,
                    float(Valores[TS_IndexCampoV]) / float(10**8))
                DbfTS.changeAttributeValue(
                    Valores.id(), TS_IndexCampoARESTUDO,
                    int(Valores[TS_IndexCampoT]) * CellSize)
                DbfTS.changeAttributeValue(
                    Valores.id(), TS_IndexCampoARFENOM,
                    int(Valores[TS_IndexCampoV1]) * CellSize)
            DbfTS.commitChanges()
            DbfTS.updateFields()

            # CPRIAR LISTAS DE VALORES PARA AS SOMAS ACUMULADAS
            ListaARESTUDO = []
            ListaARFENOM = []
            for Valores in processing.features(DbfTS):
                ListaARESTUDO.append(int(Valores[TS_IndexCampoARESTUDO]))
                ListaARFENOM.append(int(Valores[TS_IndexCampoARFENOM]))

        # CALCULAR CAMPOS 'ArEstudAc', 'ArFenomAc'
            SomaARESTUDO = sum(ListaARESTUDO)
            SomaARFENOM = sum(ListaARFENOM)
            DbfTS.startEditing()
            for Valores, SomaAcARESTUDO, SomaAcARFENOM in zip(
                    processing.features(DbfTS), numpy.cumsum(ListaARESTUDO),
                    numpy.cumsum(ListaARFENOM)):
                if Valores.id() == 0:
                    DbfTS.changeAttributeValue(Valores.id(),
                                               TS_IndexCampoArFenomAc, 0)
                    DbfTS.changeAttributeValue(Valores.id(),
                                               TS_IndexCampoArEstudAc, 0)
                else:
                    DbfTS.changeAttributeValue(
                        Valores.id(), TS_IndexCampoArEstudAc,
                        float(SomaAcARESTUDO) / float(SomaARESTUDO))
                    DbfTS.changeAttributeValue(
                        Valores.id(), TS_IndexCampoArFenomAc,
                        float(SomaAcARFENOM) / float(SomaARFENOM))
            DbfTS.commitChanges()

            # CALCULAR CAMPOS 'Lsi_Li', 'ai_b1_2'
            ListaArEstudAc = []
            ListaArFenomAc = []
            for Valores in processing.features(DbfTS):
                ListaArEstudAc.append(float(Valores[TS_IndexCampoArEstudAc]))
                ListaArFenomAc.append(float(Valores[TS_IndexCampoArFenomAc]))
            ListaArEstudAc.insert(0, 0)
            ListaArFenomAc.insert(0, 0)

            DbfTS.startEditing()
            for Valores, ValoresArEstudAc, ValoresArFenomAc in zip(
                    processing.features(DbfTS), ListaArEstudAc,
                    ListaArFenomAc):
                if Valores.id() == 0:
                    DbfTS.changeAttributeValue(Valores.id(),
                                               TS_IndexCampoLsi_Li, 0)
                    DbfTS.changeAttributeValue(Valores.id(),
                                               TS_IndexCampoai_b1_2, 0)
                else:
                    DbfTS.changeAttributeValue(
                        Valores.id(), TS_IndexCampoLsi_Li,
                        float(Valores[TS_IndexCampoArEstudAc]) -
                        float(ValoresArEstudAc))
                    DbfTS.changeAttributeValue(
                        Valores.id(), TS_IndexCampoai_b1_2,
                        float(
                            float(Valores[TS_IndexCampoArFenomAc]) +
                            float(ValoresArFenomAc)) / float(2))
            DbfTS.commitChanges()

            # CALCULAR CAMPO 'AAC'
            DbfTS.startEditing()
            for Valores in processing.features(DbfTS):
                DbfTS.changeAttributeValue(
                    Valores.id(), TS_IndexCampoACC,
                    float(Valores[TS_IndexCampoai_b1_2]) *
                    float(Valores[TS_IndexCampoLsi_Li]))
            DbfTS.commitChanges()

            # SOMAR VALORES DE ACC PARA ESCREVER A MENSAGEM
            ListaACC = []
            for Valores in DbfTS.getFeatures():
                ListaACC.append(Valores[TS_IndexCampoACC])
            SomaACC = round(sum(ListaACC), 4)

            # APAGAR CAMPOS INICIAIS PROVENIENTES DO CSV
            DbfTS.dataProvider().deleteAttributes(
                [TS_IndexCampoV, TS_IndexCampoV1, TS_IndexCampoT])
            DbfTS.updateFields()

            msgBar = self.iface.messageBar()
            msgBar.pushWidget(
                msgBar.createMessage(
                    "########### O MODELO FOI VALIDADO COM UMA TAXA DE SUCESSO DE "
                    + str(SomaACC) + "! ###########"), QgsMessageBar.INFO
            )  #"...INFO, 5)" para defenir o tempo da mensagem
    hausdorff = max(distances)
    return hausdorff


origin_layer = processing.getObject(origin_layer)
target_layer = processing.getObject(target_layer)
target_id_column_index = target_layer.fieldNameIndex(target_id_column_index)
"""
origin_layer = l1
target_layer = l2
target_id_column_index = 0
interval = 1
"""

target_spatial_index = QgsSpatialIndex()
target_features = processing.features(target_layer)

origin_fields = origin_layer.pendingFields().toList()
origin_fields.append(QgsField("BEST_FIT", QVariant.Int))
origin_fields.append(QgsField("HAUSDORFF", QVariant.Double))
origin_fields.append(QgsField("LEN_DIFF", QVariant.Double))
writer = VectorWriter(output, None, origin_fields,
                      origin_layer.dataProvider().geometryType(),
                      origin_layer.crs())

outFeat = QgsFeature()

# populate the spatial index
for feat in target_features:
    target_spatial_index.insertFeature(feat)
import processing 
from PyQt4.QtWebKit import QWebView

layer = iface.activeLayer()
values = []
features = processing.features(layer)
for f in features:
    values.append(f['elev_m'])

myWV = QWebView(None)
html='<html><head><script type="text/javascript"'
html+='src="https://www.google.com/jsapi"></script>'
html+='<script type="text/javascript">'
html+='google.load("visualization","1",{packages:["corechart"]});'
html+='google.setOnLoadCallback(drawChart);'
html+='function drawChart() { '
html+='var data = google.visualization.arrayToDataTable(['
html+='["%s"],' % (field_name)

for value in values:
    html+='[%f],' % (value)

html+=']);'
html+='var chart = new google.visualization.Histogram('
html+='                document.getElementById("chart_div"));'
html+='chart.draw(data, {title: "Histogram"});}</script></head>'
html+='<body><h1>Layer: %s</h1>' % (layer.name())
html+='<p>Values for %s range from: ' % (field_name)
html+='%d to %d</p>' % (min(values),max(values))
html+='<div id="chart_div"style="width:900px; height:500px;">'
html+='</div></body></html>'
Esempio n. 38
0
 def test_featuresWithoutSelection(self):
     layer = processing.getObject(points())
     features = processing.features(layer)
     self.assertEqual(12, len(features))
Esempio n. 39
0
    def processAlgorithm(self, progress):

        field = self.getParameterValue(self.FIELD)
        field = field[0:10] # try to handle Shapefile field length limit
        filename = self.getParameterValue(self.INPUT)
        layer = dataobjects.getObjectFromUri(filename)
        filename = dataobjects.exportVectorLayer(layer)        
        provider = layer.dataProvider()
        fields = provider.fields()
        fields.append(QgsField('MORANS_P', QVariant.Double))
        fields.append(QgsField('MORANS_Z', QVariant.Double))
        fields.append(QgsField('MORANS_Q', QVariant.Int))
        fields.append(QgsField('MORANS_I', QVariant.Double))
        fields.append(QgsField('MORANS_C', QVariant.Double))

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, provider.geometryType(), layer.crs() )

        contiguity = self.getParameterValue(self.CONTIGUITY)
        if contiguity == 'queen':
            print 'INFO: Local Moran\'s using queen contiguity'
            w=pysal.queen_from_shapefile(filename)
        else:
            print 'INFO: Local Moran\'s using rook contiguity'
            w=pysal.rook_from_shapefile(filename)

        f = pysal.open(pysal.examples.get_path(filename.replace('.shp','.dbf')))
        y=np.array(f.by_col[str(field)])
        lm = pysal.Moran_Local(y,w,transformation = "r", permutations = 999)

        # http://pysal.readthedocs.org/en/latest/library/esda/moran.html?highlight=local%20moran#pysal.esda.moran.Moran_Local
        # values indicate quadrat location 1 HH,  2 LH,  3 LL,  4 HL

        # http://www.biomedware.com/files/documentation/spacestat/Statistics/LM/Results/Interpreting_univariate_Local_Moran_statistics.htm
        # category - scatter plot quadrant - autocorrelation - interpretation
        # high-high - upper right (red) - positive - Cluster - "I'm high and my neighbors are high."
        # high-low - lower right (pink) - negative - Outlier - "I'm a high outlier among low neighbors."
        # low-low - lower left (med. blue) - positive - Cluster - "I'm low and my neighbors are low."
        # low-high - upper left (light blue) - negative - Outlier - "I'm a low outlier among high neighbors."

        # http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#/What_is_a_z_score_What_is_a_p_value/005p00000006000000/
        # z-score (Standard Deviations) | p-value (Probability) | Confidence level
        #     < -1.65 or > +1.65        |        < 0.10         |       90%
        #     < -1.96 or > +1.96        |        < 0.05         |       95%
        #     < -2.58 or > +2.58        |        < 0.01         |       99%

        self.setOutputValue(self.P_SIM, str(lm.p_sim))

        sig_q = lm.q * (lm.p_sim <= 0.01) # could make significance level an option
        outFeat = QgsFeature()
        i = 0
        for inFeat in processing.features(layer):
            inGeom = inFeat.geometry()
            outFeat.setGeometry(inGeom)
            attrs = inFeat.attributes()
            attrs.append(float(lm.p_sim[i]))
            attrs.append(float(lm.z_sim[i]))
            attrs.append(int(lm.q[i]))
            attrs.append(float(lm.Is[i]))
            attrs.append(int(sig_q[i]))
            outFeat.setAttributes(attrs)
            writer.addFeature(outFeat)
            i+=1

        del writer
Esempio n. 40
0
    def processAlgorithm(self, progress):
        field = self.getParameterValue(self.FIELD)
        field = field[0:10]  # try to handle Shapefile field length limit
        filename = self.getParameterValue(self.INPUT)
        layer = dataobjects.getObjectFromUri(filename)
        filename = dataobjects.exportVectorLayer(layer)
        provider = layer.dataProvider()
        fields = provider.fields()
        fields.append(QgsField('MORANS_P', QVariant.Double))
        fields.append(QgsField('MORANS_Z', QVariant.Double))
        fields.append(QgsField('MORANS_Q', QVariant.Int))  # quadrant
        fields.append(QgsField('MORANS_Q_S',
                               QVariant.Int))  # significant quadrant
        fields.append(QgsField('MORANS_I', QVariant.Double))

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, provider.geometryType(), layer.crs())

        contiguity = self.getParameterValue(self.CONTIGUITY)
        if self.CONTIGUITY_OPTIONS[contiguity] == 'queen':
            print 'INFO: Local Moran\'s using queen contiguity'
            w = pysal.queen_from_shapefile(filename)
        elif self.CONTIGUITY_OPTIONS[contiguity] == 'rook':
            print 'INFO: Local Moran\'s using rook contiguity'
            w = pysal.rook_from_shapefile(filename)

        significance_level = self.getParameterValue(self.SIGNIFICANCE_LEVEL)
        if self.SIGNIFICANCE_OPTIONS[significance_level] == '90%':
            max_p = 0.10
        elif self.SIGNIFICANCE_OPTIONS[significance_level] == '95%':
            max_p = 0.05
        elif self.SIGNIFICANCE_OPTIONS[significance_level] == '99%':
            max_p = 0.01
        print 'INFO: significance level ' + self.SIGNIFICANCE_OPTIONS[
            significance_level]

        f = pysal.open(
            pysal.examples.get_path(filename.replace('.shp', '.dbf')))
        y = np.array(f.by_col[str(field)])
        lm = pysal.Moran_Local(y, w, transformation="r", permutations=999)

        # http://pysal.readthedocs.org/en/latest/library/esda/moran.html?highlight=local%20moran#pysal.esda.moran.Moran_Local
        # values indicate quadrat location 1 HH,  2 LH,  3 LL,  4 HL

        # http://www.biomedware.com/files/documentation/spacestat/Statistics/LM/Results/Interpreting_univariate_Local_Moran_statistics.htm
        # category - scatter plot quadrant - autocorrelation - interpretation
        # high-high - upper right (red) - positive - Cluster - "I'm high and my neighbors are high."
        # high-low - lower right (pink) - negative - Outlier - "I'm a high outlier among low neighbors."
        # low-low - lower left (med. blue) - positive - Cluster - "I'm low and my neighbors are low."
        # low-high - upper left (light blue) - negative - Outlier - "I'm a low outlier among high neighbors."

        # http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#/What_is_a_z_score_What_is_a_p_value/005p00000006000000/
        # z-score (Standard Deviations) | p-value (Probability) | Confidence level
        #     < -1.65 or > +1.65        |        < 0.10         |       90%
        #     < -1.96 or > +1.96        |        < 0.05         |       95%
        #     < -2.58 or > +2.58        |        < 0.01         |       99%

        self.setOutputValue(self.P_SIM, str(lm.p_sim))

        sig_q = lm.q * (lm.p_sim <= max_p)
        outFeat = QgsFeature()
        i = 0
        for inFeat in processing.features(layer):
            inGeom = inFeat.geometry()
            outFeat.setGeometry(inGeom)
            attrs = inFeat.attributes()
            attrs.append(float(lm.p_sim[i]))
            attrs.append(float(lm.z_sim[i]))
            attrs.append(int(lm.q[i]))
            attrs.append(int(sig_q[i]))
            attrs.append(float(lm.Is[i]))
            outFeat.setAttributes(attrs)
            writer.addFeature(outFeat)
            i += 1

        del writer
Esempio n. 41
0
			def Probabilidade(InputAA, InputLayerNameAA, InputLimite, CellSize, TempFolder, Extent):
				# CRIAR SHAPEFILE "Limite" TEMPORARIO
				LimiteOriginal = QgsVectorLayer(InputLimite, "LimiteOriginal", "ogr")
				LimiteCopy = os.path.join(TempFolder, "LimiteCopy.shp")
				processing.runalg("qgis:dissolve",LimiteOriginal,True,None,LimiteCopy)
				Limite = QgsVectorLayer(LimiteCopy, "Limite", "ogr")
				
				# CRIAR CAMPO "Limite"
				CampoLimite = Limite.dataProvider().addAttributes([QgsField("Limite", QVariant.Int)])
				Limite.updateFields()
				Limite.startEditing()	
				[Limite.changeAttributeValue(Valores.id(), Limite.fieldNameIndex("Limite"), int(0)) for Valores in processing.features(Limite)]
				Limite.commitChanges()
				Limite.updateFields()

				# APAGAR CAMPOS DESNECESSARIOS
				for field in range(len(Limite.dataProvider().attributeIndexes())-1):
					Limite.dataProvider().deleteAttributes([0])
				Limite.updateFields()
				
				AA = []
				inc = 0
				for AreasArdidas, LayerName in zip(InputAA,InputLayerNameAA):
					# CLIP POR "Limite"
					ClipOutput = os.path.join(TempFolder, LayerName + "_Clip.shp")
					processing.runalg("qgis:clip",AreasArdidas,Limite,ClipOutput)
					LoadAA = QgsVectorLayer(ClipOutput, LayerName, "ogr")
					FeatureList = []

					for f in LoadAA.getFeatures():
						FeatureList.append(f)
					if len(FeatureList) == 0:
						LimiteGeometryList = []
						for feature in Limite.getFeatures():
							LimiteGeometryList.append(feature)
						LoadAA.startEditing()
						DataProvider = LoadAA.dataProvider()
						DataProvider.addFeatures(LimiteGeometryList)
						LoadAA.commitChanges()
					
					# CRIAR CAMPOS "ANO"
					inc +=1
					CampoName = "A_" + str(inc)
					CampoAno = LoadAA.dataProvider().addAttributes([QgsField(CampoName, QVariant.Int)])
					LoadAA.updateFields()
					CampoAnoIndex = LoadAA.fieldNameIndex(CampoName)
					LoadAA.startEditing()
					if len(FeatureList) == 0:
						[LoadAA.changeAttributeValue(Valores.id(), CampoAnoIndex, int(0)) for Valores in processing.features(LoadAA)]
					else:
						[LoadAA.changeAttributeValue(Valores.id(), CampoAnoIndex, int(1)) for Valores in processing.features(LoadAA)]
					LoadAA.commitChanges()
					LoadAA.updateFields()

					# APAGAR CAMPOS DESNECESSARIOS
					for field in range(len(LoadAA.dataProvider().attributeIndexes())-1):
						LoadAA.dataProvider().deleteAttributes([0])
					LoadAA.updateFields()
					AA.append(LoadAA)
				
				# EXECUTAR UNION
				UnionAnos = []
				for incremento in range(0, len(AA)-1):
					if incremento == 0:
						processing.runalg("saga:union",AA[0],AA[1],True, os.path.join(TempFolder, "Union_" + str(incremento)+ ".shp"))
					else:
						UnionAnos = os.path.join(TempFolder, "Union_" + str(incremento)+ ".shp")
						processing.runalg("saga:union",os.path.join(TempFolder, "Union_" + str(incremento-1)+ ".shp"),AA[incremento + 1],True, UnionAnos)
						
				OutputUnion = os.path.join(TempFolder, "Probabilidade.shp")
				processing.runalg("saga:union",UnionAnos,Limite,True, OutputUnion)
				
				### CALCULAR PROBABILIDADE
				Probabilidade = QgsVectorLayer(OutputUnion, "Probabilidade", "ogr")
				
				ExpressaoAppend = []
				for field in Probabilidade.dataProvider().attributeIndexes():
					ExpressaoAppend.append("float(Valores["+str(field) + "]) ")
				ExpressaoProb = '+ '.join(ExpressaoAppend)
				NrCampos = int(len(ExpressaoAppend)-1)
				CampoProb = Probabilidade.dataProvider().addAttributes([QgsField("PROB", QVariant.Double)])
				Probabilidade.updateFields()
				CampoProbIndex = Probabilidade.fieldNameIndex("PROB")

				Probabilidade.startEditing()	
				for Valores in processing.features(Probabilidade):
					Probabilidade.changeAttributeValue(Valores.id(), CampoProbIndex, eval(ExpressaoProb)/float(NrCampos)*100)
				Probabilidade.commitChanges()
				Probabilidade.updateFields()

				Probabilidade.startEditing()	
				for Valores in processing.features(Probabilidade):
					if (Valores[CampoProbIndex] == 0) or (Valores[CampoProbIndex] <= ((1.0/float(NrCampos)*100)+0.000000000005)):
						Probabilidade.changeAttributeValue(Valores.id(), CampoProbIndex, float(1))
				Probabilidade.commitChanges()
				Probabilidade.updateFields()
				processing.runalg("grass7:v.to.rast.attribute",Probabilidade,0,"PROB",Extent,CellSize,-1,0.0001, os.path.join(OutputFolder, "probabilidade.tif"))
Esempio n. 42
0
	def run(self):
		layer=self.iface.activeLayer()
		if layer and layer.dataProvider().capabilities() & QgsVectorDataProvider.ChangeGeometries:
			parts_before_total = 0
			parts_after_total = 0
			parts_included_total = 0
			done_anything=False
			for feature in processing.features(layer):
				geom = feature.geometry()
				if geom.type() == QGis.Line and geom.isMultipart():
					parts = geom.asMultiPolyline()

					lineparts = []
					for px in range(0, len(parts)):
						if len(parts[px]) > 1:
							lp = Linepart(px, parts[px])
							lineparts.append(lp)
					for px in range(0, len(lineparts)):
						for px2 in range(px + 1, len(lineparts)):
							lineparts[px].maybe_connect_with(lineparts[px2])

					seen = {}
					included = {}
					new_parts = []

					for pa in lineparts:
						if pa in seen:
							continue

						if pa.disabled():
							continue

						seen[pa]=True

						points = []
						abort_part=False
						pa_last=pa
						pa_curr=pa.at_start()
						while pa_curr:
							if pa_curr == pa:
								# multi-part loop detected. multi-part loops are ambiguous since
								# where should the start/endpoint be?
								# 
								# all parts in the loop are now marked
								# as seen since we are back were we
								# started and we will just abort.
								abort_part=True
								break
							seen[pa_curr]=True
							shpt1, p, shpt2, pa_next =pa_curr.traverse_from(pa_last)
							p.reverse()
							points[0:0] = p + [shpt1]
							included[pa_curr]=True
							pa_last=pa_curr
							pa_curr=pa_next
						if abort_part:
							continue
						points.extend(pa.points_start_to_end())
						included[pa]=True
						pa_last=pa
						pa_curr=pa.at_end()
						while pa_curr:
							seen[pa_curr]=True
							shpt1, p, shpt2, pa_next=pa_curr.traverse_from(pa_last)
							points.extend([shpt1] + p)
							included[pa_curr]=True
							pa_last=pa_curr
							pa_curr=pa_next

						new_parts.append(points)
								
#					print "feature", feature, "(",feature.id(),")-----"
#					print "parts before:", len(parts)
#					print "parts after:", len(new_parts)
#					np=0
#					for p in parts:
#						np+=len(p)
#					print "number of points before:", np
#					np=0
#					for p in new_parts:
#						np+=len(p)
#					print "number of points after:", np
#					print "-----"

					if len(parts) != len(new_parts):
						if not done_anything:
							done_anything=True
							layer.beginEditCommand("Multiline Join")

						g = QgsGeometry.fromMultiPolyline(new_parts)
						layer.changeGeometry(feature.id(), g)
			#			layer.addFeatures([f])

						parts_before_total += len(parts)
						parts_after_total += len(new_parts)
						parts_included_total += len(included)

			if done_anything:
#				print "actually did modify something"
				layer.endEditCommand()
				layer.triggerRepaint()

				self.iface.messageBar().pushMessage("Multiline Join",
					str(parts_included_total) + "/" + str(parts_before_total) + \
					" parts -> " + str(parts_after_total) + " parts", level=QgsMessageBar.INFO)
Esempio n. 43
0
 def test_featuresWithoutSelection(self):
     layer = processing.getObject(points())
     features = processing.features(layer)
     self.assertEqual(12, len(features))
 def calculateFields(self, listKeep, listStats, output):
     # get selected layer
     index = self.ui.comboLayerList.currentIndex()
     legendInterface = self.iface.legendInterface()
     listLayers = [
         layer for layer in legendInterface.layers()
         if layer.type() == QgsMapLayer.VectorLayer
     ]
     selectedLayer = listLayers[index]
     # iterates over layer features to get attributes as a list of lists
     # uses the processing method so as to get only selected features if this option is set in the processing options
     iter = processing.features(selectedLayer)
     attrs = [feature.attributes() for feature in iter]
     # get all values of the dissolve field (before processing : with duplicate values)
     indexDissolveField = self.ui.comboFieldList.currentIndex()
     valuesDissolveField = [
         feature[indexDissolveField] for feature in attrs
     ]
     # get unique values for dissolve field, from output (seems more secure than to get it from valuesDissolveField ?)
     outputLayer = QgsVectorLayer(output, "name", "ogr")
     provider = outputLayer.dataProvider()
     fields = provider.fields()
     listFieldNames = [field.name() for field in fields]
     iter = outputLayer.getFeatures()
     uniqueValuesDissolveField = [
         feature.attributes()[indexDissolveField] for feature in iter
     ]
     # initializes list of lists which will contain results (it will have one element per kept field)
     listRes = []
     # trick for dissolve field, if kept
     if listKeep[indexDissolveField] == 2:
         listStats[indexDissolveField] = 'First'
     # for each kept field
     for i in range(len(listFieldNames)):
         if listKeep[i] == 2:
             # creates list which will contain attribute values for current field, one empty element per unique dissolve field value
             listAttrs = [[]
                          for val in range(len(uniqueValuesDissolveField))]
             # fill this list with all the current field values corresponding to each dissolve field value
             valuesField = [feature[i] for feature in attrs]
             for (x, y) in zip(valuesDissolveField, valuesField):
                 listAttrs[uniqueValuesDissolveField.index(x)].append(y)
             # removes any NULL values
             listAttrs = [[x for x in l if x] for l in listAttrs]
             # for each list in listAttrs, calculates one value according to the chosen stat
             # if list is empty (can happen if it contained originally only NULL values), return NULL as a result
             if listStats[i] == "Mean":
                 listAttrs = [
                     sum(y) / len(y) if y else NULL for y in listAttrs
                 ]
             elif listStats[i] == "Sum":
                 listAttrs = [sum(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Min":
                 listAttrs = [min(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Max":
                 listAttrs = [max(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "Count":
                 listAttrs = [len(y) if y else NULL for y in listAttrs]
             elif listStats[i] == "First":
                 listAttrs = [y[0] if y else NULL for y in listAttrs]
             elif listStats[i] == "Last":
                 listAttrs = [y[-1] if y else NULL for y in listAttrs]
             elif listStats[i] == "Median":
                 listAttrs = [
                     self.median(y) if y else NULL for y in listAttrs
                 ]
             elif listStats[i] == "Standard deviation":
                 listAttrs = [
                     self.standard_dev(y) if y else NULL for y in listAttrs
                 ]
             elif listStats[i] == "Concatenation":
                 listAttrs = [
                     ", ".join(y) if y else NULL for y in listAttrs
                 ]
             elif listStats[i] == "Uniquification":
                 listAttrs = [
                     ", ".join(set(y)) if y else NULL for y in listAttrs
                 ]
             # append each field result to listRes
             listRes.append(listAttrs)
     return listRes
Esempio n. 45
0
    def processAlgorithm(self, progress):
        progress.setPercentage(0)
        # Retrieve the values of the parameters entered by the user
        roadsLayer = dataobjects.getObjectFromUri(
            self.getParameterValue(self.ROADS_LAYER))
        gridLayer = dataobjects.getObjectFromUri(
            self.getParameterValue(self.GRID_LAYER))
        vertIdField = self.getParameterValue(self.VERT_ID_FIELD)
        stress = self.getParameterValue(self.STRESS)

        # build the output layer
        outFields = QgsFields()
        outFields.append(QgsField('grid_id', QVariant.Int))
        outFields.append(QgsField('status',QVariant.String))
        outFields.append(QgsField('free_cost', QVariant.Int))
        outFields.append(QgsField('cnst_cost', QVariant.Int))
        outFields.append(QgsField('cost_ratio', QVariant.Double))
        writer = self.getOutputFromName(self.OUTPUT_LAYER).getVectorWriter(
            outFields, QGis.WKBPolygon, roadsLayer.crs())
        progress.setPercentage(2)

        # establish db connection
        progress.setInfo('Getting DB connection')
        self.setDbFromRoadsLayer(roadsLayer)
        self.setLayersFromDb()

        # get network
        progress.setInfo('Building network')
        nu = NXUtils(self.vertsLayer,self.linksLayer)
        nu.buildNetwork()
        DG = nu.getNetwork()
        SG = nu.getStressNetwork(stress)
        progress.setPercentage(10)
        graphCosts = nx.get_edge_attributes(DG,'weight')

        #get grid feature and vert id
        progress.setText('Reading selected feature(s)')
        selectedGridFeatures = processing.features(gridLayer)
        if not len(selectedGridFeatures) == 1:
            raise GeoAlgorithmExecutionException('You must select one and only one feature in the grid layer')
        gridFeature = QgsFeature()
        for i, f in enumerate(selectedGridFeatures):
            gridFeature = f
        sourceVertId = gridFeature.attribute(vertIdField)

        #test for source feature not having any low stress connections
        if not SG.has_node(sourceVertId):
            raise GeoAlgorithmExecutionException('The selected grid cell has no low stress connections')

        #iterate grid features and compile scores
        progress.setText('Generating grid scores')
        #helper function to sum costs from graph
        def sumCosts(nodes,graphWeights):
            cost = 0
            del nodes[1]    #remove the source and target nodes from consideration
            del nodes[-1]
            for j, node in enumerate(nodes):
                    try:
                        cost = cost + graphCosts[(node,nodes[j+1])]
                    except:
                        pass
            return cost
        #gridProvider = grid.dataProvider()
        gridFeatures = gridLayer.getFeatures()
        for i, gf in enumerate(gridFeatures):
            targetVertId = gf.attribute(vertIdField)
            progress.setInfo('from: ' + str(sourceVertId) + ' to: ' + str(targetVertId))

            #write new feature
            progress.setText('Writing grid feature')
            newFeat = QgsFeature()
            newGeom = QgsGeometry(gf.geometry())
            newFeat.setGeometry(newGeom)
            newFeat.initAttributes(5)
            newFeat.setAttribute(0,gf.attribute(vertIdField))
            if targetVertId == sourceVertId:
                newFeat.setAttribute(1,'Source cell')
            elif not SG.has_node(targetVertId):
                newFeat.setAttribute(1,'Unreachable')
            elif not nx.has_path(SG,source=sourceVertId,target=targetVertId):
                newFeat.setAttribute(1,'Unreachable')
            else:
                #get shortest path without stress
                pathNoStress = nx.shortest_path(DG,source=sourceVertId,target=targetVertId,weight='weight')
                #get shortest path with stress
                pathStress = nx.shortest_path(SG,source=sourceVertId,target=targetVertId,weight='weight')
                #get cost values
                costNoStress = sumCosts(pathNoStress,graphCosts)
                costStress = sumCosts(pathStress,graphCosts)

                #add attributes
                newFeat.setAttribute(1,'Target cell')
                newFeat.setAttribute(2,costNoStress)
                newFeat.setAttribute(3,costStress)
                if costNoStress == 0:
                    pass
                else:
                    newFeat.setAttribute(4,float(costStress)/float(costNoStress))

            writer.addFeature(newFeat)

        del writer
from math import sqrt 

t_start = datetime.now()
print '{0}: Clustering using KMeans'.format(t_start)

layer = processing.getObject(input)
provider = layer.dataProvider()
fields = provider.fields()
fields.append(QgsField('CLUSTER', QVariant.Int))
fields.append(QgsField('CLUSTER_N', QVariant.Int))
writer = VectorWriter(kmeans_output, None,fields, provider.geometryType(), layer.crs() )


# Perform clustering 
X = []
for edge in processing.features(layer):
    geom = edge.geometry()
    azimuth = geom.vertexAt(0).azimuth(geom.vertexAt(1))/4
    
    X.append([geom.vertexAt(0).x(),geom.vertexAt(0).y()])
    X.append([geom.vertexAt(1).x(),geom.vertexAt(1).y()])
    

db = KMeans(n_clusters=edge_point_clusters).fit(X)
pt_labels = list(db.labels_)

# labels[0] = cluster of line0 start 
# labels[1] = cluster of line0 end
# labels[2] = cluster of line1 start
# labels[3] = cluster of line1 end
# ...
Esempio n. 47
0
import processing


layer = iface.legendInterface().layers()[0]  #if only one layer or find better way to do this
features = processing.features(layer)

#OR

count = 0
depths = []
for feature in layer.getFeatures():  #getFeatures() returns an interator of the layer's features
	depths.append([count, feature.attributes()[-2]])
	count +=1
	

#To get field by name rather than index:

idx = layer.fieldNameIndex('name')

for feature in layer.getFeatures():
	depths.append(feature.attributes()[idx])







#add field to shp with ogr: (can be done while shp is open in qgis)

from osgeo import ogr
Esempio n. 48
0
def calculateFields(listStats, output):
    # iterates over input layer features to get attributes as a list of lists
    # uses the processing method so as to get only selected features if this option is set in the processing options
    iter = processing.features(inputLayer)
    attrs = [feature.attributes() for feature in iter]
    # get index of dissolve field
    provider = inputLayer.dataProvider()
    fields = provider.fields()
    listFieldNames = [field.name() for field in fields]
    indexDissolveField = listFieldNames.index(Dissolve_field)
    # get all values of the dissolve field (before processing : with duplicate values)
    valuesDissolveField = [feature[indexDissolveField] for feature in attrs]
    # get unique values for dissolve field, from output (seems more secure than to get it from valuesDissolveField ?)
    outputLayer = QgsVectorLayer(output, "name", "ogr")
    provider = dissolveLayer.dataProvider()
    fields = provider.fields()
    listFieldNames = [field.name() for field in fields]
    iter = outputLayer.getFeatures()
    uniqueValuesDissolveField = [
        feature.attributes()[indexDissolveField] for feature in iter
    ]
    # initializes list of lists which will contain results (it will have one element per kept field)
    listRes = []
    # for each kept field
    for i in range(len(listFieldNames)):
        if listStats[i] != 'no':
            # creates list which will contain attribute values for current field, one empty element per unique dissolve field value
            listAttrs = [[] for val in range(len(uniqueValuesDissolveField))]
            # fill this list with all the current field values corresponding to each dissolve field value
            valuesField = [feature[i] for feature in attrs]
            for (x, y) in zip(valuesDissolveField, valuesField):
                listAttrs[uniqueValuesDissolveField.index(x)].append(y)
            # removes any NULL values
            listAttrs = [[x for x in l if x] for l in listAttrs]
            # for each list in listAttrs, calculates one value according to the chosen stat
            # if list is empty (can happen if it contained originally only NULL values), return NULL as a result
            if listStats[i] == "mean":
                listAttrs = [sum(y) / len(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "sum":
                listAttrs = [sum(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "min":
                listAttrs = [min(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "max":
                listAttrs = [max(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "count":
                listAttrs = [len(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "first":
                listAttrs = [y[0] if y else NULL for y in listAttrs]
            elif listStats[i] == "last":
                listAttrs = [y[-1] if y else NULL for y in listAttrs]
            elif listStats[i] == "median":
                listAttrs = [self.median(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "sd":
                listAttrs = [
                    self.standard_dev(y) if y else NULL for y in listAttrs
                ]
            elif listStats[i] == "concat":
                listAttrs = [", ".join(y) if y else NULL for y in listAttrs]
            elif listStats[i] == "unique":
                listAttrs = [
                    ", ".join(set(y)) if y else NULL for y in listAttrs
                ]
            # append each field result to listRes
            listRes.append(listAttrs)
    return listRes
    hausdorff = max(distances)
    return hausdorff


origin_layer = processing.getObject(origin_layer)
target_layer = processing.getObject(target_layer)
target_id_column_index = target_layer.fieldNameIndex(target_id_column_index)
"""
origin_layer = l1
target_layer = l2
target_id_column_index = 0
interval = 1
"""

target_spatial_index = QgsSpatialIndex()
target_features = processing.features(target_layer)

origin_fields = origin_layer.pendingFields().toList()
origin_fields.append( QgsField("BEST_FIT", QVariant.Int ))
origin_fields.append( QgsField("HAUSDORFF", QVariant.Double ))
origin_fields.append( QgsField("LEN_DIFF", QVariant.Double ))
writer = VectorWriter(output, None, origin_fields, origin_layer.dataProvider().geometryType(), origin_layer.crs() )

outFeat = QgsFeature()

# populate the spatial index
for feat in target_features: 
    target_spatial_index.insertFeature(feat)
    
origin_features = processing.features(origin_layer)
for origin_feature in origin_features:
import qgis.gui 

VonSchacht = 0
BisSchacht = 0
bis_schacht = 1
sTime = time.time()
haltungen = []
selected_FeatureCount = 0
VonBisSchacht = []
logging.basicConfig(level=logging.DEBUG)

layer_haltungen = QgsMapLayerRegistry.instance().mapLayersByName("Haltungen 20180308")[0]
layer_haltungen.isValid()
layer_schacht = QgsMapLayerRegistry.instance().mapLayersByName("Normschacht 20180308")[0]
# layer_selectedHaltungen = QgsMapLayerRegistry.instance().mapLayersByName("Haltungen 20180308 copy")[0]
features_selectedHaltungen = processing.features(layer_haltungen)
# print("Haltungen-For:")
for feature in features_selectedHaltungen:
    try:
        # print("Start des Haltungs-For-Loop")
        layer_haltungen.setSelectedFeatures([])
        iface.mapCanvas().setSelectionColor( QColor("yellow"))
        fid = int(feature.id())
        layer_haltungen.setSelectedFeatures([fid])

        # print "Feature ID %d: " % feature.id()
        processing.runalg("preconfigured:normschacht")
        features_schacht = processing.features(layer_schacht)
        # for f in features_schacht:
        #     if f['Funktion'] == 6:
        #         logging.DEBUG("Break")
Esempio n. 51
0
def option1_2():
    peso = 0
    features = processing.features(layer)
    for feature in features:
        peso += feature['peso']
    print("peso", peso)
Esempio n. 52
0
# category - scatter plot quadrant - autocorrelation - interpretation
# high-high - upper right (red) - positive - Cluster - "I'm high and my neighbors are high."
# high-low - lower right (pink) - negative - Outlier - "I'm a high outlier among low neighbors."
# low-low - lower left (med. blue) - positive - Cluster - "I'm low and my neighbors are low."
# low-high - upper left (light blue) - negative - Outlier - "I'm a low outlier among high neighbors."

# http://help.arcgis.com/en/arcgisdesktop/10.0/help/index.html#/What_is_a_z_score_What_is_a_p_value/005p00000006000000/
# z-score (Standard Deviations) | p-value (Probability) | Confidence level
#     < -1.65 or > +1.65        |        < 0.10         |       90%
#     < -1.96 or > +1.96        |        < 0.05         |       95%
#     < -2.58 or > +2.58        |        < 0.01         |       99%


sig_q = lm.q * (lm.p_sim <= 0.01) # could make significance level an option
outFeat = QgsFeature()
i = 0
for inFeat in processing.features(layer):
    inGeom = inFeat.geometry()
    outFeat.setGeometry(inGeom)
    attrs = inFeat.attributes()
    attrs.append(float(lm.p_sim[i]))
    attrs.append(float(lm.z_sim[i]))
    attrs.append(int(lm.q[i]))
    attrs.append(float(lm.Is[i]))
    attrs.append(int(sig_q[i]))
    outFeat.setAttributes(attrs)
    writer.addFeature(outFeat)
    i+=1

del writer
Esempio n. 53
-1
 def test_SagaVectorAlgorithWithUnsupportedInputAndOutputFormat(self):
     '''this tests both the exporting to shp and then the format change in the output layer'''
     layer = processing.getObject(polygonsGeoJson());
     feature = layer.getFeatures().next()
     selected = [feature.id()]
     layer.setSelectedFeatures(selected)
     outputs=processing.runalg("saga:polygoncentroids",polygonsGeoJson(),True, getTempFilename("geojson"))
     layer.setSelectedFeatures([])
     output=outputs['CENTROIDS']
     layer=dataobjects.getObjectFromUri(output, True)
     fields=layer.pendingFields()
     expectednames=['ID','POLY_NUM_A','POLY_ST_A']
     expectedtypes=['Real','Real','String']
     names=[str(f.name()) for f in fields]
     types=[str(f.typeName()) for f in fields]
     self.assertEqual(expectednames, names)
     self.assertEqual(expectedtypes, types)
     features=processing.features(layer)
     self.assertEqual(1, len(features))
     feature=features.next()
     attrs=feature.attributes()
     expectedvalues=["0","1.1","string a"]
     values=[str(attr) for attr in attrs]
     self.assertEqual(expectedvalues, values)
     wkt='POINT(270787.49991451 4458955.46775295)'
     self.assertEqual(wkt, str(feature.geometry().exportToWkt()))