def processAlgorithm(self, progress): # Vector layer vector = self.getParameterValue(self.SPECIES) v = Processing.getObject(vector) v_crs = v.crs() # Environmental layers envlayers = self.getParameterValue(self.ENV) pred = [] shape = None for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if r.crs() != v_crs: raise GeoAlgorithmExecutionException( "All input layers need to have the same projection") raster = gdal.Open(str(lay)) gt = raster.GetGeoTransform() array = raster.GetRasterBand(1).ReadAsArray() if shape == None: shape = array.shape elif array.shape != shape: raise GeoAlgorithmExecutionException( "All input environmental layers need to have the same resolution" ) layers.append(array) # Creating response work_array = numpy.zeros_like( layers[0]) # Make a Presence/Absence of input array for feature in v.getFeatures(): geom = feature.geometry().asPoint() mx = geom.x() my = geom.y() pp = func.world2Pixel(gt, mx, my) x = round(pp[0]) y = round(pp[1]) work_array[y, x] = 1 response = work_array # The logistic regression logit = LogisticRegression(C=1.0, class_weight='auto', fit_intercept=True) logit.fit(train, response)
def processAlgorithm(self, progress): # Do the stuff self.progress = progress vector = self.getParameterValue(self.VECTOR) v = Processing.getObject(vector) # mp = self.getParameterValue(self.MULTIPLIER) # Get List of coordinates progress.setConsoleInfo("Get Input coordinates...") func.updateProcessing(progress, 1, 4) x = y = id = [] ds = ogr.Open(vector) lay = ds.GetLayer() for i in range(0, lay.GetFeatureCount()): f = lay.GetFeature(i) geom = f.GetGeometryRef() x.append(geom.GetX()) y.append(geom.GetY()) id.append(f.GetFID()) if len(x) == 0 or len(y) == 0: raise GeoAlgorithmExecutionException( "Coordinates of given point layer could not be extracted") # Build Mahalanobis Distances progress.setConsoleInfo("Build Mahalanobis Distances...") func.updateProcessing(progress, 2, 4) md = self.MahalanobisDist(x, y) # Identify outliers and build new values progress.setConsoleInfo("Identify outliers...") func.updateProcessing(progress, 3, 4) # Get 3 greatest values indices outliers = (-numpy.array(md)).argsort()[:3] # threshold = numpy.mean(md) * mp # adjust 1.5 accordingly # outliers = [] # for i in range(len(md)): # if md[i] >= threshold: # outliers.append(i) # position of removed pair if len(outliers) >= (len(id) / 4): raise GeoAlgorithmExecutionException( "Too many outliers. Try to increase the multiplier.") # Get ids of outliers and select those in the input vectorlayer progress.setConsoleInfo("Select %s outliers in the input vectorlayer" % (str(len(outliers)))) func.updateProcessing(progress, 4, 4) out = [] for o in outliers: i = int(id[o]) v.select(i)
def processAlgorithm(self, progress): # Do the stuff metric = self.m[self.getParameterValue(self.METRIC)] envlayers = self.getParameterValue(self.ENV) names = [] env = [] shape = None # Load all arrays into an dictonary for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object name = str( r.name() ) names.append(name) a = gdal.Open( lay ) array = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array NA = a.GetRasterBand(1).GetNoDataValue() if NA != None: array[array==NA] = 0 env.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException("Input layers need to have the same extent and cellsize.") a = r = None if len( env ) == 1: raise GeoAlgorithmExecutionException("You need at least two layers to calculate overlap statistics.") progress.setConsoleInfo("Loaded %s arrays for calculation" % ( str( len( names) ) ) ) func.updateProcessing(progress,1,3) results = [] func.updateProcessing(progress,2,3) if len(env) > 2: # Iterative Calculation of the metrics for j in range(0,len(env)): for k in range(j+1,len(env)): progress.setConsoleInfo("Calculating Overlap of layers %s with %s" % (names[j],names[k]) ) r = self.Overlap(metric,env[j],env[k]) res = (names[j],names[k],metric,r) results.append(res) else: # Only two input layers r = self.Overlap(metric,env[0],env[1]) res = (names[0],names[1],metric,r) results.append(res) progress.setConsoleInfo("Saving results") func.updateProcessing(progress,3,3) output = self.getOutputValue(self.RESULTS) titles = ['Layer1','Layer2','Metric','Overlap'] # Save Output func.saveToCSV(results, titles, output )
def processAlgorithm(self, progress): # Do the stuff self.progress = progress vector = self.getParameterValue(self.VECTOR) v = Processing.getObject(vector) # mp = self.getParameterValue(self.MULTIPLIER) # Get List of coordinates progress.setConsoleInfo("Get Input coordinates...") func.updateProcessing(progress,1,4) x = y = id = [] ds = ogr.Open(vector) lay = ds.GetLayer() for i in range(0,lay.GetFeatureCount()): f = lay.GetFeature(i) geom = f.GetGeometryRef() x.append(geom.GetX()) y.append(geom.GetY()) id.append(f.GetFID()) if len(x) == 0 or len(y) == 0: raise GeoAlgorithmExecutionException("Coordinates of given point layer could not be extracted") # Build Mahalanobis Distances progress.setConsoleInfo("Build Mahalanobis Distances...") func.updateProcessing(progress,2,4) md = self.MahalanobisDist(x,y) # Identify outliers and build new values progress.setConsoleInfo("Identify outliers...") func.updateProcessing(progress,3,4) # Get 3 greatest values indices outliers = (-numpy.array(md)).argsort()[:3] # threshold = numpy.mean(md) * mp # adjust 1.5 accordingly # outliers = [] # for i in range(len(md)): # if md[i] >= threshold: # outliers.append(i) # position of removed pair if len(outliers) >= (len(id) / 4): raise GeoAlgorithmExecutionException("Too many outliers. Try to increase the multiplier.") # Get ids of outliers and select those in the input vectorlayer progress.setConsoleInfo("Select %s outliers in the input vectorlayer" % (str(len(outliers)))) func.updateProcessing(progress,4,4) out = [] for o in outliers: i = int( id[o] ) v.select(i)
def processAlgorithm(self, progress): # Vector layer vector = self.getParameterValue(self.SPECIES) v = Processing.getObject(vector) v_crs = v.crs() # Environmental layers envlayers = self.getParameterValue(self.ENV) pred = [] shape = None for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if r.crs() != v_crs: raise GeoAlgorithmExecutionException("All input layers need to have the same projection") raster = gdal.Open(str(lay)) gt = raster.GetGeoTransform() array = raster.GetRasterBand(1).ReadAsArray() if shape == None: shape = array.shape elif array.shape != shape: raise GeoAlgorithmExecutionException("All input environmental layers need to have the same resolution") layers.append(array) # Creating response work_array = numpy.zeros_like(layers[0]) # Make a Presence/Absence of input array for feature in v.getFeatures(): geom = feature.geometry().asPoint() mx = geom.x() my = geom.y() pp = func.world2Pixel(gt, mx,my) x = round(pp[0]) y = round(pp[1]) work_array[y,x] = 1 response = work_array # The logistic regression logit = LogisticRegression(C=1.0,class_weight = 'auto',fit_intercept=True) logit.fit(train,response)
def processAlgorithm(self, progress): only_selected = ProcessingConfig.getSetting( ProcessingConfig.USE_SELECTED) input_file_path = self.getParameterValue(self.INPUT_LAYER) layer = Processing.getObject(input_file_path) unique_attribute = self.getParameterValue(self.UNIQUE_ATTRIBUTE) project_crs = iface.mapCanvas().mapRenderer().destinationCrs() try: the_algorithm = InputsProcessor(project_crs) QObject.connect(the_algorithm, SIGNAL('progress_changed'), partial(self.update_progress, progress, the_algorithm)) QObject.connect(the_algorithm, SIGNAL('update_info'), partial(self.update_info, progress)) self._run_the_algorithm(the_algorithm, only_selected, layer, unique_attribute) except Exception as e: raise GeoAlgorithmExecutionException(e.message)
def processAlgorithm(self, progress): # Do the stuff point = self.getParameterValue(self.SPECIES) v = Processing.getObject(point) ref_crs = v.crs() # For Comparison with the raster layers x = y = v_id = [] # Iter through and get coordinates and id iter = v.getFeatures() for feature in iter: v_id.append( feature.id() ) geom = feature.geometry().asPoint() x.append(geom.x()) y.append(geom.y()) envlayers = self.getParameterValue(self.ENV) names = [] env = [] shape = None # Load all arrays into an dictonary for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object name = str( r.name() ) if r.crs() != ref_crs: raise GeoAlgorithmExecutionException("Input point layer and all environmental layers need to have the same projection.") names.append(name) a = gdal.Open( lay ) array = a.GetRasterBand(1).ReadAsArray() env.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException("Input layers need to have the same extent and cellsize.") a = r = None if len( env ) == 1: raise GeoAlgorithmExecutionException("You need more than two layers to calculate the MESS index.") progress.setConsoleInfo("Loaded %s arrays for calculation" % ( str( len( names) ) ) ) func.updateProcessing(progress,1,5) ## Start Calculating MESS # Get Point Values values = self.extractPointValues(envlayers,names,x,y) progress.setConsoleInfo("Extracted Point data values") func.updateProcessing(progress,2,5) # Flattening predictor values r = numpy.array(env) ref = r.reshape((r.shape[0], -1)) # Flattening extracted point values v = numpy.array(values) val = v.reshape((v.shape[0], -1)) func.updateProcessing(progress,2,4) # Iterative Calculation of the metric out = [] # for i in range(0,len(env)): # numpy.apply_along_axis(self.getMESS,1,ref,ref[,i]) result = None # mess<-function(X,V,full=TRUE) # for (i in 1:(dim(E)[2])) { # e<-data.frame(E[,i]) ; v<-V[,i] # r_mess[[i]][]<-apply(X=e, MARGIN=1, FUN=messi, v=v) # } # rmess<-r_mess[[1]] # E<-extract(x=r_mess,y=1:ncell(r_mess[[1]])) # rmess[]<-apply(X=E, MARGIN=1, FUN=min) # if(full==TRUE) { # out layerNames(out)<-c(layerNames(X),"mess") # } # if(full==FALSE) out return(out) # } progress.setConsoleInfo("Saving results") func.updateProcessing(progress,5,5) # Create Output a = gdal.Open(envlayers.split(";")[0]) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() NA = a.GetRasterBand(1).GetNoDataValue() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.RESULTS) metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE ) and metadata[ gdal.DCAP_CREATE ] == "YES": pass else: progress.setConsoleInfo("Creation of input Fileformat is not supported by gdal. Create GTiff by default.") driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Output file could not be created!")
def processAlgorithm(self, progress): # Do the stuff vector = self.getParameterValue(self.VECTOR) v = Processing.getObject(vector) scl = self.getParameterValue(self.SPEC_COL) ext = self.getParameterValue(self.EXTENT) try: ext = string.split(ext,",") # split except AttributeError: # Extent was empty, raise error raise GeoAlgorithmExecutionException("Please set an extent for the generated raster") cs = self.getParameterValue(self.GRAIN_SIZE) output = self.getOutputValue(self.GRID) # Create output layer xmin = float(ext[0]) xmax = float(ext[1]) ymin = float(ext[2]) ymax = float(ext[3]) gt = (xmin,cs,0.0,ymax,0.0,-cs) nodata = -9999 cols = int( abs( (xmax-xmin)/gt[1] ) ) rows = int( abs( (ymax-ymin)/gt[5] ) ) fin_array = numpy.zeros((rows,cols)) # Create empty grid #if vector is a point do the following, else calculate for overlapping range sizes if v.geometryType() == QGis.Point: progress.setConsoleInfo("Using the point layers to calculate Species richness for resulting grid.") progress.setConsoleInfo("---") # Get the number of species noSpecies = func.getUniqueAttributeList( v, scl) progress.setConsoleInfo("Processing %s number of different species" % (str(len(noSpecies))) ) ds = ogr.Open(vector) name = ds.GetLayer().GetName() proj = ds.GetLayer().GetSpatialRef() n = ds.GetLayer().GetFeatureCount() k = 1 for spec in noSpecies: # Make a copy of the final_array work_array = numpy.zeros_like(fin_array) # Vector layer subsetting to the specific species layers = ds.ExecuteSQL("SELECT * FROM %s WHERE %s = '%s'" % (name, scl, spec) ) progress.setConsoleInfo("Gridding %s individual points of species %s " % (str(layers.GetFeatureCount()), spec )) func.updateProcessing(progress,k,n ) for i in range(0,layers.GetFeatureCount()): f = layers.GetFeature(i) geom = f.GetGeometryRef() mx,my= geom.GetX(), geom.GetY() #coord in map units pp = func.world2Pixel(gt, mx,my) x = round(pp[0]) y = round(pp[1]) if x < 0 or y < 0 or x >= work_array.shape[1] or y >= work_array.shape[0]: progress.setConsoleInfo("Point %s outside given exent" % (str( f.GetFID() )) ) else: # Check if species was already added to grid cell test = work_array[y,x] if test != 1: work_array[y,x] = 1 k += 1 # Add the working arrays values to fin_array = numpy.add(work_array,fin_array) elif v.geometryType() == QGis.Polygon: progress.setConsoleInfo("Using the range size polygons to calculate Species richness for resulting grid.") # rasterization if numpy.count_nonzero(fin_array) == 0: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"No values were rasterized. Check GeometryType and Vector Projection.") # Create output raster func.createRaster(output,cols,rows,fin_array,nodata,gt,proj,'GTiff') # And free up memory del(ds,layers)
def processAlgorithm(self, progress): # Get the location to the maxent jar file maxent = qsdm_settings.maxent() if os.path.basename(maxent) == 'maxent.jar': pass else: maxent = os.path.join(qsdm_settings.maxent(), 'maxent.jar') # If the directory and not the file was chosen # Get location of java, available memory and output path if sys.platform == "win32" or "win64": ex = "java.exe" else: ex = "java" java = os.path.join(qsdm_settings.javaPath(),ex) # the path to java if basic execution fails temp = qsdm_settings.getTEMP()+os.sep+"MAXENT" # folder where reprojected files and such are saved mem = str( qsdm_settings.getMEM() ) # available memory for MAXENT work = qsdm_settings.workPath() # get the name of the folder to save the Maxent model results to env_dir = self.getParameterValue(self.ENV_DIR) progress.setConsoleInfo("Starting Species Layer Preperation") # Check if temp folder exists, otherwise create it if os.path.exists(temp) == False: os.mkdir(temp) ## Species layer preperation # Get the species file to model. Take selected species column and coordinates point = self.getParameterValue(self.SPECIES) v = Processing.getObject(point) scl = self.getParameterValue(self.SPEC_COL) # get names of species from input file if v.source().find("type=csv") != -1 : raise GeoAlgorithmExecutionException("Species point layer should be saved as ESRI Shapefile") else: crs = v.crs() if crs.authid() != "EPSG:4326": progress.setConsoleInfo("Species localities not in WGS84, reprojecting...") # Reproject using ogr func.reprojectLatLong(v,temp) # Then open again as QgsVectorLayer out = temp+os.sep+"localities.shp" if (os.path.exists(out) and os.path.isfile(out)) == False: raise GeoAlgorithmExecutionException("Species point layer data could not be reprojected to WGS84") fileInfo = QFileInfo(out) baseName = fileInfo.baseName() v = QgsVectorLayer(out, baseName, "ogr") if v.isValid() != True: # If this didn't work, try to use the Processing way v = Processing.getObject(out) if v.isValid() != True: # Otherwise return error raise GeoAlgorithmExecutionException("No valid layer could be loaded from the reprojection.") # Get Coordinates from point layer and add the species name coord = func.point2table(v,scl) if coord is None: raise GeoAlgorithmExecutionException("Species point layer data could not be extracted") # Convert coordinates and species name to csv, save in temporary Folder # Get Systemwide temporary folder to save the species csv speciesPath = temp + os.sep +"species.csv" species = func.saveToCSV(coord,("Species","Long","Lat"),speciesPath) specieslist = func.getUniqueAttributeList( v, scl, True) progress.setConsoleInfo("Species data successfully prepared for MAXENT") progress.setConsoleInfo("---") ## Maxent execution # Try if JAVA can be executed like this, otherwise take the binary from the given path try: from subprocess import DEVNULL # python 3k except ImportError: DEVNULL = open(os.devnull, 'wb') proc = subprocess.call(['java', '-version'],stdin=subprocess.PIPE, stdout=DEVNULL, stderr=subprocess.STDOUT) if proc == 0: start = "java -mx" + str(int(mem)) + "m -jar " else: progress.setConsoleInfo("JAVA could not be run by default. Using link to binary from set JAVA folder.") start = java + " -mx" + str(int(mem)) + "m -jar " # if Windows, encapsule jar file in " if platform.system() == "Windows": start += "\"" + maxent + "\"" else: start += maxent myCommand = start + " samplesfile=" + speciesPath myCommand += " environmentallayers=" + env_dir myCommand += " outputdirectory=" + work # finish the command myCommand += " redoifexists" # add a message progress.setConsoleInfo("#### Attempting to start MAXENT ####") # execute the command loglines = [] loglines.append('MAXENT execution console output') # result = os.system(myCommand) proc = subprocess.Popen( myCommand, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, ).stdout for line in iter(proc.readline, ''): loglines.append(line) ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines) # Print all loglines if delivered from MAXENT err = False for line in loglines: progress.setConsoleInfo(line) if line.find("Error") != -1: err = True if err: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"MAXENT calculations did not succed! Check the Processing Info output for possible error sources.") print "Used command:" + myCommand else: ProcessingLog.addToLog(ProcessingLog.LOG_INFO,"MAXENT modelling finished.") # Finished, Load all resulting layers in QGIS if successfully run # In order to be compatible with processing copy or link them to the Processing output folder #out_r = self.getOutputValue(self.OUT_PRED) #out_t = self.getOutputValue(self.OUT_PRED_RES) p = work + os.sep + "maxentResults.csv" func.tableInQgis(p,",") #load in only generated Prediction for species in specieslist: t = species.replace(" ","_") p = work + os.sep + t + ".asc" func.rasterInQgis(p)
def createTest(text): s = "" tokens = text[len("processing.runalg("):-1].split(",") cmdname = tokens[0][1:-1] methodname = "test_" + cmdname.replace(":", "") s += "def " + methodname + "(self):\n" alg = Processing.getAlgorithm(cmdname) execcommand = "processing.runalg(" i = 0 for token in tokens: if i < alg.getVisibleParametersCount() + 1: if os.path.exists(token[1:-1]): token = os.path.basename(token[1:-1])[:-4] + "()" execcommand += token + "," else: execcommand += "None," i += 1 s += "\toutputs=" + execcommand[:-1] + ")\n" i = -1 * len(alg.outputs) for out in alg.outputs: filename = tokens[i][1:-1] if (tokens[i] == str(None)): QtGui.QMessageBox.critical( None, "Error", "Cannot create unit test for that algorithm execution.\nThe output cannot be a temporary file" ) return s += "\toutput=outputs['" + out.name + "']\n" if isinstance(out, (OutputNumber, OutputString)): s += "self.assertTrue(" + str(out) + ", output.value)\n" if isinstance(out, OutputRaster): dataset = gdal.Open(filename, GA_ReadOnly) strhash = hash(str(dataset.ReadAsArray(0).tolist())) s += "\tself.assertTrue(os.path.isfile(output))\n" s += "\tdataset=gdal.Open(output, GA_ReadOnly)\n" s += "\tstrhash=hash(str(dataset.ReadAsArray(0).tolist()))\n" s += "\tself.assertEqual(strhash," + str(strhash) + ")\n" if isinstance(out, OutputVector): layer = Processing.getObject(filename) fields = layer.pendingFields() s += "\tlayer=QGisLayers.getObjectFromUri(output, True)\n" s += "\tfields=layer.pendingFields()\n" s += "\texpectednames=[" + ",".join( ["'" + str(f.name()) + "'" for f in fields]) + "]\n" s += "\texpectedtypes=[" + ",".join( ["'" + str(f.typeName()) + "'" for f in fields]) + "]\n" s += "\tnames=[str(f.name()) for f in fields]\n" s += "\ttypes=[str(f.typeName()) for f in fields]\n" s += "\tself.assertEqual(expectednames, names)\n" s += "\tself.assertEqual(expectedtypes, types)\n" features = QGisLayers.features(layer) numfeat = len(features) s += "\tfeatures=processing.getfeatures(layer)\n" s += "\tself.assertEqual(" + str(numfeat) + ", len(features))\n" if numfeat > 0: feature = features.next() attrs = feature.attributes() s += "\tfeature=features.next()\n" s += "\tattrs=feature.attributes()\n" s += "\texpectedvalues=[" + ",".join( ['"' + str(attr) + '"' for attr in attrs]) + "]\n" s += "\tvalues=[str(attr) for attr in attrs]\n" s += "\tself.assertEqual(expectedvalues, values)\n" s += "\twkt='" + str(feature.geometry().exportToWkt()) + "'\n" s += "\tself.assertEqual(wkt, str(feature.geometry().exportToWkt()))" dlg = ShowTestDialog(s) dlg.exec_()
def processAlgorithm(self, progress): # Do the stuff b = self.getParameterValue(self.BASELINE) p = self.getParameterValue(self.PREDICTION) d = self.getParameterValue(self.QUANT) # baseline r = Processing.getObject(b) baseline_name = str(r.name()) a = gdal.Open(b) baseline = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array nodata = a.GetRasterBand(1).GetNoDataValue() if nodata == None: raise GeoAlgorithmExecutionException( "Please classify both layers with a valid nodata-value.") # Take Values for output from baseline raster columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.RESULTS) a = None # Prediction r = Processing.getObject(p) prediction_name = str(r.name()) a = gdal.Open(b) prediction = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array NA = a.GetRasterBand(1).GetNoDataValue() if NA == None: raise GeoAlgorithmExecutionException( "Please classify both layers with a valid nodata-value.") a = None # Compare shapes if baseline.shape != prediction.shape: raise GeoAlgorithmExecutionException( "Input layers need to have the same extent and cellsize.") progress.setConsoleInfo("Loaded layers for calculation") func.updateProcessing(progress, 1, 4) progress.setConsoleInfo("Starting processing") func.updateProcessing(progress, 2, 4) # Index values # 1 = Range contraction # 2 = No Change # 3 = Range Expansion med = numpy.median(baseline[baseline != nodata]) low = numpy.percentile(baseline[baseline != nodata], 50 - d) high = numpy.percentile(baseline[baseline != nodata], 50 + d) res = numpy.zeros_like(baseline) res[numpy.where( numpy.logical_and(prediction >= low, prediction <= high))] = 2 res[prediction < low] = 1 res[prediction > high] = 3 res[baseline == nodata] = nodata # fill the rest with nodata result = numpy.copy(res) progress.setConsoleInfo("Saving results") func.updateProcessing(progress, 3, 4) # Create Output metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE) and metadata[gdal.DCAP_CREATE] == "YES": pass else: progress.setConsoleInfo( "Creation of input Fileformat is not supported by gdal. Create GTiff by default." ) driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, "Output file could not be created!")
def processAlgorithm(self, progress): ## Parameter preperation ## # Get the location to the maxent jar file maxent = qsdm_settings.maxent() if os.path.basename(maxent) == 'maxent.jar': pass else: maxent = os.path.join(qsdm_settings.maxent(), 'maxent.jar') # If the directory and not the file was chosen # Get location of java, available memory and output path if sys.platform == "win32" or "win64": ex = "java.exe" else: ex = "java" java = os.path.join(qsdm_settings.javaPath(),ex) # the path to java if basic execution fails mem = str( qsdm_settings.getMEM() ) # available memory for MAXENT work = qsdm_settings.workPath() # get the name of the folder to save the Maxent model results to temp = qsdm_settings.getTEMP()+os.sep+"MAXENT" # folder where reprojected files and such are saved progress.setConsoleInfo("Starting Parameter and File Preperation") # Check if temp folder exists, otherwise create it if os.path.exists(temp) == False: os.mkdir(temp) # Get optional parameters param = self.getParameterValue(self.PARAM) o = Processing.getObject(param) if type(o)==QgsVectorLayer and o.isValid() and os.path.splitext(o.source())[1]==".csv": progress.setConsoleInfo("Using optional parameter file for MAXENT") param = dict() # Format Parameters to dictionary dp = o.dataProvider() for feat in dp.getFeatures(): geom = feat.geometry() com = feat["command"] val = feat["value"] param[com] = val # and make maxent invisible to the modeller param["visible"] = False else: progress.setConsoleInfo("No valid optional Parameter file detected") # Use default parameters param = dict() # per default write a separate maxent results file for each species param["perspeciesresults"] = False # and make maxent invisible to the modeller param["visible"] = False # Progress updater: n = len(self.getParameterValue(self.ENV).split(";"))+5 func.updateProcessing(progress,1,n,"Loaded Parameters.") ## Species layer # Get the species file to model. Take selected species column and coordinates point = self.getParameterValue(self.SPECIES) v = Processing.getObject(point) crs = v.crs() if crs.authid() != "EPSG:4326": progress.setConsoleInfo("Species localities not in WGS84, reprojecting...") # Reproject using ogr func.reprojectLatLong(v,temp) # Then open again as QgsVectorLayer out = temp+os.sep+"localities.shp" if (os.path.exists(out) and os.path.isfile(out)) == False: raise GeoAlgorithmExecutionException("Species point layer data could not be reprojected to WGS84") fileInfo = QFileInfo(out) baseName = fileInfo.baseName() v = QgsVectorLayer(out, baseName, "ogr") if v.isValid() != True: # If this didn't work, try to use the Processing way v = Processing.getObject(out) if v.isValid() != True: # Otherwise return error raise GeoAlgorithmExecutionException("No valid layer could be loaded from the reprojection.") # get names of species from input file scl = self.getParameterValue(self.SPEC_COL) # Get Coordinates from point layer and add the species name coord = func.point2table(v,scl) if coord is None: raise GeoAlgorithmExecutionException("Species point layer data could not be extracted") # Convert coordinates and species name to csv, save in temporary Folder # Get Systemwide temporary folder to save the species csv speciesPath = temp + os.sep +"species.csv" species = func.saveToCSV(coord,("Species","Long","Lat"),speciesPath) specieslist = func.getUniqueAttributeList( v, scl,True) progress.setConsoleInfo("Species data successfully prepared for MAXENT") progress.setConsoleInfo("---") func.updateProcessing(progress,2,n,"Loaded Species data.") ## Environmental Layers # get the selected environmental layers and prepare them for MAXENT progress.setConsoleInfo("Starting preparing the environmental layers") envlayers = self.getParameterValue(self.ENV) env = dict() layers = [] # Project to WGS84 if necessary for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object name = str( r.name() ) crs = r.crs() if crs.authid() != "EPSG:4326": # Reproject layer progress.setConsoleInfo("Originial Layer %s not in WGS84, reprojecting..." % (name)) r = func.reprojectRasterLatLong(r,temp,True) if r == False or r.isValid()==False : ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Projecting "+name+" to WGS84 failed!") layers.append( r.source() ) if len(layers) == 0: raise GeoAlgorithmExecutionException("Environmental Layers could not be reprojected!") else: func.updateProcessing(progress,3,n,"Reprojection finished.") # Check the extent of all those layers and unify if necessary # Check if necessary -> Do the raster layer have differing extents func.updateProcessing(progress,4,n) uni = [] app = False # Which approach should be used? if len(layers) > 1 and func.unificationNecessary(layers): progress.setConsoleInfo("Input layers have different extents, intersecting...") # The credits of the following approach go to Yury Ryabov - http://ssrebelious.blogspot.com if app == False: # get coordinates of corners for the final raster fin_coordinates = func.finCoordinates(layers) r = gdal.Open(str( layers[0] ) ) main_geo_transform = r.GetGeoTransform() proj = r.GetProjection() no_data = r.GetRasterBand(1).GetNoDataValue() if not no_data: no_data = -9999 for lay in layers: raster = gdal.Open(str(lay)) name = os.path.splitext(os.path.basename(lay))[0] out = temp + os.sep + name + 'warp.tif' result = func.ExtendRaster(raster, fin_coordinates, out, main_geo_transform, proj, no_data) if result: raster = None if os.path.exists(out): # Add output to uni uni.append(out) else: raise GeoAlgorithmExecutionException("Unified layer could not be saved.") else: raise GeoAlgorithmExecutionException("Layers could not be unified. Please set do this manually.") else: # FIXME: Faster Approach down below. Currently not yet working # 1. Build largest extent and geotransform # big_coord has left, top, right, bottom of dataset's bounds in geospatial coordinates. fin_coordinates, main_geo_transform, interp = func.CreateMainGeotransform(layers) # get coordinates and geotransform of corners for the final raster # set number of columns and rows for raster main_cols = (fin_coordinates[2] - fin_coordinates[0]) / abs(main_geo_transform[1]) main_rows = (fin_coordinates[3] - fin_coordinates[1]) / abs(main_geo_transform[5]) progress.setConsoleInfo("Creating new raster based on greatest extent with %s columns and %s rows" % (str(main_cols),str(main_rows))) #FIXME: Check coordinates big_coord = [main_geo_transform[0], main_geo_transform[3], main_geo_transform[0] + (main_geo_transform[1] * main_rows), main_geo_transform[3] + (main_geo_transform[5] * main_cols)] # 2. Loop through rasters and Intersect them export the biggest for lay in layers: name = os.path.splitext(os.path.basename(lay))[0] r = gdal.Open(str( lay ) ) src_p = r.GetProjection() if interp: # Interpolate to biggest cellsize progress.setConsoleInfo("Resolution of Environmental Layers is different. Bilinear interpolation to the coarsest cellsize = xy(%s,%s)" % (abs(main_geo_transform[1]),abs(main_geo_transform[5]))) #FIXME: Maybe interpolate to nearest neighbor if categorical r = func.gridInterpolation(r,temp,main_geo_transform,main_cols,main_rows,src_p, 'Bilinear',False) wide = abs( r.RasterXSize ) high = abs( r.RasterYSize ) geotransform = r.GetGeoTransform() nodata = r.GetRasterBand(1).GetNoDataValue() # should be -9999 if projected correctly if nodata == None: nodata = -9999 # target has left, top, right, bottom of dataset's bounds in geospatial coordinates. target = [geotransform[0], geotransform[3], geotransform[0] + (geotransform[1] * wide), geotransform[3] + (geotransform[5] * high)] #Intersection intersection = [max(big_coord[0], target[0]), min(big_coord[1], target[1]), min(big_coord[2], target[2]), max(big_coord[3], target[3])] # Convert to pixels p1 = func.world2Pixel(geotransform,intersection[0],intersection[1]) p2 = func.world2Pixel(geotransform,intersection[2],intersection[3]) band = r.GetRasterBand(1) result = band.ReadAsArray(p1[0], p1[1], p2[0] - p1[0], p2[1] - p1[1], p2[0] - p1[0], p2[1] - p1[1]) # Write to new raster output = temp + os.sep + name + 'warp.tif' func.createRaster(output,abs(main_geo_transform[1]),abs(main_geo_transform[5]),result,nodata,main_geo_transform,src_p,'GTiff') if os.path.exists(output): # Add output to uni uni.append(output) else: raise GeoAlgorithmExecutionException("Environmental Layers could not be prepared for MAXENT") return None else: uni = layers if len(uni) == 0 or len(uni) != len(layers): raise GeoAlgorithmExecutionException("Environmental Layers with unified extent could not be generated!") else: progress.setConsoleInfo("Environmental Layer successfully unified.") func.updateProcessing(progress,5,n,"Unified environmental Layers.") # Format to asc if necessary for lay in uni: r = Processing.getObject(lay) # QgsRasterLayer object name = os.path.basename(str( r.name() )) out = temp + os.sep + name + '.asc' progress.setConsoleInfo("Convert environmental layers to ESRI ASC format...") # Format to asc proc = func.raster2ASC(r,out) if proc and os.path.isfile(out): env[name] = out else: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Converting/Projecting "+name+" to ESRI asc format failed!") func.updateProcessing(progress,6,n,"Formated to ASC.") # Check if anything is in env, worked if len(env) == 0: raise GeoAlgorithmExecutionException("Environmental Layers could not be prepared for MAXENT") # Check if the number of the original selected layers is equal to if len(envlayers.split(";")) != len(env): ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Successfully prepared environmental layers "+str( env.keys() ) ) raise GeoAlgorithmExecutionException("Not all environmental Layers could be prepared for MAXENT. Check Processing Log.") # Test if species csv exists if os.path.exists(speciesPath) == False: raise GeoAlgorithmExecutionException("Species point layer could not be prepared for MAXENT") ## create the maxent command progress.setConsoleInfo("---") progress.setConsoleInfo("All fine so far. Attempting to build MAXENT execution command...") # Try if JAVA can be executed like this, otherwise take the binary from the given path try: from subprocess import DEVNULL # python 3k except ImportError: DEVNULL = open(os.devnull, 'wb') proc = subprocess.call(['java', '-version'],stdin=subprocess.PIPE, stdout=DEVNULL, stderr=subprocess.STDOUT) if proc == 0: start = "java -mx" + str(int(mem)) + "m -jar " else: progress.setConsoleInfo("JAVA could not be run by default. Using link to binary from set JAVA folder.") start = java + " -mx" + str(int(mem)) + "m -jar " # if Windows, encapsule jar file in " if platform.system() == "Windows": start += "\"" + maxent + "\"" else: start += maxent myCommand = start + " samplesfile=" + speciesPath myCommand += " environmentallayers=" + temp # Toggle all selected Layers myCommand += " togglelayertype=" for i in range(0,len(env.keys())): myCommand += os.path.splitext( env.keys()[i] )[0] if i is not len(env.keys())-1: myCommand += "," myCommand += " outputdirectory=" + work # Parse parameters into command for option in param.iteritems(): myCommand += " " + option[0] + "=" + str( option[1] ).lower() # finish the command myCommand += " redoifexists autorun" # add a message progress.setConsoleInfo("#### Attempting to start MAXENT ####") func.updateProcessing(progress,7,n) # execute the command loglines = [] loglines.append('MAXENT execution console output') # result = os.system(myCommand) proc = subprocess.Popen( myCommand, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, ).stdout for line in iter(proc.readline, ''): loglines.append(line) ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines) # Print all loglines if delivered from MAXENT err = False for line in loglines: progress.setConsoleInfo(line) if line.find("Error") != -1: err = True if err: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"MAXENT calculations did not succed! Check the Processing Info output for possible error sources.") print "Used command:" + myCommand else: ProcessingLog.addToLog(ProcessingLog.LOG_INFO,"MAXENT modelling finished.") func.updateProcessing(progress,n,n) # Finished, Load all resulting layers in QGIS if successfully run # In order to be compatible with processing copy or link them to the Processing output folder #out_r = self.getOutputValue(self.OUT_PRED) #out_t = self.getOutputValue(self.OUT_PRED_RES) p = work + os.sep + "maxentResults.csv" func.tableInQgis(p,",") #load in only generated Prediction for species in specieslist: t = species.replace(" ","_") p = work + os.sep + t + ".asc" func.rasterInQgis(p) ## Styling and grouping # Freeze the canvas canvas = QgsMapCanvas() canvas.freeze(True) #Add a new group and all new layers to it groups = iface.legendInterface().groups() if ('MAXENT' in groups ) == False: idx = iface.legendInterface().addGroup( "MAXENT" ) groups = iface.legendInterface().groups() layerMap = QgsMapLayerRegistry.instance().mapLayers() for lyr in layerMap.itervalues(): if lyr.name() in specieslist: # Move them to the maxent group iface.legendInterface().moveLayer( lyr, groups.index("MAXENT") ) # Style the output lyr.setDrawingStyle("SingleBandPseudoColor") # The band of classLayer classLyrBnd = 1 # Color list for ramp clrLst = [ QgsColorRampShader.ColorRampItem(0, QColor(224,224,224),"0"), # Grey QgsColorRampShader.ColorRampItem(0.01, QColor(0,0,153),"> 0.01"), # darkblue QgsColorRampShader.ColorRampItem(0.2, QColor(153,204,255),"0.2"), # lightblue QgsColorRampShader.ColorRampItem(0.35,QColor(153,255,153),"0.35"), # lightgreen QgsColorRampShader.ColorRampItem(0.5, QColor(0,153,0),"0.5"), # green QgsColorRampShader.ColorRampItem(0.65, QColor(255,255,0),"0.65"), # yellow QgsColorRampShader.ColorRampItem(0.75, QColor(255,128,0),"0.75"), # orange QgsColorRampShader.ColorRampItem(0.85, QColor(255,0,0),">0.85") ] # red #Create the shader lyrShdr = QgsRasterShader() #Create the color ramp function clrFnctn = QgsColorRampShader() clrFnctn.setColorRampType(QgsColorRampShader.INTERPOLATED) clrFnctn.setColorRampItemList(clrLst) #Set the raster shader function lyrShdr.setRasterShaderFunction(clrFnctn) #Create the renderer lyrRndr = QgsSingleBandPseudoColorRenderer(lyr.dataProvider(), classLyrBnd, lyrShdr) #Apply the renderer to classLayer lyr.setRenderer(lyrRndr) #refresh legend if hasattr(lyr, "setCacheImage"): lyr.setCacheImage(None) lyr.triggerRepaint() iface.legendInterface().refreshLayerSymbology(lyr) #Finally move the Maxent results to the group as well lyr = func.getLayerByName( "MaxentResults" ) iface.legendInterface().moveLayer( lyr, groups.index("MAXENT") ) canvas.freeze(False) canvas.refresh()
def processAlgorithm(self, progress): # Do the stuff point = self.getParameterValue(self.SPECIES) v = Processing.getObject(point) ref_crs = v.crs() # For Comparison with the raster layers x = y = v_id = [] # Iter through and get coordinates and id iter = v.getFeatures() for feature in iter: v_id.append(feature.id()) geom = feature.geometry().asPoint() x.append(geom.x()) y.append(geom.y()) envlayers = self.getParameterValue(self.ENV) names = [] env = [] shape = None # Load all arrays into an dictonary for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object name = str(r.name()) if r.crs() != ref_crs: raise GeoAlgorithmExecutionException( "Input point layer and all environmental layers need to have the same projection." ) names.append(name) a = gdal.Open(lay) array = a.GetRasterBand(1).ReadAsArray() env.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException( "Input layers need to have the same extent and cellsize." ) a = r = None if len(env) == 1: raise GeoAlgorithmExecutionException( "You need more than two layers to calculate the MESS index.") progress.setConsoleInfo("Loaded %s arrays for calculation" % (str(len(names)))) func.updateProcessing(progress, 1, 5) ## Start Calculating MESS # Get Point Values values = self.extractPointValues(envlayers, names, x, y) progress.setConsoleInfo("Extracted Point data values") func.updateProcessing(progress, 2, 5) # Flattening predictor values r = numpy.array(env) ref = r.reshape((r.shape[0], -1)) # Flattening extracted point values v = numpy.array(values) val = v.reshape((v.shape[0], -1)) func.updateProcessing(progress, 2, 4) # Iterative Calculation of the metric out = [] # for i in range(0,len(env)): # numpy.apply_along_axis(self.getMESS,1,ref,ref[,i]) result = None # mess<-function(X,V,full=TRUE) # for (i in 1:(dim(E)[2])) { # e<-data.frame(E[,i]) ; v<-V[,i] # r_mess[[i]][]<-apply(X=e, MARGIN=1, FUN=messi, v=v) # } # rmess<-r_mess[[1]] # E<-extract(x=r_mess,y=1:ncell(r_mess[[1]])) # rmess[]<-apply(X=E, MARGIN=1, FUN=min) # if(full==TRUE) { # out layerNames(out)<-c(layerNames(X),"mess") # } # if(full==FALSE) out return(out) # } progress.setConsoleInfo("Saving results") func.updateProcessing(progress, 5, 5) # Create Output a = gdal.Open(envlayers.split(";")[0]) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() NA = a.GetRasterBand(1).GetNoDataValue() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.RESULTS) metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE) and metadata[gdal.DCAP_CREATE] == "YES": pass else: progress.setConsoleInfo( "Creation of input Fileformat is not supported by gdal. Create GTiff by default." ) driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, "Output file could not be created!")
def processAlgorithm(self, progress): # Do the stuff, julie reference = self.getParameterValue(self.REFERENCE) projection = self.getParameterValue(self.PROJECTION) ref_ar = [] proj_ar = [] crs = None shape = None # Load all reference arrays into an dictonary for lay in reference.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if crs == None: crs = r.crs() else: if r.crs() != crs: raise GeoAlgorithmExecutionException( "All input layers need to have the same projection.") a = gdal.Open(lay) array = a.GetRasterBand(1).ReadAsArray() ref_ar.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException( "Input layers need to have the same extent and cellsize." ) a = r = None # Load all projection arrays into an dictonary for lay in projection.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if crs == None: crs = r.crs() else: if r.crs() != crs: raise GeoAlgorithmExecutionException( "All input layers need to have the same projection.") a = gdal.Open(lay) array = a.GetRasterBand(1).ReadAsArray() proj_ar.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException( "Input layers need to have the same extent and cellsize and nodata value." ) a = r = None if len(ref_ar) != len(proj_ar): raise GeoAlgorithmExecutionException( "Need the same number of reference layers as projection layers." ) progress.setConsoleInfo("Successfully loaded layers for calculation") func.updateProcessing(progress, 1, 5) # Take Values for output from the first reference layer a = gdal.Open(reference.split(";")[0]) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() nodata = a.GetRasterBand(1).GetNoDataValue() output = self.getOutputValue(self.RESULT) a = None # test a = "/home/martin/.qgis2/python/plugins/QSDM/sampledata/environment/cur/t_mean.asc" a = "/home/martin/.qgis2/python/plugins/QSDM/sampledata/environment/cur/elev_mean.asc" r = gdal.Open(a) ref_ar = r.GetRasterBand(1).ReadAsArray() proj_ar = r.GetRasterBand(1).ReadAsArray() nodata = -9999 ref = ref_ar.ravel() proj = proj_ar.ravel() # Reshaping to a single shape progress.setConsoleInfo("Flattening Input layers to a single shape") func.updateProcessing(progress, 2, 5) r = numpy.array(ref_ar) # Reference ref = r.reshape((r.shape[0], -1)) r = numpy.array(proj_ar) # Projection proj = r.reshape((r.shape[0], -1)) # Calculating Average and Covariance progress.setConsoleInfo("Calculating Average and Covariance") func.updateProcessing(progress, 3, 5) ref = ref.astype(float) ref[ref == nodata] = numpy.nan # Replace values of nodata with NaN ref_avg = numpy.apply_along_axis(numpy.nanmean, 1, ref) m = numpy.ma.make_mask((numpy.isnan(ref))) mask = numpy.ma.MaskedArray(ref, m) # use a mask to kickout the nan values ref_cov = numpy.ma.cov(ref, mask, rowvar=0, allow_masked=True) numpy.ma.reshape(ref_cov, (len(ref_avg), -1)) try: inv_covariance_xy = numpy.linalg.inv(ref_cov.data) except numpy.linalg.LinAlgError: # There is no linear inverse matrix for given points self.progress.setConsoleInfo( "Singular matrix. Calculating (Moore-Penrose) pseudo-inverse matrix instead." ) #inv_covariance_xy = numpy.linalg.pinv(ref_cov) raise GeoAlgorithmExecutionException( "Singular non-invertable covariance matrix. Looking for solutions for this." ) # Calculate Mahalanobis distance ratios progress.setConsoleInfo( "Calculating Mahalanobis ratios between raster cells") func.updateProcessing(progress, 4, 5) try: from scipy.spatial.distance import mahalanobis # Calculate for ref arrays mah.ref = mahalanobis(ref, ref_avg, inv_covariance_xy) mah.ref = numpy.exp2(mah.ref) # Calculate D^2 - squared # Calculate for proj arrays mah.proj = mahalanobis(proj, ref_avg, inv_covariance_xy) mah.proj = numpy.exp2(mah.proj) # Calculate D^2 - squared # Ratios mah.max = numpy.max(mah.ref[numpy.isfinite(mah.ref)]) result < -numpy.divide(mah.proj, mah.max) except ImportError: self.progress.setConsoleInfo( "Scipy not found. Calculating mahalanobis manually.") # Center each value by the mean x = ref_ar[0] xy_mean = ref_avg x_diff = numpy.array([x_i - xy_mean[0] for x_i in x]) y_diff = numpy.array([y_i - xy_mean[1] for y_i in y]) diff_xy = numpy.transpose([x_diff, y_diff]) # Formula for MahalanobisDist md = [] for i in range(len(diff_xy)): md.append( numpy.sqrt( numpy.dot( numpy.dot(numpy.transpose(diff_xy[i]), inv_covariance_xy), diff_xy[i]))) return md progress.setConsoleInfo("Saving results") func.updateProcessing(progress, 5, 5) # Create Output metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE) and metadata[gdal.DCAP_CREATE] == "YES": pass else: progress.setConsoleInfo( "Creation of input Fileformat is not supported by gdal. Create GTiff by default." ) driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, "Output file could not be created!")
def processAlgorithm(self, progress): # Do the stuff, julie reference = self.getParameterValue(self.REFERENCE) projection = self.getParameterValue(self.PROJECTION) ref_ar = [] proj_ar = [] crs = None shape = None # Load all reference arrays into an dictonary for lay in reference.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if crs == None: crs = r.crs() else: if r.crs() != crs: raise GeoAlgorithmExecutionException("All input layers need to have the same projection.") a = gdal.Open( lay ) array = a.GetRasterBand(1).ReadAsArray() ref_ar.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException("Input layers need to have the same extent and cellsize.") a = r = None # Load all projection arrays into an dictonary for lay in projection.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if crs == None: crs = r.crs() else: if r.crs() != crs: raise GeoAlgorithmExecutionException("All input layers need to have the same projection.") a = gdal.Open( lay ) array = a.GetRasterBand(1).ReadAsArray() proj_ar.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException("Input layers need to have the same extent and cellsize and nodata value.") a = r = None if len( ref_ar ) != len( proj_ar ): raise GeoAlgorithmExecutionException("Need the same number of reference layers as projection layers.") progress.setConsoleInfo("Successfully loaded layers for calculation" ) func.updateProcessing(progress,1,5) # Take Values for output from the first reference layer a = gdal.Open( reference.split(";")[0] ) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() nodata = a.GetRasterBand(1).GetNoDataValue() output = self.getOutputValue(self.RESULT) a = None # test a = "/home/martin/.qgis2/python/plugins/QSDM/sampledata/environment/cur/t_mean.asc" a = "/home/martin/.qgis2/python/plugins/QSDM/sampledata/environment/cur/elev_mean.asc" r = gdal.Open(a) ref_ar = r.GetRasterBand(1).ReadAsArray() proj_ar = r.GetRasterBand(1).ReadAsArray() nodata = -9999 ref = ref_ar.ravel() proj = proj_ar.ravel() # Reshaping to a single shape progress.setConsoleInfo("Flattening Input layers to a single shape" ) func.updateProcessing(progress,2,5) r = numpy.array(ref_ar)# Reference ref = r.reshape((r.shape[0], -1)) r = numpy.array(proj_ar)# Projection proj = r.reshape((r.shape[0], -1)) # Calculating Average and Covariance progress.setConsoleInfo("Calculating Average and Covariance" ) func.updateProcessing(progress,3,5) ref = ref.astype(float) ref[ref == nodata] = numpy.nan # Replace values of nodata with NaN ref_avg = numpy.apply_along_axis(numpy.nanmean,1,ref) m = numpy.ma.make_mask((numpy.isnan(ref))) mask = numpy.ma.MaskedArray(ref,m) # use a mask to kickout the nan values ref_cov = numpy.ma.cov(ref,mask,rowvar=0,allow_masked=True) numpy.ma.reshape(ref_cov, (len(ref_avg),-1)) try: inv_covariance_xy = numpy.linalg.inv(ref_cov.data) except numpy.linalg.LinAlgError: # There is no linear inverse matrix for given points self.progress.setConsoleInfo("Singular matrix. Calculating (Moore-Penrose) pseudo-inverse matrix instead.") #inv_covariance_xy = numpy.linalg.pinv(ref_cov) raise GeoAlgorithmExecutionException("Singular non-invertable covariance matrix. Looking for solutions for this.") # Calculate Mahalanobis distance ratios progress.setConsoleInfo("Calculating Mahalanobis ratios between raster cells") func.updateProcessing(progress,4,5) try: from scipy.spatial.distance import mahalanobis # Calculate for ref arrays mah.ref = mahalanobis(ref,ref_avg,inv_covariance_xy) mah.ref = numpy.exp2(mah.ref) # Calculate D^2 - squared # Calculate for proj arrays mah.proj = mahalanobis(proj,ref_avg,inv_covariance_xy) mah.proj = numpy.exp2(mah.proj) # Calculate D^2 - squared # Ratios mah.max = numpy.max( mah.ref[numpy.isfinite( mah.ref )] ) result <- numpy.divide(mah.proj,mah.max ) except ImportError: self.progress.setConsoleInfo("Scipy not found. Calculating mahalanobis manually.") # Center each value by the mean x = ref_ar[0] xy_mean = ref_avg x_diff = numpy.array([x_i - xy_mean[0] for x_i in x]) y_diff = numpy.array([y_i - xy_mean[1] for y_i in y]) diff_xy = numpy.transpose([x_diff, y_diff]) # Formula for MahalanobisDist md = [] for i in range(len(diff_xy)): md.append( numpy.sqrt(numpy.dot(numpy.dot(numpy.transpose(diff_xy[i]),inv_covariance_xy),diff_xy[i])) ) return md progress.setConsoleInfo("Saving results") func.updateProcessing(progress,5,5) # Create Output metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE ) and metadata[ gdal.DCAP_CREATE ] == "YES": pass else: progress.setConsoleInfo("Creation of input Fileformat is not supported by gdal. Create GTiff by default.") driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Output file could not be created!")
def processAlgorithm(self, progress): # Do the stuff metric = self.m[self.getParameterValue(self.METRIC)] envlayers = self.getParameterValue(self.ENV) names = [] env = [] shape = None # Load all arrays into an dictonary for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object name = str(r.name()) names.append(name) a = gdal.Open(lay) array = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array NA = a.GetRasterBand(1).GetNoDataValue() if NA != None: array[array == NA] = 0 env.append(array) if shape == None: # Fast check if array are unequal shape = array.shape else: if shape != array.shape: raise GeoAlgorithmExecutionException( "Input layers need to have the same extent and cellsize." ) a = r = None if len(env) == 1: raise GeoAlgorithmExecutionException( "You need at least two layers to calculate overlap statistics." ) progress.setConsoleInfo("Loaded %s arrays for calculation" % (str(len(names)))) func.updateProcessing(progress, 1, 3) results = [] func.updateProcessing(progress, 2, 3) if len(env) > 2: # Iterative Calculation of the metrics for j in range(0, len(env)): for k in range(j + 1, len(env)): progress.setConsoleInfo( "Calculating Overlap of layers %s with %s" % (names[j], names[k])) r = self.Overlap(metric, env[j], env[k]) res = (names[j], names[k], metric, r) results.append(res) else: # Only two input layers r = self.Overlap(metric, env[0], env[1]) res = (names[0], names[1], metric, r) results.append(res) progress.setConsoleInfo("Saving results") func.updateProcessing(progress, 3, 3) output = self.getOutputValue(self.RESULTS) titles = ['Layer1', 'Layer2', 'Metric', 'Overlap'] # Save Output func.saveToCSV(results, titles, output)
def processAlgorithm(self, progress): # Do the stuff b = self.getParameterValue(self.BASELINE) p = self.getParameterValue(self.PREDICTION) d = self.getParameterValue(self.QUANT) # baseline r = Processing.getObject(b) baseline_name = str( r.name() ) a = gdal.Open( b ) baseline = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array nodata = a.GetRasterBand(1).GetNoDataValue() if nodata == None: raise GeoAlgorithmExecutionException("Please classify both layers with a valid nodata-value.") # Take Values for output from baseline raster columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() data_type = a.GetRasterBand(1).DataType gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.RESULTS) a = None # Prediction r = Processing.getObject(p) prediction_name = str( r.name() ) a = gdal.Open( b ) prediction = a.GetRasterBand(1).ReadAsArray() # Prepare by removing all no-data values from array NA = a.GetRasterBand(1).GetNoDataValue() if NA == None: raise GeoAlgorithmExecutionException("Please classify both layers with a valid nodata-value.") a = None # Compare shapes if baseline.shape != prediction.shape: raise GeoAlgorithmExecutionException("Input layers need to have the same extent and cellsize.") progress.setConsoleInfo("Loaded layers for calculation" ) func.updateProcessing(progress,1,4) progress.setConsoleInfo("Starting processing" ) func.updateProcessing(progress,2,4) # Index values # 1 = Range contraction # 2 = No Change # 3 = Range Expansion med = numpy.median(baseline[baseline!=nodata]) low = numpy.percentile(baseline[baseline!=nodata],50-d) high = numpy.percentile(baseline[baseline!=nodata],50+d) res = numpy.zeros_like(baseline) res[numpy.where(numpy.logical_and(prediction >= low,prediction <= high))] = 2 res[prediction < low] = 1 res[prediction > high] = 3 res[baseline==nodata] = nodata # fill the rest with nodata result = numpy.copy(res) progress.setConsoleInfo("Saving results") func.updateProcessing(progress,3,4) # Create Output metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE ) and metadata[ gdal.DCAP_CREATE ] == "YES": pass else: progress.setConsoleInfo("Creation of input Fileformat is not supported by gdal. Create GTiff by default.") driver = gdal.GetDriverByName("GTiff") try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Output file could not be created!")
def processAlgorithm(self, progress): # Do the thing, Julie envlayers = self.getParameterValue(self.ENV) no_data = float( self.getParameterValue(self.NA) ) output = str( self.getParameterValue(self.OUTDIR) ) if output == None or output == "": import tempfile output = tempfile.gettempdir() # Starting transformation layers = [] for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object layers.append( r.source() ) if len(layers) == 0: raise GeoAlgorithmExecutionException("This function needs at least two input layers!") else: func.updateProcessing(progress,1,3) progress.setConsoleInfo("Starting unification") # get coordinates of corners for the final raster fin_coordinates = func.finCoordinates(layers) r = gdal.Open(str( layers[0] ) ) main_geo_transform = r.GetGeoTransform() proj = r.GetProjection() func.updateProcessing(progress,2,3) output_list = [] outnames = [] for lay in layers: raster = gdal.Open(str(lay)) name = os.path.splitext(os.path.basename(lay))[0] out = output + os.sep + name + '_warp.tif' result = func.ExtendRaster(raster, fin_coordinates, out, main_geo_transform, proj, no_data) if result: raster = None if os.path.exists(out) == False: raise GeoAlgorithmExecutionException("Unified layer could not be saved.") else: output_list.append(out) outnames.append(name + '_warp') func.updateProcessing(progress,3,3,"Finished") # Load to QGIS in a new group for o in output_list: func.rasterInQgis(o) canvas = QgsMapCanvas() canvas.freeze(True) #Add a new group and all new layers to it groups = iface.legendInterface().groups() if ('UnifiedLayers' in groups ) == False: idx = iface.legendInterface().addGroup( "UnifiedLayers" ) groups = iface.legendInterface().groups() layerMap = QgsMapLayerRegistry.instance().mapLayers() for lyr in layerMap.itervalues(): if lyr.name() in outnames: # Move them to the new group iface.legendInterface().moveLayer( lyr, groups.index("UnifiedLayers") ) canvas.freeze(False) canvas.refresh()
def processAlgorithm(self, progress): # Do the stuff vector = self.getParameterValue(self.VECTOR) v = Processing.getObject(vector) scl = self.getParameterValue(self.SPEC_COL) what = self.m[self.getParameterValue(self.METHOD)] ext = self.getParameterValue(self.EXTENT) try: ext = string.split(ext,",") # split except AttributeError: # Extent was empty, raise error raise GeoAlgorithmExecutionException("Please set an extent for the generated raster") cs = self.getParameterValue(self.GRAIN_SIZE) output = self.getOutputValue(self.GRID) # Create output layer xmin = float(ext[0]) xmax = float(ext[1]) ymin = float(ext[2]) ymax = float(ext[3]) gt = (xmin,cs,0.0,ymax,0.0,-cs) nodata = -9999 cols = int( abs( (xmax-xmin)/gt[1] ) ) rows = int( abs( (ymax-ymin)/gt[5] ) ) try: fin_array = numpy.zeros((rows,cols)) # Create empty grid except MemoryError: raise GeoAlgorithmExecutionException("MemoryError: Resolution is too fine. Please choose a higher value.") #if vector is a point do the following, else calculate for overlapping range sizes if v.geometryType() == QGis.Point: progress.setConsoleInfo("Using the point layers to calculate %s for resulting grid." % (what)) progress.setConsoleInfo("---") # Make the Array one line bigger to capute points not entirely inside the array heightFP,widthFP = fin_array.shape #define hight and width of input matrix withBorders = numpy.zeros((heightFP+(2*1),widthFP+(2*1)))*0 # set the border to borderValue withBorders[1:heightFP+1,1:widthFP+1]=fin_array # set the interior region to the input matrix fin_array = withBorders rows, cols = fin_array.shape # Get the number of species noSpecies = func.getUniqueAttributeList( v, scl) progress.setConsoleInfo("Processing %s number of different species" % (str(len(noSpecies))) ) ds = ogr.Open(vector) name = ds.GetLayer().GetName() proj = ds.GetLayer().GetSpatialRef() proj = proj.ExportToWkt() n = ds.GetLayer().GetFeatureCount() arrayDict = dict() k = 1 for spec in noSpecies: # Make a copy of the final_array work_array = numpy.zeros_like(fin_array) # Vector layer subsetting to the specific species v_id = [] # Iter through subset of species request= QgsFeatureRequest() request.setFilterExpression( scl + " = " + "'" + spec + "'" ) iter = v.getFeatures( request ) # Set the selection for feature in iter: v_id.append( feature.id() ) geom = feature.geometry().asPoint() mx = geom.x() my = geom.y() pp = func.world2Pixel(gt, mx,my) x = round(pp[0]) y = round(pp[1]) if x < 0 or y < 0 or x >= work_array.shape[1] or y >= work_array.shape[0]: progress.setConsoleInfo("Point %s outside given exent" % (str( f.GetFID() )) ) else: #set grid cell to 1 work_array[y,x] = 1 arrayDict[spec] = work_array # Save the working array in the dictionary k += 1 if what == 'Species Richness': #SR = K (the total number of species in a grid cell) for spec_ar in arrayDict.itervalues(): fin_array = fin_array + spec_ar # Simply add up the values elif what == 'Weighted Endemism': # WE = ∑ 1/C (C is the number of grid cells each endemic occurs in) for spec_ar in arrayDict.itervalues(): # Construct vector of total number of cells each species is found ncell = func.count_nonzero(spec_ar) work = spec_ar.astype(float) out = numpy.divide(work, ncell) # Now divide all cells by the number fin_array = fin_array + out # Simply add up the values elif what == 'Corrected Weighted Endemism': # CWE = WE/K (K is the total number of species in a grid cell) nspec = numpy.zeros_like(fin_array).astype(float) for spec_ar in arrayDict.itervalues(): # Construct vector of total number of cells each species is found ncell = func.count_nonzero(spec_ar) work = spec_ar.astype(float) out = numpy.divide(work, ncell) # Now divide all cells by the number fin_array = fin_array + out # Simply add up the values to calculate the WE nspec = nspec + spec_ar # Simply add up the values for species richness fin_array = numpy.divide(fin_array,nspec) # Now divide through number of species elif v.geometryType() == QGis.Polygon: progress.setConsoleInfo("Using the range size polygons to calculate %s for resulting grid." % (what)) progress.setConsoleInfo("---") noSpecies = func.getUniqueAttributeList( v, scl) progress.setConsoleInfo("Processing %s number of different species" % (str(len(noSpecies))) ) ds = ogr.Open(vector) name = ds.GetLayer().GetName() proj = ds.GetLayer().GetSpatialRef().ExportToWkt() n = ds.GetLayer().GetFeatureCount() k = 1 for spec in noSpecies: work_array = numpy.zeros_like(fin_array) layers = ds.ExecuteSQL("SELECT * FROM %s WHERE %s = '%s'" % (name, scl, spec) ) progress.setConsoleInfo("Gridding range of species %s " % (spec )) if str(layers.GetFeatureCount()) == 0: raise GeoAlgorithmExecutionException("Species could not be queried from the point layer.") func.updateProcessing(progress,k,n) work2 = numpy.copy(work_array) # temporary working array for i in range(0,layers.GetFeatureCount()): f = layers.GetFeature(i) geom = f.GetGeometryRef() res = self.clipArray(layers,gt,geom,work_array) work2 = work2 + res if res == None: raise GeoAlgorithmExecutionException("Feature %s of species %s could not be rasterized. Possibly because it is a multipolygon. Split data beforehand." % (str(i),spec) ) work_array = work2 # Set back if err != 0: raise GeoAlgorithmExecutionException("Features of species %s could not be rasterized." % (spec) ) # Get Array if work_array.shape != fin_array.shape: raise GeoAlgorithmExecutionException("Rasterized grids could not be merged together." ) if what == 'Species Richness': fin_array = fin_array + work_array # Simply add up the values elif what == 'Weighted Endemism': # Weighted Endemism (WE), which is the sum of the reciprocal of the total number of cells each # species in a grid cell is found in. A WE emphasizes areas that have a high proportion of animals # with restricted ranges. # WE = ∑ 1/C (C is the number of grid cells each endemic occurs in) ncell = func.count_nonzero(work_array) elif what == 'Corrected Weighted Endemism': #Corrected Weighted Endemism (CWE). The corrected weighted endemism is simply the # weighted endemism divided by the total number of species in a cell (Crisp 2001). A CWE # emphasizes areas that have a high proportion of animals with restricted ranges, but are not # necessarily areas that are species rich. # CWE = WE/K (K is the total number of species in a grid cell) pass if func.count_nonzero(fin_array) == 0: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"No values were rasterized. Check GeometryType and Vector Projection.") # Create output raster func.createRaster(output,cols,rows,fin_array,nodata,gt,proj,'GTiff')
def processAlgorithm(self, progress): # Set up the data as sklearn bunch (basically just a dictionary with specific attributes) data = Bunch() # Vector layer vector = self.getParameterValue(self.SPECIES) v = Processing.getObject(vector) v_crs = v.crs() # Environmental layers envlayers = self.getParameterValue(self.ENV) if func.unificationNecessary(envlayers.split(";")): raise GeoAlgorithmExecutionException( "All input environmental layers need to have the same resolution and extent. Use the Unify tool beforehand" ) #TODO: Enable option to do this automatically progress.setConsoleInfo("Loading Coverage Data") # Check Projection and Cellsize for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if r.crs() != v_crs: raise GeoAlgorithmExecutionException( "All input layers need to have the same projection") if round(r.rasterUnitsPerPixelX()) != round( r.rasterUnitsPerPixelY()): raise GeoAlgorithmExecutionException( "Grid Cell size values are not equal. Please be sure that grid cells are squares." ) # Set coverage parameters r = Processing.getObject( envlayers.split(";")[0]) # QgsRasterLayer object ex = r.extent() data["grid_size"] = r.rasterUnitsPerPixelX() data["Nx"] = r.width() data["Ny"] = r.height() data["x_left_lower_corner"] = ex.xMinimum() data["y_left_lower_corner"] = ex.yMinimum() # Load in Coverage values coverage = [] for lay in envlayers.split(";"): raster = gdal.Open(str(lay)) if raster.RasterCount > 1: progress.setConsoleInfo( "Warning: Multiple bands for layer detected. Using only first band." ) array = raster.GetRasterBand(1).ReadAsArray() NA = raster.GetRasterBand(1).GetNoDataValue() if NA == None: raise GeoAlgorithmExecutionException( "Warning: Raster layer has no no-data value. Please specify a no-data value for this dataset." ) else: array[array == NA] = -9999 # Replace nodata-values of array with -9999 coverage.append(array) data["coverages"] = numpy.array( coverage) # Load all the coverage values into the bunch # Setup parameters for output prediction a = gdal.Open(envlayers.split(";")[0]) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() NA = -9999 gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.OUT_PRED) # Set up the data grid xgrid, ygrid = construct_grids(data) # The grid in x,y coordinates X, Y = numpy.meshgrid(xgrid, ygrid[::-1]) # background points (grid coordinates) for evaluation numpy.random.seed(100) background_points = numpy.c_[ numpy.random.randint(low=0, high=data.Ny, size=10000), numpy.random.randint(low=0, high=data.Nx, size=10000)].T # We'll make use of the fact that coverages[6] has measurements at all # land points. This will help us decide between land and water. # FIXME: Assuming that all predictors have a similar distribution. Might be violated land_reference = data.coverages[0] progress.setConsoleInfo("Loading Occurence Data and coverage") # Creating response train = [] for feature in v.getFeatures(): geom = feature.geometry().asPoint() mx = geom.x() my = geom.y() train.append((mx, my)) data["train"] = numpy.array(train) # Add to bunch as training dataset # create species bunch sp_Bunch = Bunch(name="Species") points = dict(train=data.train) for label, pts in points.iteritems(): #determine coverage values for each of the training & testing points ix = numpy.searchsorted(xgrid, pts[0]) iy = numpy.searchsorted(ygrid, pts[1]) bunch['cov_%s' % label] = data.coverages[:, -iy, ix].T progress.setConsoleInfo( "Finished loading coverage data of environmental layers") # Starting modelling progress.setConsoleInfo("Finished preparing the data for the analysis") progress.setConsoleInfo("----") progress.setConsoleInfo("Starting Modelling with support of sklearn") # Standardize features #TODO: Enable different or no Standardization methods mean = sp_Bunch.cov.mean(axis=0) std = sp_Bunch.cov.std(axis=0) train_cover_std = (sp_Bunch.cov - mean) / std # Fit OneClassSVM progress.setConsoleInfo("Fitting Support Vector Machine") # TODO: Allow the user to vary the input clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.5) clf.fit(train_cover_std) progress.setConsoleInfo("Fitting done") # Predict species distribution using the training data Z = numpy.ones((data.Ny, data.Nx), dtype=numpy.float64) # We'll predict only for the land points. idx = numpy.where(land_reference > -9999) coverages_land = data.coverages[:, idx[0], idx[1]].T pred = clf.decision_function((coverages_land - mean) / std)[:, 0] Z *= pred.min() Z[idx[0], idx[1]] = pred levels = numpy.linspace(Z.min(), Z.max(), 25) Z[land_reference == -9999] = -9999 result = Z # save the final results scores # Compute AUC w.r.t. background points pred_background = Z[background_points[0], background_points[1]] pred_test = clf.decision_function((species.cov_test - mean) / std)[:, 0] scores = numpy.r_[pred_test, pred_background] y = numpy.r_[numpy.ones(pred_test.shape), numpy.zeros(pred_background.shape)] fpr, tpr, thresholds = metrics.roc_curve(y, scores) roc_auc = metrics.auc(fpr, tpr) # Area under the ROC curve # TODO: Evaluate the availability of other metrics to compute on (average mean error, etc.. ) # Create Output Prediction File output = self.getOutputValue(self.OUT_PRED_RES) titles = ['AUC'] res_pred = [roc_auc] # Save Output func.saveToCSV(res_pred, titles, output) # Create Output for resulting prediction metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE) and metadata[gdal.DCAP_CREATE] == "YES": pass else: progress.setConsoleInfo( "Output creation of input Fileformat is not supported by gdal. Create GTiff by default." ) driver = gdal.GetDriverByName("GTiff") data_type = result.dtype try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR, "Output file could not be created!")
def processAlgorithm(self, progress): # Set up the data as sklearn bunch (basically just a dictionary with specific attributes) data = Bunch() # Vector layer vector = self.getParameterValue(self.SPECIES) v = Processing.getObject(vector) v_crs = v.crs() # Environmental layers envlayers = self.getParameterValue(self.ENV) if func.unificationNecessary(envlayers.split(";")): raise GeoAlgorithmExecutionException("All input environmental layers need to have the same resolution and extent. Use the Unify tool beforehand") #TODO: Enable option to do this automatically progress.setConsoleInfo("Loading Coverage Data") # Check Projection and Cellsize for lay in envlayers.split(";"): r = Processing.getObject(lay) # QgsRasterLayer object if r.crs() != v_crs: raise GeoAlgorithmExecutionException("All input layers need to have the same projection") if round(r.rasterUnitsPerPixelX()) != round(r.rasterUnitsPerPixelY()): raise GeoAlgorithmExecutionException("Grid Cell size values are not equal. Please be sure that grid cells are squares.") # Set coverage parameters r = Processing.getObject(envlayers.split(";")[0]) # QgsRasterLayer object ex = r.extent() data["grid_size"] = r.rasterUnitsPerPixelX() data["Nx"] = r.width() data["Ny"] = r.height() data["x_left_lower_corner"] = ex.xMinimum() data["y_left_lower_corner"] = ex.yMinimum() # Load in Coverage values coverage = [] for lay in envlayers.split(";"): raster = gdal.Open(str(lay)) if raster.RasterCount > 1: progress.setConsoleInfo("Warning: Multiple bands for layer detected. Using only first band.") array = raster.GetRasterBand(1).ReadAsArray() NA = raster.GetRasterBand(1).GetNoDataValue() if NA == None: raise GeoAlgorithmExecutionException("Warning: Raster layer has no no-data value. Please specify a no-data value for this dataset.") else: array[array==NA] = -9999 # Replace nodata-values of array with -9999 coverage.append(array) data["coverages"] = numpy.array( coverage ) # Load all the coverage values into the bunch # Setup parameters for output prediction a = gdal.Open(envlayers.split(";")[0]) columns = a.RasterXSize rows = a.RasterYSize driver = a.GetDriver() NA = -9999 gt = a.GetGeoTransform() proj = a.GetProjection() output = self.getOutputValue(self.OUT_PRED) # Set up the data grid xgrid, ygrid = construct_grids(data) # The grid in x,y coordinates X, Y = numpy.meshgrid(xgrid, ygrid[::-1]) # background points (grid coordinates) for evaluation numpy.random.seed(100) background_points = numpy.c_[numpy.random.randint(low=0, high=data.Ny, size=10000), numpy.random.randint(low=0, high=data.Nx, size=10000)].T # We'll make use of the fact that coverages[6] has measurements at all # land points. This will help us decide between land and water. # FIXME: Assuming that all predictors have a similar distribution. Might be violated land_reference = data.coverages[0] progress.setConsoleInfo("Loading Occurence Data and coverage") # Creating response train = [] for feature in v.getFeatures(): geom = feature.geometry().asPoint() mx = geom.x() my = geom.y() train.append((mx,my)) data["train"] = numpy.array(train) # Add to bunch as training dataset # create species bunch sp_Bunch = Bunch(name="Species") points = dict(train=data.train) for label, pts in points.iteritems(): #determine coverage values for each of the training & testing points ix = numpy.searchsorted(xgrid, pts[0]) iy = numpy.searchsorted(ygrid, pts[1]) bunch['cov_%s' % label] = data.coverages[:, -iy, ix].T progress.setConsoleInfo("Finished loading coverage data of environmental layers") # Starting modelling progress.setConsoleInfo("Finished preparing the data for the analysis") progress.setConsoleInfo("----") progress.setConsoleInfo("Starting Modelling with support of sklearn") # Standardize features #TODO: Enable different or no Standardization methods mean = sp_Bunch.cov.mean(axis=0) std = sp_Bunch.cov.std(axis=0) train_cover_std = (sp_Bunch.cov - mean) / std # Fit OneClassSVM progress.setConsoleInfo("Fitting Support Vector Machine") # TODO: Allow the user to vary the input clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.5) clf.fit(train_cover_std) progress.setConsoleInfo("Fitting done") # Predict species distribution using the training data Z = numpy.ones((data.Ny, data.Nx), dtype=numpy.float64) # We'll predict only for the land points. idx = numpy.where(land_reference > -9999) coverages_land = data.coverages[:, idx[0], idx[1]].T pred = clf.decision_function((coverages_land - mean) / std)[:, 0] Z *= pred.min() Z[idx[0], idx[1]] = pred levels = numpy.linspace(Z.min(), Z.max(), 25) Z[land_reference == -9999] = -9999 result = Z # save the final results scores # Compute AUC w.r.t. background points pred_background = Z[background_points[0], background_points[1]] pred_test = clf.decision_function((species.cov_test - mean) / std)[:, 0] scores = numpy.r_[pred_test, pred_background] y = numpy.r_[numpy.ones(pred_test.shape), numpy.zeros(pred_background.shape)] fpr, tpr, thresholds = metrics.roc_curve(y, scores) roc_auc = metrics.auc(fpr, tpr) # Area under the ROC curve # TODO: Evaluate the availability of other metrics to compute on (average mean error, etc.. ) # Create Output Prediction File output = self.getOutputValue(self.OUT_PRED_RES) titles = ['AUC'] res_pred = [roc_auc] # Save Output func.saveToCSV(res_pred, titles, output) # Create Output for resulting prediction metadata = driver.GetMetadata() if metadata.has_key( gdal.DCAP_CREATE ) and metadata[ gdal.DCAP_CREATE ] == "YES": pass else: progress.setConsoleInfo("Output creation of input Fileformat is not supported by gdal. Create GTiff by default.") driver = gdal.GetDriverByName("GTiff") data_type = result.dtype try: outData = driver.Create(output, columns, rows, 1, data_type) except Exception, e: ProcessingLog.addToLog(ProcessingLog.LOG_ERROR,"Output file could not be created!")
def createTest(text): s = "" tokens = text[len("processing.runalg("):-1].split(",") cmdname = tokens[0][1:-1]; methodname = "test_" + cmdname.replace(":","") s += "def " + methodname + "(self):\n" alg = Processing.getAlgorithm(cmdname) execcommand = "processing.runalg(" i = 0 for token in tokens: if i < alg.getVisibleParametersCount() + 1: if os.path.exists(token[1:-1]): token = os.path.basename(token[1:-1])[:-4] + "()" execcommand += token + "," else: execcommand += "None," i+=1 s += "\toutputs=" + execcommand[:-1] + ")\n" i = -1 * len(alg.outputs) for out in alg.outputs: filename = tokens[i][1:-1] if (tokens[i] == str(None)): QtGui.QMessageBox.critical(None, "Error", "Cannot create unit test for that algorithm execution.\nThe output cannot be a temporary file") return s+="\toutput=outputs['" + out.name + "']\n" if isinstance(out, (OutputNumber, OutputString)): s+="self.assertTrue(" + str(out) + ", output.value)\n" if isinstance(out, OutputRaster): dataset = gdal.Open(filename, GA_ReadOnly) strhash = hash(str(dataset.ReadAsArray(0).tolist())) s+="\tself.assertTrue(os.path.isfile(output))\n" s+="\tdataset=gdal.Open(output, GA_ReadOnly)\n" s+="\tstrhash=hash(str(dataset.ReadAsArray(0).tolist()))\n" s+="\tself.assertEqual(strhash," + str(strhash) + ")\n" if isinstance(out, OutputVector): layer = Processing.getObject(filename) fields = layer.pendingFields() s+="\tlayer=QGisLayers.getObjectFromUri(output, True)\n" s+="\tfields=layer.pendingFields()\n" s+="\texpectednames=[" + ",".join(["'" + str(f.name()) + "'" for f in fields]) + "]\n" s+="\texpectedtypes=[" + ",".join(["'" + str(f.typeName()) +"'" for f in fields]) + "]\n" s+="\tnames=[str(f.name()) for f in fields]\n" s+="\ttypes=[str(f.typeName()) for f in fields]\n" s+="\tself.assertEqual(expectednames, names)\n" s+="\tself.assertEqual(expectedtypes, types)\n" features = QGisLayers.features(layer) numfeat = len(features) s+="\tfeatures=processing.getfeatures(layer)\n" s+="\tself.assertEqual(" + str(numfeat) + ", len(features))\n" if numfeat > 0: feature = features.next() attrs = feature.attributes() s+="\tfeature=features.next()\n" s+="\tattrs=feature.attributes()\n" s+="\texpectedvalues=[" + ",".join(['"' + str(attr) + '"' for attr in attrs]) + "]\n" s+="\tvalues=[str(attr) for attr in attrs]\n" s+="\tself.assertEqual(expectedvalues, values)\n" s+="\twkt='" + str(feature.geometry().exportToWkt()) + "'\n" s+="\tself.assertEqual(wkt, str(feature.geometry().exportToWkt()))" dlg = ShowTestDialog(s) dlg.exec_()
def processAlgorithm(self, progress): # Do the stuff vector = self.getParameterValue(self.VECTOR) v = Processing.getObject(vector) scl = self.getParameterValue(self.SPEC_COL) ext = self.getParameterValue(self.EXTENT) try: ext = string.split(ext, ",") # split except AttributeError: # Extent was empty, raise error raise GeoAlgorithmExecutionException( "Please set an extent for the generated raster") cs = self.getParameterValue(self.GRAIN_SIZE) output = self.getOutputValue(self.GRID) # Create output layer xmin = float(ext[0]) xmax = float(ext[1]) ymin = float(ext[2]) ymax = float(ext[3]) gt = (xmin, cs, 0.0, ymax, 0.0, -cs) nodata = -9999 cols = int(abs((xmax - xmin) / gt[1])) rows = int(abs((ymax - ymin) / gt[5])) fin_array = numpy.zeros((rows, cols)) # Create empty grid #if vector is a point do the following, else calculate for overlapping range sizes if v.geometryType() == QGis.Point: progress.setConsoleInfo( "Using the point layers to calculate Species richness for resulting grid." ) progress.setConsoleInfo("---") # Get the number of species noSpecies = func.getUniqueAttributeList(v, scl) progress.setConsoleInfo( "Processing %s number of different species" % (str(len(noSpecies)))) ds = ogr.Open(vector) name = ds.GetLayer().GetName() proj = ds.GetLayer().GetSpatialRef() n = ds.GetLayer().GetFeatureCount() k = 1 for spec in noSpecies: # Make a copy of the final_array work_array = numpy.zeros_like(fin_array) # Vector layer subsetting to the specific species layers = ds.ExecuteSQL("SELECT * FROM %s WHERE %s = '%s'" % (name, scl, spec)) progress.setConsoleInfo( "Gridding %s individual points of species %s " % (str(layers.GetFeatureCount()), spec)) func.updateProcessing(progress, k, n) for i in range(0, layers.GetFeatureCount()): f = layers.GetFeature(i) geom = f.GetGeometryRef() mx, my = geom.GetX(), geom.GetY() #coord in map units pp = func.world2Pixel(gt, mx, my) x = round(pp[0]) y = round(pp[1]) if x < 0 or y < 0 or x >= work_array.shape[ 1] or y >= work_array.shape[0]: progress.setConsoleInfo( "Point %s outside given exent" % (str(f.GetFID()))) else: # Check if species was already added to grid cell test = work_array[y, x] if test != 1: work_array[y, x] = 1 k += 1 # Add the working arrays values to fin_array = numpy.add(work_array, fin_array) elif v.geometryType() == QGis.Polygon: progress.setConsoleInfo( "Using the range size polygons to calculate Species richness for resulting grid." ) # rasterization if numpy.count_nonzero(fin_array) == 0: ProcessingLog.addToLog( ProcessingLog.LOG_ERROR, "No values were rasterized. Check GeometryType and Vector Projection." ) # Create output raster func.createRaster(output, cols, rows, fin_array, nodata, gt, proj, 'GTiff') # And free up memory del (ds, layers)