コード例 #1
0
    def refreshView(self):
        ''' reload all QGIS layers in currently defined project '''
        if self._project is None:
            return

        # display layers if exists
        if self._project.fp_file is not None and exists(self._project.fp_file):
            if self.map_layers[self.FOOTPRINT] is None or self.map_layers[
                    self.FOOTPRINT].source() != self._project.fp_file:
                self.showDataLayer(
                    self.FOOTPRINT,
                    load_shapefile(self._project.fp_file, 'footprint'))
        else:
            self.removeDataLayer(self.FOOTPRINT)

        if self._project.zone_file is not None and exists(
                self._project.zone_file):
            if self.map_layers[self.ZONES] is None or self.map_layers[
                    self.ZONES].source() != self._project.zone_file:
                self.showDataLayer(
                    self.ZONES, load_shapefile(self._project.zone_file,
                                               'zones'))
        else:
            self.removeDataLayer(self.ZONES)

        if self._project.survey_file is not None and exists(
                self._project.survey_file):
            if getattr(self._project, 'survey', None) is None:
                self._project.load_survey()
                self.showDataLayer(self.SURVEY, self._project.survey)
        else:
            self.removeDataLayer(self.SURVEY)

        if self._project.popgrid_file is not None and exists(
                self._project.popgrid_file):
            if getattr(self._project, 'popgrid', None) is None:
                self.showDataLayer(
                    self.POP_GRID,
                    load_shapefile(self._project.popgrid_file, 'popgrid'))
        else:
            self.removeDataLayer(self.POP_GRID)

        # set export options
        for idx, export_format in enumerate(self.EXPORT_FORMATS.values()):
            if export_format == self._project.export_type:
                self.ui.cb_export_format.setCurrentIndex(idx)
        self.ui.txt_export_select_path.setText(self._project.export_path)

        # refreshResult contains refresh call to update all layers currently loaded
        self.refreshResult()
コード例 #2
0
    def test_VerifyExposure(self):
        logging.debug('test_VerifyExposure')

        exposure_path = self.test_data_dir + 'exposure3.shp'
        exposure = load_shapefile(exposure_path, 'exposure3')
        exposure_opdata = OperatorData(OperatorDataTypes.Exposure, exposure)

        fp_path = self.test_data_dir + 'footprints3.shp'
        fp = load_shapefile(fp_path, 'fp3')
        fp_opdata = OperatorData(OperatorDataTypes.Footprint, fp)

        # check fragmentation
        report = OperatorData(OperatorDataTypes.Report)

        frag_analyzer = ExposureFragmentationAnalyzer(self.operator_options)
        frag_analyzer.inputs = [exposure_opdata]
        frag_analyzer.outputs = [report]
        frag_analyzer.do_operation()
        #print report.value
        self.assertEquals(report.value['fraction_count'], 0)

        fp_cnt_analyzer = ExposureFootprintCountAnalyzer(self.operator_options)
        fp_cnt_analyzer.inputs = [exposure_opdata, fp_opdata]
        fp_cnt_analyzer.outputs = [report]
        fp_cnt_analyzer.do_operation()
        #print report.value
        self.assertEquals(report.value['total_source'],
                          report.value['total_exposure'])

        exposure_path = self.test_data_dir + 'exposure2.shp'
        exposure = load_shapefile(exposure_path, 'exposure2')
        exposure_opdata = OperatorData(OperatorDataTypes.Exposure, exposure)

        zone_path = self.test_data_dir + 'zones2.shp'
        zone = load_shapefile(zone_path, 'zones2')
        zone_opdata = OperatorData(OperatorDataTypes.Zone, zone)

        zone_cnt_analyzer = ExposureZoneCountAnalyzer(self.operator_options)
        zone_cnt_analyzer.inputs = [
            exposure_opdata, zone_opdata,
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone2_bldgcount_field)
        ]
        zone_cnt_analyzer.outputs = [report]
        zone_cnt_analyzer.do_operation()
        #print report.value
        self.assertEquals(report.value['total_source'],
                          report.value['total_exposure'])
コード例 #3
0
 def do_operation(self):
     """ perform export operation """        
     # input/output data checking already done during property set
     input_file = self.inputs[0].value
     output_file = self.inputs[1].value
             
     try:
         exp_layer = load_shapefile(input_file, 'exposure_%s' % get_unique_filename())
         # get field headers/types
         fields = exp_layer.dataProvider().fields()
         csvfile = open(output_file, 'wb')
         csvwriter = csv.writer(csvfile, delimiter=',',
                                quotechar='"', quoting=csv.QUOTE_NONNUMERIC)
         csvwriter.writerow([f.name() for f in fields.values()])
         for feature in layer_features(exp_layer):
             row = []
             for fidx, value in feature.attributeMap().iteritems():
                 # retrieve data according to field type
                 if fields[fidx].type() == QVariant.Int:
                     row.append(value.toInt()[0])
                 elif fields[fidx].type() == QVariant.Double:
                     row.append(value.toDouble()[0])
                 else:
                     row.append(str(value.toString()))
             csvwriter.writerow(row)
         csvfile.close()
     except Exception as err:
         raise OperatorError("error exporting CSV: %s" % err, self.__class__)
コード例 #4
0
ファイル: grids.py プロジェクト: gem/sidd
    def _create_grid(self, grid_name, grid_file, x_min, y_min, x_max, y_max, x_off, y_off):
        x_off2, y_off2 = x_off / 2.0, y_off / 2.0
        x_min = floor(x_min / x_off) * x_off
        x_max = ceil(x_max / x_off) * x_off
        y_min = floor(y_min / y_off) * y_off
        y_max = ceil(y_max / y_off) * y_off

        xtotal = int((x_max - x_min) / x_off) + 1
        ytotal = int((y_max - y_min) / y_off) + 1

        logAPICall.log(
            "x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d"
            % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
            logAPICall.DEBUG_L2,
        )
        fields = {0: QgsField("GRID_GID", QVariant.String)}
        writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for x in range(xtotal):
            for y in range(ytotal):
                lon = x_min + (x * x_off) + (x_off2)
                lat = y_min + (y * y_off) + (y_off2)
                # out_geom = QgsGeometry.fromRect(QgsRectangle(lon-x_off2, lat-y_off2,
                #                                             lon+x_off2, lat+y_off2))
                f.setGeometry(self._outputGeometryFromLatLon(lat, lon))
                f.addAttribute(0, QVariant(latlon_to_grid(lat, lon)))
                writer.addFeature(f)
        del writer
        return load_shapefile(grid_file, grid_name)
コード例 #5
0
ファイル: join.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value                
        fp_layer = self.inputs[2].value
        
        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):                
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)
            
            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err, self.__class__)
        
        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (fp_file), self.__class__)        
                
        # clean up
        del tmp_join_layer        
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
コード例 #6
0
ファイル: grids.py プロジェクト: gem/sidd
    def _create_grid(self, grid_name, grid_file, x_min, y_min, x_max, y_max, x_off, y_off):
        x_off2, y_off2 = x_off / 2.0, y_off / 2.0
        x_min = floor(x_min / x_off) * x_off
        x_max = ceil(x_max / x_off) * x_off
        y_min = floor(y_min / y_off) * y_off
        y_max = ceil(y_max / y_off) * y_off
        
        xtotal = int((x_max - x_min) / x_off)+1
        ytotal = int((y_max - y_min) / y_off)+1

        logAPICall.log('x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d'
                       % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
                       logAPICall.DEBUG_L2)
        fields = {
            0 : QgsField('GRID_GID', QVariant.String),            
        }
        writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for x in range(xtotal):
            for y in range(ytotal):
                lon = x_min + (x * x_off) + (x_off2)
                lat = y_min + (y * y_off) + (y_off2)                
                #out_geom = QgsGeometry.fromRect(QgsRectangle(lon-x_off2, lat-y_off2,
                #                                             lon+x_off2, lat+y_off2))                                
                f.setGeometry(self._outputGeometryFromLatLon(lat, lon))                
                f.addAttribute(0, QVariant(latlon_to_grid(lat, lon)))                
                writer.addFeature(f)
        del writer 
        return load_shapefile(grid_file, grid_name)        
コード例 #7
0
ファイル: operator_unittest.py プロジェクト: gem/sidd
    def test_VerifyExposure(self):
        logging.debug("test_VerifyExposure")

        exposure_path = self.test_data_dir + "exposure3.shp"
        exposure = load_shapefile(exposure_path, "exposure3")
        exposure_opdata = OperatorData(OperatorDataTypes.Exposure, exposure)

        fp_path = self.test_data_dir + "footprints3.shp"
        fp = load_shapefile(fp_path, "fp3")
        fp_opdata = OperatorData(OperatorDataTypes.Footprint, fp)

        # check fragmentation
        report = OperatorData(OperatorDataTypes.Report)

        frag_analyzer = ExposureFragmentationAnalyzer(self.operator_options)
        frag_analyzer.inputs = [exposure_opdata]
        frag_analyzer.outputs = [report]
        frag_analyzer.do_operation()
        # print report.value
        self.assertEquals(report.value["fraction_count"], 0)

        fp_cnt_analyzer = ExposureFootprintCountAnalyzer(self.operator_options)
        fp_cnt_analyzer.inputs = [exposure_opdata, fp_opdata]
        fp_cnt_analyzer.outputs = [report]
        fp_cnt_analyzer.do_operation()
        # print report.value
        self.assertEquals(report.value["total_source"], report.value["total_exposure"])

        exposure_path = self.test_data_dir + "exposure2.shp"
        exposure = load_shapefile(exposure_path, "exposure2")
        exposure_opdata = OperatorData(OperatorDataTypes.Exposure, exposure)

        zone_path = self.test_data_dir + "zones2.shp"
        zone = load_shapefile(zone_path, "zones2")
        zone_opdata = OperatorData(OperatorDataTypes.Zone, zone)

        zone_cnt_analyzer = ExposureZoneCountAnalyzer(self.operator_options)
        zone_cnt_analyzer.inputs = [
            exposure_opdata,
            zone_opdata,
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_bldgcount_field),
        ]
        zone_cnt_analyzer.outputs = [report]
        zone_cnt_analyzer.do_operation()
        # print report.value
        self.assertEquals(report.value["total_source"], report.value["total_exposure"])
コード例 #8
0
    def test_ApplyMS(self):
        logging.debug('test_ApplyMS')

        # load zone with count
        zone_data = self.test_LoadZone2(True, 2)
        zone_stats = layer_field_stats(zone_data[0].value, self.zone2_field)
        # load ms
        ms_opdata = self.test_LoadMS(True)
        ms = ms_opdata[0].value
        stats = ms.get_assignment_by_name('ALL')
        for zone in zone_stats.keys():
            newZone = MappingSchemeZone(zone)
            ms.assign(newZone, stats)

        # apply mapping scheme
        ms_applier = ZoneMSApplier(self.operator_options)

        ms_applier.inputs = [
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone2_bldgcount_field),
            ms_opdata[0],
        ]
        ms_applier.outputs = [
            OperatorData(OperatorDataTypes.Exposure),
            OperatorData(OperatorDataTypes.Shapefile),
        ]
        ms_applier.do_operation()
        self.assertTrue(os.path.exists(ms_applier.outputs[1].value))

        # cleanup
        self._clean_layer(zone_data)
        self._clean_layer(ms_applier.outputs)
        del ms_applier

        # testing apply MS on grid
        ############################

        ms_applier = GridMSApplier(self.operator_options)
        ms_applier.inputs = [
            OperatorData(OperatorDataTypes.Grid,
                         load_shapefile(self.grid2_path, 'test_input_grid')),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone2_bldgcount_field),
            ms_opdata[0],
        ]
        ms_applier.outputs = [
            OperatorData(OperatorDataTypes.Exposure),
            OperatorData(OperatorDataTypes.Shapefile),
        ]
        ms_applier.do_operation()
        self.assertTrue(os.path.exists(ms_applier.outputs[1].value))

        # cleanup
        self._clean_layer(ms_applier.outputs)
        del ms_applier
コード例 #9
0
ファイル: wdg_result.py プロジェクト: ImageCatInc/sidd
 
 def refreshView(self):
     ''' reload all QGIS layers in currently defined project '''
     if self._project is None:
         return
     
     # display layers if exists                
     if self._project.fp_file is not None and exists(self._project.fp_file):
         if self.map_layers[self.FOOTPRINT] is None or self.map_layers[self.FOOTPRINT].source() != self._project.fp_file:                            
             self.showDataLayer(self.FOOTPRINT, load_shapefile(self._project.fp_file, 'footprint'))
     else:            
         self.removeDataLayer(self.FOOTPRINT)
     
     if self._project.zone_file is not None and exists(self._project.zone_file):
         if self.map_layers[self.ZONES] is None or self.map_layers[self.ZONES].source() != self._project.zone_file:
             self.showDataLayer(self.ZONES, load_shapefile(self._project.zone_file, 'zones'))
     else:            
         self.removeDataLayer(self.ZONES)
         
     if self._project.survey_file is not None and exists(self._project.survey_file):
         if getattr(self._project, 'survey', None) is None:
             self._project.load_survey()
             self.showDataLayer(self.SURVEY, self._project.survey)
     else:            
         self.removeDataLayer(self.SURVEY)
     
     if self._project.popgrid_file is not None and exists(self._project.popgrid_file):
         if getattr(self._project, 'popgrid', None) is None:
             self.showDataLayer(self.POP_GRID, load_shapefile(self._project.popgrid_file, 'popgrid'))
     else:            
         self.removeDataLayer(self.POP_GRID)
     
     # set export options
     for idx, export_format in enumerate(self.EXPORT_FORMATS.values()):
         if export_format == self._project.export_type:
             self.ui.cb_export_format.setCurrentIndex(idx)
     self.ui.txt_export_select_path.setText(self._project.export_path)
     
     # refreshResult contains refresh call to update all layers currently loaded
コード例 #10
0
    def do_operation(self):
        """ perform export operation """        
        # input/output data checking already done during property set
        input_file = self.inputs[0].value
        output_file = self.inputs[1].value
        output_dbf = '%s_attr.dbf' % output_file[:-3]
        try:
            exp_layer = load_shapefile(input_file, 'exposure_%s' % get_unique_filename())
            
            # store id of distinct features            
            total_features = exp_layer.dataProvider().featureCount()
            if total_features > MAX_FEATURES_IN_MEMORY:
                # use bsddb to store id in case number of features is too large
                tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
                db = bsddb.btopen(tmp_db_file, 'c')
                use_db = True
            else:
                # in memory dictionary, should be much faster, but could fail
                # if memory is limited
                db = {}
                use_db = False
                        
            # get field index for GID
            gid_idx = layer_field_index(exp_layer, GID_FIELD_NAME)
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
            }            
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, 
                                         exp_layer.dataProvider().geometryType(), 
                                         exp_layer.crs(), "ESRI Shapefile")
            out_feature = QgsFeature()
            for feature in layer_features(exp_layer):
                gid = str(feature.attributeMap()[gid_idx].toString())
                # only write out once 
                if not db.has_key(gid):
                    db[gid]= '1'    # bsddb only accepts string 
                    out_feature.addAttribute(0, gid)
                    out_feature.setGeometry(feature.geometry())
                    writer.addFeature(out_feature)
                    
            # clean up
            del writer                
            if use_db:
                db.close()
                os.remove(tmp_db_file)

            # copy associated attribute file            
            copy_shapefile(input_file, output_dbf, extensions=['.dbf'])
        except Exception as err:
            raise OperatorError("error creating shapefile: %s" % err, self.__class__)
コード例 #11
0
ファイル: operator_unittest.py プロジェクト: gem/sidd
    def test_ApplyMS(self):
        logging.debug("test_ApplyMS")

        # load zone with count
        zone_data = self.test_LoadZone2(True, 2)
        zone_stats = layer_field_stats(zone_data[0].value, self.zone2_field)
        # load ms
        ms_opdata = self.test_LoadMS(True)
        ms = ms_opdata[0].value
        stats = ms.get_assignment_by_name("ALL")
        for zone in zone_stats.keys():
            newZone = MappingSchemeZone(zone)
            ms.assign(newZone, stats)

        # apply mapping scheme
        ms_applier = ZoneMSApplier(self.operator_options)

        ms_applier.inputs = [
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_bldgcount_field),
            ms_opdata[0],
        ]
        ms_applier.outputs = [OperatorData(OperatorDataTypes.Exposure), OperatorData(OperatorDataTypes.Shapefile)]
        ms_applier.do_operation()
        self.assertTrue(os.path.exists(ms_applier.outputs[1].value))

        # cleanup
        self._clean_layer(zone_data)
        self._clean_layer(ms_applier.outputs)
        del ms_applier

        # testing apply MS on grid
        ############################

        ms_applier = GridMSApplier(self.operator_options)
        ms_applier.inputs = [
            OperatorData(OperatorDataTypes.Grid, load_shapefile(self.grid2_path, "test_input_grid")),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_bldgcount_field),
            ms_opdata[0],
        ]
        ms_applier.outputs = [OperatorData(OperatorDataTypes.Exposure), OperatorData(OperatorDataTypes.Shapefile)]
        ms_applier.do_operation()
        self.assertTrue(os.path.exists(ms_applier.outputs[1].value))

        # cleanup
        self._clean_layer(ms_applier.outputs)
        del ms_applier
コード例 #12
0
    def do_operation(self):
        """ perform footprint load operation """
        grid_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(grid_layer)

        grid_fields = grid_layer.dataProvider().fields()

        output_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + output_layername + '.shp'

        half_grid = DEFAULT_GRID_SIZE / 2.0
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8",
                                         grid_fields, QGis.WKBPolygon,
                                         grid_layer.crs(), "ESRI Shapefile")
            out_f = QgsFeature()
            for in_f in layer_features(grid_layer):
                in_point = in_f.geometry().asPoint()
                out_geom = QgsGeometry.fromRect(
                    QgsRectangle(in_point.x() - half_grid,
                                 in_point.y() - half_grid,
                                 in_point.x() + half_grid,
                                 in_point.y() + half_grid))
                out_f.setGeometry(out_geom)
                out_f.setAttributeMap(in_f.attributeMap())
                writer.addFeature(out_f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid: %s' % err,
                                self.__class__)

        # load shapefile as layer
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading generated file %s' % (output_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #13
0
ファイル: grid.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        grid_layer = self.inputs[0].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(grid_layer)
                
        grid_fields = grid_layer.dataProvider().fields()
        
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + output_layername + '.shp'        
        
        half_grid = DEFAULT_GRID_SIZE / 2.0
        try:            
            writer = QgsVectorFileWriter(output_file, "utf-8", grid_fields,
                                         QGis.WKBPolygon, grid_layer.crs(), "ESRI Shapefile")
            out_f = QgsFeature()
            for in_f in layer_features(grid_layer):
                in_point = in_f.geometry().asPoint()                
                out_geom = QgsGeometry.fromRect(QgsRectangle(in_point.x()-half_grid, in_point.y()-half_grid,
                                                             in_point.x()+half_grid, in_point.y()+half_grid))            
                out_f.setGeometry(out_geom)
                out_f.setAttributeMap(in_f.attributeMap())
                writer.addFeature(out_f)
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid: %s' % err, self.__class__)

        # load shapefile as layer        
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:            
            raise OperatorError('Error loading generated file %s' % (output_file), self.__class__)        

        # store data in output
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #14
0
ファイル: grid.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set
        grid_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + grid_layername + '.shp'
        
        [x_min, y_min, x_max, y_max] = [x.value for x in self._inputs]
        try:
            self._write_grid_shapefile(output_file, 
                                       x_min, y_min, x_max, y_max,
                                       self._x_off, self._y_off)
        except:
            remove_shapefile(output_file)
            raise OperatorError('error creating grid', self.__class__)

        
        grid_layer = load_shapefile(output_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (output_file), self.__class__)              

        self.outputs[0].value = grid_layer
        self.outputs[1].value = output_file
コード例 #15
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        grid_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + grid_layername + '.shp'

        [x_min, y_min, x_max, y_max] = [x.value for x in self._inputs]
        try:
            self._write_grid_shapefile(output_file, x_min, y_min, x_max, y_max,
                                       self._x_off, self._y_off)
        except:
            remove_shapefile(output_file)
            raise OperatorError('error creating grid', self.__class__)

        grid_layer = load_shapefile(output_file, grid_layername)
        if not grid_layer:
            raise OperatorError(
                'Error loading result grid file' % (output_file),
                self.__class__)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = output_file
コード例 #16
0
ファイル: ms_create.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput        
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field
        
        logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
                       logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)
        
        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)
        
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())            
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area} 
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}                            
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
        
        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up        
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = ms
コード例 #17
0
ファイル: popgrid.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value

        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file,
                                                      popgrid_layername,
                                                      [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(),
                                               self._crs)
            transform_required = True
        else:
            transform_required = False

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir,
                                             get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object

                # 1. get geometry
                geom = _f.geometry()
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)

                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_popgrid_layer

        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
コード例 #18
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """    
        
        # validate inputs 
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value 

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints        
        # area_field is not required
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX
        
        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}
        
        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()        
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)
        
        try:
            # use zone geometry area 
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                     zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                          x_min, y_min, x_max, y_max, \
                                          DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False        
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):            
                gid = _f.attributeMap()[zone_gid_idx].toString()            
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]            
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid]))+area
                else: 
                    zone_area[gid] = area
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()
            
            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]            
            # calculate count/area as proportion of total zone area
            area_ratio = (area/stat[area_idx])
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area: 
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio                 
            self._update_stat(zone_stat2, '%s|%s'%(grid_gid, zone_gid), bldg_cnt, area)
        
        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area 
        tmp_join1 = 'joined_%s' % get_unique_filename()
        tmp_join1_file = '%s%s.shp' % (self._tmp_dir, tmp_join1)        
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, '%s_'% GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")        
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0] # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht      # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, '%s|%s'%(grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)
        
        # 5. generate grid with adjusted building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]           # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]                   # grid specific statistic from from zone file    (always exists)            
            if zone_totals.has_key(QString(zone_gid)):      # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]       
            else:
                s_total = [0,0] # set to zero if missing
            if zone_fp_stat.has_key(key):                   # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]                        
            else:
                s_fp = [0, 0]   # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]   
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas                
                grid_leftover_count = zone_leftover_count * ((s_zone_grid[area_idx]-s_fp[area_idx])/zone_leftover_area)
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]
            
            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0 # max(s_zone_grid[area_idx], s_fp[area_idx])
                
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
コード例 #19
0
ファイル: grids.py プロジェクト: gem/sidd
 def _load_output(self, output_file, output_layername):
     output_layer = load_shapefile(output_file, output_layername)
     if not output_layer:
         raise OperatorError('Error loading grid file' % (output_file), self.__class__)        
     self.outputs[0].value = output_layer
     self.outputs[1].value = output_file
コード例 #20
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs
        popgrid_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints

        # local variables
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with
        # - population and zone_id
        # - apply ratio to population to obtain building count
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()

            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
コード例 #21
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        # validate inputs
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        self._test_layer_field_exists(zone_layer, count_field)

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        # cnt_idx = ToGrid.STAT_COUNT_IDX

        # 1. find building count and total area for each zone
        zone_names, zone_stat = {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(
                tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
            )
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(), self.mercator_crs)

        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for _f in layer_features(tmp_join_layer):
            # get area of polygon
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]

            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area / stat[area_idx])
            if bldg_area_idx > 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area / stat[area_idx])
            else:
                bldg_area = 0

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
コード例 #22
0
ファイル: join.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:      
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0                        
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # 
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid] 
                stats[gid] = (stat[0]+1, stat[1]+area)
            
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
                3 : QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #23
0
    def do_operation(self):
        """ perform apply mapping scheme operation """

        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded",
                                self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % zone_field,
                self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % count_field,
                self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % GID_FIELD_NAME,
                self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)

        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         provider.geometryType(), self._crs,
                                         "ESRI Shapefile")
            out_feature = QgsFeature()

            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0

                count = int(count + 0.5)
                if count == 0:
                    continue

                stats = ms.get_assignment_by_name(zone_str)

                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" %
                                    zone_str)

                for _sample in stats.get_samples(count,
                                                 self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]

                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * (float(_sample[1]) / count)
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]

                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        del src_layer

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
コード例 #24
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)

        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) +
                              1)  # value as string required by bsddb
            else:
                db[key] = '1'  # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         self._outputGeometryType(), self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))

                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file %s' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
コード例 #25
0
ファイル: footprint.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        infile = self.inputs[0].value
        
        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile), self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(), mercator_crs)
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(LON_FIELD_NAME, QVariant.Double),
            2 : QgsField(LAT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
            4 : QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around
                 
                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid  = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid
                
                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()
                
                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_fp_layer
        
        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file
コード例 #26
0
ファイル: join.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]

        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount(
        ) > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir,
                                               get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}

        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        stats = layer_multifields_stats(tmp_join_layer,
                                        [zone_field, count_field])
        if stats == False:
            raise OperatorError(
                "error creating statistic based on input files",
                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)

        try:
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err,
                                self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x() /
                              DEFAULT_GRID_SIZE) * self._x_off + _x_off
                    lat = int(
                        centroid.y() / self._y_off) * self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing missing points: " % err,
                                self.__class__)

        # output result
        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
            3: QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            for key, value in grid_points.iteritems():
                [zone, zone_gid, lon, lat] = self._parse_key(key)
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(
                    value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err,
                                self.__class__)

        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file),
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
コード例 #27
0
ファイル: join.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid] += float(_count) / pop_to_bldg
            else:
                stats[_gid] = float(_count) / pop_to_bldg

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #28
0
ファイル: join.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht  #
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid]
                stats[gid] = (stat[0] + 1, stat[1] + area)

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
                3: QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #29
0
ファイル: join.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        fp_layer = self.inputs[2].value

        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)

            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err,
                                self.__class__)

        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (fp_file),
                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
コード例 #30
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value

        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (area_field, fp_layer.name()), self.__class__)
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" % (ht_field, fp_layer.name()),
                self.__class__)
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (HT_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # load zone classes
        # the operations below must be performed for each zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)

        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {}
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {}
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err,
                               logAPICall.WARNING)

        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups",
                                    self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value == -1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError(
                        "Survey groups must have same number of samples",
                        self.__class__)
            # sort by stories
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][
                    group_stories_for_sort[key]] = self.weights[idx]

        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str,
                                    group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str,
                                    _sample_size * group_weight[_sample_grp])
                self.increment_dict(
                    _zone_a_exp[_zone_str], _tax_str,
                    _sample_size * _ht * group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log(
                    "error processing sample with building type: %s" %
                    _tax_str, logAPICall.WARNING)
                pass

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)

        zone_idx = layer_field_index(tmp_join_layer2, zone_field)
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht

        # calculate building ratios for each zone
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a)
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[
                _zone] / _zone_total_count[_zone]

            # calculate expected values
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]
                if e_at_cluster == 0 or e_at_total == 0:
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster

                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster

        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str,
                               self._parse_order,
                               self._parse_modifiers,
                               add_times=int(_e_exp * 1000))
            # finalize call is required
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        # assign output
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp
コード例 #31
0
ファイル: ms_create.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value
        
        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(area_field, fp_layer.name()), self.__class__)        
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(ht_field, fp_layer.name()), self.__class__)        
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(HT_FIELD_NAME, svy_layer.name()), self.__class__)        
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # load zone classes
        # the operations below must be performed for each zone 
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones        
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'        
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)        
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        
        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} 
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {} 
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())            
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err, logAPICall.WARNING)
            
        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups", self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value==-1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError("Survey groups must have same number of samples", self.__class__)
            # sort by stories        
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight 
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][group_stories_for_sort[key]] = self.weights[idx]
                
        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())            
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]            
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size*group_weight[_sample_grp])
                self.increment_dict(_zone_a_exp[_zone_str], _tax_str, _sample_size*_ht*group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log("error processing sample with building type: %s" % _tax_str, logAPICall.WARNING)
                pass              

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'        
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)        
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)
        
        zone_idx = layer_field_index(tmp_join_layer2, zone_field)        
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)        
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht
        
        # calculate building ratios for each zone        
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a) 
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())            
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[_zone]/_zone_total_count[_zone]
            
            # calculate expected values  
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]         
                if e_at_cluster == 0 or e_at_total == 0: 
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area 
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building 
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster
                                        
                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster
        
        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp*1000))                                            
            # finalize call is required 
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)            
        
        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        # assign output        
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp    
コード例 #32
0
ファイル: grids.py プロジェクト: gem/sidd
 def _load_output(self, output_file, output_layername):
     output_layer = load_shapefile(output_file, output_layername)
     if not output_layer:
         raise OperatorError("Error loading grid file" % (output_file), self.__class__)
     self.outputs[0].value = output_layer
     self.outputs[1].value = output_file
コード例 #33
0
ファイル: join.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)
        
        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")        
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid]+=float(_count) / pop_to_bldg
            else:
                stats[_gid]=float(_count)  / pop_to_bldg          
        
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])                
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
コード例 #34
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # validate inputs
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints
        # area_field is not required

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX

        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}

        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)

        try:
            # use zone geometry area
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(
            tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
        )

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):
                gid = _f.attributeMap()[zone_gid_idx].toString()
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid])) + area
                else:
                    zone_area[gid] = area

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            # calculate count/area as proportion of total zone area
            area_ratio = area / stat[area_idx]
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area:
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio
            self._update_stat(zone_stat2, "%s|%s" % (grid_gid, zone_gid), bldg_cnt, area)

        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area
        tmp_join1 = "joined_%s" % get_unique_filename()
        tmp_join1_file = "%s%s.shp" % (self._tmp_dir, tmp_join1)
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, "%s_" % GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0]  # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht  # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, "%s|%s" % (grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)

        # 5. generate grid with adjusted building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]  # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]  # grid specific statistic from from zone file    (always exists)
            if zone_totals.has_key(QString(zone_gid)):  # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]
            else:
                s_total = [0, 0]  # set to zero if missing
            if zone_fp_stat.has_key(key):  # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]
            else:
                s_fp = [0, 0]  # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas
                grid_leftover_count = zone_leftover_count * (
                    (s_zone_grid[area_idx] - s_fp[area_idx]) / zone_leftover_area
                )
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]

            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0  # max(s_zone_grid[area_idx], s_fp[area_idx])

            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)

        # store data in output
        self._load_output(output_file, output_layername)
コード例 #35
0
ファイル: exposure.py プロジェクト: ImageCatInc/sidd
    @logAPICall
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)
        
        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')            
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid  = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())                        
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1) # value as string required by bsddb
            else:
                db[key] = '1'                   # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", 
                                         self._fields, self._outputGeometryType(), self._crs, 
                                         "ESRI Shapefile")
            f = QgsFeature()            
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))
                
                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file %s' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
コード例 #36
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        fp_layer = self.inputs[0].value
        zone_field = self.inputs[1].value

        # aggregate footprint into grids
        logAPICall.log('aggregate statistic for grid ...', logAPICall.DEBUG)
        total_features = fp_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
            use_db = True
        else:
            db = {}
            use_db = False

        zone_idx = layer_field_index(fp_layer, zone_field)
        for f in layer_features(fp_layer):
            geom = f.geometry()
            zone_str = str(f.attributeMap()[zone_idx].toString())
            centroid  = geom.centroid().asPoint()
            # use floor, this truncates all points within grid to grid's
            # bottom-left corner                        
            x = math.floor(centroid.x() / DEFAULT_GRID_SIZE)
            y = math.floor(centroid.y() / DEFAULT_GRID_SIZE)
            key = '%s %d %d' % (zone_str, x,y)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1)
            else:
                db[key] = '1'
        
        # output grid
        logAPICall.log('create grid ...', logAPICall.DEBUG)
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(zone_field, QVariant.String),
        }
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint , self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for key, val in db.iteritems():
                (zone_str, x, y) = key.split(' ')
                # point were aggregated to grid's bottom-left corner
                # add half grid size to place point at center of grid
                point = QgsPoint(int(x)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0), 
                                 int(y)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0))
                f.setGeometry(QgsGeometry.fromPoint(point))
                f.addAttribute(0, QVariant(point.x()))
                f.addAttribute(1, QVariant(point.y()))
                f.addAttribute(2, QVariant(val))
                f.addAttribute(3, QVariant(zone_str))
                writer.addFeature(f)
            del writer
        except Exception as err:
            remove_shapefile(grid_file)
            raise OperatorError("error creating joined grid: " % err, self.__class__)
        
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading created grid file' % (grid_file), self.__class__)
                
        # clean up                
        if use_db:
            db.close()
            os.remove(tmp_db_file)
            
        # done
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
コード例 #37
0
ファイル: exposure.py プロジェクト: ImageCatInc/sidd
    @logAPICall
    def do_operation(self):
        """ perform apply mapping scheme operation """
        
        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)
        
        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded", self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError("field %s not found in input layer" % zone_field, self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError("field %s not found in input layer" % count_field, self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError("field %s not found in input layer" % GID_FIELD_NAME, self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)
        
        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields, provider.geometryType(), self._crs, "ESRI Shapefile")
            out_feature = QgsFeature()
            
            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint ()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0
                
                count = int(count+0.5)
                if count == 0:
                    continue                            
                
                stats = ms.get_assignment_by_name(zone_str)
                
                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" % zone_str)
                
                for _sample in stats.get_samples(count, self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]
                    
                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * ( float(_sample[1]) / count )
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]
                    
                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
            
        del src_layer
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
コード例 #38
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        # validate inputs        
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)        
        self._test_layer_field_exists(zone_layer, count_field)
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        #cnt_idx = ToGrid.STAT_COUNT_IDX
        
        # 1. find building count and total area for each zone
        zone_names, zone_stat= {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                                         zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
    
        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                              x_min, y_min, x_max, y_max, \
                                              DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally        
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(),
                                                    self.mercator_crs)          

        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }    
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature() 
        for _f in layer_features(tmp_join_layer):
            # get area of polygon            
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            
            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area/stat[area_idx])
            if bldg_area_idx> 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area/stat[area_idx])                
            else:
                bldg_area = 0 

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)        
        del writer    

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
コード例 #39
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        zone_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)

        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [
            extent.xMinimum(),
            extent.yMinimum(),
            extent.xMaximum(),
            extent.yMaximum()
        ]

        # create grid based on extent of given region
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max,
                                       y_max, x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid',
                                self.__class__)

        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)

        # temporary grid for joined shape with all grid points not within region removed
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file),
                                self.__class__)

        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
コード例 #40
0
ファイル: grids.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs 
        popgrid_layer = self.inputs[0].value        
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)        
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with 
        # - population and zone_id
        # - apply ratio to population to obtain building count                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field) 
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()
            
            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid  = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
コード例 #41
0
ファイル: join.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value        
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value        

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)         
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]
        
        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount() > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}
        
        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field])
        if stats == False:
            raise OperatorError("error creating statistic based on input files",
                                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)        
        
        try:        
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err, self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0,  self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()
                
                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x()/DEFAULT_GRID_SIZE)*self._x_off + _x_off
                    lat = int(centroid.y()/self._y_off)*self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1                                        
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1                             
        except Exception as err:
            raise OperatorError("error processing missing points: " % err, self.__class__)

        # output result
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
            3 : QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            for key, value in grid_points.iteritems():                
                [zone, zone_gid, lon, lat] = self._parse_key(key)                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value 
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err, self.__class__)
            
        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__)
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
コード例 #42
0
ファイル: grid.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set         
        zone_layer = self.inputs[0].value        
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
                
        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]

        # create grid based on extent of given region 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file,
                                       x_min, y_min, x_max, y_max,
                                       x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid', self.__class__)        
        
        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)
        
        # temporary grid for joined shape with all grid points not within region removed 
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()        
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)        
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)                
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file), self.__class__)        
        
        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
コード例 #43
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field

        logAPICall.log(
            'survey %s, taxfield %s, zone %s, zone_field, %s' %
            (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
            logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)

        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)

        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str),
                           logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(
                    _tax_str, self._parse_order, self._parse_modifiers,
                    additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = ms
コード例 #44
0
ファイル: popgrid.py プロジェクト: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value
        
        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file, popgrid_layername,
                                                   [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object
                 
                # 1. get geometry
                geom = _f.geometry()                
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)
                
                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_popgrid_layer
        
        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
コード例 #45
0
ファイル: footprint.py プロジェクト: gem/sidd
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        infile = self.inputs[0].value

        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile),
                                self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False

        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(),
                                                    mercator_crs)

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(LON_FIELD_NAME, QVariant.Double),
            2: QgsField(LAT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
            4: QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around

                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid

                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()

                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_fp_layer

        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file