Beispiel #1
0
 def do_operation(self):
     """ perform create mapping scheme operation """
     
     # input/output verification already performed during set input/ouput
     survey_layer = self.inputs[0].value
     tax_field = self._tax_field
     
     # merge to create stats
     ms = MappingScheme(self._taxonomy)
     stats = Statistics(self._taxonomy)
     ms.assign(MappingSchemeZone('ALL'), stats)
     
     # loop through all input features
     tax_idx = layer_field_index(survey_layer, tax_field)
     area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
     cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)
     
     for _f in layer_features(survey_layer):
         _tax_str = str(_f.attributeMap()[tax_idx].toString())
         additional = {}
         _area = _f.attributeMap()[area_idx].toDouble()[0]
         if _area > 0:
             additional = {StatisticNode.AverageSize: _area} 
         _cost = _f.attributeMap()[cost_idx].toDouble()[0]
         if _cost > 0:
             additional = {StatisticNode.UnitCost: _cost}                            
         try:
             stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
         except TaxonomyParseError as perr:
             logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
     
     # store data in output
     stats.finalize()        
     
     self.outputs[0].value = ms
Beispiel #2
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value                
        fp_layer = self.inputs[2].value
        
        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):                
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)
            
            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err, self.__class__)
        
        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (fp_file), self.__class__)        
                
        # clean up
        del tmp_join_layer        
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
Beispiel #3
0
    def test_LoadGEMDBSurvey(self, skipTest=False):
        logging.debug('test_LoadSurvey %s' % skipTest)

        loader = GEMDBSurveyLoader(self.operator_options)
        loader.inputs = [
            OperatorData(OperatorDataTypes.File, self.gemdb3_path),
            OperatorData(OperatorDataTypes.StringAttribute, 'GEMDB'),
            OperatorData(OperatorDataTypes.StringAttribute, None),
        ]
        loader.outputs = [
            OperatorData(OperatorDataTypes.Survey),
            OperatorData(OperatorDataTypes.Shapefile),
        ]
        loader.do_operation()
        if skipTest:
            return loader.outputs

        # perform test
        survey_layer = loader.outputs[0].value
        self.assertEqual(survey_layer.dataProvider().featureCount(), 24)

        grp_idx = layer_field_index(survey_layer, "GROUP")
        groups = {}
        for svy in layer_features(survey_layer):
            group = str(svy.attributeMap()[grp_idx].toString())
            if not groups.has_key(group):
                groups[group] = 1
            else:
                groups[group] += 1
        self.assertEqual(len(groups), 3)
        self.assertEqual(groups.values(), [8, 8, 8])

        # clean up
        self._clean_layer(loader.outputs)
Beispiel #4
0
    def test_LoadGEMDBSurvey(self, skipTest=False):
        logging.debug("test_LoadSurvey %s" % skipTest)

        loader = GEMDBSurveyLoader(self.operator_options)
        loader.inputs = [
            OperatorData(OperatorDataTypes.File, self.gemdb3_path),
            OperatorData(OperatorDataTypes.StringAttribute, "GEMDB"),
            OperatorData(OperatorDataTypes.StringAttribute, None),
        ]
        loader.outputs = [OperatorData(OperatorDataTypes.Survey), OperatorData(OperatorDataTypes.Shapefile)]
        loader.do_operation()
        if skipTest:
            return loader.outputs

        # perform test
        survey_layer = loader.outputs[0].value
        self.assertEqual(survey_layer.dataProvider().featureCount(), 24)

        grp_idx = layer_field_index(survey_layer, "GROUP")
        groups = {}
        for svy in layer_features(survey_layer):
            group = str(svy.attributeMap()[grp_idx].toString())
            if not groups.has_key(group):
                groups[group] = 1
            else:
                groups[group] += 1
        self.assertEqual(len(groups), 3)
        self.assertEqual(groups.values(), [8, 8, 8])

        # clean up
        self._clean_layer(loader.outputs)
Beispiel #5
0
 def findFeatureExtentByAttribute(self, layer, field, value):
     """ 
     find extent of all objects in QGIS layer matching condition "field=value"         
     """
     fidx = layer_field_index(layer, field)
     if fidx == -1:
         return None
     xmin, xmax, ymin, ymax = 180, -180, 90, -90
     extent = QgsRectangle(xmin, ymin, xmax, ymax)
     need_transform = layer.crs() != self.canvas.mapRenderer(
     ).destinationCrs()
     if need_transform:
         transform = QgsCoordinateTransform(
             layer.crs(),
             self.canvas.mapRenderer().destinationCrs())
     for feature in layer_features(layer):
         if str(value) == feature.attributeMap()[fidx].toString():
             f_extent = feature.geometry().boundingBox()
             if need_transform:
                 f_extent = transform.transform(f_extent)
             xmin = min(f_extent.xMinimum(), xmin)
             xmax = max(f_extent.xMaximum(), xmax)
             ymin = min(f_extent.yMinimum(), ymin)
             ymax = max(f_extent.yMaximum(), ymax)
     extent.set(xmin, ymin, xmax, ymax)
     return extent
Beispiel #6
0
    def test_LoadZone2(self, skipTest=False, zone=2):
        logging.debug('test_LoadZoneCount %s' % skipTest)

        if zone == 2:
            zone_path = self.zone2_path
            zone_field = self.zone2_field
            zone_count_field = self.zone2_bldgcount_field
            zone_area_field = self.zone2_bldgarea_field
        elif zone == 3:
            zone_path = self.zone3_path
            zone_field = self.zone3_field
            zone_count_field = self.zone3_bldgcount_field
            zone_area_field = self.zone3_bldgarea_field
        else:
            raise Exception("zone not supported")

        loader = ZoneCountLoader(self.operator_options)
        loader.inputs = [
            OperatorData(OperatorDataTypes.Shapefile, zone_path),
            OperatorData(OperatorDataTypes.StringAttribute, zone_field),
            OperatorData(OperatorDataTypes.StringAttribute, zone_count_field),
            OperatorData(OperatorDataTypes.StringAttribute, zone_area_field),
        ]
        loader.outputs = [
            OperatorData(OperatorDataTypes.Zone),
            OperatorData(OperatorDataTypes.Shapefile)
        ]
        loader.do_operation()
        if skipTest:
            return loader.outputs

        zones = loader.outputs[0].value
        self.assertEquals(zones.featureCount(), self.zone2_feature_count)
        bldg_cnt_idx = layer_field_index(zones, zone_count_field)
        bldg_area_idx = layer_field_index(zones, zone_area_field)
        total_bldg_cnt, total_bldg_area = 0, 0
        for feature in layer_features(zones):
            total_bldg_cnt += feature.attributeMap()[bldg_cnt_idx].toDouble(
            )[0]
            total_bldg_area += feature.attributeMap()[bldg_area_idx].toDouble(
            )[0]
        self.assertEquals(total_bldg_cnt, self.zone2_total_bldg_cnt)
        self.assertEquals(total_bldg_area, self.zone2_total_bldg_area)

        # clean up
        del zones
        self._clean_layer(loader.outputs)
Beispiel #7
0
    def test_ZoneToGridJoin(self, skipTest=False):
        logging.debug('test_ZoneFootprintJoin %s' % skipTest)

        # load data
        zone_data = self.test_LoadZone2(True, 2)

        # test 1
        merger = ZoneToGrid(self.operator_options)
        merger.inputs = [
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone2_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone2_bldgarea_field),
        ]
        merger.outputs = [
            OperatorData(OperatorDataTypes.Grid),
            OperatorData(OperatorDataTypes.Shapefile)
        ]
        merger.do_operation()

        if skipTest:
            # clean up intermediate data
            self._clean_layer(zone_data)
            return merger.outputs

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area

        # sum(count)=292377  sum(sqmt)=71582303
        self.assertAlmostEqual(total_cnt, self.zone2_total_bldg_cnt, places=-2)
        self.assertAlmostEqual(total_sqmt,
                               self.zone2_total_bldg_area,
                               places=-2)

        # cleanup
        self._clean_layer(zone_data)
        self._clean_layer(merger.outputs)
Beispiel #8
0
 def _get_exposure_total(self, exposure, cnt_field):
     total_exposure = 0
     try:
         cnt_idx = layer_field_index(exposure, cnt_field)
         for f in layer_features(exposure):
             total_exposure += f.attributeMap()[cnt_idx].toDouble()[0]
     except Exception as err:
         raise OperatorError("error reading count from exposure: %s" % err, self.__class__)
     return total_exposure
Beispiel #9
0
 def _get_exposure_total(self, exposure, cnt_field):
     total_exposure=0
     try:
         cnt_idx = layer_field_index(exposure, cnt_field)
         for f in layer_features(exposure):
             total_exposure += f.attributeMap()[cnt_idx].toDouble()[0]                
     except Exception as err:
         raise OperatorError("error reading count from exposure: %s" % err, self.__class__)
     return total_exposure
Beispiel #10
0
    def test_LoadZone2(self, skipTest=False, zone=2):
        logging.debug("test_LoadZoneCount %s" % skipTest)

        if zone == 2:
            zone_path = self.zone2_path
            zone_field = self.zone2_field
            zone_count_field = self.zone2_bldgcount_field
            zone_area_field = self.zone2_bldgarea_field
        elif zone == 3:
            zone_path = self.zone3_path
            zone_field = self.zone3_field
            zone_count_field = self.zone3_bldgcount_field
            zone_area_field = self.zone3_bldgarea_field
        else:
            raise Exception("zone not supported")

        loader = ZoneCountLoader(self.operator_options)
        loader.inputs = [
            OperatorData(OperatorDataTypes.Shapefile, zone_path),
            OperatorData(OperatorDataTypes.StringAttribute, zone_field),
            OperatorData(OperatorDataTypes.StringAttribute, zone_count_field),
            OperatorData(OperatorDataTypes.StringAttribute, zone_area_field),
        ]
        loader.outputs = [OperatorData(OperatorDataTypes.Zone), OperatorData(OperatorDataTypes.Shapefile)]
        loader.do_operation()
        if skipTest:
            return loader.outputs

        zones = loader.outputs[0].value
        self.assertEquals(zones.featureCount(), self.zone2_feature_count)
        bldg_cnt_idx = layer_field_index(zones, zone_count_field)
        bldg_area_idx = layer_field_index(zones, zone_area_field)
        total_bldg_cnt, total_bldg_area = 0, 0
        for feature in layer_features(zones):
            total_bldg_cnt += feature.attributeMap()[bldg_cnt_idx].toDouble()[0]
            total_bldg_area += feature.attributeMap()[bldg_area_idx].toDouble()[0]
        self.assertEquals(total_bldg_cnt, self.zone2_total_bldg_cnt)
        self.assertEquals(total_bldg_area, self.zone2_total_bldg_area)

        # clean up
        del zones
        self._clean_layer(loader.outputs)
Beispiel #11
0
Datei: grids.py Projekt: gem/sidd
    def _create_zone_statistics(self, zone_layer, zone_field, count_field, zone_stat, zone_names):
        # project geometry into mercator and get area in m2
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), self.mercator_crs)
        zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
        zone_field_idx = layer_field_index(zone_layer, zone_field)
        count_field_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            # project into mercator and get area in m2
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            gid = _f.attributeMap()[zone_gid_idx].toString()
            # if count field is not defined, then set count to 0
            if count_field_idx >= 0:
                count = _f.attributeMap()[count_field_idx].toDouble()[0]
            else:
                count = 0

            self._update_stat(zone_stat, gid, count, area)
            zone_names[gid] = _f.attributeMap()[zone_field_idx]
Beispiel #12
0
    def test_ZoneToGridJoin(self, skipTest=False):
        logging.debug("test_ZoneFootprintJoin %s" % skipTest)

        # load data
        zone_data = self.test_LoadZone2(True, 2)

        # test 1
        merger = ZoneToGrid(self.operator_options)
        merger.inputs = [
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone2_bldgarea_field),
        ]
        merger.outputs = [OperatorData(OperatorDataTypes.Grid), OperatorData(OperatorDataTypes.Shapefile)]
        merger.do_operation()

        if skipTest:
            # clean up intermediate data
            self._clean_layer(zone_data)
            return merger.outputs

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area

        # sum(count)=292377  sum(sqmt)=71582303
        self.assertAlmostEqual(total_cnt, self.zone2_total_bldg_cnt, places=-2)
        self.assertAlmostEqual(total_sqmt, self.zone2_total_bldg_area, places=-2)

        # cleanup
        self._clean_layer(zone_data)
        self._clean_layer(merger.outputs)
Beispiel #13
0
Datei: grids.py Projekt: gem/sidd
 def _create_zone_statistics(self, zone_layer, zone_field, count_field, zone_stat, zone_names):
     # project geometry into mercator and get area in m2
     mercator_transform = QgsCoordinateTransform(zone_layer.crs(),
                                                 self.mercator_crs)    
     zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
     zone_field_idx = layer_field_index(zone_layer, zone_field)
     count_field_idx = layer_field_index(zone_layer, count_field)
     for _f in layer_features(zone_layer):            
         # project into mercator and get area in m2
         geom = _f.geometry()
         geom.transform(mercator_transform)
         area = geom.area()
             
         gid = _f.attributeMap()[zone_gid_idx].toString()            
         # if count field is not defined, then set count to 0
         if count_field_idx >= 0:
             count = _f.attributeMap()[count_field_idx].toDouble()[0]
         else:
             count = 0            
         
         self._update_stat(zone_stat, gid, count, area)
         zone_names[gid] = _f.attributeMap()[zone_field_idx]
Beispiel #14
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        tax_field = self._tax_field

        # merge to create stats
        ms = MappingScheme(self._taxonomy)
        stats = Statistics(self._taxonomy)
        ms.assign(MappingSchemeZone('ALL'), stats)

        # loop through all input features
        tax_idx = layer_field_index(survey_layer, tax_field)
        area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)

        for _f in layer_features(survey_layer):
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            try:
                stats.add_case(_tax_str, self._parse_order,
                               self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        stats.finalize()

        self.outputs[0].value = ms
Beispiel #15
0
    def do_operation(self):
        """ perform export operation """        
        # input/output data checking already done during property set
        input_file = self.inputs[0].value
        output_file = self.inputs[1].value
        output_dbf = '%s_attr.dbf' % output_file[:-3]
        try:
            exp_layer = load_shapefile(input_file, 'exposure_%s' % get_unique_filename())
            
            # store id of distinct features            
            total_features = exp_layer.dataProvider().featureCount()
            if total_features > MAX_FEATURES_IN_MEMORY:
                # use bsddb to store id in case number of features is too large
                tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
                db = bsddb.btopen(tmp_db_file, 'c')
                use_db = True
            else:
                # in memory dictionary, should be much faster, but could fail
                # if memory is limited
                db = {}
                use_db = False
                        
            # get field index for GID
            gid_idx = layer_field_index(exp_layer, GID_FIELD_NAME)
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
            }            
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, 
                                         exp_layer.dataProvider().geometryType(), 
                                         exp_layer.crs(), "ESRI Shapefile")
            out_feature = QgsFeature()
            for feature in layer_features(exp_layer):
                gid = str(feature.attributeMap()[gid_idx].toString())
                # only write out once 
                if not db.has_key(gid):
                    db[gid]= '1'    # bsddb only accepts string 
                    out_feature.addAttribute(0, gid)
                    out_feature.setGeometry(feature.geometry())
                    writer.addFeature(out_feature)
                    
            # clean up
            del writer                
            if use_db:
                db.close()
                os.remove(tmp_db_file)

            # copy associated attribute file            
            copy_shapefile(input_file, output_dbf, extensions=['.dbf'])
        except Exception as err:
            raise OperatorError("error creating shapefile: %s" % err, self.__class__)
Beispiel #16
0
    def do_operation(self):
        exposure = self.inputs[0].value

        frac_count, rec_count = 0, 0
        try:
            cnt_idx = layer_field_index(exposure, CNT_FIELD_NAME)
            for f in layer_features(exposure):
                rec_count += 1
                count = f.attributeMap()[cnt_idx].toDouble()[0]
                if count < 1:
                    frac_count += 1
        except Exception as err:
            raise OperatorError("error reading exposure: %s" % err, self.__class__)

        self.outputs[0].value = {"record_count": rec_count, "fraction_count": frac_count}
Beispiel #17
0
 def do_operation(self):
     exposure = self.inputs[0].value
     
     frac_count, rec_count = 0, 0
     try:
         cnt_idx = layer_field_index(exposure, CNT_FIELD_NAME)
         for f in layer_features(exposure):
             rec_count += 1
             count = f.attributeMap()[cnt_idx].toDouble()[0]
             if count < 1:
                 frac_count+=1
     except Exception as err:
         raise OperatorError("error reading exposure: %s" % err, self.__class__)
     
     self.outputs[0].value = {'record_count':rec_count, 'fraction_count':frac_count}  
Beispiel #18
0
    def do_operation(self):
        exposure = self.inputs[0].value
        zone_layer = self.inputs[1].value
        cnt_field = self.inputs[2].value

        try:
            # get total building count from exposure
            total_exposure = self._get_exposure_total(exposure, CNT_FIELD_NAME)

            # get total building count from zone
            total_zone = 0

            cnt_idx = layer_field_index(zone_layer, cnt_field)
            for f in layer_features(zone_layer):
                total_zone += f.attributeMap()[cnt_idx].toDouble()[0]
        except Exception as err:
            raise OperatorError("error reading count from zone: %s" % err, self.__class__)

        self.outputs[0].value = {"total_exposure": total_exposure, "total_source": total_zone}
Beispiel #19
0
    def do_operation(self):
        exposure = self.inputs[0].value
        zone_layer = self.inputs[1].value
        cnt_field = self.inputs[2].value

        try:
            # get total building count from exposure
            total_exposure = self._get_exposure_total(exposure, CNT_FIELD_NAME)
            
            # get total building count from zone
            total_zone = 0 
            
            cnt_idx = layer_field_index(zone_layer, cnt_field)
            for f in layer_features(zone_layer):                
                total_zone += f.attributeMap()[cnt_idx].toDouble()[0]
        except Exception as err:
            raise OperatorError("error reading count from zone: %s" % err, self.__class__)        

        self.outputs[0].value = {"total_exposure":total_exposure, 
                                 "total_source":total_zone}
Beispiel #20
0
    def findFeatureExtentByAttribute(self, layer, field, value):
        """ 
        find extent of all objects in QGIS layer matching condition "field=value"         
        """
        fidx = layer_field_index(layer, field)
        if fidx == -1:
            return None
        xmin, xmax, ymin, ymax = 180, -180, 90, -90
        extent = QgsRectangle(xmin, ymin, xmax, ymax)
        need_transform = layer.crs() != self.canvas.mapRenderer().destinationCrs()
        if need_transform:
            transform = QgsCoordinateTransform(layer.crs(), self.canvas.mapRenderer().destinationCrs())
        for feature in layer_features(layer):
            if str(value) == feature.attributeMap()[fidx].toString():
                f_extent = feature.geometry().boundingBox()
                if need_transform:
                    f_extent = transform.transform(f_extent)
                xmin = min(f_extent.xMinimum(), xmin)
                xmax = max(f_extent.xMaximum(), xmax)
                ymin = min(f_extent.yMinimum(), ymin)
                ymax = max(f_extent.yMaximum(), ymax)
        extent.set (xmin, ymin, xmax, ymax)
Beispiel #21
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        # validate inputs        
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)        
        self._test_layer_field_exists(zone_layer, count_field)
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        #cnt_idx = ToGrid.STAT_COUNT_IDX
        
        # 1. find building count and total area for each zone
        zone_names, zone_stat= {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                                         zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
    
        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                              x_min, y_min, x_max, y_max, \
                                              DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally        
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(),
                                                    self.mercator_crs)          

        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }    
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature() 
        for _f in layer_features(tmp_join_layer):
            # get area of polygon            
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            
            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area/stat[area_idx])
            if bldg_area_idx> 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area/stat[area_idx])                
            else:
                bldg_area = 0 

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)        
        del writer    

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #22
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs 
        popgrid_layer = self.inputs[0].value        
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)        
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with 
        # - population and zone_id
        # - apply ratio to population to obtain building count                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field) 
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()
            
            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid  = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #23
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # validate inputs
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints
        # area_field is not required

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX

        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}

        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)

        try:
            # use zone geometry area
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(
            tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
        )

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):
                gid = _f.attributeMap()[zone_gid_idx].toString()
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid])) + area
                else:
                    zone_area[gid] = area

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            # calculate count/area as proportion of total zone area
            area_ratio = area / stat[area_idx]
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area:
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio
            self._update_stat(zone_stat2, "%s|%s" % (grid_gid, zone_gid), bldg_cnt, area)

        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area
        tmp_join1 = "joined_%s" % get_unique_filename()
        tmp_join1_file = "%s%s.shp" % (self._tmp_dir, tmp_join1)
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, "%s_" % GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0]  # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht  # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, "%s|%s" % (grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)

        # 5. generate grid with adjusted building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]  # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]  # grid specific statistic from from zone file    (always exists)
            if zone_totals.has_key(QString(zone_gid)):  # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]
            else:
                s_total = [0, 0]  # set to zero if missing
            if zone_fp_stat.has_key(key):  # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]
            else:
                s_fp = [0, 0]  # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas
                grid_leftover_count = zone_leftover_count * (
                    (s_zone_grid[area_idx] - s_fp[area_idx]) / zone_leftover_area
                )
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]

            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0  # max(s_zone_grid[area_idx], s_fp[area_idx])

            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)

        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #24
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        fp_layer = self.inputs[0].value
        zone_field = self.inputs[1].value

        # aggregate footprint into grids
        logAPICall.log('aggregate statistic for grid ...', logAPICall.DEBUG)
        total_features = fp_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
            use_db = True
        else:
            db = {}
            use_db = False

        zone_idx = layer_field_index(fp_layer, zone_field)
        for f in layer_features(fp_layer):
            geom = f.geometry()
            zone_str = str(f.attributeMap()[zone_idx].toString())
            centroid  = geom.centroid().asPoint()
            # use floor, this truncates all points within grid to grid's
            # bottom-left corner                        
            x = math.floor(centroid.x() / DEFAULT_GRID_SIZE)
            y = math.floor(centroid.y() / DEFAULT_GRID_SIZE)
            key = '%s %d %d' % (zone_str, x,y)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1)
            else:
                db[key] = '1'
        
        # output grid
        logAPICall.log('create grid ...', logAPICall.DEBUG)
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(zone_field, QVariant.String),
        }
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint , self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for key, val in db.iteritems():
                (zone_str, x, y) = key.split(' ')
                # point were aggregated to grid's bottom-left corner
                # add half grid size to place point at center of grid
                point = QgsPoint(int(x)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0), 
                                 int(y)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0))
                f.setGeometry(QgsGeometry.fromPoint(point))
                f.addAttribute(0, QVariant(point.x()))
                f.addAttribute(1, QVariant(point.y()))
                f.addAttribute(2, QVariant(val))
                f.addAttribute(3, QVariant(zone_str))
                writer.addFeature(f)
            del writer
        except Exception as err:
            remove_shapefile(grid_file)
            raise OperatorError("error creating joined grid: " % err, self.__class__)
        
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading created grid file' % (grid_file), self.__class__)
                
        # clean up                
        if use_db:
            db.close()
            os.remove(tmp_db_file)
            
        # done
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
Beispiel #25
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)
        
        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")        
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid]+=float(_count) / pop_to_bldg
            else:
                stats[_gid]=float(_count)  / pop_to_bldg          
        
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])                
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
Beispiel #26
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value

        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file,
                                                      popgrid_layername,
                                                      [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(),
                                               self._crs)
            transform_required = True
        else:
            transform_required = False

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir,
                                             get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object

                # 1. get geometry
                geom = _f.geometry()
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)

                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_popgrid_layer

        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
Beispiel #27
0
 def _test_layer_field_exists(self, layer, field):
     idx = layer_field_index(layer, field) 
     if idx == -1:
         raise OperatorDataError("field %s not found in input layer" % field)
     return idx
Beispiel #28
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value

        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (area_field, fp_layer.name()), self.__class__)
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" % (ht_field, fp_layer.name()),
                self.__class__)
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (HT_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # load zone classes
        # the operations below must be performed for each zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)

        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {}
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {}
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err,
                               logAPICall.WARNING)

        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups",
                                    self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value == -1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError(
                        "Survey groups must have same number of samples",
                        self.__class__)
            # sort by stories
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][
                    group_stories_for_sort[key]] = self.weights[idx]

        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str,
                                    group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str,
                                    _sample_size * group_weight[_sample_grp])
                self.increment_dict(
                    _zone_a_exp[_zone_str], _tax_str,
                    _sample_size * _ht * group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log(
                    "error processing sample with building type: %s" %
                    _tax_str, logAPICall.WARNING)
                pass

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)

        zone_idx = layer_field_index(tmp_join_layer2, zone_field)
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht

        # calculate building ratios for each zone
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a)
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[
                _zone] / _zone_total_count[_zone]

            # calculate expected values
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]
                if e_at_cluster == 0 or e_at_total == 0:
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster

                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster

        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str,
                               self._parse_order,
                               self._parse_modifiers,
                               add_times=int(_e_exp * 1000))
            # finalize call is required
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        # assign output
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp
Beispiel #29
0
Datei: join.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht  #
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid]
                stats[gid] = (stat[0] + 1, stat[1] + area)

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
                3: QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
Beispiel #30
0
Datei: join.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        fp_layer = self.inputs[2].value

        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)

            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err,
                                self.__class__)

        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (fp_file),
                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
Beispiel #31
0
    def do_operation(self):
        """ perform apply mapping scheme operation """

        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded",
                                self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % zone_field,
                self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % count_field,
                self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % GID_FIELD_NAME,
                self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)

        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         provider.geometryType(), self._crs,
                                         "ESRI Shapefile")
            out_feature = QgsFeature()

            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0

                count = int(count + 0.5)
                if count == 0:
                    continue

                stats = ms.get_assignment_by_name(zone_str)

                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" %
                                    zone_str)

                for _sample in stats.get_samples(count,
                                                 self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]

                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * (float(_sample[1]) / count)
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]

                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        del src_layer

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
Beispiel #32
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)

        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) +
                              1)  # value as string required by bsddb
            else:
                db[key] = '1'  # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         self._outputGeometryType(), self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))

                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file %s' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
Beispiel #33
0
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        infile = self.inputs[0].value
        
        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile), self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(), mercator_crs)
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(LON_FIELD_NAME, QVariant.Double),
            2 : QgsField(LAT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
            4 : QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around
                 
                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid  = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid
                
                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()
                
                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_fp_layer
        
        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file
Beispiel #34
0
Datei: join.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]

        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount(
        ) > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir,
                                               get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}

        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        stats = layer_multifields_stats(tmp_join_layer,
                                        [zone_field, count_field])
        if stats == False:
            raise OperatorError(
                "error creating statistic based on input files",
                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)

        try:
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err,
                                self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x() /
                              DEFAULT_GRID_SIZE) * self._x_off + _x_off
                    lat = int(
                        centroid.y() / self._y_off) * self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing missing points: " % err,
                                self.__class__)

        # output result
        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
            3: QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            for key, value in grid_points.iteritems():
                [zone, zone_gid, lon, lat] = self._parse_key(key)
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(
                    value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err,
                                self.__class__)

        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file),
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
Beispiel #35
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        infile = self.inputs[0].value

        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile),
                                self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False

        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(),
                                                    mercator_crs)

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(LON_FIELD_NAME, QVariant.Double),
            2: QgsField(LAT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
            4: QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around

                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid

                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()

                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_fp_layer

        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file
Beispiel #36
0
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value
        
        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file, popgrid_layername,
                                                   [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object
                 
                # 1. get geometry
                geom = _f.geometry()                
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)
                
                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_popgrid_layer
        
        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
Beispiel #37
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value
        
        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(area_field, fp_layer.name()), self.__class__)        
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(ht_field, fp_layer.name()), self.__class__)        
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(HT_FIELD_NAME, svy_layer.name()), self.__class__)        
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # load zone classes
        # the operations below must be performed for each zone 
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones        
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'        
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)        
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        
        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} 
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {} 
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())            
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err, logAPICall.WARNING)
            
        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups", self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value==-1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError("Survey groups must have same number of samples", self.__class__)
            # sort by stories        
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight 
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][group_stories_for_sort[key]] = self.weights[idx]
                
        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())            
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]            
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size*group_weight[_sample_grp])
                self.increment_dict(_zone_a_exp[_zone_str], _tax_str, _sample_size*_ht*group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log("error processing sample with building type: %s" % _tax_str, logAPICall.WARNING)
                pass              

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'        
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)        
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)
        
        zone_idx = layer_field_index(tmp_join_layer2, zone_field)        
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)        
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht
        
        # calculate building ratios for each zone        
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a) 
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())            
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[_zone]/_zone_total_count[_zone]
            
            # calculate expected values  
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]         
                if e_at_cluster == 0 or e_at_total == 0: 
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area 
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building 
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster
                                        
                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster
        
        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp*1000))                                            
            # finalize call is required 
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)            
        
        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        # assign output        
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp    
Beispiel #38
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value        
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value        

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)         
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]
        
        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount() > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}
        
        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field])
        if stats == False:
            raise OperatorError("error creating statistic based on input files",
                                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)        
        
        try:        
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err, self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0,  self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()
                
                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x()/DEFAULT_GRID_SIZE)*self._x_off + _x_off
                    lat = int(centroid.y()/self._y_off)*self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1                                        
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1                             
        except Exception as err:
            raise OperatorError("error processing missing points: " % err, self.__class__)

        # output result
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
            3 : QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            for key, value in grid_points.iteritems():                
                [zone, zone_gid, lon, lat] = self._parse_key(key)                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value 
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err, self.__class__)
            
        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__)
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
Beispiel #39
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput        
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field
        
        logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
                       logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)
        
        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)
        
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())            
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area} 
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}                            
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
        
        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up        
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = ms
Beispiel #40
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:      
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0                        
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # 
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid] 
                stats[gid] = (stat[0]+1, stat[1]+area)
            
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
                3 : QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
Beispiel #41
0
    def test_ZoneFootprintToGridJoin(self, skipTest=False):
        logging.debug('test_ZoneFootprintJoin %s' % skipTest)

        # test 1
        # area from footprint
        zone_data = self.test_LoadZone2(True, 3)
        fp_opdata = self.test_LoadFootprintHT(skipTest=True)

        # test 1
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone3_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone3_bldgarea_field),
        ]
        merger.outputs = [
            OperatorData(OperatorDataTypes.Grid),
            OperatorData(OperatorDataTypes.Shapefile)
        ]
        merger.do_operation()

        if skipTest:
            # clean up intermediate data
            self._clean_layer(fp_opdata)
            self._clean_layer(zone_data)
            return merger.outputs

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area
        self.assertAlmostEqual(total_cnt, self.zone3_total_bldg_cnt, places=2)
        self.assertAlmostEqual(total_sqmt, self.fp3_total_area, places=-2)
        self._clean_layer(merger.outputs)

        # load data
        self._clean_layer(fp_opdata)
        fp_opdata = self.test_LoadFootprint(True, 3)

        # test 2
        # area from zone
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone3_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute,
                         self.zone3_bldgarea_field),
        ]
        merger.outputs = [
            OperatorData(OperatorDataTypes.Grid),
            OperatorData(OperatorDataTypes.Shapefile)
        ]
        merger.do_operation()

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area
        self.assertAlmostEqual(total_cnt, self.zone3_total_bldg_cnt, places=2)
        self.assertAlmostEqual(total_sqmt,
                               self.zone3_total_bldg_area,
                               places=-2)
        self._clean_layer(merger.outputs)

        # test 3
        # no area
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute),
            OperatorData(OperatorDataTypes.StringAttribute),
        ]
        merger.outputs = [
            OperatorData(OperatorDataTypes.Grid),
            OperatorData(OperatorDataTypes.Shapefile)
        ]
        merger.do_operation()
        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        total_cnt = 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            total_cnt += cnt
        self.assertAlmostEqual(total_cnt, self.fp3_feature_count, places=2)

        # cleanup
        self._clean_layer(fp_opdata)
        self._clean_layer(zone_data)
        self._clean_layer(merger.outputs)
Beispiel #42
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        # validate inputs
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        self._test_layer_field_exists(zone_layer, count_field)

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        # cnt_idx = ToGrid.STAT_COUNT_IDX

        # 1. find building count and total area for each zone
        zone_names, zone_stat = {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(
                tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
            )
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(), self.mercator_crs)

        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for _f in layer_features(tmp_join_layer):
            # get area of polygon
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]

            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area / stat[area_idx])
            if bldg_area_idx > 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area / stat[area_idx])
            else:
                bldg_area = 0

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #43
0
    @logAPICall
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)
        
        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')            
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid  = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())                        
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1) # value as string required by bsddb
            else:
                db[key] = '1'                   # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", 
                                         self._fields, self._outputGeometryType(), self._crs, 
                                         "ESRI Shapefile")
            f = QgsFeature()            
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))
                
                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file %s' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
Beispiel #44
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs
        popgrid_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints

        # local variables
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with
        # - population and zone_id
        # - apply ratio to population to obtain building count
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()

            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #45
0
    @logAPICall
    def do_operation(self):
        """ perform apply mapping scheme operation """
        
        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)
        
        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded", self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError("field %s not found in input layer" % zone_field, self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError("field %s not found in input layer" % count_field, self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError("field %s not found in input layer" % GID_FIELD_NAME, self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)
        
        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields, provider.geometryType(), self._crs, "ESRI Shapefile")
            out_feature = QgsFeature()
            
            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint ()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0
                
                count = int(count+0.5)
                if count == 0:
                    continue                            
                
                stats = ms.get_assignment_by_name(zone_str)
                
                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" % zone_str)
                
                for _sample in stats.get_samples(count, self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]
                    
                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * ( float(_sample[1]) / count )
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]
                    
                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
            
        del src_layer
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
Beispiel #46
0
    def test_ZoneFootprintToGridJoin(self, skipTest=False):
        logging.debug("test_ZoneFootprintJoin %s" % skipTest)

        # test 1
        # area from footprint
        zone_data = self.test_LoadZone2(True, 3)
        fp_opdata = self.test_LoadFootprintHT(skipTest=True)

        # test 1
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_bldgarea_field),
        ]
        merger.outputs = [OperatorData(OperatorDataTypes.Grid), OperatorData(OperatorDataTypes.Shapefile)]
        merger.do_operation()

        if skipTest:
            # clean up intermediate data
            self._clean_layer(fp_opdata)
            self._clean_layer(zone_data)
            return merger.outputs

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area
        self.assertAlmostEqual(total_cnt, self.zone3_total_bldg_cnt, places=2)
        self.assertAlmostEqual(total_sqmt, self.fp3_total_area, places=-2)
        self._clean_layer(merger.outputs)

        # load data
        self._clean_layer(fp_opdata)
        fp_opdata = self.test_LoadFootprint(True, 3)

        # test 2
        # area from zone
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_bldgcount_field),
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_bldgarea_field),
        ]
        merger.outputs = [OperatorData(OperatorDataTypes.Grid), OperatorData(OperatorDataTypes.Shapefile)]
        merger.do_operation()

        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        area_idx = layer_field_index(merger.outputs[0].value, AREA_FIELD_NAME)
        total_cnt, total_sqmt = 0, 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            area = _f.attributeMap()[area_idx].toDouble()[0]
            total_cnt += cnt
            total_sqmt += area
        self.assertAlmostEqual(total_cnt, self.zone3_total_bldg_cnt, places=2)
        self.assertAlmostEqual(total_sqmt, self.zone3_total_bldg_area, places=-2)
        self._clean_layer(merger.outputs)

        # test 3
        # no area
        merger = FootprintZoneToGrid(self.operator_options)
        merger.inputs = [
            fp_opdata[0],
            zone_data[0],
            OperatorData(OperatorDataTypes.StringAttribute, self.zone3_field),
            OperatorData(OperatorDataTypes.StringAttribute),
            OperatorData(OperatorDataTypes.StringAttribute),
        ]
        merger.outputs = [OperatorData(OperatorDataTypes.Grid), OperatorData(OperatorDataTypes.Shapefile)]
        merger.do_operation()
        self.assertTrue(os.path.exists(merger.outputs[1].value))
        cnt_idx = layer_field_index(merger.outputs[0].value, CNT_FIELD_NAME)
        total_cnt = 0
        for _f in layer_features(merger.outputs[0].value):
            cnt = _f.attributeMap()[cnt_idx].toDouble()[0]
            total_cnt += cnt
        self.assertAlmostEqual(total_cnt, self.fp3_feature_count, places=2)

        # cleanup
        self._clean_layer(fp_opdata)
        self._clean_layer(zone_data)
        self._clean_layer(merger.outputs)
Beispiel #47
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        zone_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)

        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [
            extent.xMinimum(),
            extent.yMinimum(),
            extent.xMaximum(),
            extent.yMaximum()
        ]

        # create grid based on extent of given region
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max,
                                       y_max, x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid',
                                self.__class__)

        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)

        # temporary grid for joined shape with all grid points not within region removed
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file),
                                self.__class__)

        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
Beispiel #48
0
Datei: grids.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """    
        
        # validate inputs 
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value 

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints        
        # area_field is not required
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX
        
        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}
        
        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()        
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)
        
        try:
            # use zone geometry area 
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                     zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                          x_min, y_min, x_max, y_max, \
                                          DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False        
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):            
                gid = _f.attributeMap()[zone_gid_idx].toString()            
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]            
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid]))+area
                else: 
                    zone_area[gid] = area
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()
            
            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]            
            # calculate count/area as proportion of total zone area
            area_ratio = (area/stat[area_idx])
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area: 
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio                 
            self._update_stat(zone_stat2, '%s|%s'%(grid_gid, zone_gid), bldg_cnt, area)
        
        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area 
        tmp_join1 = 'joined_%s' % get_unique_filename()
        tmp_join1_file = '%s%s.shp' % (self._tmp_dir, tmp_join1)        
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, '%s_'% GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")        
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0] # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht      # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, '%s|%s'%(grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)
        
        # 5. generate grid with adjusted building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]           # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]                   # grid specific statistic from from zone file    (always exists)            
            if zone_totals.has_key(QString(zone_gid)):      # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]       
            else:
                s_total = [0,0] # set to zero if missing
            if zone_fp_stat.has_key(key):                   # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]                        
            else:
                s_fp = [0, 0]   # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]   
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas                
                grid_leftover_count = zone_leftover_count * ((s_zone_grid[area_idx]-s_fp[area_idx])/zone_leftover_area)
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]
            
            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0 # max(s_zone_grid[area_idx], s_fp[area_idx])
                
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
Beispiel #49
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field

        logAPICall.log(
            'survey %s, taxfield %s, zone %s, zone_field, %s' %
            (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
            logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)

        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)

        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str),
                           logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(
                    _tax_str, self._parse_order, self._parse_modifiers,
                    additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = ms
Beispiel #50
0
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set         
        zone_layer = self.inputs[0].value        
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
                
        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]

        # create grid based on extent of given region 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file,
                                       x_min, y_min, x_max, y_max,
                                       x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid', self.__class__)        
        
        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)
        
        # temporary grid for joined shape with all grid points not within region removed 
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()        
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)        
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)                
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file), self.__class__)        
        
        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
Beispiel #51
0
Datei: join.py Projekt: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid] += float(_count) / pop_to_bldg
            else:
                stats[_gid] = float(_count) / pop_to_bldg

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file