예제 #1
0
    def do_operation(self):
        """ perform survey data loading """
        # input/output data checking already done during property set
        survey = self.inputs[0].value
        project = self.inputs[2].value

        tmp_survey_file = '%ssurvey_%s.shp' % (self._tmp_dir,
                                               get_unique_filename())
        # load survey
        try:
            self._loadSurvey(survey, tmp_survey_file, project)

        except Exception as err:
            remove_shapefile(tmp_survey_file)
            raise OperatorError("Error Loading Survey\n%s" % err,
                                self.__class__)
        try:
            # store loaded data
            tmp_survey_layername = 'survey_%s' % get_unique_filename()
            tmp_survey_layer = load_shapefile_verify(
                tmp_survey_file, tmp_survey_layername,
                [self._lon_field, self._lat_field, self._tax_field])
        except Exception as err:
            raise OperatorError("Error Loading Survey\n%s" % err,
                                self.__class__)

        self.outputs[0].value = tmp_survey_layer
        self.outputs[1].value = tmp_survey_file
예제 #2
0
파일: project.py 프로젝트: ImageCatInc/sidd
 def build_exposure_steps(self):
     """ building exposure database from workflow """
     if not self.workflow.ready:
         raise SIDDException('Cannot create exposure with current datasets. Please revise input')
     
     if not self.ms.is_valid:
         raise SIDDException('Current mapping scheme is not valid')
     
     for zone in self.ms.zones:
         zone.stats.refresh_leaves(with_modifier=True, order_attributes=True)
     
     if getattr(self, 'exposure', None) is not None:
         del self.exposure
         remove_shapefile(self.exposure_file)
     
     for op in self.workflow.nextstep():
         yield op
     
     # when all steps are completed, set resulting exposure
     self.exposure = self.workflow.operator_data['exposure'].value
     self.exposure_file = self.workflow.operator_data['exposure_file'].value
     if self.workflow.operator_data.has_key('exposure_grid'):
         self.exposure_grid = self.workflow.operator_data['exposure_grid'].value
     
     logAPICall.log('exposure data created %s' % self.exposure_file, logAPICall.INFO)    
예제 #3
0
    def build_exposure_steps(self):
        """ building exposure database from workflow """
        if not self.workflow.ready:
            raise SIDDException(
                'Cannot create exposure with current datasets. Please revise input'
            )

        if not self.ms.is_valid:
            raise SIDDException('Current mapping scheme is not valid')

        for zone in self.ms.zones:
            zone.stats.refresh_leaves(with_modifier=True,
                                      order_attributes=True)

        if getattr(self, 'exposure', None) is not None:
            del self.exposure
            remove_shapefile(self.exposure_file)

        for op in self.workflow.nextstep():
            yield op

        # when all steps are completed, set resulting exposure
        self.exposure = self.workflow.operator_data['exposure'].value
        self.exposure_file = self.workflow.operator_data['exposure_file'].value
        if self.workflow.operator_data.has_key('exposure_grid'):
            self.exposure_grid = self.workflow.operator_data[
                'exposure_grid'].value

        logAPICall.log('exposure data created %s' % self.exposure_file,
                       logAPICall.INFO)
예제 #4
0
파일: join.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value                
        fp_layer = self.inputs[2].value
        
        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):                
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)
            
            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err, self.__class__)
        
        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (fp_file), self.__class__)        
                
        # clean up
        del tmp_join_layer        
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
예제 #5
0
파일: survey.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform survey data loading """
        # input/output data checking already done during property set
        survey = self.inputs[0].value
        project = self.inputs[2].value

        tmp_survey_file = "%ssurvey_%s.shp" % (self._tmp_dir, get_unique_filename())
        # load survey
        try:
            self._loadSurvey(survey, tmp_survey_file, project)

        except Exception as err:
            remove_shapefile(tmp_survey_file)
            raise OperatorError("Error Loading Survey\n%s" % err, self.__class__)
        try:
            # store loaded data
            tmp_survey_layername = "survey_%s" % get_unique_filename()
            tmp_survey_layer = load_shapefile_verify(
                tmp_survey_file, tmp_survey_layername, [self._lon_field, self._lat_field, self._tax_field]
            )
        except Exception as err:
            raise OperatorError("Error Loading Survey\n%s" % err, self.__class__)

        self.outputs[0].value = tmp_survey_layer
        self.outputs[1].value = tmp_survey_file
예제 #6
0
파일: grid.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set
        grid_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + grid_layername + '.shp'
        
        [x_min, y_min, x_max, y_max] = [x.value for x in self._inputs]
        try:
            self._write_grid_shapefile(output_file, 
                                       x_min, y_min, x_max, y_max,
                                       self._x_off, self._y_off)
        except:
            remove_shapefile(output_file)
            raise OperatorError('error creating grid', self.__class__)

        
        grid_layer = load_shapefile(output_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (output_file), self.__class__)              

        self.outputs[0].value = grid_layer
        self.outputs[1].value = output_file
예제 #7
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        grid_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + grid_layername + '.shp'

        [x_min, y_min, x_max, y_max] = [x.value for x in self._inputs]
        try:
            self._write_grid_shapefile(output_file, x_min, y_min, x_max, y_max,
                                       self._x_off, self._y_off)
        except:
            remove_shapefile(output_file)
            raise OperatorError('error creating grid', self.__class__)

        grid_layer = load_shapefile(output_file, grid_layername)
        if not grid_layer:
            raise OperatorError(
                'Error loading result grid file' % (output_file),
                self.__class__)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = output_file
예제 #8
0
    @logAPICall
    def do_operation(self):
        """ perform apply mapping scheme operation """
        
        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)
        
        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded", self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError("field %s not found in input layer" % zone_field, self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError("field %s not found in input layer" % count_field, self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError("field %s not found in input layer" % GID_FIELD_NAME, self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)
        
        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields, provider.geometryType(), self._crs, "ESRI Shapefile")
            out_feature = QgsFeature()
            
            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint ()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0
                
                count = int(count+0.5)
                if count == 0:
                    continue                            
                
                stats = ms.get_assignment_by_name(zone_str)
                
                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" % zone_str)
                
                for _sample in stats.get_samples(count, self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]
                    
                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * ( float(_sample[1]) / count )
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]
                    
                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
            
        del src_layer
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
예제 #9
0
    @logAPICall
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)
        
        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')            
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid  = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())                        
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1) # value as string required by bsddb
            else:
                db[key] = '1'                   # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", 
                                         self._fields, self._outputGeometryType(), self._crs, 
                                         "ESRI Shapefile")
            f = QgsFeature()            
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))
                
                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err, self.__class__)
        
        # load shapefile as layer        
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:            
            raise OperatorError('Error loading exposure file %s' % (exposure_file), self.__class__)
        
        # store data in output
        self.outputs[0].value = exposure_layer
예제 #10
0
파일: popgrid.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value

        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file,
                                                      popgrid_layername,
                                                      [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(),
                                               self._crs)
            transform_required = True
        else:
            transform_required = False

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir,
                                             get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object

                # 1. get geometry
                geom = _f.geometry()
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)

                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_popgrid_layer

        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
예제 #11
0
파일: grid.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set         
        zone_layer = self.inputs[0].value        
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
                
        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]

        # create grid based on extent of given region 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file,
                                       x_min, y_min, x_max, y_max,
                                       x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid', self.__class__)        
        
        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)
        
        # temporary grid for joined shape with all grid points not within region removed 
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()        
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)        
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)                
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file), self.__class__)        
        
        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
예제 #12
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """    
        
        # validate inputs 
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value 

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints        
        # area_field is not required
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX
        
        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}
        
        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()        
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)
        
        try:
            # use zone geometry area 
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                     zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                          x_min, y_min, x_max, y_max, \
                                          DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False        
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):            
                gid = _f.attributeMap()[zone_gid_idx].toString()            
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]            
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid]))+area
                else: 
                    zone_area[gid] = area
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()
            
            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]            
            # calculate count/area as proportion of total zone area
            area_ratio = (area/stat[area_idx])
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area: 
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio                 
            self._update_stat(zone_stat2, '%s|%s'%(grid_gid, zone_gid), bldg_cnt, area)
        
        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area 
        tmp_join1 = 'joined_%s' % get_unique_filename()
        tmp_join1_file = '%s%s.shp' % (self._tmp_dir, tmp_join1)        
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, '%s_'% GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")        
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0] # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht      # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, '%s|%s'%(grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)
        
        # 5. generate grid with adjusted building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]           # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]                   # grid specific statistic from from zone file    (always exists)            
            if zone_totals.has_key(QString(zone_gid)):      # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]       
            else:
                s_total = [0,0] # set to zero if missing
            if zone_fp_stat.has_key(key):                   # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]                        
            else:
                s_fp = [0, 0]   # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]   
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas                
                grid_leftover_count = zone_leftover_count * ((s_zone_grid[area_idx]-s_fp[area_idx])/zone_leftover_area)
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]
            
            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0 # max(s_zone_grid[area_idx], s_fp[area_idx])
                
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
예제 #13
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput        
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field
        
        logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
                       logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)
        
        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)
        
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())            
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area} 
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}                            
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
        
        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up        
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = ms
예제 #14
0
파일: join.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:      
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0                        
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # 
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid] 
                stats[gid] = (stat[0]+1, stat[1]+area)
            
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
                3 : QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
예제 #15
0
파일: join.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)
        
        # merge with zone 
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")        
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid]+=float(_count) / pop_to_bldg
            else:
                stats[_gid]=float(_count)  / pop_to_bldg          
        
        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)
        try:            
            fields ={
                0 : QgsField(GID_FIELD_NAME, QVariant.Int),
                1 : QgsField(zone_field, QVariant.String),
                2 : QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")                     
            f = QgsFeature()            
            for _f in layer_features(zone_layer):
                
                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])                
                
                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)
            
            del writer, f
        except Exception as err:            
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err, self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
예제 #16
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field

        logAPICall.log(
            'survey %s, taxfield %s, zone %s, zone_field, %s' %
            (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
            logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)

        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)

        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str),
                           logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(
                    _tax_str, self._parse_order, self._parse_modifiers,
                    additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = ms
예제 #17
0
파일: join.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        fp_layer = self.inputs[2].value

        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)

            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err,
                                self.__class__)

        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (fp_file),
                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
예제 #18
0
파일: join.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value        
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value        

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)         
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]
        
        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount() > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}
        
        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field])
        if stats == False:
            raise OperatorError("error creating statistic based on input files",
                                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)        
        
        try:        
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err, self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0,  self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()
                
                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x()/DEFAULT_GRID_SIZE)*self._x_off + _x_off
                    lat = int(centroid.y()/self._y_off)*self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1                                        
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1                             
        except Exception as err:
            raise OperatorError("error processing missing points: " % err, self.__class__)

        # output result
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
            3 : QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            for key, value in grid_points.iteritems():                
                [zone, zone_gid, lon, lat] = self._parse_key(key)                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value 
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err, self.__class__)
            
        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__)
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
예제 #19
0
파일: footprint.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        # load and verify
        infile = self.inputs[0].value

        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile),
                                self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log(
            'tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
            logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False

        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(),
                                                    mercator_crs)

        # output grid
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.Int),
            1: QgsField(LON_FIELD_NAME, QVariant.Double),
            2: QgsField(LAT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
            4: QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around

                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid

                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()

                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err,
                                self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)

        # clean up
        del tmp_fp_layer

        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file
예제 #20
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value
        
        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(area_field, fp_layer.name()), self.__class__)        
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(ht_field, fp_layer.name()), self.__class__)        
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(HT_FIELD_NAME, svy_layer.name()), self.__class__)        
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # load zone classes
        # the operations below must be performed for each zone 
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones        
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'        
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)        
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        
        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} 
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {} 
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())            
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err, logAPICall.WARNING)
            
        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups", self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value==-1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError("Survey groups must have same number of samples", self.__class__)
            # sort by stories        
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight 
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][group_stories_for_sort[key]] = self.weights[idx]
                
        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())            
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]            
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size*group_weight[_sample_grp])
                self.increment_dict(_zone_a_exp[_zone_str], _tax_str, _sample_size*_ht*group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log("error processing sample with building type: %s" % _tax_str, logAPICall.WARNING)
                pass              

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'        
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)        
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)
        
        zone_idx = layer_field_index(tmp_join_layer2, zone_field)        
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)        
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht
        
        # calculate building ratios for each zone        
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a) 
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())            
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[_zone]/_zone_total_count[_zone]
            
            # calculate expected values  
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]         
                if e_at_cluster == 0 or e_at_total == 0: 
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area 
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building 
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster
                                        
                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster
        
        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp*1000))                                            
            # finalize call is required 
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)            
        
        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        # assign output        
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp    
예제 #21
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        zone_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)

        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [
            extent.xMinimum(),
            extent.yMinimum(),
            extent.xMaximum(),
            extent.yMaximum()
        ]

        # create grid based on extent of given region
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max,
                                       y_max, x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid',
                                self.__class__)

        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)

        # temporary grid for joined shape with all grid points not within region removed
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file),
                                self.__class__)

        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
예제 #22
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # validate inputs
        fp_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        count_field = self.inputs[3].value
        area_field = self.inputs[4].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(fp_layer)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints
        # area_field is not required

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        cnt_idx = ToGrid.STAT_COUNT_IDX

        zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {}

        # 1. find building count and total area for each zone
        # project geometry into mercator and get area in m2
        mercator_crs = QgsCoordinateReferenceSystem()
        mercator_crs.createFromEpsg(3395)
        mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs)

        try:
            # use zone geometry area
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
        tmp_grid_lyr1 = self._create_grid(
            tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
        )

        # tally total building area if there is defined
        bldg_area_idx = layer_field_index(zone_layer, area_field)
        zone_area = {}
        zone_has_area = False
        if bldg_area_idx > 0:
            zone_has_area = True
            zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME)
            for _f in layer_features(zone_layer):
                gid = _f.attributeMap()[zone_gid_idx].toString()
                area = _f.attributeMap()[bldg_area_idx].toDouble()[0]
                if zone_area.has_key(gid):
                    zone_area[gid] = str(float(zone_area[gid])) + area
                else:
                    zone_area[gid] = area

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        for _f in layer_features(tmp_join_layer):
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            # calculate count/area as proportion of total zone area
            area_ratio = area / stat[area_idx]
            if bldg_cnt_idx > 0:
                bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio
            else:
                bldg_cnt = 0
            if zone_has_area:
                area = zone_area[zone_gid] * area_ratio
            else:
                area = stat[area_idx] * area_ratio
            self._update_stat(zone_stat2, "%s|%s" % (grid_gid, zone_gid), bldg_cnt, area)

        # 4. find total buildings in each zone based on footprint
        # - simply join the files and tally count and total area
        tmp_join1 = "joined_%s" % get_unique_filename()
        tmp_join1_file = "%s%s.shp" % (self._tmp_dir, tmp_join1)
        try:
            # do intersection
            analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file)
            tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_fp_stat = {}
        zone_gid_idx = layer_field_index(tmp_join1_layer, "%s_" % GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID")
        fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME)
        fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME)
        fp_has_height = False
        for _f in layer_features(tmp_join1_layer):
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            area = _f.attributeMap()[fp_area_idx].toDouble()[0]  # area comes from geometry, always exists
            ht = _f.attributeMap()[fp_ht_idx].toDouble()[0]
            if ht > 0:
                fp_has_height = True
                area *= ht  # this is actual area to be aggregated at the end
            self._update_stat(zone_fp_stat, "%s|%s" % (grid_gid, zone_gid), 1, area)
            self._update_stat(zone_totals, zone_gid, 1, area)

        # 5. generate grid with adjusted building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for key in zone_stat2.keys():
            (grid_gid, zone_gid) = str(key).split("|")
            s_zone = zone_stat[QString(zone_gid)]  # overall statistics for the zone from zone file (always exists)
            s_zone_grid = zone_stat2[key]  # grid specific statistic from from zone file    (always exists)
            if zone_totals.has_key(QString(zone_gid)):  # overall statistics for the zone from footprints
                s_total = zone_totals[QString(zone_gid)]
            else:
                s_total = [0, 0]  # set to zero if missing
            if zone_fp_stat.has_key(key):  # grid specific statistic from from footprint
                s_fp = zone_fp_stat[key]
            else:
                s_fp = [0, 0]  # set to zero if missing

            zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx]
            if zone_has_area:
                zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx]
            else:
                zone_leftover_area = s_zone[area_idx] - s_total[area_idx]
            if zone_leftover_count > 0:
                # there are still building not accounted for
                # distribute to grid based on ratio of grid leftover area over zone leftover area
                # (leftover area is area of zone after subtracting footprint areas
                grid_leftover_count = zone_leftover_count * (
                    (s_zone_grid[area_idx] - s_fp[area_idx]) / zone_leftover_area
                )
                grid_count = s_fp[cnt_idx] + grid_leftover_count
            else:
                grid_count = s_fp[cnt_idx]

            if fp_has_height:
                # area can be actual area based on footprint area * height
                area = s_fp[area_idx]
            elif zone_has_area:
                area = s_zone_grid[area_idx]
            else:
                # no area defined
                area = 0  # max(s_zone_grid[area_idx], s_fp[area_idx])

            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, grid_count)
            f.addAttribute(3, area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        del tmp_join1_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
        remove_shapefile(tmp_join1_file)

        # store data in output
        self._load_output(output_file, output_layername)
예제 #23
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        fp_layer = self.inputs[0].value
        zone_field = self.inputs[1].value

        # aggregate footprint into grids
        logAPICall.log('aggregate statistic for grid ...', logAPICall.DEBUG)
        total_features = fp_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
            use_db = True
        else:
            db = {}
            use_db = False

        zone_idx = layer_field_index(fp_layer, zone_field)
        for f in layer_features(fp_layer):
            geom = f.geometry()
            zone_str = str(f.attributeMap()[zone_idx].toString())
            centroid  = geom.centroid().asPoint()
            # use floor, this truncates all points within grid to grid's
            # bottom-left corner                        
            x = math.floor(centroid.x() / DEFAULT_GRID_SIZE)
            y = math.floor(centroid.y() / DEFAULT_GRID_SIZE)
            key = '%s %d %d' % (zone_str, x,y)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1)
            else:
                db[key] = '1'
        
        # output grid
        logAPICall.log('create grid ...', logAPICall.DEBUG)
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(zone_field, QVariant.String),
        }
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint , self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for key, val in db.iteritems():
                (zone_str, x, y) = key.split(' ')
                # point were aggregated to grid's bottom-left corner
                # add half grid size to place point at center of grid
                point = QgsPoint(int(x)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0), 
                                 int(y)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0))
                f.setGeometry(QgsGeometry.fromPoint(point))
                f.addAttribute(0, QVariant(point.x()))
                f.addAttribute(1, QVariant(point.y()))
                f.addAttribute(2, QVariant(val))
                f.addAttribute(3, QVariant(zone_str))
                writer.addFeature(f)
            del writer
        except Exception as err:
            remove_shapefile(grid_file)
            raise OperatorError("error creating joined grid: " % err, self.__class__)
        
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading created grid file' % (grid_file), self.__class__)
                
        # clean up                
        if use_db:
            db.close()
            os.remove(tmp_db_file)
            
        # done
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
예제 #24
0
파일: join.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        zone_count_field = self.inputs[2].value
        fp_layer = self.inputs[3].value

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        stats = {}
        for _feature in layer_features(tmp_join_layer):
            gid = _feature.attributeMap()[gid_idx].toString()
            if ht_idx > 0:
                ht = _feature.attributeMap()[ht_idx].toDouble()[0]
            else:
                ht = 0
            # if height is not defined, it is set to 0
            # this will cause the system to ignore area generate without having to
            # remove the field
            area = _feature.attributeMap()[area_idx].toDouble()[0] * ht  #
            if not stats.has_key(gid):
                stats[gid] = (1, area)
            else:
                stat = stats[gid]
                stats[gid] = (stat[0] + 1, stat[1] + area)

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
                3: QgsField(AREA_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    stat = stats[gid]
                    bldg_count = stat[0]
                    area = stat[1]
                except:
                    bldg_count, area = 0, 0
                f.addAttribute(2, QVariant(bldg_count))
                f.addAttribute(3, QVariant(area))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
예제 #25
0
파일: popgrid.py 프로젝트: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        popgrid_file = self.inputs[0].value
        pop_field = self.inputs[1].value
        
        popgrid_layername = 'zone_%s' % get_unique_filename()
        try:
            tmp_popgrid_layer = load_shapefile_verify(popgrid_file, popgrid_layername,
                                                   [pop_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_popgrid_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_popgrid_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_popgrid_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        pop_idx = layer_field_index(tmp_popgrid_layer, pop_field)
        output_file = '%spop_grid_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_popgrid_layer):
                # NOTE: geom.transform does projection in place to underlying C object
                 
                # 1. get geometry
                geom = _f.geometry()                
                # 2. change project if required
                if transform_required:
                    geom = transform.transform(geom)
                
                # 3. write to file
                gid += 1
                f.setGeometry(geom)
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, _f.attributeMap()[pop_idx])
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        popgrid_layername = 'popgrid_%s' % get_unique_filename()
        popgrid_layer = load_shapefile(output_file, popgrid_layername)
        if not popgrid_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_popgrid_layer
        
        # store data in output
        self.outputs[0].value = popgrid_layer
        self.outputs[1].value = output_file
예제 #26
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value

        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (area_field, fp_layer.name()), self.__class__)
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" % (ht_field, fp_layer.name()),
                self.__class__)
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (HT_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # load zone classes
        # the operations below must be performed for each zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)

        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {}
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {}
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err,
                               logAPICall.WARNING)

        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups",
                                    self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value == -1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError(
                        "Survey groups must have same number of samples",
                        self.__class__)
            # sort by stories
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][
                    group_stories_for_sort[key]] = self.weights[idx]

        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str,
                                    group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str,
                                    _sample_size * group_weight[_sample_grp])
                self.increment_dict(
                    _zone_a_exp[_zone_str], _tax_str,
                    _sample_size * _ht * group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log(
                    "error processing sample with building type: %s" %
                    _tax_str, logAPICall.WARNING)
                pass

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)

        zone_idx = layer_field_index(tmp_join_layer2, zone_field)
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht

        # calculate building ratios for each zone
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a)
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[
                _zone] / _zone_total_count[_zone]

            # calculate expected values
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]
                if e_at_cluster == 0 or e_at_total == 0:
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster

                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster

        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str,
                               self._parse_order,
                               self._parse_modifiers,
                               add_times=int(_e_exp * 1000))
            # finalize call is required
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        # assign output
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp
예제 #27
0
파일: join.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        popgrid_layer = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # merge with zone
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # count footprint in each zone
        stats = {}
        _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_")
        _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        for _f in layer_features(tmp_join_layer):
            # retrieve count from statistic
            _gid = _f.attributeMap()[_gid_idx].toString()
            _count = _f.attributeMap()[_cnt_idx].toString()
            if stats.has_key(_gid):
                stats[_gid] += float(_count) / pop_to_bldg
            else:
                stats[_gid] = float(_count) / pop_to_bldg

        output_layername = 'zone_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)
        logAPICall.log('create outputfile %s ... ' % output_file,
                       logAPICall.DEBUG)
        try:
            fields = {
                0: QgsField(GID_FIELD_NAME, QVariant.Int),
                1: QgsField(zone_field, QVariant.String),
                2: QgsField(CNT_FIELD_NAME, QVariant.Int),
            }
            writer = QgsVectorFileWriter(output_file, "utf-8", fields,
                                         QGis.WKBPolygon, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(zone_layer):

                # write to file
                f.setGeometry(_f.geometry())
                f.addAttribute(0, _f.attributeMap()[0])
                f.addAttribute(1, _f.attributeMap()[1])

                # retrieve count from statistic
                try:
                    gid = _f.attributeMap()[0].toString()
                    bldg_count = stats[gid]
                except:
                    bldg_count = 0
                f.addAttribute(2, QVariant(bldg_count))
                writer.addFeature(f)

            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating zone: %s" % err,
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading footprint centroid file' % (output_file),
                self.__class__)
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
예제 #28
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        # validate inputs
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)
        self._test_layer_field_exists(zone_layer, count_field)

        # local variables
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        # cnt_idx = ToGrid.STAT_COUNT_IDX

        # 1. find building count and total area for each zone
        zone_names, zone_stat = {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 2. create grids around extent of zone
        tmp_grid1 = "grid_" + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp"
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(
                tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE
            )
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # 3. intersect grids and zones to obtain polygons with
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area)
        # apply ratio to zone building count to obtain count assigned to polygon
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # do tally
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(), self.mercator_crs)

        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
            3: QgsField(AREA_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for _f in layer_features(tmp_join_layer):
            # get area of polygon
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]

            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area / stat[area_idx])
            if bldg_area_idx > 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area / stat[area_idx])
            else:
                bldg_area = 0

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
예제 #29
0
파일: join.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        grid_layer = self.inputs[3].value

        zone_stats = {}
        zone_count_stats = {}
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        count_idx = layer_field_index(zone_layer, count_field)
        for _f in layer_features(zone_layer):
            gid = _f.attributeMap()[gid_idx].toString()
            zone_stats[gid] = 0
            zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0]

        # create storage for temporary output data
        use_grid_db = grid_layer.dataProvider().featureCount(
        ) > MAX_FEATURES_IN_MEMORY
        if False:
            tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir,
                                               get_unique_filename())
            grid_points = bsddb.btopen(tmp_grid_db_file, 'c')
        else:
            grid_points = {}

        # merge to create stats
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        analyzer = QgsOverlayAnalyzer()
        try:
            analyzer.intersection(grid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,
                                                   [zone_field, count_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        stats = layer_multifields_stats(tmp_join_layer,
                                        [zone_field, count_field])
        if stats == False:
            raise OperatorError(
                "error creating statistic based on input files",
                self.__class__)

        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        count_idx = layer_field_index(tmp_join_layer, count_field)
        lon_idx = layer_field_index(tmp_join_layer, self._lon_field)
        lat_idx = layer_field_index(tmp_join_layer, self._lat_field)
        gid_idx = layer_field_index(tmp_join_layer, self._gid_field)

        try:
            for _f in layer_features(tmp_join_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                # update stats
                zone_stats[gid] += 1
                grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing joined layer: " % err,
                                self.__class__)

        # test for zones without a grid point assigned
        count_idx = layer_field_index(zone_layer, count_field)
        gid_idx = layer_field_index(zone_layer, self._gid_field)
        zone_idx = layer_field_index(zone_layer, zone_field)
        _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0
        try:
            for _f in layer_features(zone_layer):
                centroid = _f.geometry().centroid().asPoint()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()
                count_val = _f.attributeMap()[count_idx].toDouble()[0]
                gid = _f.attributeMap()[gid_idx].toString()

                if zone_stats[gid] == 0:
                    # get lower left corner
                    lon = int(centroid.x() /
                              DEFAULT_GRID_SIZE) * self._x_off + _x_off
                    lat = int(
                        centroid.y() / self._y_off) * self._y_off + _y_off

                    #self._write_feature(writer, f, lon, lat, zone_str, count_val)
                    zone_stats[gid] += 1
                    grid_points[self._make_key(zone_str, gid, lon, lat)] = 1
        except Exception as err:
            raise OperatorError("error processing missing points: " % err,
                                self.__class__)

        # output result
        fields = {
            0: QgsField(self._lon_field, QVariant.Double),
            1: QgsField(self._lat_field, QVariant.Double),
            2: QgsField(zone_field, QVariant.String),
            3: QgsField(count_field, QVariant.Double)
        }
        grid_layername = 'grid_%s' % (get_unique_filename())
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            f = QgsFeature()
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            for key, value in grid_points.iteritems():
                [zone, zone_gid, lon, lat] = self._parse_key(key)
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                """                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))            
                f.addAttribute(3, QVariant(count_val / total_features))
                writer.addFeature(f)
                """
                value = float(
                    value) / zone_stats[zone_gid] * zone_count_stats[zone_gid]
                #grid_points[key] = value
                self._write_feature(writer, f, lon, lat, zone, value)
            del writer
        except Exception as err:
            raise OperatorError("error creating joined grid file: " % err,
                                self.__class__)

        # load result layer
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading joined grid file' % (grid_file),
                                self.__class__)

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
예제 #30
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs
        popgrid_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)
        # count_field is not required
        # if count field is not defined, then generate building count from footprints

        # local variables
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with
        # - population and zone_id
        # - apply ratio to population to obtain building count
        tmp_join = "joined_%s" % get_unique_filename()
        tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0: QgsField(GID_FIELD_NAME, QVariant.String),
            1: QgsField(zone_field, QVariant.String),
            2: QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = "grid_%s" % get_unique_filename()
        output_file = "%s%s.shp" % (self._tmp_dir, output_layername)
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()

            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer

        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)

        # store data in output
        self._load_output(output_file, output_layername)
예제 #31
0
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set        
        # load and verify
        infile = self.inputs[0].value
        
        tmp_fp_layername = 'fp_%s' % get_unique_filename()
        tmp_fp_layer = load_shapefile(infile, tmp_fp_layername)
        if not tmp_fp_layer:
            raise OperatorError('Error loading footprint file' % (infile), self.__class__)

        if self._fp_ht_field is not None:
            ht_idx = layer_field_index(tmp_fp_layer, self._fp_ht_field)
        else:
            ht_idx = -1
        logAPICall.log('tmp_fp_layer.crs().epsg() %s ' % tmp_fp_layer.crs().epsg(),
                       logAPICall.DEBUG)
        if tmp_fp_layer.crs().epsg() != self._crs.epsg():
            transform = QgsCoordinateTransform(tmp_fp_layer.crs(), self._crs)
            transform_required = True
        else:
            transform_required = False
        
        mercator_crs = QgsCoordinateReferenceSystem()
        #mercator_crs.createFromProj4("+proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +ellps=WGS84 +datum=WGS84 +units=m +no_defs")
        mercator_crs.createFromEpsg(3395)        
        mercator_transform = QgsCoordinateTransform(tmp_fp_layer.crs(), mercator_crs)
        
        # output grid
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.Int),
            1 : QgsField(LON_FIELD_NAME, QVariant.Double),
            2 : QgsField(LAT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
            4 : QgsField(HT_FIELD_NAME, QVariant.Int),
        }
        output_file = '%sfpc_%s.shp' % (self._tmp_dir, get_unique_filename())
        logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG)        
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for _f in layer_features(tmp_fp_layer):
                # NOTE: geom.transform does projection in place to underlying
                #       C object, for some reason, multiple projection does not
                #       work correctly. following is a work-around
                 
                # 1. get geometry
                geom = _f.geometry()
                # 2. get original centroid point and project is required
                centroid  = geom.centroid().asPoint()
                if transform_required:
                    t_centroid = transform.transform(centroid)
                else:
                    t_centroid = centroid
                
                # 3. project into mercator and get area in m2
                geom.transform(mercator_transform)
                area = geom.area()
                
                # write to file
                gid += 1
                f.setGeometry(QgsGeometry.fromPoint(t_centroid))
                f.addAttribute(0, QVariant(gid))
                f.addAttribute(1, QVariant(t_centroid.x()))
                f.addAttribute(2, QVariant(t_centroid.y()))
                f.addAttribute(3, QVariant(area))
                if ht_idx != -1:
                    f.addAttribute(4, _f.attributeMap()[ht_idx])
                else:
                    f.addAttribute(4, QVariant(0))
                writer.addFeature(f)            
            del writer, f
        except Exception as err:
            remove_shapefile(output_file)
            raise OperatorError("error creating footprint centroids: %s" % err, self.__class__)

        fp_layer = load_shapefile(output_file, tmp_fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__)        
        
        # clean up
        del tmp_fp_layer
        
        # store data in output
        self.outputs[0].value = fp_layer
        self.outputs[1].value = output_file
예제 #32
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        # validate inputs        
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        area_field = self.inputs[3].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, GID_FIELD_NAME)
        self._test_layer_field_exists(zone_layer, zone_field)        
        self._test_layer_field_exists(zone_layer, count_field)
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()
        area_idx = ToGrid.STAT_AREA_IDX
        #cnt_idx = ToGrid.STAT_COUNT_IDX
        
        # 1. find building count and total area for each zone
        zone_names, zone_stat= {}, {}
        try:
            self._create_zone_statistics(zone_layer, zone_field, count_field, 
                                         zone_stat, zone_names)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
    
        # 2. create grids around extent of zone 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'
        try:
            extent = zone_layer.extent()
            [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]
            tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \
                                              x_min, y_min, x_max, y_max, \
                                              DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE)            
        except Exception as err:
            raise OperatorError(str(err), self.__class__)    
        
        # 3. intersect grids and zones to obtain polygons with 
        # - grid_id and zone_id
        # - ratio of grid covered by zone (polygon area / zone area) 
        # apply ratio to zone building count to obtain count assigned to polygon                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        # do tally        
        zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME)
        grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID")
        bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field)
        bldg_area_idx = layer_field_index(tmp_join_layer, area_field)
        mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(),
                                                    self.mercator_crs)          

        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(AREA_FIELD_NAME, QVariant.Double),
        }    
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature() 
        for _f in layer_features(tmp_join_layer):
            # get area of polygon            
            geom = _f.geometry()
            geom.transform(mercator_transform)
            area = geom.area()

            # generate all stats of interest
            zone_gid = _f.attributeMap()[zone_gid_idx].toString()
            grid_gid = _f.attributeMap()[grid_gid_idx].toString()
            stat = zone_stat[zone_gid]
            
            # calculate count/area as proportion of total zone area
            bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area/stat[area_idx])
            if bldg_area_idx> 0:
                bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area/stat[area_idx])                
            else:
                bldg_area = 0 

            # create output record
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone_names[QString(zone_gid)])
            f.addAttribute(2, bldg_cnt)
            f.addAttribute(3, bldg_area)
            writer.addFeature(f)        
        del writer    

        # clean up
        del tmp_grid_lyr1
        del tmp_join_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
예제 #33
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        svy_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(svy_layer)

        total_features = svy_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
        else:
            db = {}

        # tally statistics for each grid_id/building type combination
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        for f in layer_features(svy_layer):
            geom = f.geometry()
            centroid = geom.centroid().asPoint()
            grid_id = latlon_to_grid(centroid.y(), centroid.x())
            tax_str = str(f.attributeMap()[tax_idx].toString())

            key = '%s %s' % (tax_str, grid_id)
            if db.has_key(key):
                db[key] = str(int(db[key]) +
                              1)  # value as string required by bsddb
            else:
                db[key] = '1'  # value as string required by bsddb

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%s%s.shp' % (self._tmp_dir, exposure_layername)

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         self._outputGeometryType(), self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            gid = 0
            for key, val in db.iteritems():
                (tax_str, grid_id) = key.split(' ')
                lon, lat = grid_to_latlon(int(grid_id))

                f.setGeometry(self._outputGeometryFromGridId(grid_id))
                f.addAttribute(0, QVariant(grid_id))
                f.addAttribute(1, QVariant(lon))
                f.addAttribute(2, QVariant(lat))
                f.addAttribute(3, QVariant(tax_str))
                f.addAttribute(4, QVariant(''))
                f.addAttribute(5, QVariant(val))
                writer.addFeature(f)
                gid += 1
            del writer, f
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file %s' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
예제 #34
0
파일: grids.py 프로젝트: gem/sidd
    def do_operation(self):
        """ perform create mappin """
        # validate inputs 
        popgrid_layer = self.inputs[0].value        
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        pop_to_bldg = float(self.inputs[3].value)

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(popgrid_layer)
        self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME)
        self._test_layer_loaded(zone_layer)
        self._test_layer_field_exists(zone_layer, zone_field)        
        # count_field is not required        
        # if count field is not defined, then generate building count from footprints
        
        # local variables 
        analyzer = QgsOverlayAnalyzer()

        # intersect grids and zones to obtain polygons with 
        # - population and zone_id
        # - apply ratio to population to obtain building count                  
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)
        try:
            # do intersection
            analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile(tmp_join_file, tmp_join)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)

        # generate grid with  building counts
        fields = {
            0 : QgsField(GID_FIELD_NAME, QVariant.String),            
            1 : QgsField(zone_field, QVariant.String),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
        }
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = '%s%s.shp' % (self._tmp_dir, output_layername)                
        writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME)
        zone_idx = layer_field_index(tmp_join_layer, zone_field) 
        for _f in layer_features(tmp_join_layer):
            pop_count = _f.attributeMap()[pop_idx].toDouble()[0]
            zone = _f.attributeMap()[zone_idx].toString()
            
            # 1. get geometry
            geom = _f.geometry()
            # 2. get original centroid point and project is required
            centroid  = geom.centroid().asPoint()
            grid_gid = latlon_to_grid(centroid.y(), centroid.x())
            f.setGeometry(self._outputGeometryFromGridId(grid_gid))
            f.addAttribute(0, grid_gid)
            f.addAttribute(1, zone)
            f.addAttribute(2, pop_count / pop_to_bldg)
            writer.addFeature(f)
        del writer
        
        # clean up
        del tmp_join_layer
        remove_shapefile(tmp_join_file)
                
        # store data in output
        self._load_output(output_file, output_layername)
예제 #35
0
    def do_operation(self):
        """ perform apply mapping scheme operation """

        # input/output data checking already done during property set
        src_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        count_field = self.inputs[2].value
        ms = self.inputs[3].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(src_layer)
        self._test_layer_field_exists(src_layer, zone_field)
        self._test_layer_field_exists(src_layer, count_field)

        # loop through all zones and assign mapping scheme
        # outputs
        exposure_layername = 'exp_%s' % get_unique_filename()
        exposure_file = '%sexp_%s.shp' % (self._tmp_dir, exposure_layername)

        # loop through all input features
        provider = src_layer.dataProvider()
        if provider is None:
            raise OperatorError("input layer not correctly loaded",
                                self.__class__)
        zone_idx = layer_field_index(src_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % zone_field,
                self.__class__)
        count_idx = layer_field_index(src_layer, count_field)
        if count_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % count_field,
                self.__class__)
        gid_idx = layer_field_index(src_layer, GID_FIELD_NAME)
        if gid_idx == -1:
            raise OperatorError(
                "field %s not found in input layer" % GID_FIELD_NAME,
                self.__class__)
        area_idx = layer_field_index(src_layer, AREA_FIELD_NAME)

        provider.select(provider.attributeIndexes(), provider.extent())
        provider.rewind()

        try:
            writer = QgsVectorFileWriter(exposure_file, "utf-8", self._fields,
                                         provider.geometryType(), self._crs,
                                         "ESRI Shapefile")
            out_feature = QgsFeature()

            gid = 0
            for in_feature in layer_features(src_layer):
                geom = in_feature.geometry()
                centroid = geom.centroid().asPoint()
                gid = in_feature.attributeMap()[gid_idx]
                zone_str = str(in_feature.attributeMap()[zone_idx].toString())
                count = in_feature.attributeMap()[count_idx].toDouble()[0]
                if area_idx > 0:
                    area = in_feature.attributeMap()[area_idx].toDouble()[0]
                else:
                    area = 0

                count = int(count + 0.5)
                if count == 0:
                    continue

                stats = ms.get_assignment_by_name(zone_str)

                # use default stats if missing
                if stats is None:
                    raise Exception("no mapping scheme found for zone %s" %
                                    zone_str)

                for _sample in stats.get_samples(count,
                                                 self._extrapolationOption):
                    # write out if there are structures assigned
                    _type = _sample[0]
                    _cnt = _sample[1]

                    if area > 0:
                        # use area provided by footprint/zone if defined
                        _size = area * (float(_sample[1]) / count)
                        if _sample[3] > 0 and _sample[2] > 0:
                            _cost = (_sample[3] / _sample[2]) * area
                        else:
                            _cost = 0
                    else:
                        # use mapping scheme generic area otherwise
                        _size = _sample[2]
                        _cost = _sample[3]

                    if _cnt > 0:
                        out_feature.setGeometry(geom)
                        #out_feature.addAttribute(0, QVariant(gid))
                        out_feature.addAttribute(0, gid)
                        out_feature.addAttribute(1, QVariant(centroid.x()))
                        out_feature.addAttribute(2, QVariant(centroid.y()))
                        out_feature.addAttribute(3, QVariant(_type))
                        out_feature.addAttribute(4, QVariant(zone_str))
                        out_feature.addAttribute(5, QVariant(_cnt))
                        out_feature.addAttribute(6, QVariant(_size))
                        out_feature.addAttribute(7, QVariant(_cost))
                        writer.addFeature(out_feature)
            del writer, out_feature
        except Exception as err:
            remove_shapefile(exposure_file)
            raise OperatorError("error creating exposure file: %s" % err,
                                self.__class__)

        del src_layer

        # load shapefile as layer
        exposure_layer = load_shapefile(exposure_file, exposure_layername)
        if not exposure_layer:
            raise OperatorError(
                'Error loading exposure file' % (exposure_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = exposure_layer
        self.outputs[1].value = exposure_file
예제 #36
0
 def _clean_layer(self, output):
     del output[0].value
     remove_shapefile(output[1].value)
예제 #37
0
 def _clean_layer(self, output):
     del output[0].value
     remove_shapefile(output[1].value)