def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value fp_layer = self.inputs[2].value # merge with zone to get assignment tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) fields = { 0 : QgsField(self._lon_field, QVariant.Double), 1 : QgsField(self._lat_field, QVariant.Double), 2 : QgsField(zone_field, QVariant.String), } zone_idx = layer_field_index(tmp_join_layer, zone_field) fp_layername = 'fpc_%s' % get_unique_filename() fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername) try: writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(tmp_join_layer): centroid = _f.geometry().centroid().asPoint() lon = centroid.x() lat = centroid.y() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) writer.addFeature(f) del writer except Exception as err: logAPICall.log(err, logAPICall.ERROR) remove_shapefile(fp_file) raise OperatorError("error creating joined grid: %s" % err, self.__class__) # load shapefile as layer fp_layer = load_shapefile(fp_file, fp_layername) if not fp_layer: raise OperatorError('Error loading footprint centroid file' % (fp_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = fp_layer self.outputs[1].value = fp_file
def intersect_shapefiles(shape1, shape2, output_path): """ Intersect two shapefiles and wirte the result to disk :param shape1: First shapefile :type shape1: QgsVectorLayer :param shape2: Second shapefile :type shape2: QgsVectorLayer :param output_path: The place where the intersection shall be stored :type output_path: str :return: If the layers were intersected successfully :rtype: bool """ try: if shape1.isValid() and shape2.isValid(): analyser = QgsOverlayAnalyzer() #progress = QProgressDialog() #info = QLabel('Intersecting floor plan with investigation layer.\nThis may take up to 30 seconds.') #progress.setLabel(info) #progress.setMinimum(0) #progress.setMaximum(100) return analyser.intersection(shape1, shape2, output_path) #p=progress) except AttributeError, Error: print(Error) return False
def do_operation(self): """ perform create mapping scheme operation """ # validate inputs fp_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value count_field = self.inputs[3].value area_field = self.inputs[4].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(fp_layer) self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, GID_FIELD_NAME) self._test_layer_field_exists(zone_layer, zone_field) # count_field is not required # if count field is not defined, then generate building count from footprints # area_field is not required # local variables analyzer = QgsOverlayAnalyzer() area_idx = ToGrid.STAT_AREA_IDX cnt_idx = ToGrid.STAT_COUNT_IDX zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {} # 1. find building count and total area for each zone # project geometry into mercator and get area in m2 mercator_crs = QgsCoordinateReferenceSystem() mercator_crs.createFromEpsg(3395) mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs) try: # use zone geometry area self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names) except Exception as err: raise OperatorError(str(err), self.__class__) # 2. create grids around extent of zone tmp_grid1 = "grid_" + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp" extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()] tmp_grid_lyr1 = self._create_grid( tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE ) # tally total building area if there is defined bldg_area_idx = layer_field_index(zone_layer, area_field) zone_area = {} zone_has_area = False if bldg_area_idx > 0: zone_has_area = True zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME) for _f in layer_features(zone_layer): gid = _f.attributeMap()[zone_gid_idx].toString() area = _f.attributeMap()[bldg_area_idx].toDouble()[0] if zone_area.has_key(gid): zone_area[gid] = str(float(zone_area[gid])) + area else: zone_area[gid] = area # 3. intersect grids and zones to obtain polygons with # - grid_id and zone_id # - ratio of grid covered by zone (polygon area / zone area) # apply ratio to zone building count to obtain count assigned to polygon tmp_join = "joined_%s" % get_unique_filename() tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID") bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field) for _f in layer_features(tmp_join_layer): geom = _f.geometry() geom.transform(mercator_transform) area = geom.area() # generate all stats of interest zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() stat = zone_stat[zone_gid] # calculate count/area as proportion of total zone area area_ratio = area / stat[area_idx] if bldg_cnt_idx > 0: bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio else: bldg_cnt = 0 if zone_has_area: area = zone_area[zone_gid] * area_ratio else: area = stat[area_idx] * area_ratio self._update_stat(zone_stat2, "%s|%s" % (grid_gid, zone_gid), bldg_cnt, area) # 4. find total buildings in each zone based on footprint # - simply join the files and tally count and total area tmp_join1 = "joined_%s" % get_unique_filename() tmp_join1_file = "%s%s.shp" % (self._tmp_dir, tmp_join1) try: # do intersection analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file) tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_fp_stat = {} zone_gid_idx = layer_field_index(tmp_join1_layer, "%s_" % GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID") fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME) fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME) fp_has_height = False for _f in layer_features(tmp_join1_layer): zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() area = _f.attributeMap()[fp_area_idx].toDouble()[0] # area comes from geometry, always exists ht = _f.attributeMap()[fp_ht_idx].toDouble()[0] if ht > 0: fp_has_height = True area *= ht # this is actual area to be aggregated at the end self._update_stat(zone_fp_stat, "%s|%s" % (grid_gid, zone_gid), 1, area) self._update_stat(zone_totals, zone_gid, 1, area) # 5. generate grid with adjusted building counts fields = { 0: QgsField(GID_FIELD_NAME, QVariant.String), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Double), 3: QgsField(AREA_FIELD_NAME, QVariant.Double), } output_layername = "grid_%s" % get_unique_filename() output_file = "%s%s.shp" % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for key in zone_stat2.keys(): (grid_gid, zone_gid) = str(key).split("|") s_zone = zone_stat[QString(zone_gid)] # overall statistics for the zone from zone file (always exists) s_zone_grid = zone_stat2[key] # grid specific statistic from from zone file (always exists) if zone_totals.has_key(QString(zone_gid)): # overall statistics for the zone from footprints s_total = zone_totals[QString(zone_gid)] else: s_total = [0, 0] # set to zero if missing if zone_fp_stat.has_key(key): # grid specific statistic from from footprint s_fp = zone_fp_stat[key] else: s_fp = [0, 0] # set to zero if missing zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx] if zone_has_area: zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx] else: zone_leftover_area = s_zone[area_idx] - s_total[area_idx] if zone_leftover_count > 0: # there are still building not accounted for # distribute to grid based on ratio of grid leftover area over zone leftover area # (leftover area is area of zone after subtracting footprint areas grid_leftover_count = zone_leftover_count * ( (s_zone_grid[area_idx] - s_fp[area_idx]) / zone_leftover_area ) grid_count = s_fp[cnt_idx] + grid_leftover_count else: grid_count = s_fp[cnt_idx] if fp_has_height: # area can be actual area based on footprint area * height area = s_fp[area_idx] elif zone_has_area: area = s_zone_grid[area_idx] else: # no area defined area = 0 # max(s_zone_grid[area_idx], s_fp[area_idx]) f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone_names[QString(zone_gid)]) f.addAttribute(2, grid_count) f.addAttribute(3, area) writer.addFeature(f) del writer # clean up del tmp_grid_lyr1 del tmp_join_layer del tmp_join1_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_join_file) remove_shapefile(tmp_join1_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): """ perform create mappin """ # validate inputs popgrid_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value pop_to_bldg = float(self.inputs[3].value) # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(popgrid_layer) self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME) self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, zone_field) # count_field is not required # if count field is not defined, then generate building count from footprints # local variables analyzer = QgsOverlayAnalyzer() # intersect grids and zones to obtain polygons with # - population and zone_id # - apply ratio to population to obtain building count tmp_join = "joined_%s" % get_unique_filename() tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # generate grid with building counts fields = { 0: QgsField(GID_FIELD_NAME, QVariant.String), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Double), } output_layername = "grid_%s" % get_unique_filename() output_file = "%s%s.shp" % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME) zone_idx = layer_field_index(tmp_join_layer, zone_field) for _f in layer_features(tmp_join_layer): pop_count = _f.attributeMap()[pop_idx].toDouble()[0] zone = _f.attributeMap()[zone_idx].toString() # 1. get geometry geom = _f.geometry() # 2. get original centroid point and project is required centroid = geom.centroid().asPoint() grid_gid = latlon_to_grid(centroid.y(), centroid.x()) f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone) f.addAttribute(2, pop_count / pop_to_bldg) writer.addFeature(f) del writer # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): # input/output verification not performed yet fp_layer = self.inputs[0].value area_field = self.inputs[1].value ht_field = self.inputs[2].value zone_layer = self.inputs[3].value zone_field = self.inputs[4].value svy_layer = self.inputs[5].value # make sure required data fields are populated area_idx = layer_field_index(fp_layer, area_field) if area_idx == -1: raise OperatorError( "Field %s does not exist in %s" % (area_field, fp_layer.name()), self.__class__) ht_idx = layer_field_index(fp_layer, ht_field) if ht_idx == -1: raise OperatorError( "Field %s does not exist in %s" % (ht_field, fp_layer.name()), self.__class__) zone_idx = layer_field_index(zone_layer, zone_field) if zone_idx == -1: raise OperatorError( "Field %s does not exist in %s" % (zone_field, zone_layer.name()), self.__class__) svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME) if svy_samp_idx == -1: raise OperatorError( "Field %s does not exist in %s" % (GRP_FIELD_NAME, svy_layer.name()), self.__class__) svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME) if svy_ht_idx == -1: raise OperatorError( "Field %s does not exist in %s" % (HT_FIELD_NAME, svy_layer.name()), self.__class__) svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME) if svy_size_idx == -1: raise OperatorError("Field %s does not exist in %s" % (AREA_FIELD_NAME, svy_layer.name())) tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME) if tax_idx == -1: raise OperatorError("Field %s does not exist in %s" % (TAX_FIELD_NAME, svy_layer.name())) # load zone classes # the operations below must be performed for each zone try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # join survey with zones logAPICall.log('merge survey & zone', logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' analyzer = QgsOverlayAnalyzer() analyzer.intersection(svy_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('compile zone statistics', logAPICall.DEBUG) zone_idx = layer_field_index(tmp_join_layer, zone_field) svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME) svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME) svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) if svy_size_idx == -1: raise OperatorError("Field %s does not exist in %s" % (AREA_FIELD_NAME, svy_layer.name())) tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME) if tax_idx == -1: raise OperatorError("Field %s does not exist in %s" % (TAX_FIELD_NAME, svy_layer.name())) # empty fields for holding the stats _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {} _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {} _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} for _zone in zone_classes.iterkeys(): _zone_n_exp[_zone] = {} _zone_p_exp[_zone] = {} _zone_a_exp[_zone] = {} _zone_e_exp[_zone] = {} _zone_group_counts[_zone] = {} _zone_group_stories[_zone] = {} _zone_group_weight[_zone] = {} _zone_total_area[_zone] = 0 _zone_total_count[_zone] = 0 _zone_total_ht[_zone] = 0 # associate group to ratio value for _rec in layer_features(tmp_join_layer): _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0] _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString()) _tax_str = str(_rec.attributeMap()[tax_idx].toString()) try: self._taxonomy.parse(_tax_str) self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1) self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht) except Exception as err: logAPICall.log("Error processing record %s" % err, logAPICall.WARNING) for _zone in zone_classes.iterkeys(): if len(_zone_group_counts[_zone]) != 3: raise OperatorError("Survey must have 3 sampling groups", self.__class__) cmp_value = -1 for _grp, _count in _zone_group_counts[_zone].iteritems(): if cmp_value == -1: cmp_value = _count if cmp_value != _count: raise OperatorError( "Survey groups must have same number of samples", self.__class__) # sort by stories group_stories_for_sort = {} for _grp, _ht in _zone_group_stories[_zone].iteritems(): group_stories_for_sort[_ht] = _grp sorted_keys = group_stories_for_sort.keys() sorted_keys.sort() # assign group to weight for idx, key in enumerate(sorted_keys): _zone_group_weight[_zone][ group_stories_for_sort[key]] = self.weights[idx] # aggregate values from survey for each building type # - count (n) # - floor area (p) # - total area (a) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString()) _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0] _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0] group_weight = _zone_group_weight[_zone] try: self._taxonomy.parse(_tax_str) self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp]) self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size * group_weight[_sample_grp]) self.increment_dict( _zone_a_exp[_zone_str], _tax_str, _sample_size * _ht * group_weight[_sample_grp]) self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0) except Exception as err: logAPICall.log( "error processing sample with building type: %s" % _tax_str, logAPICall.WARNING) pass # adjust ratio using footprint ht/area tmp_join_layername2 = 'join_%s' % get_unique_filename() tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp' analyzer = QgsOverlayAnalyzer() analyzer.intersection(fp_layer, zone_layer, tmp_join_file2) tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername) zone_idx = layer_field_index(tmp_join_layer2, zone_field) area_idx = layer_field_index(tmp_join_layer2, area_field) ht_idx = layer_field_index(tmp_join_layer2, ht_field) for _f in layer_features(tmp_join_layer2): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _area = _f.attributeMap()[area_idx].toDouble()[0] _ht = _f.attributeMap()[ht_idx].toDouble()[0] _zone_total_area[_zone_str] += _area _zone_total_count[_zone_str] += 1 _zone_total_ht[_zone_str] += _ht # calculate building ratios for each zone for _zone in zone_classes.iterkeys(): # for total count (n) and area (a) e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues()) e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues()) # E[A] estimated total building area for zone e_at_total = _zone_total_area[_zone] * _zone_total_ht[ _zone] / _zone_total_count[_zone] # calculate expected values for t, e_at_cluster in _zone_a_exp[_zone].iteritems(): e_nt_cluster = _zone_n_exp[_zone][t] if e_at_cluster == 0 or e_at_total == 0: # area is missing, use count instead _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total _zone_a_exp[_zone][t] = 0 else: # use ratio of area over total area # E[f(t)] building fraction based on sampled area e_ft_cluster = e_at_cluster / e_at_cluster_total # E[G(t)] average area per building e_gt_cluster = e_at_cluster / e_nt_cluster # E[A(t)] estimated total building area for zone for building type e_at = e_at_total * e_ft_cluster # E[N(t)] estimated total number of buildings zone-wide by type e_nt = e_at / e_gt_cluster _zone_e_exp[_zone][t] = e_nt _zone_a_exp[_zone][t] = e_ft_cluster # convert the building ratios logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone in zone_classes.iterkeys(): # create mapping scheme for zone stats = Statistics(self._taxonomy) # use building ratio to create statistic for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems(): stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp * 1000)) # finalize call is required stats.finalize() ms.assign(MappingSchemeZone(_zone), stats) # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) # assign output self.outputs[0].value = ms self.outputs[1].value = _zone_a_exp
def do_operation(self): # validate inputs zone_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value area_field = self.inputs[3].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, GID_FIELD_NAME) self._test_layer_field_exists(zone_layer, zone_field) self._test_layer_field_exists(zone_layer, count_field) # local variables analyzer = QgsOverlayAnalyzer() area_idx = ToGrid.STAT_AREA_IDX # cnt_idx = ToGrid.STAT_COUNT_IDX # 1. find building count and total area for each zone zone_names, zone_stat = {}, {} try: self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names) except Exception as err: raise OperatorError(str(err), self.__class__) # 2. create grids around extent of zone tmp_grid1 = "grid_" + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + ".shp" try: extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()] tmp_grid_lyr1 = self._create_grid( tmp_grid1, tmp_grid1_file, x_min, y_min, x_max, y_max, DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE ) except Exception as err: raise OperatorError(str(err), self.__class__) # 3. intersect grids and zones to obtain polygons with # - grid_id and zone_id # - ratio of grid covered by zone (polygon area / zone area) # apply ratio to zone building count to obtain count assigned to polygon tmp_join = "joined_%s" % get_unique_filename() tmp_join_file = "%s%s.shp" % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID") bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field) bldg_area_idx = layer_field_index(tmp_join_layer, area_field) mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(), self.mercator_crs) fields = { 0: QgsField(GID_FIELD_NAME, QVariant.String), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Double), 3: QgsField(AREA_FIELD_NAME, QVariant.Double), } output_layername = "grid_%s" % get_unique_filename() output_file = "%s%s.shp" % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(tmp_join_layer): # get area of polygon geom = _f.geometry() geom.transform(mercator_transform) area = geom.area() # generate all stats of interest zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() stat = zone_stat[zone_gid] # calculate count/area as proportion of total zone area bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area / stat[area_idx]) if bldg_area_idx > 0: bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area / stat[area_idx]) else: bldg_area = 0 # create output record f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone_names[QString(zone_gid)]) f.addAttribute(2, bldg_cnt) f.addAttribute(3, bldg_area) writer.addFeature(f) del writer # clean up del tmp_grid_lyr1 del tmp_join_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_join_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value grid_layer = self.inputs[3].value zone_stats = {} zone_count_stats = {} gid_idx = layer_field_index(zone_layer, self._gid_field) count_idx = layer_field_index(zone_layer, count_field) for _f in layer_features(zone_layer): gid = _f.attributeMap()[gid_idx].toString() zone_stats[gid] = 0 zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0] # create storage for temporary output data use_grid_db = grid_layer.dataProvider().featureCount() > MAX_FEATURES_IN_MEMORY if False: tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename()) grid_points = bsddb.btopen(tmp_grid_db_file, 'c') else: grid_points = {} # merge to create stats tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(grid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field, count_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field]) if stats == False: raise OperatorError("error creating statistic based on input files", self.__class__) zone_idx = layer_field_index(tmp_join_layer, zone_field) count_idx = layer_field_index(tmp_join_layer, count_field) lon_idx = layer_field_index(tmp_join_layer, self._lon_field) lat_idx = layer_field_index(tmp_join_layer, self._lat_field) gid_idx = layer_field_index(tmp_join_layer, self._gid_field) try: for _f in layer_features(tmp_join_layer): lon = _f.attributeMap()[lon_idx].toDouble()[0] lat = _f.attributeMap()[lat_idx].toDouble()[0] zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() # update stats zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing joined layer: " % err, self.__class__) # test for zones without a grid point assigned count_idx = layer_field_index(zone_layer, count_field) gid_idx = layer_field_index(zone_layer, self._gid_field) zone_idx = layer_field_index(zone_layer, zone_field) _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0 try: for _f in layer_features(zone_layer): centroid = _f.geometry().centroid().asPoint() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() if zone_stats[gid] == 0: # get lower left corner lon = int(centroid.x()/DEFAULT_GRID_SIZE)*self._x_off + _x_off lat = int(centroid.y()/self._y_off)*self._y_off + _y_off #self._write_feature(writer, f, lon, lat, zone_str, count_val) zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing missing points: " % err, self.__class__) # output result fields = { 0 : QgsField(self._lon_field, QVariant.Double), 1 : QgsField(self._lat_field, QVariant.Double), 2 : QgsField(zone_field, QVariant.String), 3 : QgsField(count_field, QVariant.Double) } grid_layername = 'grid_%s' % (get_unique_filename()) grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername) try: f = QgsFeature() writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") for key, value in grid_points.iteritems(): [zone, zone_gid, lon, lat] = self._parse_key(key) f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) """ f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) f.addAttribute(3, QVariant(count_val / total_features)) writer.addFeature(f) """ value = float(value) / zone_stats[zone_gid] * zone_count_stats[zone_gid] #grid_points[key] = value self._write_feature(writer, f, lon, lat, zone, value) del writer except Exception as err: raise OperatorError("error creating joined grid file: " % err, self.__class__) # load result layer grid_layer = load_shapefile(grid_file, grid_layername) if not grid_layer: raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = grid_layer self.outputs[1].value = grid_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput survey_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value tax_field = self._tax_field logAPICall.log( 'survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field), logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' # load zone classes try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # merge to create stats logAPICall.log('merge survey & zone', logAPICall.DEBUG) analyzer = QgsOverlayAnalyzer() analyzer.intersection(survey_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('create mapping schemes', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone, _count in zone_classes.iteritems(): stats = Statistics(self._taxonomy) ms.assign(MappingSchemeZone(_zone), stats) # loop through all input features zone_idx = layer_field_index(tmp_join_layer, zone_field) tax_idx = layer_field_index(tmp_join_layer, tax_field) area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) additional = {} _area = _f.attributeMap()[area_idx].toDouble()[0] if _area > 0: additional = {StatisticNode.AverageSize: _area} _cost = _f.attributeMap()[cost_idx].toDouble()[0] if _cost > 0: additional = {StatisticNode.UnitCost: _cost} logAPICall.log('zone %s => %s' % (_zone_str, _tax_str), logAPICall.DEBUG_L2) try: ms.get_assignment_by_name(_zone_str).add_case( _tax_str, self._parse_order, self._parse_modifiers, additional) except TaxonomyParseError as perr: logAPICall.log( "error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING) # store data in output for _zone, _stats in ms.assignments(): _stats.finalize() _stats.get_tree().value = _zone.name # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) self.outputs[0].value = ms
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value grid_layer = self.inputs[3].value zone_stats = {} zone_count_stats = {} gid_idx = layer_field_index(zone_layer, self._gid_field) count_idx = layer_field_index(zone_layer, count_field) for _f in layer_features(zone_layer): gid = _f.attributeMap()[gid_idx].toString() zone_stats[gid] = 0 zone_count_stats[gid] = _f.attributeMap()[count_idx].toDouble()[0] # create storage for temporary output data use_grid_db = grid_layer.dataProvider().featureCount( ) > MAX_FEATURES_IN_MEMORY if False: tmp_grid_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename()) grid_points = bsddb.btopen(tmp_grid_db_file, 'c') else: grid_points = {} # merge to create stats tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(grid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join, [zone_field, count_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) stats = layer_multifields_stats(tmp_join_layer, [zone_field, count_field]) if stats == False: raise OperatorError( "error creating statistic based on input files", self.__class__) zone_idx = layer_field_index(tmp_join_layer, zone_field) count_idx = layer_field_index(tmp_join_layer, count_field) lon_idx = layer_field_index(tmp_join_layer, self._lon_field) lat_idx = layer_field_index(tmp_join_layer, self._lat_field) gid_idx = layer_field_index(tmp_join_layer, self._gid_field) try: for _f in layer_features(tmp_join_layer): lon = _f.attributeMap()[lon_idx].toDouble()[0] lat = _f.attributeMap()[lat_idx].toDouble()[0] zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() # update stats zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing joined layer: " % err, self.__class__) # test for zones without a grid point assigned count_idx = layer_field_index(zone_layer, count_field) gid_idx = layer_field_index(zone_layer, self._gid_field) zone_idx = layer_field_index(zone_layer, zone_field) _x_off, _y_off = self._x_off / 2.0, self._y_off / 2.0 try: for _f in layer_features(zone_layer): centroid = _f.geometry().centroid().asPoint() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() count_val = _f.attributeMap()[count_idx].toDouble()[0] gid = _f.attributeMap()[gid_idx].toString() if zone_stats[gid] == 0: # get lower left corner lon = int(centroid.x() / DEFAULT_GRID_SIZE) * self._x_off + _x_off lat = int( centroid.y() / self._y_off) * self._y_off + _y_off #self._write_feature(writer, f, lon, lat, zone_str, count_val) zone_stats[gid] += 1 grid_points[self._make_key(zone_str, gid, lon, lat)] = 1 except Exception as err: raise OperatorError("error processing missing points: " % err, self.__class__) # output result fields = { 0: QgsField(self._lon_field, QVariant.Double), 1: QgsField(self._lat_field, QVariant.Double), 2: QgsField(zone_field, QVariant.String), 3: QgsField(count_field, QVariant.Double) } grid_layername = 'grid_%s' % (get_unique_filename()) grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername) try: f = QgsFeature() writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") for key, value in grid_points.iteritems(): [zone, zone_gid, lon, lat] = self._parse_key(key) f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) """ f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) f.addAttribute(3, QVariant(count_val / total_features)) writer.addFeature(f) """ value = float( value) / zone_stats[zone_gid] * zone_count_stats[zone_gid] #grid_points[key] = value self._write_feature(writer, f, lon, lat, zone, value) del writer except Exception as err: raise OperatorError("error creating joined grid file: " % err, self.__class__) # load result layer grid_layer = load_shapefile(grid_file, grid_layername) if not grid_layer: raise OperatorError('Error loading joined grid file' % (grid_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = grid_layer self.outputs[1].value = grid_file
def do_operation(self): """ perform create mappin """ # validate inputs popgrid_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value pop_to_bldg = float(self.inputs[3].value) # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(popgrid_layer) self._test_layer_field_exists(popgrid_layer, CNT_FIELD_NAME) self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, zone_field) # count_field is not required # if count field is not defined, then generate building count from footprints # local variables analyzer = QgsOverlayAnalyzer() # intersect grids and zones to obtain polygons with # - population and zone_id # - apply ratio to population to obtain building count tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # generate grid with building counts fields = { 0 : QgsField(GID_FIELD_NAME, QVariant.String), 1 : QgsField(zone_field, QVariant.String), 2 : QgsField(CNT_FIELD_NAME, QVariant.Double), } output_layername = 'grid_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() pop_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME) zone_idx = layer_field_index(tmp_join_layer, zone_field) for _f in layer_features(tmp_join_layer): pop_count = _f.attributeMap()[pop_idx].toDouble()[0] zone = _f.attributeMap()[zone_idx].toString() # 1. get geometry geom = _f.geometry() # 2. get original centroid point and project is required centroid = geom.centroid().asPoint() grid_gid = latlon_to_grid(centroid.y(), centroid.x()) f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone) f.addAttribute(2, pop_count / pop_to_bldg) writer.addFeature(f) del writer # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value zone_count_field = self.inputs[2].value fp_layer = self.inputs[3].value # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME) stats = {} for _feature in layer_features(tmp_join_layer): gid = _feature.attributeMap()[gid_idx].toString() if ht_idx > 0: ht = _feature.attributeMap()[ht_idx].toDouble()[0] else: ht = 0 # if height is not defined, it is set to 0 # this will cause the system to ignore area generate without having to # remove the field area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # if not stats.has_key(gid): stats[gid] = (1, area) else: stat = stats[gid] stats[gid] = (stat[0] + 1, stat[1] + area) output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields = { 0: QgsField(GID_FIELD_NAME, QVariant.Int), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Int), 3: QgsField(AREA_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() stat = stats[gid] bldg_count = stat[0] area = stat[1] except: bldg_count, area = 0, 0 f.addAttribute(2, QVariant(bldg_count)) f.addAttribute(3, QVariant(area)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError( 'Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value popgrid_layer = self.inputs[2].value pop_to_bldg = float(self.inputs[3].value) # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone stats = {} _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME) for _f in layer_features(tmp_join_layer): # retrieve count from statistic _gid = _f.attributeMap()[_gid_idx].toString() _count = _f.attributeMap()[_cnt_idx].toString() if stats.has_key(_gid): stats[_gid] += float(_count) / pop_to_bldg else: stats[_gid] = float(_count) / pop_to_bldg output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields = { 0: QgsField(GID_FIELD_NAME, QVariant.Int), 1: QgsField(zone_field, QVariant.String), 2: QgsField(CNT_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() bldg_count = stats[gid] except: bldg_count = 0 f.addAttribute(2, QVariant(bldg_count)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError( 'Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value fp_layer = self.inputs[2].value # merge with zone to get assignment tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join, [zone_field]) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) fields = { 0: QgsField(self._lon_field, QVariant.Double), 1: QgsField(self._lat_field, QVariant.Double), 2: QgsField(zone_field, QVariant.String), } zone_idx = layer_field_index(tmp_join_layer, zone_field) fp_layername = 'fpc_%s' % get_unique_filename() fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername) try: writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(tmp_join_layer): centroid = _f.geometry().centroid().asPoint() lon = centroid.x() lat = centroid.y() zone_str = str(_f.attributeMap()[zone_idx].toString()).upper() f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) f.addAttribute(2, QVariant(zone_str)) writer.addFeature(f) del writer except Exception as err: logAPICall.log(err, logAPICall.ERROR) remove_shapefile(fp_file) raise OperatorError("error creating joined grid: %s" % err, self.__class__) # load shapefile as layer fp_layer = load_shapefile(fp_file, fp_layername) if not fp_layer: raise OperatorError( 'Error loading footprint centroid file' % (fp_file), self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) self.outputs[0].value = fp_layer self.outputs[1].value = fp_file
def do_operation(self): # input/output verification not performed yet fp_layer = self.inputs[0].value area_field = self.inputs[1].value ht_field = self.inputs[2].value zone_layer = self.inputs[3].value zone_field = self.inputs[4].value svy_layer = self.inputs[5].value # make sure required data fields are populated area_idx = layer_field_index(fp_layer, area_field) if area_idx == -1: raise OperatorError("Field %s does not exist in %s" %(area_field, fp_layer.name()), self.__class__) ht_idx = layer_field_index(fp_layer, ht_field) if ht_idx == -1: raise OperatorError("Field %s does not exist in %s" %(ht_field, fp_layer.name()), self.__class__) zone_idx = layer_field_index(zone_layer, zone_field) if zone_idx == -1: raise OperatorError("Field %s does not exist in %s" %(zone_field, zone_layer.name()), self.__class__) svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME) if svy_samp_idx == -1: raise OperatorError("Field %s does not exist in %s" %(GRP_FIELD_NAME, svy_layer.name()), self.__class__) svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME) if svy_ht_idx == -1: raise OperatorError("Field %s does not exist in %s" %(HT_FIELD_NAME, svy_layer.name()), self.__class__) svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME) if svy_size_idx == -1: raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name())) tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME) if tax_idx == -1: raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name())) # load zone classes # the operations below must be performed for each zone try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # join survey with zones logAPICall.log('merge survey & zone', logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' analyzer = QgsOverlayAnalyzer() analyzer.intersection(svy_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('compile zone statistics', logAPICall.DEBUG) zone_idx = layer_field_index(tmp_join_layer, zone_field) svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME) svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME) svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) if svy_size_idx == -1: raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name())) tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME) if tax_idx == -1: raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name())) # empty fields for holding the stats _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {} _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {} _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} for _zone in zone_classes.iterkeys(): _zone_n_exp[_zone] = {} _zone_p_exp[_zone] = {} _zone_a_exp[_zone] = {} _zone_e_exp[_zone] = {} _zone_group_counts[_zone] = {} _zone_group_stories[_zone] = {} _zone_group_weight[_zone] = {} _zone_total_area[_zone] = 0 _zone_total_count[_zone] = 0 _zone_total_ht[_zone] = 0 # associate group to ratio value for _rec in layer_features(tmp_join_layer): _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0] _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString()) _tax_str = str(_rec.attributeMap()[tax_idx].toString()) try: self._taxonomy.parse(_tax_str) self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1) self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht) except Exception as err: logAPICall.log("Error processing record %s" % err, logAPICall.WARNING) for _zone in zone_classes.iterkeys(): if len(_zone_group_counts[_zone]) != 3: raise OperatorError("Survey must have 3 sampling groups", self.__class__) cmp_value = -1 for _grp, _count in _zone_group_counts[_zone].iteritems(): if cmp_value==-1: cmp_value = _count if cmp_value != _count: raise OperatorError("Survey groups must have same number of samples", self.__class__) # sort by stories group_stories_for_sort = {} for _grp, _ht in _zone_group_stories[_zone].iteritems(): group_stories_for_sort[_ht] = _grp sorted_keys = group_stories_for_sort.keys() sorted_keys.sort() # assign group to weight for idx, key in enumerate(sorted_keys): _zone_group_weight[_zone][group_stories_for_sort[key]] = self.weights[idx] # aggregate values from survey for each building type # - count (n) # - floor area (p) # - total area (a) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString()) _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0] _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0] group_weight = _zone_group_weight[_zone] try: self._taxonomy.parse(_tax_str) self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp]) self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size*group_weight[_sample_grp]) self.increment_dict(_zone_a_exp[_zone_str], _tax_str, _sample_size*_ht*group_weight[_sample_grp]) self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0) except Exception as err: logAPICall.log("error processing sample with building type: %s" % _tax_str, logAPICall.WARNING) pass # adjust ratio using footprint ht/area tmp_join_layername2 = 'join_%s' % get_unique_filename() tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp' analyzer = QgsOverlayAnalyzer() analyzer.intersection(fp_layer, zone_layer, tmp_join_file2) tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername) zone_idx = layer_field_index(tmp_join_layer2, zone_field) area_idx = layer_field_index(tmp_join_layer2, area_field) ht_idx = layer_field_index(tmp_join_layer2, ht_field) for _f in layer_features(tmp_join_layer2): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _area = _f.attributeMap()[area_idx].toDouble()[0] _ht = _f.attributeMap()[ht_idx].toDouble()[0] _zone_total_area[_zone_str] += _area _zone_total_count[_zone_str] += 1 _zone_total_ht[_zone_str] += _ht # calculate building ratios for each zone for _zone in zone_classes.iterkeys(): # for total count (n) and area (a) e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues()) e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues()) # E[A] estimated total building area for zone e_at_total = _zone_total_area[_zone] * _zone_total_ht[_zone]/_zone_total_count[_zone] # calculate expected values for t, e_at_cluster in _zone_a_exp[_zone].iteritems(): e_nt_cluster = _zone_n_exp[_zone][t] if e_at_cluster == 0 or e_at_total == 0: # area is missing, use count instead _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total _zone_a_exp[_zone][t] = 0 else: # use ratio of area over total area # E[f(t)] building fraction based on sampled area e_ft_cluster = e_at_cluster / e_at_cluster_total # E[G(t)] average area per building e_gt_cluster = e_at_cluster / e_nt_cluster # E[A(t)] estimated total building area for zone for building type e_at = e_at_total * e_ft_cluster # E[N(t)] estimated total number of buildings zone-wide by type e_nt = e_at / e_gt_cluster _zone_e_exp[_zone][t] = e_nt _zone_a_exp[_zone][t] = e_ft_cluster # convert the building ratios logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone in zone_classes.iterkeys(): # create mapping scheme for zone stats = Statistics(self._taxonomy) # use building ratio to create statistic for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems(): stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp*1000)) # finalize call is required stats.finalize() ms.assign(MappingSchemeZone(_zone), stats) # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) # assign output self.outputs[0].value = ms self.outputs[1].value = _zone_a_exp
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput survey_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value tax_field = self._tax_field logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field), logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' # load zone classes try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # merge to create stats logAPICall.log('merge survey & zone', logAPICall.DEBUG) analyzer = QgsOverlayAnalyzer() analyzer.intersection(survey_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('create mapping schemes', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone, _count in zone_classes.iteritems(): stats = Statistics(self._taxonomy) ms.assign(MappingSchemeZone(_zone), stats) # loop through all input features zone_idx = layer_field_index(tmp_join_layer, zone_field) tax_idx = layer_field_index(tmp_join_layer, tax_field) area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) additional = {} _area = _f.attributeMap()[area_idx].toDouble()[0] if _area > 0: additional = {StatisticNode.AverageSize: _area} _cost = _f.attributeMap()[cost_idx].toDouble()[0] if _cost > 0: additional = {StatisticNode.UnitCost: _cost} logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2) try: ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional) except TaxonomyParseError as perr: logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING) # store data in output for _zone, _stats in ms.assignments(): _stats.finalize() _stats.get_tree().value = _zone.name # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) self.outputs[0].value = ms
def do_operation(self): # validate inputs zone_layer = self.inputs[0].value zone_field = self.inputs[1].value count_field = self.inputs[2].value area_field = self.inputs[3].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, GID_FIELD_NAME) self._test_layer_field_exists(zone_layer, zone_field) self._test_layer_field_exists(zone_layer, count_field) # local variables analyzer = QgsOverlayAnalyzer() area_idx = ToGrid.STAT_AREA_IDX #cnt_idx = ToGrid.STAT_COUNT_IDX # 1. find building count and total area for each zone zone_names, zone_stat= {}, {} try: self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names) except Exception as err: raise OperatorError(str(err), self.__class__) # 2. create grids around extent of zone tmp_grid1 = 'grid_' + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp' try: extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()] tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \ x_min, y_min, x_max, y_max, \ DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE) except Exception as err: raise OperatorError(str(err), self.__class__) # 3. intersect grids and zones to obtain polygons with # - grid_id and zone_id # - ratio of grid covered by zone (polygon area / zone area) # apply ratio to zone building count to obtain count assigned to polygon tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID") bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field) bldg_area_idx = layer_field_index(tmp_join_layer, area_field) mercator_transform = QgsCoordinateTransform(tmp_join_layer.crs(), self.mercator_crs) fields = { 0 : QgsField(GID_FIELD_NAME, QVariant.String), 1 : QgsField(zone_field, QVariant.String), 2 : QgsField(CNT_FIELD_NAME, QVariant.Double), 3 : QgsField(AREA_FIELD_NAME, QVariant.Double), } output_layername = 'grid_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(tmp_join_layer): # get area of polygon geom = _f.geometry() geom.transform(mercator_transform) area = geom.area() # generate all stats of interest zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() stat = zone_stat[zone_gid] # calculate count/area as proportion of total zone area bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * (area/stat[area_idx]) if bldg_area_idx> 0: bldg_area = _f.attributeMap()[bldg_area_idx].toDouble()[0] * (area/stat[area_idx]) else: bldg_area = 0 # create output record f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone_names[QString(zone_gid)]) f.addAttribute(2, bldg_cnt) f.addAttribute(3, bldg_area) writer.addFeature(f) del writer # clean up del tmp_grid_lyr1 del tmp_join_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_join_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value zone_count_field = self.inputs[2].value fp_layer = self.inputs[3].value # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(fp_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME) stats = {} for _feature in layer_features(tmp_join_layer): gid = _feature.attributeMap()[gid_idx].toString() if ht_idx > 0: ht = _feature.attributeMap()[ht_idx].toDouble()[0] else: ht = 0 # if height is not defined, it is set to 0 # this will cause the system to ignore area generate without having to # remove the field area = _feature.attributeMap()[area_idx].toDouble()[0] * ht # if not stats.has_key(gid): stats[gid] = (1, area) else: stat = stats[gid] stats[gid] = (stat[0]+1, stat[1]+area) output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields ={ 0 : QgsField(GID_FIELD_NAME, QVariant.Int), 1 : QgsField(zone_field, QVariant.String), 2 : QgsField(CNT_FIELD_NAME, QVariant.Int), 3 : QgsField(AREA_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() stat = stats[gid] bldg_count = stat[0] area = stat[1] except: bldg_count, area = 0, 0 f.addAttribute(2, QVariant(bldg_count)) f.addAttribute(3, QVariant(area)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file
def do_operation(self): """ perform create mapping scheme operation """ # validate inputs fp_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value count_field = self.inputs[3].value area_field = self.inputs[4].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(fp_layer) self._test_layer_loaded(zone_layer) self._test_layer_field_exists(zone_layer, GID_FIELD_NAME) self._test_layer_field_exists(zone_layer, zone_field) # count_field is not required # if count field is not defined, then generate building count from footprints # area_field is not required # local variables analyzer = QgsOverlayAnalyzer() area_idx = ToGrid.STAT_AREA_IDX cnt_idx = ToGrid.STAT_COUNT_IDX zone_names, zone_stat, zone_stat2, zone_totals = {}, {}, {}, {} # 1. find building count and total area for each zone # project geometry into mercator and get area in m2 mercator_crs = QgsCoordinateReferenceSystem() mercator_crs.createFromEpsg(3395) mercator_transform = QgsCoordinateTransform(zone_layer.crs(), mercator_crs) try: # use zone geometry area self._create_zone_statistics(zone_layer, zone_field, count_field, zone_stat, zone_names) except Exception as err: raise OperatorError(str(err), self.__class__) # 2. create grids around extent of zone tmp_grid1 = 'grid_' + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp' extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()] tmp_grid_lyr1 = self._create_grid(tmp_grid1, tmp_grid1_file, \ x_min, y_min, x_max, y_max, \ DEFAULT_GRID_SIZE, DEFAULT_GRID_SIZE) # tally total building area if there is defined bldg_area_idx = layer_field_index(zone_layer, area_field) zone_area = {} zone_has_area = False if bldg_area_idx > 0: zone_has_area = True zone_gid_idx = layer_field_index(zone_layer, GID_FIELD_NAME) for _f in layer_features(zone_layer): gid = _f.attributeMap()[zone_gid_idx].toString() area = _f.attributeMap()[bldg_area_idx].toDouble()[0] if zone_area.has_key(gid): zone_area[gid] = str(float(zone_area[gid]))+area else: zone_area[gid] = area # 3. intersect grids and zones to obtain polygons with # - grid_id and zone_id # - ratio of grid covered by zone (polygon area / zone area) # apply ratio to zone building count to obtain count assigned to polygon tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) try: # do intersection analyzer.intersection(tmp_grid_lyr1, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join_layer, "GRID_GID") bldg_cnt_idx = layer_field_index(tmp_join_layer, count_field) for _f in layer_features(tmp_join_layer): geom = _f.geometry() geom.transform(mercator_transform) area = geom.area() # generate all stats of interest zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() stat = zone_stat[zone_gid] # calculate count/area as proportion of total zone area area_ratio = (area/stat[area_idx]) if bldg_cnt_idx > 0: bldg_cnt = _f.attributeMap()[bldg_cnt_idx].toDouble()[0] * area_ratio else: bldg_cnt = 0 if zone_has_area: area = zone_area[zone_gid] * area_ratio else: area = stat[area_idx] * area_ratio self._update_stat(zone_stat2, '%s|%s'%(grid_gid, zone_gid), bldg_cnt, area) # 4. find total buildings in each zone based on footprint # - simply join the files and tally count and total area tmp_join1 = 'joined_%s' % get_unique_filename() tmp_join1_file = '%s%s.shp' % (self._tmp_dir, tmp_join1) try: # do intersection analyzer.intersection(fp_layer, tmp_join_layer, tmp_join1_file) tmp_join1_layer = load_shapefile(tmp_join1_file, tmp_join1) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # do tally zone_fp_stat = {} zone_gid_idx = layer_field_index(tmp_join1_layer, '%s_'% GID_FIELD_NAME) grid_gid_idx = layer_field_index(tmp_join1_layer, "GRID_GID") fp_area_idx = layer_field_index(tmp_join1_layer, AREA_FIELD_NAME) fp_ht_idx = layer_field_index(tmp_join1_layer, HT_FIELD_NAME) fp_has_height = False for _f in layer_features(tmp_join1_layer): zone_gid = _f.attributeMap()[zone_gid_idx].toString() grid_gid = _f.attributeMap()[grid_gid_idx].toString() area = _f.attributeMap()[fp_area_idx].toDouble()[0] # area comes from geometry, always exists ht = _f.attributeMap()[fp_ht_idx].toDouble()[0] if ht > 0: fp_has_height = True area *= ht # this is actual area to be aggregated at the end self._update_stat(zone_fp_stat, '%s|%s'%(grid_gid, zone_gid), 1, area) self._update_stat(zone_totals, zone_gid, 1, area) # 5. generate grid with adjusted building counts fields = { 0 : QgsField(GID_FIELD_NAME, QVariant.String), 1 : QgsField(zone_field, QVariant.String), 2 : QgsField(CNT_FIELD_NAME, QVariant.Double), 3 : QgsField(AREA_FIELD_NAME, QVariant.Double), } output_layername = 'grid_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for key in zone_stat2.keys(): (grid_gid, zone_gid) = str(key).split("|") s_zone = zone_stat[QString(zone_gid)] # overall statistics for the zone from zone file (always exists) s_zone_grid = zone_stat2[key] # grid specific statistic from from zone file (always exists) if zone_totals.has_key(QString(zone_gid)): # overall statistics for the zone from footprints s_total = zone_totals[QString(zone_gid)] else: s_total = [0,0] # set to zero if missing if zone_fp_stat.has_key(key): # grid specific statistic from from footprint s_fp = zone_fp_stat[key] else: s_fp = [0, 0] # set to zero if missing zone_leftover_count = s_zone[cnt_idx] - s_total[cnt_idx] if zone_has_area: zone_leftover_area = zone_area[QString(zone_gid)] - s_total[area_idx] else: zone_leftover_area = s_zone[area_idx] - s_total[area_idx] if zone_leftover_count > 0: # there are still building not accounted for # distribute to grid based on ratio of grid leftover area over zone leftover area # (leftover area is area of zone after subtracting footprint areas grid_leftover_count = zone_leftover_count * ((s_zone_grid[area_idx]-s_fp[area_idx])/zone_leftover_area) grid_count = s_fp[cnt_idx] + grid_leftover_count else: grid_count = s_fp[cnt_idx] if fp_has_height: # area can be actual area based on footprint area * height area = s_fp[area_idx] elif zone_has_area: area = s_zone_grid[area_idx] else: # no area defined area = 0 # max(s_zone_grid[area_idx], s_fp[area_idx]) f.setGeometry(self._outputGeometryFromGridId(grid_gid)) f.addAttribute(0, grid_gid) f.addAttribute(1, zone_names[QString(zone_gid)]) f.addAttribute(2, grid_count) f.addAttribute(3, area) writer.addFeature(f) del writer # clean up del tmp_grid_lyr1 del tmp_join_layer del tmp_join1_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_join_file) remove_shapefile(tmp_join1_file) # store data in output self._load_output(output_file, output_layername)
def do_operation(self): """ perform footprint load operation """ # input/output data checking already done during property set zone_layer = self.inputs[0].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(zone_layer) x_off = self._x_off y_off = self._y_off extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [ extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum() ] # create grid based on extent of given region tmp_grid1 = 'grid_' + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp' try: self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max, y_max, x_off, y_off) except: remove_shapefile(tmp_grid1_file) raise OperatorError('error creating temporary grid', self.__class__) tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1) # temporary grid for joined shape with all grid points not within region removed tmp_grid2 = 'grid_' + get_unique_filename() tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp' tmp_grid2_layer = None try: analyzer = QgsOverlayAnalyzer() analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file) tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2) except: raise OperatorError('error creating grid', self.__class__) # create result layer grid_layername = 'grid_%s' % get_unique_filename() grid_file = self._tmp_dir + grid_layername + '.shp' try: writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") f = QgsFeature() lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field) lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field) for _f in layer_features(tmp_grid2_layer): lon = _f.attributeMap()[lon_idx].toDouble()[0] lat = _f.attributeMap()[lat_idx].toDouble()[0] f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) writer.addFeature(f) del writer except Exception as err: logAPICall.log(str(err), logAPICall.ERROR) raise OperatorError('error writing out grid', self.__class__) grid_layer = load_shapefile(grid_file, grid_layername) if not grid_layer: raise OperatorError('Error loading result grid file' % (grid_file), self.__class__) # clean up del analyzer, tmp_grid1_layer, tmp_grid2_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_grid2_file) self.outputs[0].value = grid_layer self.outputs[1].value = grid_file
def do_operation(self): """ perform footprint load operation """ # input/output data checking already done during property set zone_layer = self.inputs[0].value # make sure input is correct # NOTE: these checks cannot be performed at set input time # because the data layer maybe is not loaded yet self._test_layer_loaded(zone_layer) x_off = self._x_off y_off = self._y_off extent = zone_layer.extent() [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()] # create grid based on extent of given region tmp_grid1 = 'grid_' + get_unique_filename() tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp' try: self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max, y_max, x_off, y_off) except: remove_shapefile(tmp_grid1_file) raise OperatorError('error creating temporary grid', self.__class__) tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1) # temporary grid for joined shape with all grid points not within region removed tmp_grid2 = 'grid_' + get_unique_filename() tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp' tmp_grid2_layer = None try: analyzer = QgsOverlayAnalyzer() analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file) tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2) except: raise OperatorError('error creating grid', self.__class__) # create result layer grid_layername = 'grid_%s' % get_unique_filename() grid_file = self._tmp_dir + grid_layername + '.shp' try: writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields, QGis.WKBPoint, self._crs, "ESRI Shapefile") f = QgsFeature() lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field) lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field) for _f in layer_features(tmp_grid2_layer): lon = _f.attributeMap()[lon_idx].toDouble()[0] lat = _f.attributeMap()[lat_idx].toDouble()[0] f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat))) f.addAttribute(0, QVariant(lon)) f.addAttribute(1, QVariant(lat)) writer.addFeature(f) del writer except Exception as err: logAPICall.log(str(err), logAPICall.ERROR) raise OperatorError('error writing out grid', self.__class__) grid_layer = load_shapefile(grid_file, grid_layername) if not grid_layer: raise OperatorError('Error loading result grid file' % (grid_file), self.__class__) # clean up del analyzer, tmp_grid1_layer, tmp_grid2_layer remove_shapefile(tmp_grid1_file) remove_shapefile(tmp_grid2_file) self.outputs[0].value = grid_layer self.outputs[1].value = grid_file
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput zone_layer = self.inputs[0].value zone_field = self.inputs[1].value popgrid_layer = self.inputs[2].value pop_to_bldg = float(self.inputs[3].value) # merge with zone tmp_join = 'joined_%s' % get_unique_filename() tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join) analyzer = QgsOverlayAnalyzer() try: analyzer.intersection(popgrid_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join) except AssertionError as err: raise OperatorError(str(err), self.__class__) except Exception as err: raise OperatorError(str(err), self.__class__) # count footprint in each zone stats = {} _gid_idx = layer_field_index(tmp_join_layer, GID_FIELD_NAME + "_") _cnt_idx = layer_field_index(tmp_join_layer, CNT_FIELD_NAME) for _f in layer_features(tmp_join_layer): # retrieve count from statistic _gid = _f.attributeMap()[_gid_idx].toString() _count = _f.attributeMap()[_cnt_idx].toString() if stats.has_key(_gid): stats[_gid]+=float(_count) / pop_to_bldg else: stats[_gid]=float(_count) / pop_to_bldg output_layername = 'zone_%s' % get_unique_filename() output_file = '%s%s.shp' % (self._tmp_dir, output_layername) logAPICall.log('create outputfile %s ... ' % output_file, logAPICall.DEBUG) try: fields ={ 0 : QgsField(GID_FIELD_NAME, QVariant.Int), 1 : QgsField(zone_field, QVariant.String), 2 : QgsField(CNT_FIELD_NAME, QVariant.Int), } writer = QgsVectorFileWriter(output_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile") f = QgsFeature() for _f in layer_features(zone_layer): # write to file f.setGeometry(_f.geometry()) f.addAttribute(0, _f.attributeMap()[0]) f.addAttribute(1, _f.attributeMap()[1]) # retrieve count from statistic try: gid = _f.attributeMap()[0].toString() bldg_count = stats[gid] except: bldg_count = 0 f.addAttribute(2, QVariant(bldg_count)) writer.addFeature(f) del writer, f except Exception as err: remove_shapefile(output_file) raise OperatorError("error creating zone: %s" % err, self.__class__) # clean up del tmp_join_layer remove_shapefile(tmp_join_file) # store data in output output_layer = load_shapefile(output_file, output_layername) if not output_layer: raise OperatorError('Error loading footprint centroid file' % (output_file), self.__class__) self.outputs[0].value = output_layer self.outputs[1].value = output_file