示例#1
0
 def do_operation(self):
     """ perform create mapping scheme operation """
     
     # input/output verification already performed during set input/ouput
     survey_layer = self.inputs[0].value
     tax_field = self._tax_field
     
     # merge to create stats
     ms = MappingScheme(self._taxonomy)
     stats = Statistics(self._taxonomy)
     ms.assign(MappingSchemeZone('ALL'), stats)
     
     # loop through all input features
     tax_idx = layer_field_index(survey_layer, tax_field)
     area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
     cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)
     
     for _f in layer_features(survey_layer):
         _tax_str = str(_f.attributeMap()[tax_idx].toString())
         additional = {}
         _area = _f.attributeMap()[area_idx].toDouble()[0]
         if _area > 0:
             additional = {StatisticNode.AverageSize: _area} 
         _cost = _f.attributeMap()[cost_idx].toDouble()[0]
         if _cost > 0:
             additional = {StatisticNode.UnitCost: _cost}                            
         try:
             stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
         except TaxonomyParseError as perr:
             logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
     
     # store data in output
     stats.finalize()        
     
     self.outputs[0].value = ms
示例#2
0
文件: grids.py 项目: gem/sidd
    def _create_grid(self, grid_name, grid_file, x_min, y_min, x_max, y_max, x_off, y_off):
        x_off2, y_off2 = x_off / 2.0, y_off / 2.0
        x_min = floor(x_min / x_off) * x_off
        x_max = ceil(x_max / x_off) * x_off
        y_min = floor(y_min / y_off) * y_off
        y_max = ceil(y_max / y_off) * y_off
        
        xtotal = int((x_max - x_min) / x_off)+1
        ytotal = int((y_max - y_min) / y_off)+1

        logAPICall.log('x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d'
                       % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
                       logAPICall.DEBUG_L2)
        fields = {
            0 : QgsField('GRID_GID', QVariant.String),            
        }
        writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for x in range(xtotal):
            for y in range(ytotal):
                lon = x_min + (x * x_off) + (x_off2)
                lat = y_min + (y * y_off) + (y_off2)                
                #out_geom = QgsGeometry.fromRect(QgsRectangle(lon-x_off2, lat-y_off2,
                #                                             lon+x_off2, lat+y_off2))                                
                f.setGeometry(self._outputGeometryFromLatLon(lat, lon))                
                f.addAttribute(0, QVariant(latlon_to_grid(lat, lon)))                
                writer.addFeature(f)
        del writer 
        return load_shapefile(grid_file, grid_name)        
示例#3
0
 def build_exposure_steps(self):
     """ building exposure database from workflow """
     if not self.workflow.ready:
         raise SIDDException('Cannot create exposure with current datasets. Please revise input')
     
     if not self.ms.is_valid:
         raise SIDDException('Current mapping scheme is not valid')
     
     for zone in self.ms.zones:
         zone.stats.refresh_leaves(with_modifier=True, order_attributes=True)
     
     if getattr(self, 'exposure', None) is not None:
         del self.exposure
         remove_shapefile(self.exposure_file)
     
     for op in self.workflow.nextstep():
         yield op
     
     # when all steps are completed, set resulting exposure
     self.exposure = self.workflow.operator_data['exposure'].value
     self.exposure_file = self.workflow.operator_data['exposure_file'].value
     if self.workflow.operator_data.has_key('exposure_grid'):
         self.exposure_grid = self.workflow.operator_data['exposure_grid'].value
     
     logAPICall.log('exposure data created %s' % self.exposure_file, logAPICall.INFO)    
示例#4
0
    def verify_result(self):
        """
        run data quality tests 
        """
        builder = WorkflowBuilder(self.operator_options)
        try:
            verify_workflow = builder.build_verify_result_workflow(self)
        except WorkflowException as err:
            raise SIDDException(
                "error creating workflow for result verification\n%s" % err)
        # process workflow
        for step in verify_workflow.nextstep():
            try:
                step.do_operation()
            except Exception as err:
                logAPICall.log(err, logAPICall.WARNING)
                pass

        self.quality_reports = {}
        if verify_workflow.operator_data.has_key('frag_report'):
            self.quality_reports[
                'fragmentation'] = verify_workflow.operator_data[
                    'frag_report'].value
        if verify_workflow.operator_data.has_key('count_report'):
            self.quality_reports['count'] = verify_workflow.operator_data[
                'count_report'].value
            try:
                if self.zone_type == ZonesTypes.LanduseCount and self.output_type == OutputTypes.Grid:
                    self.quality_reports['count']['_note'] = ''
            except:
                pass

        logAPICall.log('result verification completed', logAPICall.INFO)
示例#5
0
 def get_project_data(self, attrib):        
     if self.db.has_key(attrib):
         logAPICall.log('read from db %s => %s ' % (attrib, str(self.db[attrib])[0:25]), logAPICall.DEBUG_L2)
         return self.db[attrib]
     else:
         logAPICall.log('%s does not exist in db' % attrib, logAPICall.DEBUG_L2)
         return None
示例#6
0
文件: workflow.py 项目: gem/sidd
    def build_processing_chain(self, project, workflow):
        for [data, out_type, func] in self._process_chains:
            # match all inputs
            input_matches = True
            logAPICall.log(
                '\tcheck data input chain %s %s=> %s' %
                (data, out_type, func.__name__), logAPICall.DEBUG_L2)
            for data_item in data:
                if not workflow.operator_data.has_key(data_item):
                    input_matches = False
                    break
            if not input_matches:
                continue

            # match output type
            if out_type != project.output_type:
                continue

            # build process chain
            func(project, workflow)

            # done
            return

        # no match for input and output type
        # cannot do anything with given data
        raise WorkflowException(WorkflowErrors.NoActionDefined)
示例#7
0
文件: join.py 项目: ImageCatInc/sidd
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value                
        fp_layer = self.inputs[2].value
        
        # merge with zone to get assignment
        tmp_join = 'joined_%s' % get_unique_filename()
        tmp_join_file = '%s%s.shp' % (self._tmp_dir, tmp_join)        
        analyzer = QgsOverlayAnalyzer()        
        try:
            analyzer.intersection(fp_layer, zone_layer, tmp_join_file)
            tmp_join_layer = load_shapefile_verify(tmp_join_file, tmp_join,[zone_field])
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        except Exception as err:
            raise OperatorError(str(err), self.__class__)
        
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(zone_field, QVariant.String),
        }
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        fp_layername = 'fpc_%s' % get_unique_filename()
        fp_file = '%s%s.shp' % (self._tmp_dir, fp_layername)
        try:
            writer = QgsVectorFileWriter(fp_file, "utf-8", fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for _f in layer_features(tmp_join_layer):                
                centroid = _f.geometry().centroid().asPoint()
                lon = centroid.x()
                lat = centroid.y()
                zone_str = str(_f.attributeMap()[zone_idx].toString()).upper()

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                f.addAttribute(2, QVariant(zone_str))
                writer.addFeature(f)
            
            del writer
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            remove_shapefile(fp_file)
            raise OperatorError("error creating joined grid: %s" % err, self.__class__)
        
        # load shapefile as layer
        fp_layer = load_shapefile(fp_file, fp_layername)
        if not fp_layer:
            raise OperatorError('Error loading footprint centroid file' % (fp_file), self.__class__)        
                
        # clean up
        del tmp_join_layer        
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = fp_layer
        self.outputs[1].value = fp_file
示例#8
0
    def verify_result(self):
        """
        run data quality tests 
        """
        builder = WorkflowBuilder(self.operator_options)
        try:
            verify_workflow = builder.build_verify_result_workflow(self)
        except WorkflowException as err:
            raise SIDDException("error creating workflow for result verification\n%s" % err)
        # process workflow
        for step in verify_workflow.nextstep():
            try:
                step.do_operation()
            except Exception as err:
                logAPICall.log(err, logAPICall.WARNING)
                pass                

        self.quality_reports={}
        if verify_workflow.operator_data.has_key('frag_report'):
            self.quality_reports['fragmentation'] = verify_workflow.operator_data['frag_report'].value
        if verify_workflow.operator_data.has_key('count_report'):
            self.quality_reports['count'] = verify_workflow.operator_data['count_report'].value
            try:
                if self.zone_type == ZonesTypes.LanduseCount and self.output_type == OutputTypes.Grid:
                    self.quality_reports['count']['_note'] = ''
            except:
                pass
                
        logAPICall.log('result verification completed', logAPICall.INFO)
示例#9
0
文件: grids.py 项目: gem/sidd
    def _create_grid(self, grid_name, grid_file, x_min, y_min, x_max, y_max, x_off, y_off):
        x_off2, y_off2 = x_off / 2.0, y_off / 2.0
        x_min = floor(x_min / x_off) * x_off
        x_max = ceil(x_max / x_off) * x_off
        y_min = floor(y_min / y_off) * y_off
        y_max = ceil(y_max / y_off) * y_off

        xtotal = int((x_max - x_min) / x_off) + 1
        ytotal = int((y_max - y_min) / y_off) + 1

        logAPICall.log(
            "x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d"
            % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
            logAPICall.DEBUG_L2,
        )
        fields = {0: QgsField("GRID_GID", QVariant.String)}
        writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPolygon, self._crs, "ESRI Shapefile")
        f = QgsFeature()
        for x in range(xtotal):
            for y in range(ytotal):
                lon = x_min + (x * x_off) + (x_off2)
                lat = y_min + (y * y_off) + (y_off2)
                # out_geom = QgsGeometry.fromRect(QgsRectangle(lon-x_off2, lat-y_off2,
                #                                             lon+x_off2, lat+y_off2))
                f.setGeometry(self._outputGeometryFromLatLon(lat, lon))
                f.addAttribute(0, QVariant(latlon_to_grid(lat, lon)))
                writer.addFeature(f)
        del writer
        return load_shapefile(grid_file, grid_name)
示例#10
0
文件: node.py 项目: gem/sidd
 def from_xml(self, xmlnode):
     """ construct node and children from XML """  
     self.name = get_node_attrib(xmlnode, 'attribute')      
     self.value = get_node_attrib(xmlnode, 'value')
     self.level = int(get_node_attrib(xmlnode, 'level'))
     self.weight = float(get_node_attrib(xmlnode, 'weight'))
     self.count = self.weight
     self.is_default = str(get_node_attrib(xmlnode, 'is_default')).upper()=='TRUE'
     self.is_skipped = str(get_node_attrib(xmlnode, 'is_skipped')).upper()=='TRUE'
     
     for add_node in xmlnode.findall('additional'):
         for idx, label in enumerate(self.label_additional):
             add_value = get_node_attrib(add_node, label)
             if add_value != '':
                 self.additional[idx]=add_value
     
     for mod_node in xmlnode.findall('modifiers/modifier'):
         mod = StatisticModifier()
         mod.from_xml(mod_node)
         self.modifiers.append(mod)
         
     for childnode in xmlnode.findall('children/node'):
         logAPICall.log('created new child with xmlnode %s' % childnode, logAPICall.DEBUG_L2)
         node = StatisticNode(self)
         node.from_xml(childnode)
         self.children.append(node)
示例#11
0
    def _write_grid_shapefile(self, path, x_min, y_min, x_max, y_max, x_off,
                              y_off):
        x_off = self._x_off
        y_off = self._y_off
        x_min = floor(x_min / x_off) * x_off
        x_max = ceil(x_max / x_off) * x_off
        y_min = floor(y_min / y_off) * y_off
        y_max = ceil(y_max / y_off) * y_off

        xtotal = int((x_max - x_min) / x_off)
        ytotal = int((y_max - y_min) / y_off)

        logAPICall.log(
            'x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d'
            % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
            logAPICall.DEBUG_L2)

        writer = QgsVectorFileWriter(path, "utf-8", self._fields,
                                     QGis.WKBPoint, self._crs,
                                     "ESRI Shapefile")
        f = QgsFeature()
        for x in range(xtotal):
            for y in range(ytotal):
                lon = x_min + (x * x_off) + (x_off / 2.0)
                lat = y_min + (y * y_off) + (y_off / 2.0)
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)
        del writer
示例#12
0
 def build_processing_chain(self, project, workflow):
     for [data, out_type, func] in self._process_chains:
         # match all inputs
         input_matches = True
         logAPICall.log('\tcheck data input chain %s %s=> %s' % (data, out_type, func.__name__),
                        logAPICall.DEBUG_L2)
         for data_item in data:
             if not workflow.operator_data.has_key(data_item):
                 input_matches = False
                 break
         if not input_matches:
             continue
         
         # match output type
         if out_type != project.output_type:
             continue
         
         # build process chain
         func(project, workflow)
         
         # done
         return
     
     # no match for input and output type
     # cannot do anything with given data
     raise WorkflowException(WorkflowErrors.NoActionDefined)
示例#13
0
    def build_exposure_steps(self):
        """ building exposure database from workflow """
        if not self.workflow.ready:
            raise SIDDException(
                'Cannot create exposure with current datasets. Please revise input'
            )

        if not self.ms.is_valid:
            raise SIDDException('Current mapping scheme is not valid')

        for zone in self.ms.zones:
            zone.stats.refresh_leaves(with_modifier=True,
                                      order_attributes=True)

        if getattr(self, 'exposure', None) is not None:
            del self.exposure
            remove_shapefile(self.exposure_file)

        for op in self.workflow.nextstep():
            yield op

        # when all steps are completed, set resulting exposure
        self.exposure = self.workflow.operator_data['exposure'].value
        self.exposure_file = self.workflow.operator_data['exposure_file'].value
        if self.workflow.operator_data.has_key('exposure_grid'):
            self.exposure_grid = self.workflow.operator_data[
                'exposure_grid'].value

        logAPICall.log('exposure data created %s' % self.exposure_file,
                       logAPICall.INFO)
示例#14
0
文件: grid.py 项目: ImageCatInc/sidd
 def _write_grid_shapefile(self, path, x_min, y_min, x_max, y_max, x_off, y_off):
     x_off = self._x_off        
     y_off = self._y_off        
     x_min = floor(x_min / x_off) * x_off
     x_max = ceil(x_max / x_off) * x_off
     y_min = floor(y_min / y_off) * y_off
     y_max = ceil(y_max / y_off) * y_off
     
     xtotal = int((x_max - x_min) / x_off)
     ytotal = int((y_max - y_min) / y_off)
     
     logAPICall.log('x_min %f x_max %f y_min %f y_max %f x_off %f y_off %f xtotal %d, ytotal %d'
                    % (x_min, x_max, y_min, y_max, x_off, y_off, xtotal, ytotal),
                    logAPICall.DEBUG_L2)
     
     writer = QgsVectorFileWriter(path, "utf-8", self._fields, QGis.WKBPoint, self._crs, "ESRI Shapefile")
     f = QgsFeature()
     for x in range(xtotal):
         for y in range(ytotal):
             lon = x_min + (x * x_off) + (x_off/2.0)
             lat = y_min + (y * y_off) + (y_off/2.0)
             f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
             f.addAttribute(0, QVariant(lon))
             f.addAttribute(1, QVariant(lat))
             writer.addFeature(f)
     del writer
示例#15
0
 def data(self, index, role):
     """ return data to be displayed in a cell """
     if role == Qt.DisplayRole:
         logAPICall.log('row %s column %s ' %(index.row(), index.column()),
                          logAPICall.DEBUG_L2)
         return QString("%s" % self.selected[index.row()][index.column()])
     else:
         return QVariant()
示例#16
0
 def index(self, row, column, parent):
     """ provide index to data given a cell """
     logAPICall.log('index row %s col %s parent %s' % (row, column, parent), logAPICall.DEBUG_L2)
     mod = self.get_modifier(row)
     if mod is not None:
         return self.createIndex(row, column, mod)
     else:
         return QModelIndex()
示例#17
0
 def save_project_data(self, attrib, value):
     if value is None:
         # delete
         logAPICall.log('delete from db %s ' % (attrib), logAPICall.DEBUG_L2)
         if self.db.has_key(attrib):
             del self.db[attrib]
     else:
         logAPICall.log('save to db %s => %s ' % (attrib, str(value)[0:25]), logAPICall.DEBUG_L2)
         self.db[attrib]=str(value)
示例#18
0
 def get_project_data(self, attrib):
     if self.db.has_key(attrib):
         logAPICall.log(
             'read from db %s => %s ' %
             (attrib, str(self.db[attrib])[0:25]), logAPICall.DEBUG_L2)
         return self.db[attrib]
     else:
         logAPICall.log('%s does not exist in db' % attrib,
                        logAPICall.DEBUG_L2)
         return None
示例#19
0
 def save_project_data(self, attrib, value):
     if value is None:
         # delete
         logAPICall.log('delete from db %s ' % (attrib),
                        logAPICall.DEBUG_L2)
         if self.db.has_key(attrib):
             del self.db[attrib]
     else:
         logAPICall.log('save to db %s => %s ' % (attrib, str(value)[0:25]),
                        logAPICall.DEBUG_L2)
         self.db[attrib] = str(value)
示例#20
0
文件: workflow.py 项目: gem/sidd
    def build_ms_workflow(self, project, isEmpty=False):
        """ create mapping scheme creating workflow """
        workflow = Workflow()

        logAPICall.log('creating survey loader ...', logAPICall.DEBUG_L2)
        if not isEmpty:
            self.load_survey(project, workflow, False)

        if project.zone_type == ZonesTypes.None:
            if isEmpty:
                logAPICall.log('creating empty mapping scheme ...',
                               logAPICall.DEBUG_L2)
                ms_creator = EmptyMSCreator(self.operator_options)
                ms_creator.inputs = []
            else:
                logAPICall.log('creating mapping scheme from survey only ...',
                               logAPICall.DEBUG_L2)
                ms_creator = SurveyOnlyMSCreator(self.operator_options)
                ms_creator.inputs = [
                    workflow.operator_data['survey'],
                ]
        else:
            self.load_zone(project, workflow, False)
            if isEmpty:
                logAPICall.log('creating empty mapping scheme from zones ...',
                               logAPICall.DEBUG_L2)
                ms_creator = EmptyZonesMSCreator(self.operator_options)
                ms_creator.inputs = [
                    workflow.operator_data['zone'],
                    workflow.operator_data['zone_field'],
                ]
            else:
                logAPICall.log(
                    'creating mapping scheme from survey and zones ...',
                    logAPICall.DEBUG_L2)
                workflow.operator_data['zone_field'] = OperatorData(
                    OperatorDataTypes.StringAttribute, project.zone_field)
                ms_creator = SurveyZonesMSCreator(self.operator_options)
                ms_creator.inputs = [
                    workflow.operator_data['survey'],
                    workflow.operator_data['zone'],
                    workflow.operator_data['zone_field'],
                ]

        workflow.operator_data['ms'] = OperatorData(
            OperatorDataTypes.MappingScheme)
        ms_creator.outputs = [
            workflow.operator_data['ms'],
        ]
        workflow.operators.append(ms_creator)
        workflow.ready = True

        return workflow
示例#21
0
    def verify_data(self):
        """ verify existing data and create workflow """
        # build workflow based on current data
        builder = WorkflowBuilder(self.operator_options)
        self.workflow = builder.build_workflow(self)

        if self.workflow.ready:
            self.status = ProjectStatus.ReadyForExposure
        else:
            self.status = ProjectStatus.ReadyForMS
        self.errors = self.workflow.errors
        self.exposure = None
        logAPICall.log('input verification completed', logAPICall.INFO)
示例#22
0
 def set_project_path(self, project_file):
     try:
         if (not os.path.exists(project_file)):
             shutil.copyfile(FILE_PROJ_TEMPLATE, project_file)
         self.db = bsddb.btopen(project_file, 'c')
         self.version_major = self.get_project_data('version_major')
         self.version_minor = self.get_project_data('version_minor')
         logAPICall.log('opening project file version %s.%s' %(self.version_major, self.version_minor),
                        logAPICall.INFO)
         self.project_file = project_file
         self.require_save = True
     except:
         raise SIDDProjectException(ProjectErrors.FileFormatError)
示例#23
0
 def verify_data(self):
     """ verify existing data and create workflow """
     # build workflow based on current data
     builder = WorkflowBuilder(self.operator_options)
     self.workflow = builder.build_workflow(self)    
     
     if self.workflow.ready:
         self.status = ProjectStatus.ReadyForExposure
     else:
         self.status = ProjectStatus.ReadyForMS
     self.errors = self.workflow.errors
     self.exposure = None
     logAPICall.log('input verification completed', logAPICall.INFO)
示例#24
0
 def export_data(self):
     """ export exposure data """
     builder = WorkflowBuilder(self.operator_options)
     try:
         export_workflow = builder.build_export_workflow(self)
     except WorkflowException as err:
         raise SIDDException("error creating workflow for exporting data\n%s" % err)
     try:
         # process workflow 
         export_workflow.process()
         logAPICall.log('data export completed', logAPICall.INFO)            
     except Exception as err:
         raise SIDDException("error exporting data\n" % err)
示例#25
0
 def export_data(self):
     """ export exposure data """
     builder = WorkflowBuilder(self.operator_options)
     try:
         export_workflow = builder.build_export_workflow(self)
     except WorkflowException as err:
         raise SIDDException(
             "error creating workflow for exporting data\n%s" % err)
     try:
         # process workflow
         export_workflow.process()
         logAPICall.log('data export completed', logAPICall.INFO)
     except Exception as err:
         raise SIDDException("error exporting data\n" % err)
示例#26
0
 def set_project_path(self, project_file):
     try:
         if (not os.path.exists(project_file)):
             shutil.copyfile(FILE_PROJ_TEMPLATE, project_file)
         self.db = bsddb.btopen(project_file, 'c')
         self.version_major = self.get_project_data('version_major')
         self.version_minor = self.get_project_data('version_minor')
         logAPICall.log(
             'opening project file version %s.%s' %
             (self.version_major, self.version_minor), logAPICall.INFO)
         self.project_file = project_file
         self.require_save = True
     except:
         raise SIDDProjectException(ProjectErrors.FileFormatError)
示例#27
0
    def do_operation(self):
        """ perform footprint load operation """
        grid_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(grid_layer)

        grid_fields = grid_layer.dataProvider().fields()

        output_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + output_layername + '.shp'

        half_grid = DEFAULT_GRID_SIZE / 2.0
        try:
            writer = QgsVectorFileWriter(output_file, "utf-8",
                                         grid_fields, QGis.WKBPolygon,
                                         grid_layer.crs(), "ESRI Shapefile")
            out_f = QgsFeature()
            for in_f in layer_features(grid_layer):
                in_point = in_f.geometry().asPoint()
                out_geom = QgsGeometry.fromRect(
                    QgsRectangle(in_point.x() - half_grid,
                                 in_point.y() - half_grid,
                                 in_point.x() + half_grid,
                                 in_point.y() + half_grid))
                out_f.setGeometry(out_geom)
                out_f.setAttributeMap(in_f.attributeMap())
                writer.addFeature(out_f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid: %s' % err,
                                self.__class__)

        # load shapefile as layer
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:
            raise OperatorError(
                'Error loading generated file %s' % (output_file),
                self.__class__)

        # store data in output
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
示例#28
0
文件: node.py 项目: gem/sidd
    def add(self, attr_vals, parse_order, level, additional_data={}):
        """ 
        recursively update statistic @ node and @ child nodes
        using attr_val, defaults, skips at idx
        """
        # increment count of current node
        self.count+=1
        
        # the ending condition for the recursive call
        # NOTE: is_leaf is not used here, this process should work on a empty tree
        if (len(parse_order) <= level):
            # leaf node also aggregate additional data
            self.increment_additonal(self.AverageSize, additional_data)            
            self.increment_additonal(self.UnitCost, additional_data)
            return
        
        logAPICall.log('processing %d %s' %(level, parse_order[level]), logAPICall.DEBUG)

        # get value to add/update children
        # NOTE: value for current node is already set by its parent
        # all processing/parsing is to work on its children        
        attr_name = parse_order[level]
        value = None
        for val in attr_vals:
            if val.attribute.name == attr_name:
                value = val
                break

        # handle default cases
        is_default = False                
        if value is None:
            is_default = True 
        elif value is not None and (str(value) == value.attribute.default or str(value) == value.attribute.group.default):
            value = None
            is_default = True                       
        
        logAPICall.log('\tnode:%s' %(value), logAPICall.DEBUG_L2)
        
        child_found = False
        # find children and add value/modifier
        for child in self.children:
            if (child.value is None and value is None) or str(child.value) == str(value):
                logAPICall.log('found child with %s' % value, logAPICall.DEBUG_L2)
                child_found = True                
                # recursive call to process next level
                child.add(attr_vals, parse_order, level+1, additional_data)
                return 

        # if no children found, then add new node for value and add modifier
        if not child_found:
            logAPICall.log('create new child with %s' % value, logAPICall.DEBUG_L2)
            child = StatisticNode(self, attr_name, value, self.level+1, is_default, False)
            self.children.append(child)
            # recursive call to process next level
            child.add(attr_vals, parse_order, level+1, additional_data)
        return        
示例#29
0
 def clean_up(self):
     """ cleanup """
     try:            
         logAPICall.log('attempt to delete project temp dir %s' % self.temp_dir, logAPICall.DEBUG)
         if os.path.exists(self.temp_dir):                
             del self.workflow
             if self.exposure is not None:
                 del self.exposure   # must delete QGIS layer, otherwise exposure_file becomes locked
                                     # and will generate error on shutil.rmtree
             shutil.rmtree(self.temp_dir)
     except Exception as err:            
         logAPICall.log('failed to delete temporary directory: %s' % str(err), logAPICall.WARNING)
     try:
         if self.project_file is not None and self.db is not None:
             self.db.close()
     except Exception:
         pass
示例#30
0
文件: workflow.py 项目: gem/sidd
    def build_sampling_ms_workflow(self, project):
        """ create workflow for mapping scheme using stratified sampling methodology """
        workflow = Workflow()

        logAPICall.log('creating survey loader ...', logAPICall.DEBUG_L2)

        # test to make sure all necessary data is available
        if project.fp_type != FootprintTypes.FootprintHt:
            workflow.add_error(WorkflowErrors.NeedsHeight)
        if project.zone_type == ZonesTypes.None:
            workflow.add_error(WorkflowErrors.NeedsZone)
        if project.survey_type == SurveyTypes.None:
            workflow.add_error(WorkflowErrors.NeedSurvey)

        if workflow.has_error():
            return workflow

        # data loading
        self.load_footprint(project, workflow, True)
        self.load_survey(project, workflow, False)
        self.load_zone(project, workflow, False)

        # create operator for mapping scheme
        workflow.operator_data['zone_field'] = OperatorData(
            OperatorDataTypes.StringAttribute, project.zone_field)
        ms_creator = StratifiedMSCreator(self.operator_options)
        ms_creator.inputs = [
            workflow.operator_data['fp'],
            OperatorData(OperatorDataTypes.StringAttribute, AREA_FIELD_NAME),
            OperatorData(OperatorDataTypes.StringAttribute, HT_FIELD_NAME),
            workflow.operator_data['zone'],
            workflow.operator_data['zone_field'],
            workflow.operator_data['survey'],
        ]

        workflow.operator_data['ms'] = OperatorData(
            OperatorDataTypes.MappingScheme)
        workflow.operator_data['zone_stats'] = OperatorData(
            OperatorDataTypes.ZoneStatistic)
        ms_creator.outputs = [
            workflow.operator_data['ms'], workflow.operator_data['zone_stats']
        ]
        workflow.operators.append(ms_creator)
        workflow.ready = True
        return workflow
示例#31
0
 def data(self, index, role):
     """ data for cells """
     col, row = index.column(), index.row()
     logAPICall.log('data col %s row %s' % (row, col), logAPICall.DEBUG_L2)
     
     if role == Qt.DisplayRole:
         # construct data for display in table
         _mod = self.get_modifier(row)
         _idx = row - _mod[self.STR_INDEX]
         if (col < self.STR_INDEX):
             # for first 4 columns, only first row in new modifier
             # need to show the headings
             if (_idx == 0):
                 return QVariant(_mod[col])
             else:
                 return QVariant()
         else:
             # for last 2 columns, show modifier value and associated percentage
             for _key in sorted(_mod[self.MOD_INDEX].keys()):
                 if (_idx == 0):
                     if (col == self.STR_INDEX):
                         return QVariant(_key)
                     else:
                         return QVariant("%.2f" %_mod[self.MOD_INDEX].value(_key))
                 else:
                     _idx -=1
     elif role == Qt.ToolTipRole:
         # construct data for display in tooltip
         _mod = self.get_modifier(row)
         _idx = row - _mod[self.STR_INDEX]
         if col==1:
             if (_idx == 0):                    
                 return build_attribute_tooltip(self.valid_codes, self.ms.taxonomy.parse(_mod[col]))
             else:
                 return QVariant()
         elif col==2:
             _key = sorted(_mod[self.MOD_INDEX].keys())[_idx]
             if _key is not None:
                 return build_attribute_tooltip(self.valid_codes, self.ms.taxonomy.parse(_key))
         else:
             return QVariant("")
     else:
         return QVariant()
示例#32
0
 def clean_up(self):
     """ cleanup """
     try:
         logAPICall.log(
             'attempt to delete project temp dir %s' % self.temp_dir,
             logAPICall.DEBUG)
         if os.path.exists(self.temp_dir):
             del self.workflow
             if self.exposure is not None:
                 del self.exposure  # must delete QGIS layer, otherwise exposure_file becomes locked
                 # and will generate error on shutil.rmtree
             shutil.rmtree(self.temp_dir)
     except Exception as err:
         logAPICall.log(
             'failed to delete temporary directory: %s' % str(err),
             logAPICall.WARNING)
     try:
         if self.project_file is not None and self.db is not None:
             self.db.close()
     except Exception:
         pass
示例#33
0
 def export_ms(self, path, export_format):
     """ 
     export mapping scheme according to given format
     see constants.MSExportTypes for type supported
     """
     if self.ms is None:
         raise SIDDException('Mapping Scheme is required for this action')
     
     builder= WorkflowBuilder(self.operator_options)
     try:
         if export_format == MSExportTypes.XML:
             export_workflow = builder.build_export_ms_workflow(self, path)
         else:
             export_workflow = builder.build_export_distribution_workflow(self, path)
         export_workflow.process()
         logAPICall.log('data export completed', logAPICall.INFO)
     except WorkflowException:
         return False
     except Exception as err:
         logAPICall.log(err, logAPICall.ERROR)
         return False
示例#34
0
    def build_ms_workflow(self, project, isEmpty=False):
        """ create mapping scheme creating workflow """
        workflow = Workflow()
        
        logAPICall.log('creating survey loader ...', logAPICall.DEBUG_L2)
        if not isEmpty:
            self.load_survey(project, workflow, False)

        if project.zone_type == ZonesTypes.None:
            if isEmpty:
                logAPICall.log('creating empty mapping scheme ...', logAPICall.DEBUG_L2)
                ms_creator = EmptyMSCreator(self.operator_options)
                ms_creator.inputs = []
            else:
                logAPICall.log('creating mapping scheme from survey only ...', logAPICall.DEBUG_L2)            
                ms_creator = SurveyOnlyMSCreator(self.operator_options)
                ms_creator.inputs = [workflow.operator_data['survey'],]
        else:
            self.load_zone(project, workflow, False)
            if isEmpty:                
                logAPICall.log('creating empty mapping scheme from zones ...', logAPICall.DEBUG_L2)
                ms_creator = EmptyZonesMSCreator(self.operator_options)
                ms_creator.inputs = [
                    workflow.operator_data['zone'],
                    workflow.operator_data['zone_field'],]
            else:
                logAPICall.log('creating mapping scheme from survey and zones ...', logAPICall.DEBUG_L2)
                workflow.operator_data['zone_field'] = OperatorData(OperatorDataTypes.StringAttribute, project.zone_field)
                ms_creator = SurveyZonesMSCreator(self.operator_options)
                ms_creator.inputs = [
                    workflow.operator_data['survey'],
                    workflow.operator_data['zone'],
                    workflow.operator_data['zone_field'],]
        
        workflow.operator_data['ms'] = OperatorData(OperatorDataTypes.MappingScheme)
        ms_creator.outputs = [workflow.operator_data['ms'],]
        workflow.operators.append(ms_creator)
        workflow.ready=True
        
        return workflow
示例#35
0
文件: ms.py 项目: gem/sidd
    def append_branch(self, node, branch):
        """ append a branch (from library) to a node in a mapping scheme tree """

        stat_tree = None
        if type(node) == MappingSchemeZone:
            # selected node is zone
            # retrieve root node from stats tree
            stat_tree = self.get_assignment(node)
            node_to_attach = node.stats.get_tree()
        else:
            node_to_attach = node
            stat_tree = self.get_assignment_by_node(node_to_attach)

        if stat_tree is None:
            raise SIDDException(
                'selected node does not belong to mapping scheme')

        # add branch as child
        if type(branch) == MappingSchemeZone:
            # branch starts from zone node, so it is a full stats tree
            # add only the child nodes
            logAPICall.log('branch is zone, add children', logAPICall.DEBUG_L2)

            # test to make sure append is valid
            # exception will be thrown is case of error
            for child in branch.stats.get_tree().children:
                stat_tree.test_repeated_attribute(node_to_attach, child)
                stat_tree.test_repeated_value(node_to_attach, child)

            for child in branch.stats.get_tree().children:
                stat_tree.add_branch(node_to_attach,
                                     child,
                                     test_repeating=False,
                                     update_stats=False)
            node_to_attach.balance_weights()
        else:
            # branch is from a tree
            # add branch as child node
            logAPICall.log('branch is node, add branch', logAPICall.DEBUG_L2)
            stat_tree.add_branch(node_to_attach, branch)
示例#36
0
    def export_ms(self, path, export_format):
        """ 
        export mapping scheme according to given format
        see constants.MSExportTypes for type supported
        """
        if self.ms is None:
            raise SIDDException('Mapping Scheme is required for this action')

        builder = WorkflowBuilder(self.operator_options)
        try:
            if export_format == MSExportTypes.XML:
                export_workflow = builder.build_export_ms_workflow(self, path)
            else:
                export_workflow = builder.build_export_distribution_workflow(
                    self, path)
            export_workflow.process()
            logAPICall.log('data export completed', logAPICall.INFO)
        except WorkflowException:
            return False
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            return False
示例#37
0
 def load_data(self, input_param, layer, output_file):
     input_file = getattr(self, input_param, None)
     if input_file is not None:
         builder = WorkflowBuilder(self.operator_options)
         # create workflow
         if input_param == 'fp_file':
             workflow = builder.build_load_fp_workflow(self)
         elif input_param == 'zone_file':
             workflow = builder.build_load_zones_workflow(self)
         elif input_param == 'survey_file':
             workflow = builder.build_load_survey_workflow(self)
         elif input_param == 'popgrid_file':
             workflow = builder.build_load_popgrid_workflow(self)
         else:
             raise Exception('Data Type Not Recognized %s' % input_param)
         
         if not workflow.ready:
             raise Exception('Cannot load data with %s' % input_param)
         workflow.process()
         
         logAPICall.log('data file %s loaded' % input_file, logAPICall.INFO)
         return workflow.operator_data[layer].value, workflow.operator_data[output_file].value
示例#38
0
文件: grid.py 项目: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        grid_layer = self.inputs[0].value
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(grid_layer)
                
        grid_fields = grid_layer.dataProvider().fields()
        
        output_layername = 'grid_%s' % get_unique_filename()
        output_file = self._tmp_dir + output_layername + '.shp'        
        
        half_grid = DEFAULT_GRID_SIZE / 2.0
        try:            
            writer = QgsVectorFileWriter(output_file, "utf-8", grid_fields,
                                         QGis.WKBPolygon, grid_layer.crs(), "ESRI Shapefile")
            out_f = QgsFeature()
            for in_f in layer_features(grid_layer):
                in_point = in_f.geometry().asPoint()                
                out_geom = QgsGeometry.fromRect(QgsRectangle(in_point.x()-half_grid, in_point.y()-half_grid,
                                                             in_point.x()+half_grid, in_point.y()+half_grid))            
                out_f.setGeometry(out_geom)
                out_f.setAttributeMap(in_f.attributeMap())
                writer.addFeature(out_f)
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid: %s' % err, self.__class__)

        # load shapefile as layer        
        output_layer = load_shapefile(output_file, output_layername)
        if not output_layer:            
            raise OperatorError('Error loading generated file %s' % (output_file), self.__class__)        

        # store data in output
        self.outputs[0].value = output_layer
        self.outputs[1].value = output_file
示例#39
0
    def build_sampling_ms_workflow(self, project):
        """ create workflow for mapping scheme using stratified sampling methodology """
        workflow = Workflow()
        
        logAPICall.log('creating survey loader ...', logAPICall.DEBUG_L2)

        # test to make sure all necessary data is available
        if project.fp_type != FootprintTypes.FootprintHt:
            workflow.add_error(WorkflowErrors.NeedsHeight)
        if project.zone_type == ZonesTypes.None:
            workflow.add_error(WorkflowErrors.NeedsZone)            
        if project.survey_type == SurveyTypes.None:
            workflow.add_error(WorkflowErrors.NeedSurvey)

        if workflow.has_error():
            return workflow
        
        # data loading         
        self.load_footprint(project, workflow, True)
        self.load_survey(project, workflow, False)
        self.load_zone(project, workflow, False)
        
        # create operator for mapping scheme
        workflow.operator_data['zone_field'] = OperatorData(OperatorDataTypes.StringAttribute, project.zone_field)
        ms_creator = StratifiedMSCreator(self.operator_options)
        ms_creator.inputs = [workflow.operator_data['fp'],
                             OperatorData(OperatorDataTypes.StringAttribute, AREA_FIELD_NAME),
                             OperatorData(OperatorDataTypes.StringAttribute, HT_FIELD_NAME),
                             workflow.operator_data['zone'],
                             workflow.operator_data['zone_field'],
                             workflow.operator_data['survey'],]
        
        workflow.operator_data['ms'] = OperatorData(OperatorDataTypes.MappingScheme)
        workflow.operator_data['zone_stats'] = OperatorData(OperatorDataTypes.ZoneStatistic)
        ms_creator.outputs = [workflow.operator_data['ms'], workflow.operator_data['zone_stats']]
        workflow.operators.append(ms_creator)
        workflow.ready=True 
        return workflow
示例#40
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        tax_field = self._tax_field

        # merge to create stats
        ms = MappingScheme(self._taxonomy)
        stats = Statistics(self._taxonomy)
        ms.assign(MappingSchemeZone('ALL'), stats)

        # loop through all input features
        tax_idx = layer_field_index(survey_layer, tax_field)
        area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)

        for _f in layer_features(survey_layer):
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            try:
                stats.add_case(_tax_str, self._parse_order,
                               self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        stats.finalize()

        self.outputs[0].value = ms
示例#41
0
    def load_data(self, input_param, layer, output_file):
        input_file = getattr(self, input_param, None)
        if input_file is not None:
            builder = WorkflowBuilder(self.operator_options)
            # create workflow
            if input_param == 'fp_file':
                workflow = builder.build_load_fp_workflow(self)
            elif input_param == 'zone_file':
                workflow = builder.build_load_zones_workflow(self)
            elif input_param == 'survey_file':
                workflow = builder.build_load_survey_workflow(self)
            elif input_param == 'popgrid_file':
                workflow = builder.build_load_popgrid_workflow(self)
            else:
                raise Exception('Data Type Not Recognized %s' % input_param)

            if not workflow.ready:
                raise Exception('Cannot load data with %s' % input_param)
            workflow.process()

            logAPICall.log('data file %s loaded' % input_file, logAPICall.INFO)
            return workflow.operator_data[layer].value, workflow.operator_data[
                output_file].value
示例#42
0
文件: ms.py 项目: ImageCatInc/sidd
    def append_branch(self, node, branch):
        """ append a branch (from library) to a node in a mapping scheme tree """

        stat_tree = None
        if type(node) == MappingSchemeZone:
            # selected node is zone
            # retrieve root node from stats tree
            stat_tree = self.get_assignment(node)
            node_to_attach = node.stats.get_tree()
        else:
            node_to_attach = node
            stat_tree = self.get_assignment_by_node(node_to_attach)
            
        if stat_tree is None:
            raise SIDDException('selected node does not belong to mapping scheme')
            
        # add branch as child
        if type(branch) == MappingSchemeZone:
            # branch starts from zone node, so it is a full stats tree
            # add only the child nodes
            logAPICall.log('branch is zone, add children', logAPICall.DEBUG_L2)
            
            # test to make sure append is valid
            # exception will be thrown is case of error
            for child in branch.stats.get_tree().children:
                stat_tree.test_repeated_attribute(node_to_attach, child)
                stat_tree.test_repeated_value(node_to_attach, child)
                     
            for child in branch.stats.get_tree().children:                
                stat_tree.add_branch(node_to_attach, child, test_repeating=False, update_stats=False)
            node_to_attach.balance_weights()            
        else:
            # branch is from a tree
            # add branch as child node
            logAPICall.log('branch is node, add branch', logAPICall.DEBUG_L2)          
            stat_tree.add_branch(node_to_attach, branch)
示例#43
0
    def do_build_ms(self, isEmpty=False, useSampling=False):
        """ create mapping scheme """
        builder = WorkflowBuilder(self.operator_options)
        # force reload existing survey
        self.survey = None

        # create workflow
        if useSampling:
            ms_workflow = builder.build_sampling_ms_workflow(self)
        else:
            ms_workflow = builder.build_ms_workflow(self, isEmpty)
        if not ms_workflow.ready:
            raise SIDDException(ms_workflow.errors)

        # process workflow
        ms_workflow.process()
        self.ms = ms_workflow.operator_data['ms'].value
        if useSampling:
            self.zone_stats = ms_workflow.operator_data['zone_stats'].value
        for zone, stats in self.ms.assignments():
            stats.refresh_leaves()

        logAPICall.log('mapping scheme created', logAPICall.INFO)
        self.require_save = True
示例#44
0
 def do_build_ms(self, isEmpty=False, useSampling=False):
     """ create mapping scheme """
     builder = WorkflowBuilder(self.operator_options)
     # force reload existing survey
     self.survey = None
     
     # create workflow 
     if useSampling:
         ms_workflow = builder.build_sampling_ms_workflow(self)
     else:
         ms_workflow = builder.build_ms_workflow(self, isEmpty)
     if not ms_workflow.ready:
         raise SIDDException(ms_workflow.errors)
     
     # process workflow 
     ms_workflow.process()
     self.ms = ms_workflow.operator_data['ms'].value
     if useSampling:
         self.zone_stats = ms_workflow.operator_data['zone_stats'].value
     for zone, stats in self.ms.assignments():
         stats.refresh_leaves()
         
     logAPICall.log('mapping scheme created', logAPICall.INFO)
     self.require_save = True
示例#45
0
文件: grid.py 项目: ImageCatInc/sidd
    def do_operation(self):
        """ perform footprint load operation """
        
        # input/output data checking already done during property set         
        zone_layer = self.inputs[0].value        
        
        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)
                
        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [extent.xMinimum(), extent.yMinimum(), extent.xMaximum(), extent.yMaximum()]

        # create grid based on extent of given region 
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file,
                                       x_min, y_min, x_max, y_max,
                                       x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid', self.__class__)        
        
        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)
        
        # temporary grid for joined shape with all grid points not within region removed 
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()        
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs, "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)        
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]
                
                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)                
            del writer
        except  Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file), self.__class__)        
        
        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)
        
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
示例#46
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput
        fp_layer = self.inputs[0].value
        zone_field = self.inputs[1].value

        # aggregate footprint into grids
        logAPICall.log('aggregate statistic for grid ...', logAPICall.DEBUG)
        total_features = fp_layer.dataProvider().featureCount()
        if total_features > MAX_FEATURES_IN_MEMORY:
            # use bsddb to store temporary lat/lon
            tmp_db_file = '%sdb_%s.db' % (self._tmp_dir, get_unique_filename())
            db = bsddb.btopen(tmp_db_file, 'c')
            use_db = True
        else:
            db = {}
            use_db = False

        zone_idx = layer_field_index(fp_layer, zone_field)
        for f in layer_features(fp_layer):
            geom = f.geometry()
            zone_str = str(f.attributeMap()[zone_idx].toString())
            centroid  = geom.centroid().asPoint()
            # use floor, this truncates all points within grid to grid's
            # bottom-left corner                        
            x = math.floor(centroid.x() / DEFAULT_GRID_SIZE)
            y = math.floor(centroid.y() / DEFAULT_GRID_SIZE)
            key = '%s %d %d' % (zone_str, x,y)
            if db.has_key(key):
                db[key] = str(int(db[key]) + 1)
            else:
                db[key] = '1'
        
        # output grid
        logAPICall.log('create grid ...', logAPICall.DEBUG)
        fields = {
            0 : QgsField(self._lon_field, QVariant.Double),
            1 : QgsField(self._lat_field, QVariant.Double),
            2 : QgsField(CNT_FIELD_NAME, QVariant.Double),
            3 : QgsField(zone_field, QVariant.String),
        }
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = '%s%s.shp' % (self._tmp_dir, grid_layername)
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", fields, QGis.WKBPoint , self._crs, "ESRI Shapefile")
            f = QgsFeature()
            for key, val in db.iteritems():
                (zone_str, x, y) = key.split(' ')
                # point were aggregated to grid's bottom-left corner
                # add half grid size to place point at center of grid
                point = QgsPoint(int(x)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0), 
                                 int(y)*DEFAULT_GRID_SIZE+(DEFAULT_GRID_SIZE/2.0))
                f.setGeometry(QgsGeometry.fromPoint(point))
                f.addAttribute(0, QVariant(point.x()))
                f.addAttribute(1, QVariant(point.y()))
                f.addAttribute(2, QVariant(val))
                f.addAttribute(3, QVariant(zone_str))
                writer.addFeature(f)
            del writer
        except Exception as err:
            remove_shapefile(grid_file)
            raise OperatorError("error creating joined grid: " % err, self.__class__)
        
        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading created grid file' % (grid_file), self.__class__)
                
        # clean up                
        if use_db:
            db.close()
            os.remove(tmp_db_file)
            
        # done
        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
示例#47
0
文件: data.py 项目: gem/sidd
 def __eq__(self, other):
     """ compare two operator data object """
     logAPICall.log(
         'comparing self.type(%s) and other.type(%s)' %
         (self.type, other.type), logAPICall.DEBUG_L2)
     return self.type == other.type
示例#48
0
    def buildExposure(self):        
        """ build exposure """
        self.ui.statusbar.showMessage(get_ui_string("app.status.ms.processing"))
                
        # verify current dataset to make sure can process exposure
        self.project.verify_data()
        
        # can not proceed if project is not ready for exposure
        if self.project.status != ProjectStatus.ReadyForExposure:
            logUICall.log(get_ui_string("project.error.NotEnoughData"), logUICall.WARNING)

            # show result 
            self.tab_datainput.showVerificationResults()
            self.ui.mainTabs.setCurrentIndex(self.TAB_DATA)
            return
        
        # close current results
        self.tab_result.closeResult()
        self.ui.mainTabs.setTabEnabled(self.TAB_RESULT, True)

        # reset progress dialog
        self.progress.setVisible(True)
        self.progress.ui.pb_progress.setRange(0, self.project.build_exposure_total_steps())        
        self.progress.ui.txt_progress.clear()
        self.progress.ui.txt_progress.appendPlainText(get_ui_string("app.status.processing"))        
        self.qtapp.processEvents()
        
        cancelled = False
        error_occured = False
        error_message = ""
        curStep = 0
        try:
            for step in self.project.build_exposure_steps():
                if cancelled or error_occured:
                    break
                
                # use introspection to get operator class                           
                class_name = str(step.__class__)
                # result of above call has format 
                # <class '...'> where ... is the class name of interest
                class_name = class_name[class_name.find("'")+1:class_name.rfind("'")]
                
                # update UI
                logAPICall.log('\t %s' % step.name, logAPICall.DEBUG)
                self.progress.ui.txt_progress.appendPlainText(get_ui_string('message.%s'% class_name))
                self.progress.ui.pb_progress.setValue(curStep)                        
                self.qtapp.processEvents()
                sleep(0.5)
                
                # perform operation
                step.do_operation()
                if not self.progress.isVisible():
                    cancelled = True
                
                # operation successful
                curStep+=1
        except Exception as err:
            # exception are thrown if data is not ready for exposure 
            error_message = err.message
            error_occured = True
            self.progress.setVisible(False)
            
        if error_occured:
            # processing cancelled
            logUICall.log(error_message, logUICall.WARNING)
            self.ui.statusbar.showMessage(get_ui_string("app.status.cancelled"))
        elif cancelled:
            # processing cancelled
            logUICall.log(get_ui_string("app.status.cancelled"), logUICall.WARNING)
            self.ui.statusbar.showMessage(get_ui_string("app.status.cancelled"))
        else:
            # processing completed            
            self.project.verify_result()
            self.progress.setVisible(False)
        
            # show result
            self.tab_result.refreshResult()
            self.ui.mainTabs.setTabEnabled(self.TAB_RESULT, True)
            self.ui.mainTabs.setCurrentIndex(self.TAB_RESULT)
            self.ui.statusbar.showMessage(get_ui_string("app.status.exposure.created"))       
示例#49
0
文件: taxonomy.py 项目: gem/sidd
    def __initialize(self, db_path):
        """
        prepare parser
        - load attributes and codes from underlying db
        """
        if self.__initialized:
            return

        logAPICall.log('initialize taxonomy from database %s' % db_path,
                       logAPICall.DEBUG)

        # load attributes / code from DB for parsing
        conn = sqlite3.connect(db_path)

        c = conn.cursor()

        # load attribute groups
        # attribute group default value for fill-in to missing groups
        sql = """
            select a.attribute, a.default_value from dic_gem_attributes g inner join dic_gem_attribute_levels a on g.attribute=a.attribute where level=1 and order_in_basic <> ''
        """
        c.execute(sql)
        default_values = {}
        for row in c:
            default_values[str(row[0])] = str(row[1])

        sql = """
            select order_in_extended, attribute from dic_gem_attributes g order by order_in_extended asc
        """
        c.execute(sql)
        output_str = []
        for row in c:
            output_str.append(str(row[1]))
        self._output_str = output_str[:3] + output_str
        sql = """
            select g.order_in_basic,  g.attribute, max(a.level) levels, g.format
            from dic_gem_attributes g
            inner join dic_gem_attribute_levels a on g.attribute=a.attribute 
            where order_in_basic <> ''
            group by g.order_in_basic, g.attribute, g.format
            order by order_in_basic
        """
        c.execute(sql)
        self.__attrGroups = []
        for row in c:
            grp = GemTaxonomyAttributeGroup(
                str(row[1]).strip(), int(row[0]), int(row[2]),
                default_values[str(row[1])], int(row[3]))
            self.__attrGroups.append(grp)

        # load attributes
        sql = """
            select a.name, a.level, g.attribute, a.format, a.lookup_table
            from dic_gem_attributes g
            inner join dic_gem_attribute_levels a on g.attribute=a.attribute 
            where order_in_basic <> '' 
            group by a.name, a.level, g.attribute, a.lookup_table, a.format
            order by g.order_in_basic, a.level
        """
        c.execute(sql)
        self.__attrs = []
        for row in c:
            grp = self.get_attribute_group_by_name(str(row[2]).strip())
            attr = GemTaxonomyAttribute(
                str(row[0]).strip(), grp, int(row[1]), None, int(row[3]))
            attr.lookup_table = str(row[4])
            grp.add_attribute(attr)
            self.__attrs.append(attr)

        # load codes
        sql = """select code, description, scope from %s"""
        self.__codes = {}
        for attr in self.__attrs:
            if attr.lookup_table == "":
                continue
            c.execute(sql % attr.lookup_table)
            for row in c:
                code_value = str(row[0]).strip()
                code = TaxonomyAttributeCode(attr, code_value,
                                             str(row[1]).strip(),
                                             str(row[2]).strip())
                self.__codes[code_value] = code
                attr.add_code(code)

        # load rules
        sql = """ select parent_table, child_table, parent_code, child_code from GEM_RULES """
        c.execute(sql)
        self.rules = {}
        for row in c:
            rule_key = '%s|%s' % (str(row[0]).strip(), str(row[1]).strip())
            parent_code = self.__codes[str(row[2]).strip()]
            child_code = self.__codes[str(row[3]).strip()]
            if not self.rules.has_key(rule_key):
                self.rules[rule_key] = {}
            rule = self.rules[rule_key]
            if not rule.has_key(parent_code):
                rule[parent_code] = []
            rule[parent_code].append(child_code)

        conn.close()
        self.__initialized = True
示例#50
0
    def do_operation(self):
        """ perform footprint load operation """

        # input/output data checking already done during property set
        zone_layer = self.inputs[0].value

        # make sure input is correct
        # NOTE: these checks cannot be performed at set input time
        #       because the data layer maybe is not loaded yet
        self._test_layer_loaded(zone_layer)

        x_off = self._x_off
        y_off = self._y_off

        extent = zone_layer.extent()
        [x_min, y_min, x_max, y_max] = [
            extent.xMinimum(),
            extent.yMinimum(),
            extent.xMaximum(),
            extent.yMaximum()
        ]

        # create grid based on extent of given region
        tmp_grid1 = 'grid_' + get_unique_filename()
        tmp_grid1_file = self._tmp_dir + tmp_grid1 + '.shp'

        try:
            self._write_grid_shapefile(tmp_grid1_file, x_min, y_min, x_max,
                                       y_max, x_off, y_off)
        except:
            remove_shapefile(tmp_grid1_file)
            raise OperatorError('error creating temporary grid',
                                self.__class__)

        tmp_grid1_layer = load_shapefile(tmp_grid1_file, tmp_grid1)

        # temporary grid for joined shape with all grid points not within region removed
        tmp_grid2 = 'grid_' + get_unique_filename()
        tmp_grid2_file = self._tmp_dir + tmp_grid2 + '.shp'
        tmp_grid2_layer = None
        try:
            analyzer = QgsOverlayAnalyzer()
            analyzer.intersection(tmp_grid1_layer, zone_layer, tmp_grid2_file)
            tmp_grid2_layer = load_shapefile(tmp_grid2_file, tmp_grid2)
        except:
            raise OperatorError('error creating grid', self.__class__)

        # create result layer
        grid_layername = 'grid_%s' % get_unique_filename()
        grid_file = self._tmp_dir + grid_layername + '.shp'
        try:
            writer = QgsVectorFileWriter(grid_file, "utf-8", self._fields,
                                         QGis.WKBPoint, self._crs,
                                         "ESRI Shapefile")
            f = QgsFeature()
            lon_idx = layer_field_index(tmp_grid2_layer, self._lon_field)
            lat_idx = layer_field_index(tmp_grid2_layer, self._lat_field)
            for _f in layer_features(tmp_grid2_layer):
                lon = _f.attributeMap()[lon_idx].toDouble()[0]
                lat = _f.attributeMap()[lat_idx].toDouble()[0]

                f.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
                f.addAttribute(0, QVariant(lon))
                f.addAttribute(1, QVariant(lat))
                writer.addFeature(f)
            del writer
        except Exception as err:
            logAPICall.log(str(err), logAPICall.ERROR)
            raise OperatorError('error writing out grid', self.__class__)

        grid_layer = load_shapefile(grid_file, grid_layername)
        if not grid_layer:
            raise OperatorError('Error loading result grid file' % (grid_file),
                                self.__class__)

        # clean up
        del analyzer, tmp_grid1_layer, tmp_grid2_layer
        remove_shapefile(tmp_grid1_file)
        remove_shapefile(tmp_grid2_file)

        self.outputs[0].value = grid_layer
        self.outputs[1].value = grid_file
示例#51
0
    def __initialize(self, db_path):
        """
        prepare parser
        - load attributes and codes from underlying db
        """        
        if self.__initialized:
            return
        
        logAPICall.log('initialize taxonomy from database %s' % db_path, logAPICall.DEBUG)
        
        # load attributes / code from DB for parsing
        conn = sqlite3.connect(db_path)

        c = conn.cursor()
        
        # load attribute groups
        # attribute group default value for fill-in to missing groups                 
        sql = """
            select a.attribute, a.default_value from dic_gem_attributes g inner join dic_gem_attribute_levels a on g.attribute=a.attribute where level=1 and order_in_basic <> ''
        """     
        c.execute(sql)
        default_values = {}
        for row in c:
            default_values[str(row[0])]=str(row[1])

        sql = """
            select order_in_extended, attribute from dic_gem_attributes g order by order_in_extended asc
        """     
        c.execute(sql)
        output_str = []
        for row in c:
            output_str.append(str(row[1]))
        self._output_str = output_str[:3]+output_str
        sql = """
            select g.order_in_basic,  g.attribute, max(a.level) levels, g.format
            from dic_gem_attributes g
            inner join dic_gem_attribute_levels a on g.attribute=a.attribute 
            where order_in_basic <> ''
            group by g.order_in_basic, g.attribute, g.format
            order by order_in_basic
        """
        c.execute(sql)
        self.__attrGroups = []
        for row in c:            
            grp = GemTaxonomyAttributeGroup(str(row[1]).strip(), int(row[0]), int(row[2]), 
                                            default_values[str(row[1])], int(row[3]))
            self.__attrGroups.append(grp)
            
        # load attributes
        sql = """
            select a.name, a.level, g.attribute, a.format, a.lookup_table, a.default_value
            from dic_gem_attributes g
            inner join dic_gem_attribute_levels a on g.attribute=a.attribute 
            where order_in_basic <> '' 
            group by a.name, a.level, g.attribute, a.lookup_table, a.format, a.default_value
            order by g.order_in_basic, a.level
        """
        c.execute(sql)
        self.__attrs = []
        for row in c:
            grp = self.get_attribute_group_by_name(str(row[2]).strip())
            attr = GemTaxonomyAttribute(str(row[0]).strip(), grp, 
                                        int(row[1]), str(row[5]), int(row[3]))
            attr.lookup_table = str(row[4])
            grp.add_attribute(attr)
            self.__attrs.append(attr)

        # load codes
        sql = """select code, description, scope from %s"""
        self.__codes = {}
        for attr in self.__attrs:
            if attr.lookup_table == "":
                continue
            c.execute(sql % attr.lookup_table)
            for row in c: 
                code_value = str(row[0]).strip()
                code = TaxonomyAttributeCode(attr, 
                                             code_value, str(row[1]).strip(), str(row[2]).strip())
                self.__codes[code_value] = code
                attr.add_code(code)
        
        # load rules
        sql = """ select parent_table, child_table, parent_code, child_code from GEM_RULES """
        c.execute(sql)
        self.rules = {}
        for row in c:
            rule_key = '%s|%s' % (str(row[0]).strip(), str(row[1]).strip())
            parent_code = self.__codes[str(row[2]).strip()]            
            child_code = self.__codes[str(row[3]).strip()]
            if not self.rules.has_key(rule_key):
                self.rules[rule_key] = {}
            rule = self.rules[rule_key]
            if not rule.has_key(parent_code):
                rule[parent_code] = []
            rule[parent_code].append(child_code)
                        
        conn.close()
        self.__initialized=True
示例#52
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field

        logAPICall.log(
            'survey %s, taxfield %s, zone %s, zone_field, %s' %
            (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
            logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)

        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)

        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str),
                           logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(
                    _tax_str, self._parse_order, self._parse_modifiers,
                    additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = ms
示例#53
0
文件: workflow.py 项目: gem/sidd
    def build_workflow(self, project):
        """ create workflow for creating exposure """
        workflow = Workflow()
        still_needs_count = True
        still_needs_zone = True
        still_needs_ms = True

        # footprint loading
        logAPICall.log('checking footprint data', logAPICall.DEBUG)
        if project.fp_type == FootprintTypes.FootprintHt:
            self.load_footprint(project, workflow, True)
            still_needs_count = False
        elif project.fp_type == FootprintTypes.Footprint:
            self.load_footprint(project, workflow, False)
            still_needs_count = False

        # population grid loading
        if project.popgrid_type == PopGridTypes.Grid:
            self.load_popgrid(project, workflow)
            still_needs_count = False

        # survey loading
        logAPICall.log('checking survey data', logAPICall.DEBUG)
        if project.survey_type == SurveyTypes.CompleteSurvey:
            self.load_survey(project, workflow, True)
            still_needs_count = False
            still_needs_zone = False
            still_needs_ms = False
        elif project.survey_type == SurveyTypes.SampledSurvey:
            self.load_survey(project, workflow, False)

        # zones loading
        logAPICall.log('checking zone data', logAPICall.DEBUG)
        if project.zone_type == ZonesTypes.LanduseCount:
            self.load_zone(project, workflow, True)
            still_needs_count = False
            still_needs_zone = False

        elif project.zone_type == ZonesTypes.Landuse:
            self.load_zone(project, workflow, False)
            still_needs_zone = False

        # need to load MS
        logAPICall.log('checking ms', logAPICall.DEBUG)
        if getattr(project, 'ms', None) is not None and type(
                project.ms) == MappingScheme:
            workflow.operator_data['ms'] = OperatorData(
                OperatorDataTypes.MappingScheme, project.ms)
            still_needs_ms = False

        logAPICall.log('checking if dataset is complete', logAPICall.DEBUG)

        if still_needs_zone:
            workflow.add_error(WorkflowErrors.NeedsZone)
        if still_needs_count:
            workflow.add_error(WorkflowErrors.NeedsCount)
        if still_needs_ms:
            workflow.add_error(WorkflowErrors.NeedsMS)

        # if data set is not complete
        # return a workflow that is not ready
        if workflow.has_error():
            return workflow

        logAPICall.log('add exposure building operation', logAPICall.DEBUG)

        # select appropriate operator(s) to apply mapping scheme
        try:
            self.build_processing_chain(project, workflow)
            workflow.ready = True
        except WorkflowException as err:
            # error building processing chain
            # workflow is not ready
            workflow.add_error(err.error)

        return workflow
示例#54
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value

        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (area_field, fp_layer.name()), self.__class__)
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" % (ht_field, fp_layer.name()),
                self.__class__)
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (HT_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # load zone classes
        # the operations below must be performed for each zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)

        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {}
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {}
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err,
                               logAPICall.WARNING)

        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups",
                                    self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value == -1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError(
                        "Survey groups must have same number of samples",
                        self.__class__)
            # sort by stories
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][
                    group_stories_for_sort[key]] = self.weights[idx]

        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str,
                                    group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str,
                                    _sample_size * group_weight[_sample_grp])
                self.increment_dict(
                    _zone_a_exp[_zone_str], _tax_str,
                    _sample_size * _ht * group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log(
                    "error processing sample with building type: %s" %
                    _tax_str, logAPICall.WARNING)
                pass

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)

        zone_idx = layer_field_index(tmp_join_layer2, zone_field)
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht

        # calculate building ratios for each zone
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a)
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[
                _zone] / _zone_total_count[_zone]

            # calculate expected values
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]
                if e_at_cluster == 0 or e_at_total == 0:
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster

                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster

        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str,
                               self._parse_order,
                               self._parse_modifiers,
                               add_times=int(_e_exp * 1000))
            # finalize call is required
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        # assign output
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp