示例#1
0
 def do_operation(self):
     """ perform create mapping scheme operation """
     
     # input/output verification already performed during set input/ouput
     survey_layer = self.inputs[0].value
     tax_field = self._tax_field
     
     # merge to create stats
     ms = MappingScheme(self._taxonomy)
     stats = Statistics(self._taxonomy)
     ms.assign(MappingSchemeZone('ALL'), stats)
     
     # loop through all input features
     tax_idx = layer_field_index(survey_layer, tax_field)
     area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
     cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)
     
     for _f in layer_features(survey_layer):
         _tax_str = str(_f.attributeMap()[tax_idx].toString())
         additional = {}
         _area = _f.attributeMap()[area_idx].toDouble()[0]
         if _area > 0:
             additional = {StatisticNode.AverageSize: _area} 
         _cost = _f.attributeMap()[cost_idx].toDouble()[0]
         if _cost > 0:
             additional = {StatisticNode.UnitCost: _cost}                            
         try:
             stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
         except TaxonomyParseError as perr:
             logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
     
     # store data in output
     stats.finalize()        
     
     self.outputs[0].value = ms
示例#2
0
    def test_BuildMS(self, skipTest=False):
        import csv
        survey = csv.reader(open(self.survey_file, 'r'),
                            delimiter=',',
                            quotechar='"')
        # skip header, there is probably a better way to do this
        survey.next()

        stats = Statistics(self.taxonomy)
        _count = 0

        for row in survey:
            tax_string = row[2]
            stats.add_case(tax_string, parse_order=self.ms_parse_order)
        stats.finalize()

        ms = MappingScheme(self.taxonomy)
        ms_zone = MappingSchemeZone('ALL')
        ms.assign(ms_zone, stats)
        #ms.save(self.ms_file)
        if skipTest:
            return ms

        ms2 = MappingScheme(self.taxonomy)
        ms2.read(self.ms_file)

        self.assertEqual(
            ms.get_assignment_by_name("ALL").to_xml().strip().__len__(),
            ms2.get_assignment_by_name("ALL").to_xml().strip().__len__())
示例#3
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        ms = MappingScheme(self._taxonomy)
        zone = MappingSchemeZone('ALL')
        stats = Statistics(self._taxonomy)
        stats.finalize()
        stats.get_tree().value = zone.name
        ms.assign(zone, stats)

        self.outputs[0].value = ms
示例#4
0
 def do_operation(self):
     """ perform create mapping scheme operation """
     
     # input/output verification already performed during set input/ouput
     ms = MappingScheme(self._taxonomy)
     zone = MappingSchemeZone('ALL')
     stats = Statistics(self._taxonomy)
     stats.finalize()
     stats.get_tree().value = zone.name
     ms.assign(zone, stats)
     
     self.outputs[0].value = ms
示例#5
0
 def test_LoadMS(self, skipTest=False, statsOnly=True):
     ms = MappingScheme(self.taxonomy)
     ms.read(self.ms_file)
     
     if skipTest:
         if statsOnly:
             return ms.get_assignment_by_name("ALL")
         else:
             return ms
     
     stats = ms.get_assignment_by_name("ALL")
     attributes = stats.get_attributes(stats.get_tree())        
     self.assertEqual(sorted(attributes), sorted(self.ms_parse_order))
示例#6
0
    def test_BuildMS(self, skipTest=False):
        import csv
        survey = csv.reader(open(self.survey_file , 'r'), delimiter=',', quotechar='"')
        # skip header, there is probably a better way to do this
        survey.next()

        stats = Statistics(self.taxonomy)
        _count=0

        for row in survey:
            tax_string = row[2]
            stats.add_case(tax_string, parse_order=self.ms_parse_order)
        stats.finalize()
        
        ms = MappingScheme(self.taxonomy)
        ms_zone = MappingSchemeZone('ALL')
        ms.assign(ms_zone, stats)
        #ms.save(self.ms_file)
        if skipTest:
            return ms
        
        ms2 = MappingScheme(self.taxonomy)
        ms2.read(self.ms_file)
        
        self.assertEqual(
            ms.get_assignment_by_name("ALL").to_xml().strip().__len__(),
            ms2.get_assignment_by_name("ALL").to_xml().strip().__len__()
        )
示例#7
0
文件: dlg_ms_branch.py 项目: gem/sidd
 def saveMSBranch(self):
     """ 
     event handler for btn_save
     - open "Save mapping scheme" dialog box to save current set of values/weights
       as a single level mapping scheme
     """
     ms = MappingScheme(self.taxonomy)
     stats = Statistics(self.taxonomy)
     root = stats.get_tree()
     for v, w in map(None, self.levelModel.values, self.levelModel.weights):
         node = StatisticNode(root, '', v)
         node.weight = w
         root.children.append(node)
     stats.finalized = True
     ms.assign(MappingSchemeZone('ALL'), stats)
     
     self.dlgSave.setMS(ms, True)
     self.dlgSave.exec_()
示例#8
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput        
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value
        
        # load zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        # merge to create stats
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            stats.finalize()
            stats.get_tree().value = _zone                
            ms.assign(MappingSchemeZone(_zone), stats)

        self.outputs[0].value = ms
示例#9
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        zone_layer = self.inputs[0].value
        zone_field = self.inputs[1].value

        # load zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            stats.finalize()
            stats.get_tree().value = _zone
            ms.assign(MappingSchemeZone(_zone), stats)

        self.outputs[0].value = ms
示例#10
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        tax_field = self._tax_field

        # merge to create stats
        ms = MappingScheme(self._taxonomy)
        stats = Statistics(self._taxonomy)
        ms.assign(MappingSchemeZone('ALL'), stats)

        # loop through all input features
        tax_idx = layer_field_index(survey_layer, tax_field)
        area_idx = layer_field_index(survey_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(survey_layer, COST_FIELD_NAME)

        for _f in layer_features(survey_layer):
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            try:
                stats.add_case(_tax_str, self._parse_order,
                               self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        stats.finalize()

        self.outputs[0].value = ms
示例#11
0
    def test_LoadMS(self, skipTest=False, statsOnly=True):
        ms = MappingScheme(self.taxonomy)
        ms.read(self.ms_file)

        if skipTest:
            if statsOnly:
                return ms.get_assignment_by_name("ALL")
            else:
                return ms

        stats = ms.get_assignment_by_name("ALL")
        attributes = stats.get_attributes(stats.get_tree())
        self.assertEqual(sorted(attributes), sorted(self.ms_parse_order))
示例#12
0
class Project (object):
    """
    SIDD project contains data and operators necessary to create an exposure
    database from given dataset
    """
    # constructor / destructor
    ##################################

    def __init__(self, app_config, taxonomy):
        """ constructor """
        self.temp_dir = get_temp_dir('tmp%s'%get_random_name())
        self.app_config = app_config
        self.operator_options = {
            'tmp_dir': self.temp_dir,
            'taxonomy':taxonomy,    
            'parse_modifiers':app_config.get('options', 'parse_modifier', True, bool),        
        }
        self.reset()

        self.project_file = None
        self.db = None
        self.require_save = False
    
    def __del__(self):
        """ 
        destructor that perform cleanup
        NOTE: based on python behavior, there is no guarantee this method will ever to called
        """
        self.clean_up()
        
    def clean_up(self):
        """ cleanup """
        try:            
            logAPICall.log('attempt to delete project temp dir %s' % self.temp_dir, logAPICall.DEBUG)
            if os.path.exists(self.temp_dir):                
                del self.workflow
                if self.exposure is not None:
                    del self.exposure   # must delete QGIS layer, otherwise exposure_file becomes locked
                                        # and will generate error on shutil.rmtree
                shutil.rmtree(self.temp_dir)
        except Exception as err:            
            logAPICall.log('failed to delete temporary directory: %s' % str(err), logAPICall.WARNING)
        try:
            if self.project_file is not None and self.db is not None:
                self.db.close()
        except Exception:
            pass
    
    # data setter methods
    ##################################
    @logAPICall
    def set_project_path(self, project_file):
        try:
            if (not os.path.exists(project_file)):
                shutil.copyfile(FILE_PROJ_TEMPLATE, project_file)
            self.db = bsddb.btopen(project_file, 'c')
            self.version_major = self.get_project_data('version_major')
            self.version_minor = self.get_project_data('version_minor')
            logAPICall.log('opening project file version %s.%s' %(self.version_major, self.version_minor),
                           logAPICall.INFO)
            self.project_file = project_file
            self.require_save = True
        except:
            raise SIDDProjectException(ProjectErrors.FileFormatError)
    
    @logAPICall
    def set_footprint(self, fp_type, fp_file='', ht_field=''):
        self.fp_file = fp_file
        self.fp_type = fp_type
        self.fp_ht_field = ht_field
        self.require_save = True

    @logAPICall
    def set_zones(self, zone_type, zone_file='', zone_field='', zone_count_field='', zone_area_field=''):
        """ load zone data """
        self.zone_file = zone_file
        self.zone_type = zone_type
        self.zone_field = zone_field
        self.zone_count_field = zone_count_field
        self.zone_area_field = zone_area_field
        self.require_save = True        

    @logAPICall
    def set_survey(self, survey_type, survey_file='', survey_format='GEMDB'):
        """ load survey data """
        self.survey_file = survey_file
        self.survey_type = survey_type
        self.survey_format = survey_format
        self.require_save = True

    @logAPICall
    def set_popgrid(self, popgrid_type, popgrid_file='', pop_field='', pop_to_bldg=1):
        self.popgrid_type = popgrid_type
        self.popgrid_file = popgrid_file
        self.pop_field = pop_field
        self.pop_to_bldg = pop_to_bldg

    @logAPICall
    def set_output_type(self, output_type):
        self.output_type = output_type
        self.require_save = True

    @logAPICall
    def set_export(self, export_type, export_path):
        self.export_type = export_type
        self.export_path = export_path
        self.require_save = True

    @logAPICall
    def reset(self, sync=False):
        """
        reset project to default values, with option to also clear underlying db
        """
        self.fp_type = FootprintTypes.None
        self.fp_file = ''
        self.fp_ht_field = ''
        
        self.survey_type = SurveyTypes.None
        self.survey_file = ''
        self.survey_format = 'GEMDB' #'CSV'
        
        self.zone_type = ZonesTypes.None
        self.zone_file = ''
        self.zone_field = '' 
        self.zone_count_field = ''
        self.zone_area_field = ''

        self.popgrid_type= PopGridTypes.None
        self.popgrid_file = ''
        self.pop_field = ''
        self.pop_to_bldg = 1
        
        self.ms = None
        self.output_type = OutputTypes.Grid

        self.exposure = None
        
        self.export_type = ExportTypes.Shapefile
        self.export_path = ''

        # empty workflow
        self.workflow = Workflow()
        
        # clear status
        self.status = ProjectStatus.NotVerified
        self.errors = []
        
        self.require_save = True
        if sync:
            self.sync(SyncModes.Write)

    # exposure processing methods
    ##################################
    @logAPICall
    def load_footprint(self):
        # only load if all required fields exists
        if self.fp_type == FootprintTypes.None:
            return
        if self.fp_file == '':
            return
        if self.fp_type == FootprintTypes.FootprintHt and self.fp_ht_field == '':
            return            
        
        self.fp, self.fp_tmp_file = self.load_data('fp_file', 'fp', 'fp_file')
        return

    @logAPICall
    def load_zones(self):
        # only load if all required fields exists
        if self.zone_type == ZonesTypes.None:
            return
        if self.zone_file == '' or self.zone_field == '':
            return
        if self.fp_type ==  ZonesTypes.LanduseCount and self.zone_count_field == '':
            return
                
        self.zone, self.zone_tmp_file = self.load_data('zone_file', 'zone', 'zone_file') 
        return 

    @logAPICall
    def load_survey(self):
        self.survey, self.survey_tmp_file = self.load_data('survey_file', 'survey', 'survey_file') 
        return 
    
    @logAPICall
    def load_popgrid(self):
        if self.popgrid_type == PopGridTypes.None:
            return
                
        self.survey, self.survey_tmp_file = self.load_data('popgrid_file', 'popgrid', 'popgrid_file') 
        return 
    
    @logAPICall
    def verify_data(self):
        """ verify existing data and create workflow """
        # build workflow based on current data
        builder = WorkflowBuilder(self.operator_options)
        self.workflow = builder.build_workflow(self)    
        
        if self.workflow.ready:
            self.status = ProjectStatus.ReadyForExposure
        else:
            self.status = ProjectStatus.ReadyForMS
        self.errors = self.workflow.errors
        self.exposure = None
        logAPICall.log('input verification completed', logAPICall.INFO)
        
    @logAPICall
    def build_exposure(self):
        """ building exposure database from workflow """
        for step in self.build_exposure_steps():
            step.do_operation()
    
    @logAPICall
    def build_exposure_total_steps(self):
        if not self.workflow.ready:
            raise SIDDException('exposure workflow not complete')
        return self.workflow.steps()

    @logAPICall
    def build_exposure_steps(self):
        """ building exposure database from workflow """
        if not self.workflow.ready:
            raise SIDDException('Cannot create exposure with current datasets. Please revise input')
        
        if not self.ms.is_valid:
            raise SIDDException('Current mapping scheme is not valid')
        
        for zone in self.ms.zones:
            zone.stats.refresh_leaves(with_modifier=True, order_attributes=True)
        
        if getattr(self, 'exposure', None) is not None:
            del self.exposure
            remove_shapefile(self.exposure_file)
        
        for op in self.workflow.nextstep():
            yield op
        
        # when all steps are completed, set resulting exposure
        self.exposure = self.workflow.operator_data['exposure'].value
        self.exposure_file = self.workflow.operator_data['exposure_file'].value
        if self.workflow.operator_data.has_key('exposure_grid'):
            self.exposure_grid = self.workflow.operator_data['exposure_grid'].value
        
        logAPICall.log('exposure data created %s' % self.exposure_file, logAPICall.INFO)    

    @logAPICall
    def build_ms(self):
        """ build mapping scheme from survey data """
        # make sure survey exists
        if (self.survey_type == SurveyTypes.None):
            raise SIDDException('survey is required for creating mapping scheme')        
        # try to create ms using random
        try: 
            use_sampling = self.operator_options['stratified.sampling']
            return self.do_build_ms(isEmpty=False, useSampling=use_sampling)
        except Exception as err:
            self.create_empty_ms()
            raise SIDDException('Unable to create Mapping Scheme:%s' % str(err))

    @logAPICall
    def create_empty_ms(self):
        """ create an empty mapping scheme """
        # build mapping scheme
        return self.do_build_ms(isEmpty=True)

    @logAPICall
    def load_ms(self, path):
        """ load mapping scheme from XML """
        if self.zone_type != ZonesTypes.None:
            self.create_empty_ms()
        builder= WorkflowBuilder(self.operator_options)
        ms_workflow = builder.build_load_ms_workflow(self, path)        
        ms_workflow.process()
        ms = ms_workflow.operator_data['ms'].value  
        if self.ms is not None:              
            # include existing zones from current ms
            new_zones = [zone.name for zone in ms.zones]    
            for existing_zone in self.ms.zones:
                try:
                    new_zones.index(existing_zone.name)
                except:
                    # not found
                    statistics = Statistics(self.ms.taxonomy)
                    zone = MappingSchemeZone(existing_zone.name)
                    ms.assign(zone, statistics)
        self.ms = ms
    
    @logAPICall
    def export_ms(self, path, export_format):
        """ 
        export mapping scheme according to given format
        see constants.MSExportTypes for type supported
        """
        if self.ms is None:
            raise SIDDException('Mapping Scheme is required for this action')
        
        builder= WorkflowBuilder(self.operator_options)
        try:
            if export_format == MSExportTypes.XML:
                export_workflow = builder.build_export_ms_workflow(self, path)
            else:
                export_workflow = builder.build_export_distribution_workflow(self, path)
            export_workflow.process()
            logAPICall.log('data export completed', logAPICall.INFO)
        except WorkflowException:
            return False
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            return False
    
    @logAPICall
    def verify_result(self):
        """
        run data quality tests 
        """
        builder = WorkflowBuilder(self.operator_options)
        try:
            verify_workflow = builder.build_verify_result_workflow(self)
        except WorkflowException as err:
            raise SIDDException("error creating workflow for result verification\n%s" % err)
        # process workflow
        for step in verify_workflow.nextstep():
            try:
                step.do_operation()
            except Exception as err:
                logAPICall.log(err, logAPICall.WARNING)
                pass                

        self.quality_reports={}
        if verify_workflow.operator_data.has_key('frag_report'):
            self.quality_reports['fragmentation'] = verify_workflow.operator_data['frag_report'].value
        if verify_workflow.operator_data.has_key('count_report'):
            self.quality_reports['count'] = verify_workflow.operator_data['count_report'].value
            try:
                if self.zone_type == ZonesTypes.LanduseCount and self.output_type == OutputTypes.Grid:
                    self.quality_reports['count']['_note'] = ''
            except:
                pass
                
        logAPICall.log('result verification completed', logAPICall.INFO)
    
    @logAPICall
    def export_data(self):
        """ export exposure data """
        builder = WorkflowBuilder(self.operator_options)
        try:
            export_workflow = builder.build_export_workflow(self)
        except WorkflowException as err:
            raise SIDDException("error creating workflow for exporting data\n%s" % err)
        try:
            # process workflow 
            export_workflow.process()
            logAPICall.log('data export completed', logAPICall.INFO)            
        except Exception as err:
            raise SIDDException("error exporting data\n" % err)
    
    # project database access methods
    ##################################
    
    @logAPICall
    def sync(self, direction=SyncModes.Read):
        """ synchronize data with DB """
        if self.project_file is None or self.db is None:
            raise SIDDProjectException(ProjectErrors.FileNotSet)
        
        if (direction == SyncModes.Read):
            logAPICall.log("reading existing datasets from DB", logAPICall.DEBUG)
            
            # load footprint
            fp_type = self.get_project_data('data.footprint')
            if fp_type is None:
                self.footprint = None
                self.fp_file = None
                self.fp_type = FootprintTypes.None
            else:
                if (fp_type == str(FootprintTypes.FootprintHt)):
                    self.set_footprint(FootprintTypes.FootprintHt,
                                       self.get_project_data('data.footprint.file'),
                                       self.get_project_data('data.footprint.ht_field'))
                else:
                    self.set_footprint(FootprintTypes.Footprint,
                                       self.get_project_data('data.footprint.file'))
            # load survey
            survey_type = self.get_project_data('data.survey')
            if survey_type is None:
                self.survey = None
                self.survey_file = None
                self.survey_type = SurveyTypes.None
            else:                
                if self.get_project_data('data.survey.is_complete') == 'True':
                    self.set_survey(SurveyTypes.CompleteSurvey,
                                    self.get_project_data('data.survey.file'))
                else:
                    self.set_survey(SurveyTypes.SampledSurvey,
                                    self.get_project_data('data.survey.file'))
            
            # load zone
            zone_type = self.get_project_data('data.zones')
            if zone_type is None:
                self.zones = None
                self.zone_file = None                
                self.zone_type = ZonesTypes.None
            else:
                if zone_type == str(ZonesTypes.Landuse):                    
                    self.set_zones(ZonesTypes.Landuse,
                                   self.get_project_data('data.zones.file'),
                                   self.get_project_data('data.zones.class_field'))
                else:
                    self.set_zones(ZonesTypes.LanduseCount,
                                   self.get_project_data('data.zones.file'),
                                   self.get_project_data('data.zones.class_field'),
                                   self.get_project_data('data.zones.count_field'),
                                   self.get_project_data('data.zones.area_field'))
                    
            # load popgrid
            pop_type = self.get_project_data('data.popgrid')
            if pop_type is None:
                self.popgrid =None
                self.popgrid_type = PopGridTypes.None
                self.popgrid_file = None
                self.pop_field = ''
            else:
                self.set_popgrid(PopGridTypes.Grid,
                                 self.get_project_data('data.popgrid.file'),
                                 self.get_project_data('data.popgrid.pop_field'),
                                 self.get_project_data('data.popgrid.pop_to_bldg')) 
            
            # load output type
            output_type = self.get_project_data('data.output')
            if output_type == "Zone":
                self.output_type = OutputTypes.Zone
            else:
                self.output_type = OutputTypes.Grid
            
            # load mapping scheme
            ms_str = self.get_project_data('data.ms')
            if ms_str is not None:
                self.ms = MappingScheme(None)
                self.ms.from_text(ms_str)

            use_sampling = self.get_project_data('stratified.sampling')
            if use_sampling is None:
                self.operator_options['stratified.sampling']= False # default to not use sampling method
            else:
                self.operator_options['stratified.sampling']= (use_sampling == "True")
                
            # load taxonomy related options
            attr_order = self.get_project_data('attribute.order')
            if attr_order is not None:
                self.operator_options['attribute.order'] = json.loads(attr_order)                
            for attr in self.operator_options['taxonomy'].attributes:
                attr_options = self.get_project_data(attr.name)
                if attr_options is not None:
                    self.operator_options[attr.name] = json.loads(attr_options)
               
            extrapolation = self.get_project_data("proc.extrapolation")
            if extrapolation is not None:
                # NOTE: converting extrapolation to enum is required
                #       because comparison of str vs. enum is not valid            
                self.operator_options["proc.extrapolation"] = makeEnum(ExtrapolateOptions, extrapolation)
            else:
                self.operator_options["proc.extrapolation"] = ExtrapolateOptions.Fraction
            
            # load export settings 
            export_type = self.get_project_data('export.type')
            if export_type is not None:
                self.export_type = makeEnum(ExportTypes, export_type)
            export_path = self.get_project_data('export.path')
            if export_path is not None:
                self.export_path = export_path
            
        else:
            logAPICall.log("store existing datasets into DB", logAPICall.DEBUG)            
            # store footprint            
            if self.fp_type == FootprintTypes.None:
                self.save_project_data('data.footprint', None)
                self.save_project_data('data.footprint.file', None)
                self.save_project_data('data.footprint.ht_field', None)
            else:
                self.save_project_data('data.footprint', self.fp_type)
                self.save_project_data('data.footprint.file', self.fp_file)
                if self.fp_type == FootprintTypes.FootprintHt:
                    self.save_project_data('data.footprint.ht_field', self.fp_ht_field)
                else:
                    self.save_project_data('data.footprint.ht_field', None)
                
            # store survey
            if self.survey_type == SurveyTypes.None:
                self.save_project_data('data.survey', None)
                self.save_project_data('data.survey.file', None)
            else:
                self.save_project_data('data.survey', self.survey_type)
                self.save_project_data('data.survey.file', self.survey_file)
                self.save_project_data('data.survey.is_complete', (self.survey_type == SurveyTypes.CompleteSurvey))

            # store zone
            if self.zone_type == ZonesTypes.None:
                self.save_project_data('data.zones', None)
                self.save_project_data('data.zones.file', None)
                self.save_project_data('data.zones.class_field', None)
                self.save_project_data('data.zones.count_field', None)
            else:
                self.save_project_data('data.zones', self.zone_type)
                self.save_project_data('data.zones.file', self.zone_file)
                self.save_project_data('data.zones.class_field', self.zone_field)
                if self.zone_type == ZonesTypes.LanduseCount:
                    self.save_project_data('data.zones.count_field', self.zone_count_field)
                    self.save_project_data('data.zones.area_field', self.zone_area_field)
                else:
                    self.save_project_data('data.zones.count_field', None)
                    self.save_project_data('data.zones.area_field', None)
            
            # store popgrid
            if self.popgrid_type == PopGridTypes.None:
                self.save_project_data('data.popgrid', None)
                self.save_project_data('data.popgrid.file', None)
                self.save_project_data('data.popgrid.pop_field', None)
                self.save_project_data('data.popgrid.pop_to_bldg', None)
            else:
                self.save_project_data('data.popgrid', self.popgrid_type)
                self.save_project_data('data.popgrid.file', self.popgrid_file)
                self.save_project_data('data.popgrid.pop_field', self.pop_field)
                self.save_project_data('data.popgrid.pop_to_bldg', self.pop_to_bldg)
            
            # store output type
            self.save_project_data('data.output', self.output_type)
            
            # store mapping scheme
            if self.ms is None:
                self.save_project_data('data.ms', None)
            else:
                self.save_project_data('data.ms', self.ms.to_xml())
            
            if self.operator_options.has_key('stratified.sampling'):
                self.save_project_data('stratified.sampling',  self.operator_options['stratified.sampling'])            

            # save taxonomy order 
            if self.operator_options.has_key('attribute.order'):
                self.save_project_data('attribute.order',  json.dumps(self.operator_options['attribute.order']))
            for attr in self.operator_options['taxonomy'].attributes:
                if self.operator_options.has_key(attr.name):
                    self.save_project_data(attr.name, json.dumps(self.operator_options[attr.name]))
            
            # save processing attributes
            if self.operator_options.has_key("proc.extrapolation"):
                self.save_project_data("proc.extrapolation", self.operator_options["proc.extrapolation"])
            
            # save export settings
            self.save_project_data('export.type', getattr(self, 'export_type', None))
            self.save_project_data('export.path', getattr(self, 'export_path', None))
            
            # flush to disk
            self.db.sync()
        
        # after each sync 
        # project is same as db, so save no longer required
        self.require_save = False

    # bsddb help functions
    ##################################    
    def get_project_data(self, attrib):        
        if self.db.has_key(attrib):
            logAPICall.log('read from db %s => %s ' % (attrib, str(self.db[attrib])[0:25]), logAPICall.DEBUG_L2)
            return self.db[attrib]
        else:
            logAPICall.log('%s does not exist in db' % attrib, logAPICall.DEBUG_L2)
            return None

    def save_project_data(self, attrib, value):
        if value is None:
            # delete
            logAPICall.log('delete from db %s ' % (attrib), logAPICall.DEBUG_L2)
            if self.db.has_key(attrib):
                del self.db[attrib]
        else:
            logAPICall.log('save to db %s => %s ' % (attrib, str(value)[0:25]), logAPICall.DEBUG_L2)
            self.db[attrib]=str(value)

    # protected helper functions
    ##################################
    
    def load_data(self, input_param, layer, output_file):
        input_file = getattr(self, input_param, None)
        if input_file is not None:
            builder = WorkflowBuilder(self.operator_options)
            # create workflow
            if input_param == 'fp_file':
                workflow = builder.build_load_fp_workflow(self)
            elif input_param == 'zone_file':
                workflow = builder.build_load_zones_workflow(self)
            elif input_param == 'survey_file':
                workflow = builder.build_load_survey_workflow(self)
            elif input_param == 'popgrid_file':
                workflow = builder.build_load_popgrid_workflow(self)
            else:
                raise Exception('Data Type Not Recognized %s' % input_param)
            
            if not workflow.ready:
                raise Exception('Cannot load data with %s' % input_param)
            workflow.process()
            
            logAPICall.log('data file %s loaded' % input_file, logAPICall.INFO)
            return workflow.operator_data[layer].value, workflow.operator_data[output_file].value

    def do_build_ms(self, isEmpty=False, useSampling=False):
        """ create mapping scheme """
        builder = WorkflowBuilder(self.operator_options)
        # force reload existing survey
        self.survey = None
        
        # create workflow 
        if useSampling:
            ms_workflow = builder.build_sampling_ms_workflow(self)
        else:
            ms_workflow = builder.build_ms_workflow(self, isEmpty)
        if not ms_workflow.ready:
            raise SIDDException(ms_workflow.errors)
        
        # process workflow 
        ms_workflow.process()
        self.ms = ms_workflow.operator_data['ms'].value
        if useSampling:
            self.zone_stats = ms_workflow.operator_data['zone_stats'].value
        for zone, stats in self.ms.assignments():
            stats.refresh_leaves()
            
        logAPICall.log('mapping scheme created', logAPICall.INFO)
        self.require_save = True
示例#13
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value
        
        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(area_field, fp_layer.name()), self.__class__)        
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(ht_field, fp_layer.name()), self.__class__)        
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:        
            raise OperatorError("Field %s does not exist in %s" %(zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(HT_FIELD_NAME, svy_layer.name()), self.__class__)        
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # load zone classes
        # the operations below must be performed for each zone 
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones        
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'        
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)        
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)
        
        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %(TAX_FIELD_NAME, svy_layer.name()))
        
        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {} 
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {} 
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())            
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err, logAPICall.WARNING)
            
        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups", self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value==-1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError("Survey groups must have same number of samples", self.__class__)
            # sort by stories        
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight 
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][group_stories_for_sort[key]] = self.weights[idx]
                
        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())            
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]            
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)            
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str, group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str, _sample_size*group_weight[_sample_grp])
                self.increment_dict(_zone_a_exp[_zone_str], _tax_str, _sample_size*_ht*group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log("error processing sample with building type: %s" % _tax_str, logAPICall.WARNING)
                pass              

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'        
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)        
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)
        
        zone_idx = layer_field_index(tmp_join_layer2, zone_field)        
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)        
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht
        
        # calculate building ratios for each zone        
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a) 
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())            
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[_zone]/_zone_total_count[_zone]
            
            # calculate expected values  
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]         
                if e_at_cluster == 0 or e_at_total == 0: 
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area 
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building 
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster
                                        
                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster
        
        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str, self._parse_order, self._parse_modifiers, add_times=int(_e_exp*1000))                                            
            # finalize call is required 
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)            
        
        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        # assign output        
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp    
示例#14
0
class Project(object):
    """
    SIDD project contains data and operators necessary to create an exposure
    database from given dataset
    """

    # constructor / destructor
    ##################################

    def __init__(self, app_config, taxonomy):
        """ constructor """
        self.temp_dir = get_temp_dir('tmp%s' % get_random_name())
        self.app_config = app_config
        self.operator_options = {
            'tmp_dir':
            self.temp_dir,
            'taxonomy':
            taxonomy,
            'parse_modifiers':
            app_config.get('options', 'parse_modifier', True, bool),
        }
        self.reset()

        self.project_file = None
        self.db = None
        self.require_save = False

    def __del__(self):
        """ 
        destructor that perform cleanup
        NOTE: based on python behavior, there is no guarantee this method will ever to called
        """
        self.clean_up()

    def clean_up(self):
        """ cleanup """
        try:
            logAPICall.log(
                'attempt to delete project temp dir %s' % self.temp_dir,
                logAPICall.DEBUG)
            if os.path.exists(self.temp_dir):
                del self.workflow
                if self.exposure is not None:
                    del self.exposure  # must delete QGIS layer, otherwise exposure_file becomes locked
                    # and will generate error on shutil.rmtree
                shutil.rmtree(self.temp_dir)
        except Exception as err:
            logAPICall.log(
                'failed to delete temporary directory: %s' % str(err),
                logAPICall.WARNING)
        try:
            if self.project_file is not None and self.db is not None:
                self.db.close()
        except Exception:
            pass

    # data setter methods
    ##################################
    @logAPICall
    def set_project_path(self, project_file):
        try:
            if (not os.path.exists(project_file)):
                shutil.copyfile(FILE_PROJ_TEMPLATE, project_file)
            self.db = bsddb.btopen(project_file, 'c')
            self.version_major = self.get_project_data('version_major')
            self.version_minor = self.get_project_data('version_minor')
            logAPICall.log(
                'opening project file version %s.%s' %
                (self.version_major, self.version_minor), logAPICall.INFO)
            self.project_file = project_file
            self.require_save = True
        except:
            raise SIDDProjectException(ProjectErrors.FileFormatError)

    @logAPICall
    def set_footprint(self, fp_type, fp_file='', ht_field=''):
        self.fp_file = fp_file
        self.fp_type = fp_type
        self.fp_ht_field = ht_field
        self.require_save = True

    @logAPICall
    def set_zones(self,
                  zone_type,
                  zone_file='',
                  zone_field='',
                  zone_count_field='',
                  zone_area_field=''):
        """ load zone data """
        self.zone_file = zone_file
        self.zone_type = zone_type
        self.zone_field = zone_field
        self.zone_count_field = zone_count_field
        self.zone_area_field = zone_area_field
        self.require_save = True

    @logAPICall
    def set_survey(self, survey_type, survey_file='', survey_format='GEMDB'):
        """ load survey data """
        self.survey_file = survey_file
        self.survey_type = survey_type
        self.survey_format = survey_format
        self.require_save = True

    @logAPICall
    def set_popgrid(self,
                    popgrid_type,
                    popgrid_file='',
                    pop_field='',
                    pop_to_bldg=1):
        self.popgrid_type = popgrid_type
        self.popgrid_file = popgrid_file
        self.pop_field = pop_field
        self.pop_to_bldg = pop_to_bldg

    @logAPICall
    def set_output_type(self, output_type):
        self.output_type = output_type
        self.require_save = True

    @logAPICall
    def set_export(self, export_type, export_path):
        self.export_type = export_type
        self.export_path = export_path
        self.require_save = True

    @logAPICall
    def reset(self, sync=False):
        """
        reset project to default values, with option to also clear underlying db
        """
        self.fp_type = FootprintTypes.None
        self.fp_file = ''
        self.fp_ht_field = ''

        self.survey_type = SurveyTypes.None
        self.survey_file = ''
        self.survey_format = 'GEMDB'  #'CSV'

        self.zone_type = ZonesTypes.None
        self.zone_file = ''
        self.zone_field = ''
        self.zone_count_field = ''
        self.zone_area_field = ''

        self.popgrid_type = PopGridTypes.None
        self.popgrid_file = ''
        self.pop_field = ''
        self.pop_to_bldg = 1

        self.ms = None
        self.output_type = OutputTypes.Grid

        self.exposure = None

        self.export_type = ExportTypes.Shapefile
        self.export_path = ''

        # empty workflow
        self.workflow = Workflow()

        # clear status
        self.status = ProjectStatus.NotVerified
        self.errors = []

        self.require_save = True
        if sync:
            self.sync(SyncModes.Write)

    # exposure processing methods
    ##################################
    @logAPICall
    def load_footprint(self):
        # only load if all required fields exists
        if self.fp_type == FootprintTypes.None:
            return
        if self.fp_file == '':
            return
        if self.fp_type == FootprintTypes.FootprintHt and self.fp_ht_field == '':
            return

        self.fp, self.fp_tmp_file = self.load_data('fp_file', 'fp', 'fp_file')
        return

    @logAPICall
    def load_zones(self):
        # only load if all required fields exists
        if self.zone_type == ZonesTypes.None:
            return
        if self.zone_file == '' or self.zone_field == '':
            return
        if self.fp_type == ZonesTypes.LanduseCount and self.zone_count_field == '':
            return

        self.zone, self.zone_tmp_file = self.load_data('zone_file', 'zone',
                                                       'zone_file')
        return

    @logAPICall
    def load_survey(self):
        self.survey, self.survey_tmp_file = self.load_data(
            'survey_file', 'survey', 'survey_file')
        return

    @logAPICall
    def load_popgrid(self):
        if self.popgrid_type == PopGridTypes.None:
            return

        self.survey, self.survey_tmp_file = self.load_data(
            'popgrid_file', 'popgrid', 'popgrid_file')
        return

    @logAPICall
    def verify_data(self):
        """ verify existing data and create workflow """
        # build workflow based on current data
        builder = WorkflowBuilder(self.operator_options)
        self.workflow = builder.build_workflow(self)

        if self.workflow.ready:
            self.status = ProjectStatus.ReadyForExposure
        else:
            self.status = ProjectStatus.ReadyForMS
        self.errors = self.workflow.errors
        self.exposure = None
        logAPICall.log('input verification completed', logAPICall.INFO)

    @logAPICall
    def build_exposure(self):
        """ building exposure database from workflow """
        for step in self.build_exposure_steps():
            step.do_operation()

    @logAPICall
    def build_exposure_total_steps(self):
        if not self.workflow.ready:
            raise SIDDException('exposure workflow not complete')
        return self.workflow.steps()

    @logAPICall
    def build_exposure_steps(self):
        """ building exposure database from workflow """
        if not self.workflow.ready:
            raise SIDDException(
                'Cannot create exposure with current datasets. Please revise input'
            )

        if not self.ms.is_valid:
            raise SIDDException('Current mapping scheme is not valid')

        for zone in self.ms.zones:
            zone.stats.refresh_leaves(with_modifier=True,
                                      order_attributes=True)

        if getattr(self, 'exposure', None) is not None:
            del self.exposure
            remove_shapefile(self.exposure_file)

        for op in self.workflow.nextstep():
            yield op

        # when all steps are completed, set resulting exposure
        self.exposure = self.workflow.operator_data['exposure'].value
        self.exposure_file = self.workflow.operator_data['exposure_file'].value
        if self.workflow.operator_data.has_key('exposure_grid'):
            self.exposure_grid = self.workflow.operator_data[
                'exposure_grid'].value

        logAPICall.log('exposure data created %s' % self.exposure_file,
                       logAPICall.INFO)

    @logAPICall
    def build_ms(self):
        """ build mapping scheme from survey data """
        # make sure survey exists
        if (self.survey_type == SurveyTypes.None):
            raise SIDDException(
                'survey is required for creating mapping scheme')
        # try to create ms using random
        try:
            use_sampling = self.operator_options['stratified.sampling']
            return self.do_build_ms(isEmpty=False, useSampling=use_sampling)
        except Exception as err:
            self.create_empty_ms()
            raise SIDDException('Unable to create Mapping Scheme:%s' %
                                str(err))

    @logAPICall
    def create_empty_ms(self):
        """ create an empty mapping scheme """
        # build mapping scheme
        return self.do_build_ms(isEmpty=True)

    @logAPICall
    def load_ms(self, path):
        """ load mapping scheme from XML """
        if self.zone_type != ZonesTypes.None:
            self.create_empty_ms()
        builder = WorkflowBuilder(self.operator_options)
        ms_workflow = builder.build_load_ms_workflow(self, path)
        ms_workflow.process()
        ms = ms_workflow.operator_data['ms'].value
        if self.ms is not None:
            # include existing zones from current ms
            new_zones = [zone.name for zone in ms.zones]
            for existing_zone in self.ms.zones:
                try:
                    new_zones.index(existing_zone.name)
                except:
                    # not found
                    statistics = Statistics(self.ms.taxonomy)
                    zone = MappingSchemeZone(existing_zone.name)
                    ms.assign(zone, statistics)
        self.ms = ms

    @logAPICall
    def export_ms(self, path, export_format):
        """ 
        export mapping scheme according to given format
        see constants.MSExportTypes for type supported
        """
        if self.ms is None:
            raise SIDDException('Mapping Scheme is required for this action')

        builder = WorkflowBuilder(self.operator_options)
        try:
            if export_format == MSExportTypes.XML:
                export_workflow = builder.build_export_ms_workflow(self, path)
            else:
                export_workflow = builder.build_export_distribution_workflow(
                    self, path)
            export_workflow.process()
            logAPICall.log('data export completed', logAPICall.INFO)
        except WorkflowException:
            return False
        except Exception as err:
            logAPICall.log(err, logAPICall.ERROR)
            return False

    @logAPICall
    def verify_result(self):
        """
        run data quality tests 
        """
        builder = WorkflowBuilder(self.operator_options)
        try:
            verify_workflow = builder.build_verify_result_workflow(self)
        except WorkflowException as err:
            raise SIDDException(
                "error creating workflow for result verification\n%s" % err)
        # process workflow
        for step in verify_workflow.nextstep():
            try:
                step.do_operation()
            except Exception as err:
                logAPICall.log(err, logAPICall.WARNING)
                pass

        self.quality_reports = {}
        if verify_workflow.operator_data.has_key('frag_report'):
            self.quality_reports[
                'fragmentation'] = verify_workflow.operator_data[
                    'frag_report'].value
        if verify_workflow.operator_data.has_key('count_report'):
            self.quality_reports['count'] = verify_workflow.operator_data[
                'count_report'].value
            try:
                if self.zone_type == ZonesTypes.LanduseCount and self.output_type == OutputTypes.Grid:
                    self.quality_reports['count']['_note'] = ''
            except:
                pass

        logAPICall.log('result verification completed', logAPICall.INFO)

    @logAPICall
    def export_data(self):
        """ export exposure data """
        builder = WorkflowBuilder(self.operator_options)
        try:
            export_workflow = builder.build_export_workflow(self)
        except WorkflowException as err:
            raise SIDDException(
                "error creating workflow for exporting data\n%s" % err)
        try:
            # process workflow
            export_workflow.process()
            logAPICall.log('data export completed', logAPICall.INFO)
        except Exception as err:
            raise SIDDException("error exporting data\n" % err)

    # project database access methods
    ##################################

    @logAPICall
    def sync(self, direction=SyncModes.Read):
        """ synchronize data with DB """
        if self.project_file is None or self.db is None:
            raise SIDDProjectException(ProjectErrors.FileNotSet)

        if (direction == SyncModes.Read):
            logAPICall.log("reading existing datasets from DB",
                           logAPICall.DEBUG)

            # load footprint
            fp_type = self.get_project_data('data.footprint')
            if fp_type is None:
                self.footprint = None
                self.fp_file = None
                self.fp_type = FootprintTypes.None
            else:
                if (fp_type == str(FootprintTypes.FootprintHt)):
                    self.set_footprint(
                        FootprintTypes.FootprintHt,
                        self.get_project_data('data.footprint.file'),
                        self.get_project_data('data.footprint.ht_field'))
                else:
                    self.set_footprint(
                        FootprintTypes.Footprint,
                        self.get_project_data('data.footprint.file'))
            # load survey
            survey_type = self.get_project_data('data.survey')
            if survey_type is None:
                self.survey = None
                self.survey_file = None
                self.survey_type = SurveyTypes.None
            else:
                if self.get_project_data('data.survey.is_complete') == 'True':
                    self.set_survey(SurveyTypes.CompleteSurvey,
                                    self.get_project_data('data.survey.file'))
                else:
                    self.set_survey(SurveyTypes.SampledSurvey,
                                    self.get_project_data('data.survey.file'))

            # load zone
            zone_type = self.get_project_data('data.zones')
            if zone_type is None:
                self.zones = None
                self.zone_file = None
                self.zone_type = ZonesTypes.None
            else:
                if zone_type == str(ZonesTypes.Landuse):
                    self.set_zones(
                        ZonesTypes.Landuse,
                        self.get_project_data('data.zones.file'),
                        self.get_project_data('data.zones.class_field'))
                else:
                    self.set_zones(
                        ZonesTypes.LanduseCount,
                        self.get_project_data('data.zones.file'),
                        self.get_project_data('data.zones.class_field'),
                        self.get_project_data('data.zones.count_field'),
                        self.get_project_data('data.zones.area_field'))

            # load popgrid
            pop_type = self.get_project_data('data.popgrid')
            if pop_type is None:
                self.popgrid = None
                self.popgrid_type = PopGridTypes.None
                self.popgrid_file = None
                self.pop_field = ''
            else:
                self.set_popgrid(
                    PopGridTypes.Grid,
                    self.get_project_data('data.popgrid.file'),
                    self.get_project_data('data.popgrid.pop_field'),
                    self.get_project_data('data.popgrid.pop_to_bldg'))

            # load output type
            output_type = self.get_project_data('data.output')
            if output_type == "Zone":
                self.output_type = OutputTypes.Zone
            else:
                self.output_type = OutputTypes.Grid

            # load mapping scheme
            ms_str = self.get_project_data('data.ms')
            if ms_str is not None:
                self.ms = MappingScheme(None)
                self.ms.from_text(ms_str)

            use_sampling = self.get_project_data('stratified.sampling')
            if use_sampling is None:
                self.operator_options[
                    'stratified.sampling'] = False  # default to not use sampling method
            else:
                self.operator_options['stratified.sampling'] = (
                    use_sampling == "True")

            # load taxonomy related options
            attr_order = self.get_project_data('attribute.order')
            if attr_order is not None:
                self.operator_options['attribute.order'] = json.loads(
                    attr_order)
            for attr in self.operator_options['taxonomy'].attributes:
                attr_options = self.get_project_data(attr.name)
                if attr_options is not None:
                    self.operator_options[attr.name] = json.loads(attr_options)

            extrapolation = self.get_project_data("proc.extrapolation")
            if extrapolation is not None:
                # NOTE: converting extrapolation to enum is required
                #       because comparison of str vs. enum is not valid
                self.operator_options["proc.extrapolation"] = makeEnum(
                    ExtrapolateOptions, extrapolation)
            else:
                self.operator_options[
                    "proc.extrapolation"] = ExtrapolateOptions.Fraction

            # load export settings
            export_type = self.get_project_data('export.type')
            if export_type is not None:
                self.export_type = makeEnum(ExportTypes, export_type)
            export_path = self.get_project_data('export.path')
            if export_path is not None:
                self.export_path = export_path

        else:
            logAPICall.log("store existing datasets into DB", logAPICall.DEBUG)
            # store footprint
            if self.fp_type == FootprintTypes.None:
                self.save_project_data('data.footprint', None)
                self.save_project_data('data.footprint.file', None)
                self.save_project_data('data.footprint.ht_field', None)
            else:
                self.save_project_data('data.footprint', self.fp_type)
                self.save_project_data('data.footprint.file', self.fp_file)
                if self.fp_type == FootprintTypes.FootprintHt:
                    self.save_project_data('data.footprint.ht_field',
                                           self.fp_ht_field)
                else:
                    self.save_project_data('data.footprint.ht_field', None)

            # store survey
            if self.survey_type == SurveyTypes.None:
                self.save_project_data('data.survey', None)
                self.save_project_data('data.survey.file', None)
            else:
                self.save_project_data('data.survey', self.survey_type)
                self.save_project_data('data.survey.file', self.survey_file)
                self.save_project_data(
                    'data.survey.is_complete',
                    (self.survey_type == SurveyTypes.CompleteSurvey))

            # store zone
            if self.zone_type == ZonesTypes.None:
                self.save_project_data('data.zones', None)
                self.save_project_data('data.zones.file', None)
                self.save_project_data('data.zones.class_field', None)
                self.save_project_data('data.zones.count_field', None)
            else:
                self.save_project_data('data.zones', self.zone_type)
                self.save_project_data('data.zones.file', self.zone_file)
                self.save_project_data('data.zones.class_field',
                                       self.zone_field)
                if self.zone_type == ZonesTypes.LanduseCount:
                    self.save_project_data('data.zones.count_field',
                                           self.zone_count_field)
                    self.save_project_data('data.zones.area_field',
                                           self.zone_area_field)
                else:
                    self.save_project_data('data.zones.count_field', None)
                    self.save_project_data('data.zones.area_field', None)

            # store popgrid
            if self.popgrid_type == PopGridTypes.None:
                self.save_project_data('data.popgrid', None)
                self.save_project_data('data.popgrid.file', None)
                self.save_project_data('data.popgrid.pop_field', None)
                self.save_project_data('data.popgrid.pop_to_bldg', None)
            else:
                self.save_project_data('data.popgrid', self.popgrid_type)
                self.save_project_data('data.popgrid.file', self.popgrid_file)
                self.save_project_data('data.popgrid.pop_field',
                                       self.pop_field)
                self.save_project_data('data.popgrid.pop_to_bldg',
                                       self.pop_to_bldg)

            # store output type
            self.save_project_data('data.output', self.output_type)

            # store mapping scheme
            if self.ms is None:
                self.save_project_data('data.ms', None)
            else:
                self.save_project_data('data.ms', self.ms.to_xml())

            if self.operator_options.has_key('stratified.sampling'):
                self.save_project_data(
                    'stratified.sampling',
                    self.operator_options['stratified.sampling'])

            # save taxonomy order
            if self.operator_options.has_key('attribute.order'):
                self.save_project_data(
                    'attribute.order',
                    json.dumps(self.operator_options['attribute.order']))
            for attr in self.operator_options['taxonomy'].attributes:
                if self.operator_options.has_key(attr.name):
                    self.save_project_data(
                        attr.name,
                        json.dumps(self.operator_options[attr.name]))

            # save processing attributes
            if self.operator_options.has_key("proc.extrapolation"):
                self.save_project_data(
                    "proc.extrapolation",
                    self.operator_options["proc.extrapolation"])

            # save export settings
            self.save_project_data('export.type',
                                   getattr(self, 'export_type', None))
            self.save_project_data('export.path',
                                   getattr(self, 'export_path', None))

            # flush to disk
            self.db.sync()

        # after each sync
        # project is same as db, so save no longer required
        self.require_save = False

    # bsddb help functions
    ##################################
    def get_project_data(self, attrib):
        if self.db.has_key(attrib):
            logAPICall.log(
                'read from db %s => %s ' %
                (attrib, str(self.db[attrib])[0:25]), logAPICall.DEBUG_L2)
            return self.db[attrib]
        else:
            logAPICall.log('%s does not exist in db' % attrib,
                           logAPICall.DEBUG_L2)
            return None

    def save_project_data(self, attrib, value):
        if value is None:
            # delete
            logAPICall.log('delete from db %s ' % (attrib),
                           logAPICall.DEBUG_L2)
            if self.db.has_key(attrib):
                del self.db[attrib]
        else:
            logAPICall.log('save to db %s => %s ' % (attrib, str(value)[0:25]),
                           logAPICall.DEBUG_L2)
            self.db[attrib] = str(value)

    # protected helper functions
    ##################################

    def load_data(self, input_param, layer, output_file):
        input_file = getattr(self, input_param, None)
        if input_file is not None:
            builder = WorkflowBuilder(self.operator_options)
            # create workflow
            if input_param == 'fp_file':
                workflow = builder.build_load_fp_workflow(self)
            elif input_param == 'zone_file':
                workflow = builder.build_load_zones_workflow(self)
            elif input_param == 'survey_file':
                workflow = builder.build_load_survey_workflow(self)
            elif input_param == 'popgrid_file':
                workflow = builder.build_load_popgrid_workflow(self)
            else:
                raise Exception('Data Type Not Recognized %s' % input_param)

            if not workflow.ready:
                raise Exception('Cannot load data with %s' % input_param)
            workflow.process()

            logAPICall.log('data file %s loaded' % input_file, logAPICall.INFO)
            return workflow.operator_data[layer].value, workflow.operator_data[
                output_file].value

    def do_build_ms(self, isEmpty=False, useSampling=False):
        """ create mapping scheme """
        builder = WorkflowBuilder(self.operator_options)
        # force reload existing survey
        self.survey = None

        # create workflow
        if useSampling:
            ms_workflow = builder.build_sampling_ms_workflow(self)
        else:
            ms_workflow = builder.build_ms_workflow(self, isEmpty)
        if not ms_workflow.ready:
            raise SIDDException(ms_workflow.errors)

        # process workflow
        ms_workflow.process()
        self.ms = ms_workflow.operator_data['ms'].value
        if useSampling:
            self.zone_stats = ms_workflow.operator_data['zone_stats'].value
        for zone, stats in self.ms.assignments():
            stats.refresh_leaves()

        logAPICall.log('mapping scheme created', logAPICall.INFO)
        self.require_save = True
示例#15
0
    def do_operation(self):
        # input/output verification not performed yet
        fp_layer = self.inputs[0].value
        area_field = self.inputs[1].value
        ht_field = self.inputs[2].value
        zone_layer = self.inputs[3].value
        zone_field = self.inputs[4].value
        svy_layer = self.inputs[5].value

        # make sure required data fields are populated
        area_idx = layer_field_index(fp_layer, area_field)
        if area_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (area_field, fp_layer.name()), self.__class__)
        ht_idx = layer_field_index(fp_layer, ht_field)
        if ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" % (ht_field, fp_layer.name()),
                self.__class__)
        zone_idx = layer_field_index(zone_layer, zone_field)
        if zone_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (zone_field, zone_layer.name()), self.__class__)
        svy_samp_idx = layer_field_index(svy_layer, GRP_FIELD_NAME)
        if svy_samp_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (GRP_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_ht_idx = layer_field_index(svy_layer, HT_FIELD_NAME)
        if svy_ht_idx == -1:
            raise OperatorError(
                "Field %s does not exist in %s" %
                (HT_FIELD_NAME, svy_layer.name()), self.__class__)
        svy_size_idx = layer_field_index(svy_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(svy_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # load zone classes
        # the operations below must be performed for each zone
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # join survey with zones
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(svy_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('compile zone statistics', logAPICall.DEBUG)
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        svy_samp_idx = layer_field_index(tmp_join_layer, GRP_FIELD_NAME)
        svy_ht_idx = layer_field_index(tmp_join_layer, HT_FIELD_NAME)

        svy_size_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        if svy_size_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (AREA_FIELD_NAME, svy_layer.name()))
        tax_idx = layer_field_index(tmp_join_layer, TAX_FIELD_NAME)
        if tax_idx == -1:
            raise OperatorError("Field %s does not exist in %s" %
                                (TAX_FIELD_NAME, svy_layer.name()))

        # empty fields for holding the stats
        _zone_n_exp, _zone_p_exp, _zone_a_exp, _zone_e_exp = {}, {}, {}, {}
        _zone_group_counts, _zone_group_stories, _zone_group_weight = {}, {}, {}
        _zone_total_area, _zone_total_count, _zone_total_ht = {}, {}, {}
        for _zone in zone_classes.iterkeys():
            _zone_n_exp[_zone] = {}
            _zone_p_exp[_zone] = {}
            _zone_a_exp[_zone] = {}
            _zone_e_exp[_zone] = {}
            _zone_group_counts[_zone] = {}
            _zone_group_stories[_zone] = {}
            _zone_group_weight[_zone] = {}
            _zone_total_area[_zone] = 0
            _zone_total_count[_zone] = 0
            _zone_total_ht[_zone] = 0

        # associate group to ratio value
        for _rec in layer_features(tmp_join_layer):
            _ht = _rec.attributeMap()[svy_ht_idx].toInt()[0]
            _samp_grp = str(_rec.attributeMap()[svy_samp_idx].toString())
            _tax_str = str(_rec.attributeMap()[tax_idx].toString())
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_group_counts[_zone], _samp_grp, 1)
                self.increment_dict(_zone_group_stories[_zone], _samp_grp, _ht)
            except Exception as err:
                logAPICall.log("Error processing record %s" % err,
                               logAPICall.WARNING)

        for _zone in zone_classes.iterkeys():
            if len(_zone_group_counts[_zone]) != 3:
                raise OperatorError("Survey must have 3 sampling groups",
                                    self.__class__)
            cmp_value = -1
            for _grp, _count in _zone_group_counts[_zone].iteritems():
                if cmp_value == -1:
                    cmp_value = _count
                if cmp_value != _count:
                    raise OperatorError(
                        "Survey groups must have same number of samples",
                        self.__class__)
            # sort by stories
            group_stories_for_sort = {}
            for _grp, _ht in _zone_group_stories[_zone].iteritems():
                group_stories_for_sort[_ht] = _grp
            sorted_keys = group_stories_for_sort.keys()
            sorted_keys.sort()
            # assign group to weight
            for idx, key in enumerate(sorted_keys):
                _zone_group_weight[_zone][
                    group_stories_for_sort[key]] = self.weights[idx]

        # aggregate values from survey for each building type
        # - count (n)
        # - floor area (p)
        # - total area (a)
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            _sample_grp = str(_f.attributeMap()[svy_samp_idx].toString())
            _sample_size = _f.attributeMap()[svy_size_idx].toDouble()[0]
            _sample_ht = _f.attributeMap()[svy_size_idx].toDouble()[0]
            group_weight = _zone_group_weight[_zone]
            try:
                self._taxonomy.parse(_tax_str)
                self.increment_dict(_zone_n_exp[_zone_str], _tax_str,
                                    group_weight[_sample_grp])
                self.increment_dict(_zone_p_exp[_zone_str], _tax_str,
                                    _sample_size * group_weight[_sample_grp])
                self.increment_dict(
                    _zone_a_exp[_zone_str], _tax_str,
                    _sample_size * _ht * group_weight[_sample_grp])
                self.increment_dict(_zone_e_exp[_zone_str], _tax_str, 0)
            except Exception as err:
                logAPICall.log(
                    "error processing sample with building type: %s" %
                    _tax_str, logAPICall.WARNING)
                pass

        # adjust ratio using footprint ht/area
        tmp_join_layername2 = 'join_%s' % get_unique_filename()
        tmp_join_file2 = self._tmp_dir + tmp_join_layername2 + '.shp'
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(fp_layer, zone_layer, tmp_join_file2)
        tmp_join_layer2 = load_shapefile(tmp_join_file2, tmp_join_layername)

        zone_idx = layer_field_index(tmp_join_layer2, zone_field)
        area_idx = layer_field_index(tmp_join_layer2, area_field)
        ht_idx = layer_field_index(tmp_join_layer2, ht_field)
        for _f in layer_features(tmp_join_layer2):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            _ht = _f.attributeMap()[ht_idx].toDouble()[0]

            _zone_total_area[_zone_str] += _area
            _zone_total_count[_zone_str] += 1
            _zone_total_ht[_zone_str] += _ht

        # calculate building ratios for each zone
        for _zone in zone_classes.iterkeys():
            # for total count (n) and area (a)
            e_nt_cluster_total = sum(_zone_n_exp[_zone].itervalues())
            e_at_cluster_total = sum(_zone_a_exp[_zone].itervalues())
            # E[A] estimated total building area for zone
            e_at_total = _zone_total_area[_zone] * _zone_total_ht[
                _zone] / _zone_total_count[_zone]

            # calculate expected values
            for t, e_at_cluster in _zone_a_exp[_zone].iteritems():
                e_nt_cluster = _zone_n_exp[_zone][t]
                if e_at_cluster == 0 or e_at_total == 0:
                    # area is missing, use count instead
                    _zone_e_exp[_zone][t] = e_nt_cluster / e_nt_cluster_total
                    _zone_a_exp[_zone][t] = 0
                else:
                    # use ratio of area over total area
                    # E[f(t)] building fraction based on sampled area
                    e_ft_cluster = e_at_cluster / e_at_cluster_total
                    # E[G(t)] average area per building
                    e_gt_cluster = e_at_cluster / e_nt_cluster

                    # E[A(t)] estimated total building area for zone for building type
                    e_at = e_at_total * e_ft_cluster
                    # E[N(t)] estimated total number of buildings zone-wide by type
                    e_nt = e_at / e_gt_cluster

                    _zone_e_exp[_zone][t] = e_nt
                    _zone_a_exp[_zone][t] = e_ft_cluster

        # convert the building ratios
        logAPICall.log('create mapping scheme for zones', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone in zone_classes.iterkeys():
            # create mapping scheme for zone
            stats = Statistics(self._taxonomy)

            # use building ratio to create statistic
            for _tax_str, _e_exp in _zone_e_exp[_zone].iteritems():
                stats.add_case(_tax_str,
                               self._parse_order,
                               self._parse_modifiers,
                               add_times=int(_e_exp * 1000))
            # finalize call is required
            stats.finalize()
            ms.assign(MappingSchemeZone(_zone), stats)

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        # assign output
        self.outputs[0].value = ms
        self.outputs[1].value = _zone_a_exp
示例#16
0
    def do_operation(self):
        """ perform create mapping scheme operation """

        # input/output verification already performed during set input/ouput
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field

        logAPICall.log(
            'survey %s, taxfield %s, zone %s, zone_field, %s' %
            (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
            logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)

        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)

        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)

        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)

        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area}
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str),
                           logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(
                    _tax_str, self._parse_order, self._parse_modifiers,
                    additional)
            except TaxonomyParseError as perr:
                logAPICall.log(
                    "error parsing case %s, %s" % (str(_tax_str), str(perr)),
                    logAPICall.WARNING)

        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)

        self.outputs[0].value = ms
示例#17
0
    def do_operation(self):
        """ perform create mapping scheme operation """
        
        # input/output verification already performed during set input/ouput        
        survey_layer = self.inputs[0].value
        zone_layer = self.inputs[1].value
        zone_field = self.inputs[2].value
        tax_field = self._tax_field
        
        logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field),
                       logAPICall.DEBUG)
        tmp_join_layername = 'join_%s' % get_unique_filename()
        tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp'

        # load zone classes
        try:
            zone_classes = layer_field_stats(zone_layer, zone_field)
        except AssertionError as err:
            raise OperatorError(str(err), self.__class__)
        
        # merge to create stats
        logAPICall.log('merge survey & zone', logAPICall.DEBUG)
        analyzer = QgsOverlayAnalyzer()        
        analyzer.intersection(survey_layer, zone_layer, tmp_join_file)
        tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername)
        
        logAPICall.log('create mapping schemes', logAPICall.DEBUG)
        ms = MappingScheme(self._taxonomy)
        for _zone, _count in zone_classes.iteritems():
            stats = Statistics(self._taxonomy)
            ms.assign(MappingSchemeZone(_zone), stats)
        
        # loop through all input features
        zone_idx = layer_field_index(tmp_join_layer, zone_field)
        tax_idx = layer_field_index(tmp_join_layer, tax_field)
        area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME)
        cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME)
        
        for _f in layer_features(tmp_join_layer):
            _zone_str = str(_f.attributeMap()[zone_idx].toString())            
            _tax_str = str(_f.attributeMap()[tax_idx].toString())
            additional = {}
            _area = _f.attributeMap()[area_idx].toDouble()[0]
            if _area > 0:
                additional = {StatisticNode.AverageSize: _area} 
            _cost = _f.attributeMap()[cost_idx].toDouble()[0]
            if _cost > 0:
                additional = {StatisticNode.UnitCost: _cost}                            
            logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2)
            try:
                ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional)
            except TaxonomyParseError as perr:
                logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING)
        
        # store data in output
        for _zone, _stats in ms.assignments():
            _stats.finalize()
            _stats.get_tree().value = _zone.name

        # clean up        
        del tmp_join_layer, analyzer
        remove_shapefile(tmp_join_file)
        
        self.outputs[0].value = ms
示例#18
0
    def sync(self, direction=SyncModes.Read):
        """ synchronize data with DB """
        if self.project_file is None or self.db is None:
            raise SIDDProjectException(ProjectErrors.FileNotSet)

        if (direction == SyncModes.Read):
            logAPICall.log("reading existing datasets from DB",
                           logAPICall.DEBUG)

            # load footprint
            fp_type = self.get_project_data('data.footprint')
            if fp_type is None:
                self.footprint = None
                self.fp_file = None
                self.fp_type = FootprintTypes.None
            else:
                if (fp_type == str(FootprintTypes.FootprintHt)):
                    self.set_footprint(
                        FootprintTypes.FootprintHt,
                        self.get_project_data('data.footprint.file'),
                        self.get_project_data('data.footprint.ht_field'))
                else:
                    self.set_footprint(
                        FootprintTypes.Footprint,
                        self.get_project_data('data.footprint.file'))
            # load survey
            survey_type = self.get_project_data('data.survey')
            if survey_type is None:
                self.survey = None
                self.survey_file = None
                self.survey_type = SurveyTypes.None
            else:
                if self.get_project_data('data.survey.is_complete') == 'True':
                    self.set_survey(SurveyTypes.CompleteSurvey,
                                    self.get_project_data('data.survey.file'))
                else:
                    self.set_survey(SurveyTypes.SampledSurvey,
                                    self.get_project_data('data.survey.file'))

            # load zone
            zone_type = self.get_project_data('data.zones')
            if zone_type is None:
                self.zones = None
                self.zone_file = None
                self.zone_type = ZonesTypes.None
            else:
                if zone_type == str(ZonesTypes.Landuse):
                    self.set_zones(
                        ZonesTypes.Landuse,
                        self.get_project_data('data.zones.file'),
                        self.get_project_data('data.zones.class_field'))
                else:
                    self.set_zones(
                        ZonesTypes.LanduseCount,
                        self.get_project_data('data.zones.file'),
                        self.get_project_data('data.zones.class_field'),
                        self.get_project_data('data.zones.count_field'),
                        self.get_project_data('data.zones.area_field'))

            # load popgrid
            pop_type = self.get_project_data('data.popgrid')
            if pop_type is None:
                self.popgrid = None
                self.popgrid_type = PopGridTypes.None
                self.popgrid_file = None
                self.pop_field = ''
            else:
                self.set_popgrid(
                    PopGridTypes.Grid,
                    self.get_project_data('data.popgrid.file'),
                    self.get_project_data('data.popgrid.pop_field'),
                    self.get_project_data('data.popgrid.pop_to_bldg'))

            # load output type
            output_type = self.get_project_data('data.output')
            if output_type == "Zone":
                self.output_type = OutputTypes.Zone
            else:
                self.output_type = OutputTypes.Grid

            # load mapping scheme
            ms_str = self.get_project_data('data.ms')
            if ms_str is not None:
                self.ms = MappingScheme(None)
                self.ms.from_text(ms_str)

            use_sampling = self.get_project_data('stratified.sampling')
            if use_sampling is None:
                self.operator_options[
                    'stratified.sampling'] = False  # default to not use sampling method
            else:
                self.operator_options['stratified.sampling'] = (
                    use_sampling == "True")

            # load taxonomy related options
            attr_order = self.get_project_data('attribute.order')
            if attr_order is not None:
                self.operator_options['attribute.order'] = json.loads(
                    attr_order)
            for attr in self.operator_options['taxonomy'].attributes:
                attr_options = self.get_project_data(attr.name)
                if attr_options is not None:
                    self.operator_options[attr.name] = json.loads(attr_options)

            extrapolation = self.get_project_data("proc.extrapolation")
            if extrapolation is not None:
                # NOTE: converting extrapolation to enum is required
                #       because comparison of str vs. enum is not valid
                self.operator_options["proc.extrapolation"] = makeEnum(
                    ExtrapolateOptions, extrapolation)
            else:
                self.operator_options[
                    "proc.extrapolation"] = ExtrapolateOptions.Fraction

            # load export settings
            export_type = self.get_project_data('export.type')
            if export_type is not None:
                self.export_type = makeEnum(ExportTypes, export_type)
            export_path = self.get_project_data('export.path')
            if export_path is not None:
                self.export_path = export_path

        else:
            logAPICall.log("store existing datasets into DB", logAPICall.DEBUG)
            # store footprint
            if self.fp_type == FootprintTypes.None:
                self.save_project_data('data.footprint', None)
                self.save_project_data('data.footprint.file', None)
                self.save_project_data('data.footprint.ht_field', None)
            else:
                self.save_project_data('data.footprint', self.fp_type)
                self.save_project_data('data.footprint.file', self.fp_file)
                if self.fp_type == FootprintTypes.FootprintHt:
                    self.save_project_data('data.footprint.ht_field',
                                           self.fp_ht_field)
                else:
                    self.save_project_data('data.footprint.ht_field', None)

            # store survey
            if self.survey_type == SurveyTypes.None:
                self.save_project_data('data.survey', None)
                self.save_project_data('data.survey.file', None)
            else:
                self.save_project_data('data.survey', self.survey_type)
                self.save_project_data('data.survey.file', self.survey_file)
                self.save_project_data(
                    'data.survey.is_complete',
                    (self.survey_type == SurveyTypes.CompleteSurvey))

            # store zone
            if self.zone_type == ZonesTypes.None:
                self.save_project_data('data.zones', None)
                self.save_project_data('data.zones.file', None)
                self.save_project_data('data.zones.class_field', None)
                self.save_project_data('data.zones.count_field', None)
            else:
                self.save_project_data('data.zones', self.zone_type)
                self.save_project_data('data.zones.file', self.zone_file)
                self.save_project_data('data.zones.class_field',
                                       self.zone_field)
                if self.zone_type == ZonesTypes.LanduseCount:
                    self.save_project_data('data.zones.count_field',
                                           self.zone_count_field)
                    self.save_project_data('data.zones.area_field',
                                           self.zone_area_field)
                else:
                    self.save_project_data('data.zones.count_field', None)
                    self.save_project_data('data.zones.area_field', None)

            # store popgrid
            if self.popgrid_type == PopGridTypes.None:
                self.save_project_data('data.popgrid', None)
                self.save_project_data('data.popgrid.file', None)
                self.save_project_data('data.popgrid.pop_field', None)
                self.save_project_data('data.popgrid.pop_to_bldg', None)
            else:
                self.save_project_data('data.popgrid', self.popgrid_type)
                self.save_project_data('data.popgrid.file', self.popgrid_file)
                self.save_project_data('data.popgrid.pop_field',
                                       self.pop_field)
                self.save_project_data('data.popgrid.pop_to_bldg',
                                       self.pop_to_bldg)

            # store output type
            self.save_project_data('data.output', self.output_type)

            # store mapping scheme
            if self.ms is None:
                self.save_project_data('data.ms', None)
            else:
                self.save_project_data('data.ms', self.ms.to_xml())

            if self.operator_options.has_key('stratified.sampling'):
                self.save_project_data(
                    'stratified.sampling',
                    self.operator_options['stratified.sampling'])

            # save taxonomy order
            if self.operator_options.has_key('attribute.order'):
                self.save_project_data(
                    'attribute.order',
                    json.dumps(self.operator_options['attribute.order']))
            for attr in self.operator_options['taxonomy'].attributes:
                if self.operator_options.has_key(attr.name):
                    self.save_project_data(
                        attr.name,
                        json.dumps(self.operator_options[attr.name]))

            # save processing attributes
            if self.operator_options.has_key("proc.extrapolation"):
                self.save_project_data(
                    "proc.extrapolation",
                    self.operator_options["proc.extrapolation"])

            # save export settings
            self.save_project_data('export.type',
                                   getattr(self, 'export_type', None))
            self.save_project_data('export.path',
                                   getattr(self, 'export_path', None))

            # flush to disk
            self.db.sync()

        # after each sync
        # project is same as db, so save no longer required
        self.require_save = False
示例#19
0
    def test_WorkflowBuilder(self):
        logging.debug('test_BuildWorkflow')
        
        def get_run_exception(func, param):
            try:
                func(param)
            except Exception as ex:
                import traceback
                traceback.print_exc() 
                return ex
            return None

        # empty proj/ms should be enough for testing
        (proj, proj_file) = self.test_CreateProject(True)
        ms = MappingScheme(self.taxonomy) 
        
        builder = WorkflowBuilder(self.operator_options)

        # test cases raising exception
        ###################
        # test case, empty project, should have errors NeedsZone, NeedsCount, NeedsMS
        workflow = builder.build_workflow(proj)
        self.assertTrue(not workflow.ready)
        self.assertEqual(len(workflow.errors), 3)
        self.assertListEqual(workflow.errors, [WorkflowErrors.NeedsZone, 
                                               WorkflowErrors.NeedsCount, 
                                               WorkflowErrors.NeedsMS])
        
        # test case, only zone, should raise exception need count
        proj.set_zones(ZonesTypes.Landuse, self.zone2_path, self.zone2_field)
        workflow = builder.build_workflow(proj)        
        self.assertTrue(not workflow.ready)
        self.assertEqual(len(workflow.errors), 2)
        self.assertListEqual(workflow.errors, [WorkflowErrors.NeedsCount, 
                                               WorkflowErrors.NeedsMS])
        
        # test case, zone / footprint, should raise exception need ms 
        proj.set_footprint(FootprintTypes.Footprint, self.fp_path)
        workflow = builder.build_workflow(proj)        
        self.assertTrue(not workflow.ready)
        self.assertEqual(len(workflow.errors), 1)
        self.assertListEqual(workflow.errors, [WorkflowErrors.NeedsMS])

        # complete footprint / zone / ms to zone, no exception
        proj.ms = ms 
        proj.set_output_type(OutputTypes.Zone)
        workflow = builder.build_workflow(proj)
        self.assertTrue(workflow.ready)
        self.assertEqual(len(workflow.errors), 0)
        
        # test cases no exception
        ###################

        # complete footprint / zone / ms to grid, no exception 
        proj.set_output_type(OutputTypes.Grid)
        workflow = builder.build_workflow(proj)
        self.assertTrue(workflow.ready)
        self.assertEqual(len(workflow.errors), 0)

        # test case, zonecount and ms to grid, no exception
        proj.set_footprint(FootprintTypes.None) # remove footprint
        proj.set_zones(ZonesTypes.LanduseCount, self.zone_path, self.zone_field, self.bldgcount_field)
        proj.ms = ms
        proj.set_output_type(OutputTypes.Grid)
        workflow = builder.build_workflow(proj)
        self.assertTrue(workflow.ready)
        self.assertEqual(len(workflow.errors), 0)
        
        # test case, zonecount and ms to zone, no exception        
        proj.set_output_type(OutputTypes.Zone)
        workflow = builder.build_workflow(proj)
        self.assertTrue(workflow.ready)
        self.assertEqual(len(workflow.errors), 0)

        # test case, complete survey, no exception
        proj.set_survey(SurveyTypes.CompleteSurvey, self.survey_path)
        workflow = builder.build_workflow(proj)
        self.assertTrue(workflow.ready)
        self.assertEqual(len(workflow.errors), 0)
        
        # clean up
        del proj
        os.remove(proj_file)
示例#20
0
    def sync(self, direction=SyncModes.Read):
        """ synchronize data with DB """
        if self.project_file is None or self.db is None:
            raise SIDDProjectException(ProjectErrors.FileNotSet)
        
        if (direction == SyncModes.Read):
            logAPICall.log("reading existing datasets from DB", logAPICall.DEBUG)
            
            # load footprint
            fp_type = self.get_project_data('data.footprint')
            if fp_type is None:
                self.footprint = None
                self.fp_file = None
                self.fp_type = FootprintTypes.None
            else:
                if (fp_type == str(FootprintTypes.FootprintHt)):
                    self.set_footprint(FootprintTypes.FootprintHt,
                                       self.get_project_data('data.footprint.file'),
                                       self.get_project_data('data.footprint.ht_field'))
                else:
                    self.set_footprint(FootprintTypes.Footprint,
                                       self.get_project_data('data.footprint.file'))
            # load survey
            survey_type = self.get_project_data('data.survey')
            if survey_type is None:
                self.survey = None
                self.survey_file = None
                self.survey_type = SurveyTypes.None
            else:                
                if self.get_project_data('data.survey.is_complete') == 'True':
                    self.set_survey(SurveyTypes.CompleteSurvey,
                                    self.get_project_data('data.survey.file'))
                else:
                    self.set_survey(SurveyTypes.SampledSurvey,
                                    self.get_project_data('data.survey.file'))
            
            # load zone
            zone_type = self.get_project_data('data.zones')
            if zone_type is None:
                self.zones = None
                self.zone_file = None                
                self.zone_type = ZonesTypes.None
            else:
                if zone_type == str(ZonesTypes.Landuse):                    
                    self.set_zones(ZonesTypes.Landuse,
                                   self.get_project_data('data.zones.file'),
                                   self.get_project_data('data.zones.class_field'))
                else:
                    self.set_zones(ZonesTypes.LanduseCount,
                                   self.get_project_data('data.zones.file'),
                                   self.get_project_data('data.zones.class_field'),
                                   self.get_project_data('data.zones.count_field'),
                                   self.get_project_data('data.zones.area_field'))
                    
            # load popgrid
            pop_type = self.get_project_data('data.popgrid')
            if pop_type is None:
                self.popgrid =None
                self.popgrid_type = PopGridTypes.None
                self.popgrid_file = None
                self.pop_field = ''
            else:
                self.set_popgrid(PopGridTypes.Grid,
                                 self.get_project_data('data.popgrid.file'),
                                 self.get_project_data('data.popgrid.pop_field'),
                                 self.get_project_data('data.popgrid.pop_to_bldg')) 
            
            # load output type
            output_type = self.get_project_data('data.output')
            if output_type == "Zone":
                self.output_type = OutputTypes.Zone
            else:
                self.output_type = OutputTypes.Grid
            
            # load mapping scheme
            ms_str = self.get_project_data('data.ms')
            if ms_str is not None:
                self.ms = MappingScheme(None)
                self.ms.from_text(ms_str)

            use_sampling = self.get_project_data('stratified.sampling')
            if use_sampling is None:
                self.operator_options['stratified.sampling']= False # default to not use sampling method
            else:
                self.operator_options['stratified.sampling']= (use_sampling == "True")
                
            # load taxonomy related options
            attr_order = self.get_project_data('attribute.order')
            if attr_order is not None:
                self.operator_options['attribute.order'] = json.loads(attr_order)                
            for attr in self.operator_options['taxonomy'].attributes:
                attr_options = self.get_project_data(attr.name)
                if attr_options is not None:
                    self.operator_options[attr.name] = json.loads(attr_options)
               
            extrapolation = self.get_project_data("proc.extrapolation")
            if extrapolation is not None:
                # NOTE: converting extrapolation to enum is required
                #       because comparison of str vs. enum is not valid            
                self.operator_options["proc.extrapolation"] = makeEnum(ExtrapolateOptions, extrapolation)
            else:
                self.operator_options["proc.extrapolation"] = ExtrapolateOptions.Fraction
            
            # load export settings 
            export_type = self.get_project_data('export.type')
            if export_type is not None:
                self.export_type = makeEnum(ExportTypes, export_type)
            export_path = self.get_project_data('export.path')
            if export_path is not None:
                self.export_path = export_path
            
        else:
            logAPICall.log("store existing datasets into DB", logAPICall.DEBUG)            
            # store footprint            
            if self.fp_type == FootprintTypes.None:
                self.save_project_data('data.footprint', None)
                self.save_project_data('data.footprint.file', None)
                self.save_project_data('data.footprint.ht_field', None)
            else:
                self.save_project_data('data.footprint', self.fp_type)
                self.save_project_data('data.footprint.file', self.fp_file)
                if self.fp_type == FootprintTypes.FootprintHt:
                    self.save_project_data('data.footprint.ht_field', self.fp_ht_field)
                else:
                    self.save_project_data('data.footprint.ht_field', None)
                
            # store survey
            if self.survey_type == SurveyTypes.None:
                self.save_project_data('data.survey', None)
                self.save_project_data('data.survey.file', None)
            else:
                self.save_project_data('data.survey', self.survey_type)
                self.save_project_data('data.survey.file', self.survey_file)
                self.save_project_data('data.survey.is_complete', (self.survey_type == SurveyTypes.CompleteSurvey))

            # store zone
            if self.zone_type == ZonesTypes.None:
                self.save_project_data('data.zones', None)
                self.save_project_data('data.zones.file', None)
                self.save_project_data('data.zones.class_field', None)
                self.save_project_data('data.zones.count_field', None)
            else:
                self.save_project_data('data.zones', self.zone_type)
                self.save_project_data('data.zones.file', self.zone_file)
                self.save_project_data('data.zones.class_field', self.zone_field)
                if self.zone_type == ZonesTypes.LanduseCount:
                    self.save_project_data('data.zones.count_field', self.zone_count_field)
                    self.save_project_data('data.zones.area_field', self.zone_area_field)
                else:
                    self.save_project_data('data.zones.count_field', None)
                    self.save_project_data('data.zones.area_field', None)
            
            # store popgrid
            if self.popgrid_type == PopGridTypes.None:
                self.save_project_data('data.popgrid', None)
                self.save_project_data('data.popgrid.file', None)
                self.save_project_data('data.popgrid.pop_field', None)
                self.save_project_data('data.popgrid.pop_to_bldg', None)
            else:
                self.save_project_data('data.popgrid', self.popgrid_type)
                self.save_project_data('data.popgrid.file', self.popgrid_file)
                self.save_project_data('data.popgrid.pop_field', self.pop_field)
                self.save_project_data('data.popgrid.pop_to_bldg', self.pop_to_bldg)
            
            # store output type
            self.save_project_data('data.output', self.output_type)
            
            # store mapping scheme
            if self.ms is None:
                self.save_project_data('data.ms', None)
            else:
                self.save_project_data('data.ms', self.ms.to_xml())
            
            if self.operator_options.has_key('stratified.sampling'):
                self.save_project_data('stratified.sampling',  self.operator_options['stratified.sampling'])            

            # save taxonomy order 
            if self.operator_options.has_key('attribute.order'):
                self.save_project_data('attribute.order',  json.dumps(self.operator_options['attribute.order']))
            for attr in self.operator_options['taxonomy'].attributes:
                if self.operator_options.has_key(attr.name):
                    self.save_project_data(attr.name, json.dumps(self.operator_options[attr.name]))
            
            # save processing attributes
            if self.operator_options.has_key("proc.extrapolation"):
                self.save_project_data("proc.extrapolation", self.operator_options["proc.extrapolation"])
            
            # save export settings
            self.save_project_data('export.type', getattr(self, 'export_type', None))
            self.save_project_data('export.path', getattr(self, 'export_path', None))
            
            # flush to disk
            self.db.sync()
        
        # after each sync 
        # project is same as db, so save no longer required
        self.require_save = False