def test_BuildMS(self, skipTest=False): import csv survey = csv.reader(open(self.survey_file , 'r'), delimiter=',', quotechar='"') # skip header, there is probably a better way to do this survey.next() stats = Statistics(self.taxonomy) _count=0 for row in survey: tax_string = row[2] stats.add_case(tax_string, parse_order=self.ms_parse_order) stats.finalize() ms = MappingScheme(self.taxonomy) ms_zone = MappingSchemeZone('ALL') ms.assign(ms_zone, stats) #ms.save(self.ms_file) if skipTest: return ms ms2 = MappingScheme(self.taxonomy) ms2.read(self.ms_file) self.assertEqual( ms.get_assignment_by_name("ALL").to_xml().strip().__len__(), ms2.get_assignment_by_name("ALL").to_xml().strip().__len__() )
def test_BuildMS(self, skipTest=False): import csv survey = csv.reader(open(self.survey_file, 'r'), delimiter=',', quotechar='"') # skip header, there is probably a better way to do this survey.next() stats = Statistics(self.taxonomy) _count = 0 for row in survey: tax_string = row[2] stats.add_case(tax_string, parse_order=self.ms_parse_order) stats.finalize() ms = MappingScheme(self.taxonomy) ms_zone = MappingSchemeZone('ALL') ms.assign(ms_zone, stats) #ms.save(self.ms_file) if skipTest: return ms ms2 = MappingScheme(self.taxonomy) ms2.read(self.ms_file) self.assertEqual( ms.get_assignment_by_name("ALL").to_xml().strip().__len__(), ms2.get_assignment_by_name("ALL").to_xml().strip().__len__())
def test_LoadMS(self, skipTest=False, statsOnly=True): ms = MappingScheme(self.taxonomy) ms.read(self.ms_file) if skipTest: if statsOnly: return ms.get_assignment_by_name("ALL") else: return ms stats = ms.get_assignment_by_name("ALL") attributes = stats.get_attributes(stats.get_tree()) self.assertEqual(sorted(attributes), sorted(self.ms_parse_order))
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput survey_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value tax_field = self._tax_field logAPICall.log( 'survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field), logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' # load zone classes try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # merge to create stats logAPICall.log('merge survey & zone', logAPICall.DEBUG) analyzer = QgsOverlayAnalyzer() analyzer.intersection(survey_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('create mapping schemes', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone, _count in zone_classes.iteritems(): stats = Statistics(self._taxonomy) ms.assign(MappingSchemeZone(_zone), stats) # loop through all input features zone_idx = layer_field_index(tmp_join_layer, zone_field) tax_idx = layer_field_index(tmp_join_layer, tax_field) area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) additional = {} _area = _f.attributeMap()[area_idx].toDouble()[0] if _area > 0: additional = {StatisticNode.AverageSize: _area} _cost = _f.attributeMap()[cost_idx].toDouble()[0] if _cost > 0: additional = {StatisticNode.UnitCost: _cost} logAPICall.log('zone %s => %s' % (_zone_str, _tax_str), logAPICall.DEBUG_L2) try: ms.get_assignment_by_name(_zone_str).add_case( _tax_str, self._parse_order, self._parse_modifiers, additional) except TaxonomyParseError as perr: logAPICall.log( "error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING) # store data in output for _zone, _stats in ms.assignments(): _stats.finalize() _stats.get_tree().value = _zone.name # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) self.outputs[0].value = ms
def do_operation(self): """ perform create mapping scheme operation """ # input/output verification already performed during set input/ouput survey_layer = self.inputs[0].value zone_layer = self.inputs[1].value zone_field = self.inputs[2].value tax_field = self._tax_field logAPICall.log('survey %s, taxfield %s, zone %s, zone_field, %s' % (survey_layer.name(), tax_field, zone_layer.name(), zone_field), logAPICall.DEBUG) tmp_join_layername = 'join_%s' % get_unique_filename() tmp_join_file = self._tmp_dir + tmp_join_layername + '.shp' # load zone classes try: zone_classes = layer_field_stats(zone_layer, zone_field) except AssertionError as err: raise OperatorError(str(err), self.__class__) # merge to create stats logAPICall.log('merge survey & zone', logAPICall.DEBUG) analyzer = QgsOverlayAnalyzer() analyzer.intersection(survey_layer, zone_layer, tmp_join_file) tmp_join_layer = load_shapefile(tmp_join_file, tmp_join_layername) logAPICall.log('create mapping schemes', logAPICall.DEBUG) ms = MappingScheme(self._taxonomy) for _zone, _count in zone_classes.iteritems(): stats = Statistics(self._taxonomy) ms.assign(MappingSchemeZone(_zone), stats) # loop through all input features zone_idx = layer_field_index(tmp_join_layer, zone_field) tax_idx = layer_field_index(tmp_join_layer, tax_field) area_idx = layer_field_index(tmp_join_layer, AREA_FIELD_NAME) cost_idx = layer_field_index(tmp_join_layer, COST_FIELD_NAME) for _f in layer_features(tmp_join_layer): _zone_str = str(_f.attributeMap()[zone_idx].toString()) _tax_str = str(_f.attributeMap()[tax_idx].toString()) additional = {} _area = _f.attributeMap()[area_idx].toDouble()[0] if _area > 0: additional = {StatisticNode.AverageSize: _area} _cost = _f.attributeMap()[cost_idx].toDouble()[0] if _cost > 0: additional = {StatisticNode.UnitCost: _cost} logAPICall.log('zone %s => %s' % (_zone_str, _tax_str) , logAPICall.DEBUG_L2) try: ms.get_assignment_by_name(_zone_str).add_case(_tax_str, self._parse_order, self._parse_modifiers, additional) except TaxonomyParseError as perr: logAPICall.log("error parsing case %s, %s" % (str(_tax_str), str(perr)), logAPICall.WARNING) # store data in output for _zone, _stats in ms.assignments(): _stats.finalize() _stats.get_tree().value = _zone.name # clean up del tmp_join_layer, analyzer remove_shapefile(tmp_join_file) self.outputs[0].value = ms