def accepted(self): self.save_settings() self.update_selected_items() # Take latest selection into account if len(self.selected_items_dict): # Load selected layers layers_dict = {} for item_text, data in self.selected_items_dict.items(): layers_dict[item_text] = { 'name': data[TABLE_NAME], 'geometry': QgsWkbTypes().geometryType(QgsWkbTypes().parseType(data[GEOMETRY_TYPE])) if data[GEOMETRY_TYPE] else None } self.selected_items_dict = dict() # Reset self.qgis_utils.get_layers(self._db, layers_dict, load=True)
def set_style(): #set the style options based on the selecte dlayer vlay = self.comboBox_JGfinv.currentLayer() if not isinstance(vlay, QgsVectorLayer): return gtype = QgsWkbTypes().displayString(vlay.wkbType()) #get the directory for thsi type of style subdir = None for foldernm in ['Point', 'Polygon']: if foldernm in gtype: subdir = foldernm break #set the options if isinstance(subdir, str): srch_dir = os.path.join(self.pars_dir, 'qmls', subdir) assert os.path.exists( srch_dir ), 'requested qml search dir doesnt exist: %s' % srch_dir #keeping the subdir for easy loading l = [os.path.join(subdir, fn) for fn in os.listdir(srch_dir)] else: l = [] l.append('') #add teh empty selection self.setup_comboBox(self.comboBox_JG_style, l)
def __init__(self, canvas): self.canvas = canvas QgsMapToolEmitPoint.__init__(self, self.canvas) self.rubberBand = QgsRubberBand(self.canvas, QgsWkbTypes().GeometryType(2)) self.rubberBand.setColor(Qt.red) self.rubberBand.setBrushStyle(Qt.Dense6Pattern) self.rubberBand.setWidth(1) self.reset()
def getListOfPoints(qgs_feature_storage): #qgs_feature_storage can be any vectorLayer/QgsProcessingParameterFeatureSource/etc given_geom_type = QgsWkbTypes().displayString(qgs_feature_storage.wkbType()) #GetStringRepresentation of WKB Type expected_geom_type = QgsWkbTypes.displayString(1) #Point if given_geom_type == expected_geom_type: qgsfeatureiterator = getFeaturesFromQgsIterable(qgs_feature_storage) return [f.geometry().asPoint() for f in qgsfeatureiterator] else: raise Qneat3GeometryException(given_geom_type, expected_geom_type)
def _check_ifz( self, #typical checks for an ifz poly layer vlay): assert isinstance(vlay, QgsVectorLayer), type(vlay) dp = vlay.dataProvider() assert 'Polygon' in QgsWkbTypes().displayString(vlay.wkbType()) assert dp.featureCount() > 0 miss_l = set([self.ifidN ]).difference([f.name() for f in vlay.fields()]) assert len(miss_l) == 0, 'missing some fields: %s' % miss_l return True
def showRect(self, startPoint, endPoint): self.rubberBand.reset(QgsWkbTypes().GeometryType(2)) if startPoint.x() == endPoint.x() or startPoint.y() == endPoint.y(): return point1 = QgsPointXY(startPoint.x(), startPoint.y()) point2 = QgsPointXY(startPoint.x(), endPoint.y()) point3 = QgsPointXY(endPoint.x(), endPoint.y()) point4 = QgsPointXY(endPoint.x(), startPoint.y()) self.rubberBand.addPoint(point1, False) self.rubberBand.addPoint(point2, False) self.rubberBand.addPoint(point3, False) self.rubberBand.addPoint(point4, True) # true to update canvas self.rubberBand.show()
def check_z_in_geometry(self): self.target_layer = self.qgis_utils.get_layer(self._db, self.current_point_name(), load=True) if not QgsWkbTypes().hasZ(self.target_layer.wkbType()): self.labelZ.setEnabled(False) self.cbo_elevation.setEnabled(False) msg = QCoreApplication.translate("CreatePointsCadastreWizard", "The current model does not support 3D geometries") self.cbo_elevation.setToolTip(msg) self.labelZ.setToolTip(msg) else: self.labelZ.setEnabled(True) self.cbo_elevation.setEnabled(True) self.labelZ.setToolTip("") self.cbo_elevation.setToolTip("")
def run( self, #sample conditional probability polygon 'lfield' values with finv geometry finv, #inventory layer lpol_d, #{event name: likelihood polygon layer} cid=None, #index field name on finv lfield='p_fail', #field with likelihhood value event_rels=None, #ev calculation method #WARNING: not necessaarily the same as the parameter used by RiskModel #mutEx: assume each event is mutually exclusive (only one can happen) #lower bound #indep: assume each event is independent (failure of one does not influence the other) #upper bound ): #======================================================================= # defults #======================================================================= log = self.logger.getChild('run') if cid is None: cid = self.cid if event_rels is None: event_rels = self.event_rels self.event_rels = event_rels #reset for plotting assert isinstance(lpol_d, dict) #====================================================================== # #check/load the data #====================================================================== #check lpols for ename, vlay in lpol_d.items(): if not isinstance(vlay, QgsVectorLayer): raise Error('bad type on %s layer: %s' % (ename, type(vlay))) assert 'Polygon' in QgsWkbTypes().displayString(vlay.wkbType()), \ 'unexpected geometry: %s'%QgsWkbTypes().displayString(vlay.wkbType()) assert lfield in [field.name() for field in vlay.fields()], \ 'specified lfield \"%s\' not on layer' assert vlay.isValid() assert vlay.crs() == self.qproj.crs( ), 'crs mismatch on %s' % vlay.name() #================================================================== # #check values #================================================================== chk_df = vlay_get_fdf(vlay, logger=log) chk_ser = chk_df.loc[:, lfield] assert not chk_ser.isna().any(), 'got nulls on %s' % ename #if 0<fval<1 boolidx = np.logical_and( #checking for fails chk_ser < 0, chk_ser > 1, ) if boolidx.any(): raise Error('%s.%s got %i (of %i) values out of range: \n%s' % (ename, lfield, boolidx.sum(), len(boolidx), chk_ser[boolidx])) #check finv assert isinstance(finv, QgsVectorLayer), 'bad type on finv' assert finv.isValid(), 'invalid finv' assert cid in [field.name() for field in finv.fields()], 'missing cid \'%s\'' % cid assert finv.crs() == self.crs, 'crs mismatch on %s' % finv.name() #====================================================================== # build finv #====================================================================== #clean out finv fc_vlay = self.deletecolumn(finv, [cid], invert=True, layname='fclean') self.createspatialindex(fc_vlay, logger=log) self.fc_vlay = fc_vlay #set this for vectorize() #get cid list fdf = vlay_get_fdf(fc_vlay, logger=log) cid_l = fdf[cid].tolist() #====================================================================== # sample values------ #====================================================================== log.info('sampling %i lpols w/ %i finvs and event_rels=\'%s\'' % (len(lpol_d), len(cid_l), event_rels)) en_c_sval_d = dict() #container for samples {event name: sample data} for ename, lp_vlay in lpol_d.items(): log = self.logger.getChild('run.%s' % ename) log.debug('sampling %s from %s to %s w/ %i atts' % (lfield, lp_vlay.name(), fc_vlay.name(), len(fdf))) """ todo: remove any features w/ zero value view(fc_vlay) """ #=================================================================== # sapmle values from polygons #=================================================================== svlay, new_fns, jcnt = self.joinattributesbylocation( fc_vlay, lp_vlay, [lfield], method=0, #one-to-many logger=log, expect_j_overlap=True, allow_none=True, ) if jcnt == 0: log.warning('no assets intersect failure polygons!') #set a dummy entri en_c_sval_d[ename] = {k: [] for k in cid_l} continue #extract raw sampling data sdf_raw = vlay_get_fdf(svlay, logger=log) #df w/ columns = [cid, lfield] #================================================================== # #do some checks #================================================================= #check columns miss_l = set(sdf_raw.columns).symmetric_difference([lfield, cid]) assert len(miss_l) == 0, 'bad columns on the reuslts' #make sure all the cids made it miss_l = set(cid_l).difference(sdf_raw[cid].unique().tolist()) assert len( miss_l ) == 0, 'failed to get %i assets in the smaple' % len(miss_l) #================================================================== # clean it #================================================================== #log misses boolidx = sdf_raw[lfield].isna() log.debug('got %i (of %i) misses. dropping these' % (boolidx.sum(), len(boolidx))) #drop misses sdf = sdf_raw.dropna(subset=[lfield], axis=0, how='any') #================================================================== # pivot to {cid:[sample values]} #================================================================== #drop down to cid groups (pvali values) d = {k: csdf[lfield].to_list() for k, csdf in sdf.groupby(cid)} #add dummy empty list for any missing cids """not very elegent... doing this to fit in with previous methods would be better to just use open joins""" cid_samp_d = {**d, **{k: [] for k in cid_l if not k in d}} #wrap event loop en_c_sval_d[ename] = cid_samp_d #add to reuslts log.debug('collected sample values on %i assets' % len(cid_samp_d)) #====================================================================== # resolve multiple events------ #====================================================================== log = self.logger.getChild('run') log.info('collected sample values for %i events and %i assets' % (len(en_c_sval_d), len(cid_l))) res_df = None #build results contqainer #loop and resolve log.debug('resolving %i events' % len(en_c_sval_d)) for ename, cid_samp_d in en_c_sval_d.items(): log.info('resolving \"%s\'' % ename) #=================================================================== # #loop through each asset and resolve sample values #=================================================================== """ TODO: Parallel process this """ cid_res_d = dict() #harmonized likelihood results for cval, pvals in cid_samp_d.items(): #simple unitaries if len(pvals) == 1: cid_res_d[cval] = pvals[0] elif len(pvals) == 0: cid_res_d[cval] = np.nan #multi value else: #calc union probability for multi predictions if event_rels == 'indep': cid_res_d[cval] = self.union_probabilities(pvals, logger=log) elif event_rels == 'mutEx': cid_res_d[cval] = sum(pvals) else: raise Error('bad event_rels: \'%s\'' % event_rels) #=================================================================== # #update results #=================================================================== res_ser = pd.Series(cid_res_d, name=ename).sort_index() if res_df is None: res_df = res_ser.to_frame() res_df.index.name = cid else: """ if not np.array_equal(res_df.index, res_ser.index): raise Error('index mmismatch')""" res_df = res_df.join(res_ser, how='left') #=================================================================== # check #=================================================================== """ res_ser.max() """ bx = res_ser > 1.0 if bx.any(): log.debug(res_ser[bx]) raise Error( '%s got %i (of %i) resolved P > 1.0.. check logger' % (ename, bx.sum(), (len(bx)))) #====================================================================== # wrap------- #====================================================================== log = self.logger.getChild('run') #======================================================================= # nulls #======================================================================= """2021-01-12: moved null handling from the model to here""" res_df = res_df.fillna(0.0) if res_df.isna().all().all(): raise Error('no intersections with any events!') return res_df res_df = res_df.round(self.prec) #====================================================================== # post checks #====================================================================== miss_l = set(lpol_d.keys()).symmetric_difference(res_df.columns) assert len(miss_l) == 0, 'failed to match columns to events' #bounds if not res_df.max().max() <= 1.0: raise Error('bad max: %.2f' % res_df.max().max()) assert res_df.min().min() >= 0.0, 'bad min' miss_l = set(res_df.index).symmetric_difference(cid_l) assert len(miss_l) == 0, 'missed some cids' #all nulls bc = res_df.isna().all(axis=0) if bc.any(): log.warning('%i (of %i) events have no intersect!\n %s' % (bc.sum(), len(bc), res_df.columns[bc].tolist())) bx = res_df.isna().all(axis=1) if bx.any(): log.warning('%i (of %i) assets have no intersect!' % (bx.sum(), len(bx))) #====================================================================== # close #====================================================================== try: #fancy reporting log.debug( 'results stats: \nmeans: \n %s\nnulls \n %s \nmaxes: \n %s \nmins: \n %s\n\n' % (res_df.mean().to_dict(), res_df.isna().sum().to_dict(), res_df.max().to_dict(), res_df.min().to_dict())) log.info('finished w/ %s event_rels = \'%s\'.. see log' % (str(res_df.shape), event_rels)) except: log.error('logging error') self.res_df = res_df return self.res_df #will have NaNs where there is no intersect
def reset(self): self.startPoint = self.endPoint = None self.isEmittingPoint = False self.rubberBand.reset(QgsWkbTypes().GeometryType(2))
def testDatenmodell(iface): ## ------------------------- # Open dictionary containing schema of GZP Datenmodell # locate dictionary and define relative path try: filename = inspect.getframeinfo(inspect.currentframe()).filename JPath = os.path.dirname(os.path.abspath(filename)) SchemaPath = os.path.join(JPath, r'schemaVorgabe.json') # # open dictionary with open(SchemaPath, 'r') as f: schema = json.load(f) except FileNotFoundError: raise QMessageBox.information( None, "FEHLERMELDUNG", "Datei schemaVorlage.json nicht gefunden") ## ------------------------- # Create schema file from layers in qgz # Create dictionary of layer schemas schemaIN = {} legend = [ tree_layer.layer() for tree_layer in QgsProject.instance().layerTreeRoot().findLayers() ] for layer in legend: if layer.type() != QgsMapLayer.RasterLayer and not layer.name( ).startswith('DOM_'): dsName = layer.source().split('|layername=')[1].split('|')[0] dsFields = layer.fields() dsshapeType = QgsWkbTypes().displayString(int(layer.wkbType())) lFields = [] # Add information of fields for field in dsFields: # translate type if field.typeName() == u'Integer64': ftype = unicode('INTEGER') elif field.typeName() == u'String': ftype = unicode('TEXT') elif field.typeName() == u'Date': ftype = unicode('DATE') elif field.typeName() == u'Real': ftype = unicode('REAL') else: ftype = field.typeName() # translate defaultValue if field.defaultValueDefinition().expression() == '': fdefault = None else: fdefault = field.defaultValueDefinition().expression() tfield = (field.name(), ftype, field.constraints().ConstraintNotNull, fdefault) lFields.append(tfield) schemaIN[dsName] = {a: (b, c, d) for a, b, c, d in lFields} # Add information on shape type if dsshapeType == 'Polygon': shType = 'MultiPolygon' elif dsshapeType == 'LineString': shType = 'MultiLineString' else: shType = dsshapeType schemaIN[dsName]['SHAPE'] = (shType, 0, None) ## compare schemas and write messages # Set up counter and message content ErrorCount = 0 MessageDict = {} for keyDM in schema.keys(): if not keyDM.startswith('DOM_'): MessageDict[keyDM] = {} # TEST 1 - table exists if keyDM in schemaIN.keys(): # TEST 2 - Field exists MessageDict[keyDM]["Feld nicht gefunden"] = [] MessageDict[keyDM]["Data Typ inkorrekt in Feld"] = [] for fname in schema[keyDM].keys(): if fname in schemaIN[keyDM].keys(): pass # TEST 3 - Field has correct attributes # data type if schema[keyDM][fname][0] == schemaIN[keyDM][fname][ 0]: pass else: ErrorCount += 1 MessageDict[ keyDM]["Data Typ inkorrekt in Feld"].append( str(fname)) else: ErrorCount += 1 MessageDict[keyDM]["Feld nicht gefunden"].append( str(fname)) else: ErrorCount += 1 MessageDict[keyDM]["Layer/Tabelle nicht gefunden"] = 'NA' # Creat interim message dictionary, shortening content interimMessContent = {} for key1 in MessageDict.keys(): interimMessContent[key1] = [] for key2, val2 in MessageDict[key1].items(): if val2 == 'NA': interimMessContent[key1].append("\n\t{}.".format(key2)) if len(val2) != 0 and val2 != 'NA': valStr = "" for val in val2: valStr += ", " + val interimMessContent[key1].append("\n\t{}: {}".format( key2, valStr[2:])) # Define Message Content MessageContent = "" for key, val in interimMessContent.items(): if len(val) != 0: MessageContent += "\n{}:".format(key) for i in val: MessageContent += i # Define Message if ErrorCount == 0: QMessageBox.information( None, "INFORMATION", "Prüfung erfolgreich abgeschlossen. \nAlle Layer und Tabellen entsprechen dem vorgegebenen Datenschema." ) else: MessageFinal = "ACHTUNG:\n\nFolgende Layer und Tabellen entsprechen nicht dem vorgegebenen Datenschema:\n" + MessageContent + "\n\nBitte korrigieren! \n" QMessageBox.information(None, "FEHLERMELDUNG", MessageFinal)
def update_available_layers(self): self.trw_layers.setUpdatesEnabled( False) # Don't render until we're ready # Grab some context data show_domains = self.chk_show_domains.isChecked() show_structures = self.chk_show_structures.isChecked() show_associations = self.chk_show_associations.isChecked() top_level_items_expanded_info = [] for i in range(self.trw_layers.topLevelItemCount()): top_level_items_expanded_info.append( self.trw_layers.topLevelItem(i).isExpanded()) # Save selection self.update_selected_items() # Iterate models adding children self.trw_layers.blockSignals( True) # We don't want to get itemSelectionChanged here self.trw_layers.clear() self.trw_layers.blockSignals(False) sorted_models = sorted(self.models_tree.keys()) for model in sorted_models: children = [] model_item = QTreeWidgetItem([model]) # Filter by search text list_tables = self.filter_tables_by_search_text( self.models_tree[model].keys(), self.txt_search_text.text()) sorted_tables = sorted(list_tables) for table in sorted_tables: current_table_info = self.models_tree[model][table] if current_table_info[QueryNames.KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_DOMAIN and not show_domains \ or current_table_info[QueryNames.KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_STRUCTURE and not show_structures \ or current_table_info[QueryNames.KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_ASSOCIATION and not show_associations: continue table_item = QTreeWidgetItem([table]) table_item.setData(0, Qt.UserRole, self.models_tree[model][table]) geometry_type = QgsWkbTypes().geometryType( QgsWkbTypes().parseType(current_table_info[ QueryNames.GEOMETRY_TYPE_MODEL_BAKER]) ) if current_table_info[ QueryNames.GEOMETRY_TYPE_MODEL_BAKER] else None icon_name = self.icon_names[ 3 if geometry_type is None else geometry_type] # Is the layer already loaded in canvas? if self.app.core.get_ladm_layer_from_qgis( self._db, current_table_info[QueryNames.TABLE_NAME_MODEL_BAKER], EnumLayerRegistryType.IN_LAYER_TREE) is not None: table_item.setText( 0, table + QCoreApplication.translate( "LoadLayersDialog", " [already loaded]")) table_item.setData(0, Qt.ForegroundRole, QBrush(Qt.lightGray)) table_item.setFlags(Qt.ItemIsEnabled) # Not selectable else: # Laye not in QGIS Layer Tree if not current_table_info[ QueryNames. KIND_SETTINGS_MODEL_BAKER]: # This is a class font = QFont() font.setBold(True) table_item.setData(0, Qt.FontRole, font) if current_table_info[ QueryNames. KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_DOMAIN: icon_name = self.icon_names[4] elif current_table_info[ QueryNames. KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_STRUCTURE: if geometry_type is None: icon_name = self.icon_names[5] elif current_table_info[ QueryNames. KIND_SETTINGS_MODEL_BAKER] == ILI2DBNames.TABLE_PROP_ASSOCIATION: icon_name = self.icon_names[6] icon = QIcon(":/Asistente-LADM-COL/resources/images/{}".format( icon_name)) table_item.setData(0, Qt.DecorationRole, icon) children.append(table_item) model_item.addChildren(children) self.trw_layers.addTopLevelItem(model_item) # Set selection iterator = QTreeWidgetItemIterator(self.trw_layers, QTreeWidgetItemIterator.Selectable) self.trw_layers.blockSignals( True) # We don't want to get itemSelectionChanged here while iterator.value(): item = iterator.value() if item.text(0) in self.selected_items_dict: item.setSelected(True) iterator += 1 self.trw_layers.blockSignals(False) # Make model items non selectable # Set expand taking previous states into account for i in range(self.trw_layers.topLevelItemCount()): self.trw_layers.topLevelItem(i).setFlags( Qt.ItemIsEnabled) # Not selectable self.trw_layers.topLevelItem(i).setExpanded( top_level_items_expanded_info[i] if top_level_items_expanded_info else True) self.trw_layers.setUpdatesEnabled(True) # Now render!
def processAlgorithm(self, parameters, context, feedback): source = self.parameterAsSource(parameters, self.INPUT, context) target = self.parameterAsVectorLayer(parameters, self.OUTPUT, context) target.dataProvider().clearErrors() editable_before = False if target.isEditable(): editable_before = True feedback.reportError( "\nWARNING: You need to close the edit session on layer '{}' before running this algorithm." .format(target.name())) return {self.OUTPUT: None} # Define a mapping between source and target layer mapping = dict() for target_idx in target.fields().allAttributesList(): target_field = target.fields().field(target_idx) source_idx = source.fields().indexOf(target_field.name()) if source_idx != -1: mapping[target_idx] = source_idx # Copy and Paste total = 100.0 / source.featureCount() if source.featureCount() else 0 features = source.getFeatures() destType = target.geometryType() destIsMulti = QgsWkbTypes.isMultiType(target.wkbType()) # Check if layer has Z or M values. drop_coordinates = list() add_coordinates = list() if QgsWkbTypes().hasM(source.wkbType()): # In ladm we don't use M values, so drop them if present drop_coordinates.append("M") if not QgsWkbTypes().hasZ(source.wkbType()) and QgsWkbTypes().hasZ( target.wkbType()): add_coordinates.append("Z") if QgsWkbTypes().hasZ( source.wkbType()) and not QgsWkbTypes().hasZ(target.wkbType()): drop_coordinates.append("Z") new_features = [] display_target_geometry = QgsWkbTypes.displayString(target.wkbType()) display_source_geometry = QgsWkbTypes.displayString(source.wkbType()) for current, in_feature in enumerate(features): if feedback.isCanceled(): break attrs = { target_idx: in_feature[source_idx] for target_idx, source_idx in mapping.items() } geom = QgsGeometry() if in_feature.hasGeometry() and target.isSpatial(): # Convert geometry to match destination layer # Adapted from QGIS qgisapp.cpp, pasteFromClipboard() geom = in_feature.geometry() if destType != QgsWkbTypes.UnknownGeometry: newGeometry = geom.convertToType(destType, destIsMulti) if newGeometry.isNull(): feedback.reportError( "\nERROR: Geometry type from the source layer ('{}') could not be converted to '{}'." .format(display_source_geometry, display_target_geometry)) return {self.OUTPUT: None} newGeometry = self.transform_geom(newGeometry, drop_coordinates, add_coordinates) geom = newGeometry # Avoid intersection if enabled in digitize settings geom.avoidIntersections( QgsProject.instance().avoidIntersectionsLayers()) new_feature = QgsVectorLayerUtils().createFeature( target, geom, attrs) new_features.append(new_feature) feedback.setProgress(int(current * total)) try: # This might print error messages... But, hey! That's what we want! res = target.dataProvider().addFeatures(new_features) except QgsEditError as e: if not editable_before: # Let's close the edit session to prepare for a next run target.rollBack() feedback.reportError( "\nERROR: No features could be copied into '{}', because of the following error:\n{}\n" .format(target.name(), repr(e))) return {self.OUTPUT: None} if res[0]: feedback.pushInfo( "\nSUCCESS: {} out of {} features from input layer were successfully copied into '{}'!" .format(len(new_features), source.featureCount(), target.name())) else: if target.dataProvider().hasErrors(): feedback.reportError( "\nERROR: The data could not be copied! Details: {}.". format(target.dataProvider().errors()[0])) else: feedback.reportError( "\nERROR: The data could not be copied! No more details from the provider." ) return {self.OUTPUT: target}
def prep_dike( self, #do some pre-calcs on teh dike layer vlay_raw, dikeID=None, #dike identifier field segID=None, #segment identifier field cbfn=None, #crest buffer fieldname logger=None): """ not sure it makes sense to have this separate from get_dike_expo anymroe """ #======================================================================= # defaults #======================================================================= if logger is None: logger = self.logger log = logger.getChild('load_dikes') if dikeID is None: dikeID = self.dikeID if segID is None: segID = self.segID if cbfn is None: cbfn = self.cbfn mstore = QgsMapLayerStore() #build a new map store #======================================================================= # precheck #======================================================================= fnl = [f.name() for f in vlay_raw.fields()] #jcolns = [self.sid, 'f0_dtag', self.cbfn, self.segln] miss_l = set([dikeID, segID, 'f0_dtag', cbfn, self.ifidN]).difference(fnl) assert len( miss_l) == 0, 'missing expected columns on dike layer: %s' % miss_l assert not 'length' in [ s.lower() for s in fnl ], '\'length\' field not allowed on dikes layer' """try forcing assert 'int' in df[segID].dtype.name, 'bad dtype on dike layer %s'%segID assert 'int' in df[dikeID].dtype.name, 'bad dtype on dike layer %s'%dikeID""" #geometry assert 'Line' in QgsWkbTypes().displayString( vlay_raw.wkbType()), 'bad vector type on dike' #======================================================================= # add geometry data #======================================================================= d = {'CALC_METHOD': 0, 'INPUT': vlay_raw, 'OUTPUT': 'TEMPORARY_OUTPUT'} vlay = processing.run('qgis:exportaddgeometrycolumns', d, feedback=self.feedback)['OUTPUT'] mstore.addMapLayer(vlay) """ view(vlay) """ #rename the vield vlay = vlay_rename_fields(vlay, {'length': self.segln}) mstore.addMapLayer(vlay) #======================================================================= # pull data #======================================================================= df = vlay_get_fdf(vlay, logger=log) #======================================================================= # build global segment ids #======================================================================= #type forcing for coln in [dikeID, segID]: try: df[coln] = df[coln].astype(int) except Exception as e: raise Error('failed to type set dike column \'%s\' w/ \n%s' % (coln, e)) s1 = df[dikeID].astype(str).str.pad(width=3, side='left', fillchar='0') s2 = df[segID].astype(str).str.pad(width=2, side='left', fillchar='0') df[self.sid] = s1.str.cat(others=s2).astype(int) assert df[ self. sid].is_unique, 'failed to get unique global segment ids... check your dikeID and segID' # bundle back into vectorlayer geo_d = vlay_get_fdata(vlay, geo_obj=True, logger=log) res_vlay = self.vlay_new_df2(df, geo_d=geo_d, logger=log, layname='%s_dike_%s' % (self.tag, vlay_raw.name())) #======================================================================= # wrap #======================================================================= dp = res_vlay.dataProvider() log.info('loaded dike layer \'%s\' w/ %i segments' % (vlay.name(), dp.featureCount())) self.dike_vlay = res_vlay self.dike_df = df """attching this again in case th user passes new values""" self.dikeID, self.segID, self.cbfn = dikeID, segID, cbfn #done during init self.sid_vals = df[self.sid].unique().tolist() mstore.removeAllMapLayers() return self.dike_vlay