def _get_dataset_subtypes(self, dataset_name, dataset_type): """Get subtypes information for single dataset.""" if dataset_type == DATASET_TYPE_FC: dataset = FeatureClass(os.path.join(self.gdb.path, dataset_name)) elif dataset_type == DATASET_TYPE_TABLE: dataset = Table(os.path.join(self.gdb.path, dataset_name)) subtypes = dataset.get_subtypes() if subtypes: df_subtypes = self._map_boolean(pd.DataFrame.from_dict(subtypes)) return df_subtypes
def _get_dataset_indexes(self, dataset_name, dataset_type): """Get indexes information for single feature class.""" if dataset_type == DATASET_TYPE_FC: dataset = FeatureClass(os.path.join(self.gdb.path, dataset_name)) elif dataset_type == DATASET_TYPE_TABLE: dataset = Table(os.path.join(self.gdb.path, dataset_name)) indexes = dataset.get_indexes() if indexes: df_indexes = (pd.DataFrame.from_dict(indexes).sort_values( by='Name')) return df_indexes
def get_tables(self): """Get geodatabase tables as `Table` class instances.""" tables = [] if self.arcpy_found: arcpy.env.workspace = self.path for tbl in arcpy.ListTables(): try: logging.debug("Table: {} ".format(tbl)) tbl_instance = Table(arcpy.Describe(tbl).catalogPath) if tbl_instance.OIDFieldName == 'ATTACHMENTID': continue od = OrderedDict() for k, v in GDB_TABLE_PROPS.items(): od[v] = getattr(tbl_instance, k, '') # custom props od['Row count'] = tbl_instance.get_row_count() num_attachments = tbl_instance.get_attachments_count() if num_attachments is not None: od['Attachments enabled'] = True od['Attachments count'] = num_attachments else: od['Attachments enabled'] = False od['Attachments count'] = '' tables.append(od) except Exception as e: logging.error( 'Error. Could not read table {} '.format(tbl)) logging.error(str(e.args[0])) tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] logging.error(tbinfo) else: table_names = [ self.ds.GetLayerByIndex(i).GetName() for i in range(0, self.ds.GetLayerCount()) if not self.ds.GetLayerByIndex(i).GetGeometryColumn() ] for table_name in table_names: try: tbl_instance = TableOgr(self, table_name) od = OrderedDict() for k, v in GDB_TABLE_PROPS.items(): od[v] = getattr(tbl_instance, k, '') # custom props od['Row count'] = tbl_instance.get_row_count() tables.append(od) except Exception as e: logger.error(e) return tables
def get_tables(self): """Get geodatabase tables as `Table` class instances.""" tables = [] if self.arcpy_found: arcpy.env.workspace = self.path for tbl in arcpy.ListTables(): try: tbl_instance = Table(arcpy.Describe(tbl).catalogPath) if tbl_instance.OIDFieldName == 'ATTACHMENTID': continue od = OrderedDict() for k, v in GDB_TABLE_PROPS.items(): od[v] = getattr(tbl_instance, k, '') # custom props od['Row count'] = tbl_instance.get_row_count() num_attachments = tbl_instance.get_attachments_count() if num_attachments is not None: od['Attachments enabled'] = True od['Attachments count'] = num_attachments else: od['Attachments enabled'] = False od['Attachments count'] = '' tables.append(od) except Exception as e: print('Error. Could not read table', tbl, '. Reason: ', e) else: table_names = [ self.ds.GetLayerByIndex(i).GetName() for i in range(0, self.ds.GetLayerCount()) if not self.ds.GetLayerByIndex(i).GetGeometryColumn() ] for table_name in table_names: try: tbl_instance = TableOgr(self, table_name) od = OrderedDict() for k, v in GDB_TABLE_PROPS.items(): od[v] = getattr(tbl_instance, k, '') # custom props od['Row count'] = tbl_instance.get_row_count() tables.append(od) except Exception as e: print(e) return tables
def _get_dataset_fields(self, dataset_name, dataset_type): """Get fields information for single dataset.""" if self.arcpy_found: if dataset_type == DATASET_TYPE_FC: dataset = FeatureClass( os.path.join(self.gdb.path, dataset_name)) elif dataset_type == DATASET_TYPE_TABLE: dataset = Table(os.path.join(self.gdb.path, dataset_name)) else: if dataset_type == DATASET_TYPE_FC: dataset = FeatureClassOgr(self.gdb, dataset_name) elif dataset_type == DATASET_TYPE_TABLE: dataset = TableOgr(self.gdb, dataset_name) df_fields = self._map_boolean( pd.DataFrame.from_dict(dataset.get_fields())) # when there is a dataset with no fields if not df_fields.empty: df_fields['Default value'].fillna(value='', inplace=True) else: df_fields = None return df_fields