def RemoveGpHistory_fc(out_xml_dir): remove_gp_history_xslt = r"C:\GIS\metadataremoval\removeGeoprocessingHistory.xslt" print "Trying to remove out_xml_dir/metadtaTempFolder..." if Exists(out_xml_dir): Delete_management(out_xml_dir) else: pass os.mkdir(out_xml_dir) env.workspace = out_xml_dir ClearWorkspaceCache_management() try: print "Starting xml conversion." name_xml = "CMLRS_LAM.xml" #Process: XSLT Transformation XSLTransform_conversion(gdb_93_CMLRS, remove_gp_history_xslt, name_xml, "") print("Completed xml conversion on %s") % (gdb_93_CMLRS) # Process: Metadata Importer MetadataImporter_conversion(name_xml, gdb_93_CMLRS) except: print("Could not complete xml conversion on %s") % (gdb_93_CMLRS) endTime = datetime.datetime.now() ScriptStatusLogging('Cansys_CMLRS_Transfer', 'SharedSDEProd.gdb\SHARED_CANSYS_CMLRS', scriptFailure, startTime, endTime, "Could not complete xml conversion on " + gdb_93_CMLRS, pythonLogTable) # Reraise the error to stop execution and prevent a success message # from being inserted into the table. raise
def RemoveGpHistory_fc(out_xml_dir): remove_gp_history_xslt = r"D:\kandrive\harvesters\scheduled-tasks\metadataremoval\removeGeoprocessingHistory.xslt" print "Trying to remove out_xml_dir/metadtaTempFolder..." if Exists(out_xml_dir): Delete_management(out_xml_dir) else: pass os.mkdir(out_xml_dir) env.workspace = out_xml_dir ClearWorkspaceCache_management() try: print "Starting xml conversion." name_xml = "RCRS_LAM.xml" #Process: XSLT Transformation XSLTransform_conversion(kanDriveSpatialConditions, remove_gp_history_xslt, name_xml, "") print("Completed xml conversion on %s") % (kanDriveSpatialConditions) # Process: Metadata Importer MetadataImporter_conversion(name_xml, kanDriveSpatialConditions) except: print("Could not complete xml conversion on %s") % ( kanDriveSpatialConditions) endTime = datetime.datetime.now() ScriptStatusLogging( 'KanDrive_Spatial_Conditions_Update', 'kandrive_spatial.DBO.Conditions', scriptFailure, startTime, endTime, "Could not complete xml conversion on " + kanDriveSpatialConditions, pythonLogTable) # Reraise the error to stop execution and prevent a success message # from being inserted into the table. raise
def edit_version(self, connection_file: str): records = self._edit() if records: log.debug("Writing edited rows to a csv...") csv_file = f'.\\facilityid\\log\\{self.feature_name}_Edits.csv' write_to_csv(csv_file, records) self.add_edit_metadata() guid_facid = {x['GLOBALID']: x["NEWFACILITYID"] for x in records} if connection_file: edit_conn = os.path.join(connection_file, *self.tuple_path[1:]) try: # Start an arc edit session log.debug("Entering an arc edit session...") editor = Editor(connection_file) editor.startEditing(False, True) editor.startOperation() log.debug("Filtering the table to editted records only...") # Query only the entries that need editing guids = ", ".join(f"'{x}'" for x in guid_facid.keys()) query = f"GLOBALID IN ({guids})" # Open an update cursor and perform edits log.debug("Opening an update cursor to perform edits...") fields = ["GLOBALID", "FACILITYID"] with UpdateCursor(edit_conn, fields, query) as cursor: for row in cursor: row[1] = guid_facid[row[0]] cursor.updateRow(row) # Stop the edit operation log.debug("Closing the edit session...") editor.stopOperation() editor.stopEditing(True) del editor ClearWorkspaceCache_management() log.info(("Successfully performed versioned edits on " f"{self.feature_name}...")) # Reset the aprx connection to the versioned connection self.aprx_connection = edit_conn self.version_name = os.path.basename( connection_file).strip(".sde") self.add_to_aprx() except RuntimeError: log.exception(("Could not perform versioned edits " f"on {self.feature_name}...")) log.debug("Logging edits to csv file containing all edits ever...") all_edits = r'.\\facilityid\\log\\AllEditsEver.csv' write_to_csv(all_edits, records) else: log.info("No edits were necessary...")
def RemoveGpHistory_fc(sdeconn,remove_gp_history_xslt,out_xml): ClearWorkspaceCache_management() env.workspace = sdeconn for fx in ListFeatureClasses(): name_xml = out_xml + os.sep + str(fx) + ".xml" #Process: XSLT Transformation XSLTransform_conversion(sdeconn + os.sep + fx, remove_gp_history_xslt, name_xml, "") print "Completed xml coversion on {0}".format(fx) # Process: Metadata Importer MetadataImporter_conversion(name_xml,sdeconn + os.sep + fx) print "Imported XML on {0}".format(fx)
def RemoveGpHistory_fc(remove_gp_history_xslt,out_xml, OpEnvironment): env.workspace = OpEnvironment.OpRunOut sdeconn = OpEnvironment.OpRunOut env.overwriteOutput = True print sdeconn ClearWorkspaceCache_management() for fx in ListFeatureClasses(): try: name_xml = out_xml + os.sep + str(fx) + ".xml" #Process: XSLT Transformation XSLTransform_conversion(sdeconn + os.sep + fx, remove_gp_history_xslt, name_xml, "") print "Completed xml conversion on {0}".format(fx) # Process: Metadata Importer MetadataImporter_conversion(name_xml,sdeconn + os.sep + fx) print "Imported XML on {0}".format(fx) except: print "could not complete xml conversion on {0}".format(fx) pass
def RemoveGpHistory_fc(out_xml): remove_gp_history_xslt = r"D:\kandrive\harvesters\scheduled-tasks\metadataremoval\removeGeoprocessingHistory.xslt" ###remove_gp_history_xslt = r"C:\GIS\KanRoadTesting\remove geoprocessing history.xslt" env.workspace = out_xml ClearWorkspaceCache_management() if Exists(out_xml): Delete_management(out_xml,"Folder") #print 'folder deleted' from os import mkdir mkdir(out_xml) #print 'metadata folder created' try: name_xml = "Kandrive_Construction_Update.xml" #Process: XSLT Transformation XSLTransform_conversion(sdeKandriveConstruction, remove_gp_history_xslt, name_xml, "") #print "Completed xml conversion on {0}".format(sdeKandriveConstruction) # Process: Metadata Importer MetadataImporter_conversion(name_xml,sdeKandriveConstruction) #print "Imported XML on {0}".format(sdeKandriveConstruction) except: print "could not complete xml conversion on {0}".format(sdeKandriveConstruction) pass
def RemoveGpHistory_fc(out_xml_dir): remove_gp_history_xslt = r"D:\kandrive\harvesters\scheduled-tasks\metadataremoval\removeGeoprocessingHistory.xslt" print "Trying to remove out_xml_dir/metadtaTempFolder..." # Change this to only affect the xml file and not the directory. # Tried that previously and had some issues with it not deleting, but # use the python OS tools and not arcpy for it this time. Might work better. if Exists(out_xml_dir): Delete_management(out_xml_dir) else: pass os.mkdir(out_xml_dir) env.workspace = out_xml_dir ClearWorkspaceCache_management() try: print "Starting xml conversion." name_xml = "kanDriveSpatialIncidents.xml" #Process: XSLT Transformation XSLTransform_conversion(kanDriveSpatialIncidents, remove_gp_history_xslt, name_xml, "") print("Completed xml conversion on %s") % (kanDriveSpatialIncidents) # Process: Metadata Importer MetadataImporter_conversion(name_xml, kanDriveSpatialIncidents) except: print("Could not complete xml conversion on %s") % ( kanDriveSpatialIncidents) endTime = datetime.datetime.now() ScriptStatusLogging( 'KanDrive_Spatial_Incidents_Update', 'kandrive_spatial.DBO.Incidents', scriptFailure, startTime, endTime, "Could not complete xml conversion on " + kanDriveSpatialIncidents, pythonLogTable) # Reraise the error to stop execution and prevent a success message # from being inserted into the table. raise