def crs10_prod_db_refresh(args): # parameters prepSPREPORTpath = args[0] prodSPREPORTpath = args[1] prepSPOWNpath = args[2] prodSPOWNpath = args[3] # log function log_msg('calling {}'.format(script_name)) err_message = None try: ##### Truncate data in production SDEs and then append from preprod ### SPREPORT for fc in spreportFClist: # Assign locations inFC = os.path.join(prepSPREPORTpath, fc) prodFC = os.path.join(prodSPREPORTpath, fc) # Count input number of records infcRowCount = arcpy.GetCount_management(inFC).getOutput(0) log_msg('INPUT {0}: {1} records'.format(inFC, infcRowCount)) # Truncate arcpy.TruncateTable_management(prodFC) # Append arcpy.Append_management(inFC, prodFC, "TEST") #arcpy.Append_management(inFC,prodFC,"NO_TEST") # Count number of records in prod prodfcRowCount = arcpy.GetCount_management(prodFC).getOutput(0) log_msg('PRODUCTION {0}: {1} records'.format( prodFC, prodfcRowCount)) ### SPOWN for fc in spownFClist: # Assign locations inFC = os.path.join(prepSPOWNpath, fc) prodFC = os.path.join(prodSPOWNpath, fc) # Count input number of records infcRowCount = arcpy.GetCount_management(inFC).getOutput(0) log_msg('INPUT {0}: {1} records'.format(inFC, infcRowCount)) # Truncate arcpy.TruncateTable_management(prodFC) #fieldMap = "TP_PROPERTY_LINK_PROPERTY_ID \"TP_PROPERTY_LINK_PROPERTY_ID\" true false false 8 Double 0 0 ,First,#," + inFC + ",PROPERTY_ID,-1,-1;OBJECTID_1 \"OBJECTID_1\" true false false 4 Long 0 0 ,First,#;PROPERTY_ID \"PROPERTY_ID\" true false false 8 Double 0 0 ,First,#," + inFC + ",PROPERTY_ID,-1,-1;PARCEL_ID \"PARCEL_ID\" true false false 8 Double 0 0 ,First,#," + inFC + ",PROPERTY_ID,-1,-1;COUNT_PARCEL_ID \"COUNT_PARCEL_ID\" true true false 2 Short 0 0 ,First,#," + inFC + ",COUNT_PARCEL_ID,-1,-1;SHAPE_STArea__ \"SHAPE_STArea__\" true false true 8 Double 0 0 ,First,#;SHAPE_STLength__ \"SHAPE_STLength__\" true false true 8 Double 0 0 ,First,#;SHAPE_Length \"SHAPE_Length\" false true true 8 Double 0 0 ,First,#;SHAPE_Area \"SHAPE_Area\" false true true 8 Double 0 0 ,First,#" # Append arcpy.Append_management(inFC, prodFC, "TEST") #arcpy.Append_management(inFC,prodFC,"NO_TEST", fieldMap) # Count number of records in prod prodfcRowCount = arcpy.GetCount_management(prodFC).getOutput(0) log_msg('PRODUCTION {0}: {1} records'.format( prodFC, prodfcRowCount)) log_msg("Process time: %s \n" % str(datetime.datetime.now() - starttime)) except Exception as e: print("ERROR: {}".format(e)) err_message = "ERROR while running {0}: {1}".format(script_name, e) return err_message, log_messages
def emptyTable(self, table): atttable = str(table) + '__ATTACH' if arcpy.Exists(table): log('\tVidage de la table ' + table) arcpy.TruncateTable_management(table) if arcpy.Exists(atttable): log('\t\tVidage de la table des pieces jointes ' + str(atttable)) arcpy.TruncateTable_management(atttable)
def TruncateTable(Table): ''' Takes a single ESRI table stored in SQL server and removes all of its rows (truncating the table). Table schema and metadata are left intact. ''' DatasetVersioned = arcpy.Describe(Table).isVersioned if DatasetVersioned == True: arcpy.UnregisterAsVersioned_management(Table) arcpy.TruncateTable_management(Table) else: arcpy.TruncateTable_management(Table)
def FC_Exist(FCname, DatasetPath, Template): FCpath = os.path.join(DatasetPath, FCname) FCtype = arcpy.Describe(Template).shapeType if arcpy.Exists(FCpath): if Compare_Fields(FCpath, Template): arcpy.AddMessage( "Feature class, {}, already exists. Clearing records.......". format(FCname)) try: arcpy.TruncateTable_management(FCpath) except: arcpy.DeleteRows_management(FCpath) else: arcpy.AddMessage( "Additional fields have been added since the Feature class, {}, was created. Recreating Feature class......." .format(FCname)) arcpy.Delete_management(FCpath) return arcpy.CreateFeatureclass_management(DatasetPath, FCname, FCtype, Template, "SAME_AS_TEMPLATE", "SAME_AS_TEMPLATE", Template) else: arcpy.AddMessage( "Feature class, {}, does not exist. Creating now.......".format( FCname)) return arcpy.CreateFeatureclass_management(DatasetPath, FCname, FCtype, Template, "SAME_AS_TEMPLATE", "SAME_AS_TEMPLATE", Template)
def TruncateAttachmentTable(AttachmentTable): ''' Function that removes all of the rows from the attachment table to ensure the database does not become bloated with reports that have been uploaded to SharePoint ''' arcpy.TruncateTable_management(AttachmentTable)
def updateData(isTable): try: # validate that there was not a schema change arcpy.env.workspace = fgdb layer = sdeFC + '_Layer' if not isTable: arcpy.MakeFeatureLayer_management(sdeFC, layer, '1 = 2') else: arcpy.MakeTableView_management(sdeFC, layer, '1 = 2') try: arcpy.Append_management(layer, f, 'TEST') log('schema test passed') passed = True except arcpy.ExecuteError as e: if '000466' in e.message: log(e.message) msg = 'schema change detected' msg += '\n\n{0}'.format(getFieldDifferences(sdeFC, f)) errors.append('{}: {}'.format(f, msg)) log(msg) passed = False return passed else: raise e arcpy.Delete_management(layer) log('checking for changes...') if checkForChanges(f, sdeFC, isTable) and passed: log('updating data...') arcpy.TruncateTable_management(f) # edit session required for data that participates in relationships editSession = arcpy.da.Editor(fgdb) editSession.startEditing(False, False) editSession.startOperation() fields = [fld.name for fld in arcpy.ListFields(f)] fields = filter_fields(fields) if not isTable: fields.append('SHAPE@') outputSR = arcpy.Describe(f).spatialReference else: outputSR = None with arcpy.da.InsertCursor(f, fields) as icursor, \ arcpy.da.SearchCursor(sdeFC, fields, sql_clause=(None, 'ORDER BY OBJECTID'), spatial_reference=outputSR) as cursor: for row in cursor: icursor.insertRow(row) editSession.stopOperation() editSession.stopEditing(True) changes.append(f.upper()) else: log('no changes found') except: errors.append('Error updating: {}'.format(f)) if logger: logger.logError()
def model_catalog_test_data_cleanup(self): if self.config.test_flag == "TEST": feature_class_list = [ self.config.model_tracking_sde_path, self.config.model_alt_bc_sde_path, self.config.model_alt_hydraulic_sde_path, self.config.model_alt_hydrologic_sde_path, self.config.project_type_sde_path, self.config.simulation_sde_path, self.config.geometry_nodes_sde_path, self.config.geometry_areas_sde_path, self.config.geometry_links_sde_path, self.config.results_area_sde_path, self.config.results_link_sde_path, self.config.results_node_sde_path, self.config.results_node_flooding_sde_path, self.config.storage_sde_path, self.config.director_sde_path ] for feature_class in feature_class_list: try: arcpy.TruncateTable_management(feature_class) except: print("unable to truncate, using Delete Rows") arcpy.DeleteRows_management(feature_class) else: print("Config set to other than TEST, data will not be deleted")
def truncate_tables(self, creds): sde = self.make_absolute(['connections', creds['sde_connection_path']]) with open(self.make_absolute(['data', 'sql', 'truncate.sql']), 'r') as f: sql = f.read() self.logger.info('truncating tabular tables') try: c = arcpy.ArcSDESQLExecute(sde) c.execute(sql) except Exception as e: raise e finally: if c is not None: del c arcpy.env.overwriteOutput = True arcpy.env.workspace = sde self.logger.info('truncating spatial tables') try: arcpy.TruncateTable_management('CrashLocation') except arcpy.ExecuteError as e: self.logger.info(e)
def oracleXY2oracle(sourceLyr, tempName, targetLyr, X, Y): now = datetime.datetime.now() arcpy.AddMessage('--- Przetwarzanie zasilania Oracle [' + now.strftime("%Y/%m/%d %H:%M:%S") + '] ---') inputs = [oracleConnector + "\\" + sourceLyr] temps = [myPath + "geofizykaGDB.gdb\\" + tempName] targets = [oracleGISPIG2Connector + "\\" + targetLyr] fieldX = [X] fieldY = [Y] events = ["tempLyr"] i = 0 for n in inputs: arcpy.AddMessage(' --> Kopiowanie tabeli tempTable' + tempName) arcpy.TableToTable_conversion(n, myPath + "geofizykaGDB.gdb", "tempTable" + tempName) arcpy.AddMessage(' --> Tworzenie warstwy przestrzennej ' + temps[i]) arcpy.MakeXYEventLayer_management( myPath + "geofizykaGDB.gdb\\tempTable" + tempName, fieldX[i], fieldY[i], events[i], spatialRef, "") arcpy.FeatureClassToFeatureClass_conversion( events[i], myPath + "geofizykaGDB.gdb\\", tempName) arcpy.AddMessage(' --> Usuwanie danych z ' + targets[i]) #arcpy.DeleteRows_management(targets[i]) # cos na oraclu nie przyjmuje tego narzedzia arcpy.TruncateTable_management(targets[i]) arcpy.AddMessage(' --> Zasilanie danych do ' + targets[i]) arcpy.Append_management(temps[i], targets[i], "NO_TEST", "", "") i = i + 1
def truncateSdeStaging(): try: arcpy.TruncateTable_management(stage_fc) except Exception as e: print 'Error truncating sde.encounter_stage: ' + e.message finally: print '...Done'
def truncateTable(): ####################### # delete all features ####################### try: deleteRows = arcpy.SearchCursor(sdeLayer) for delrow in deleteRows: RouteName = delrow.getValue("RouteName") arcpy.AddMessage("Deleting: RouteName-" + RouteName) del deleteRows arcpy.TruncateTable_management(sdeLayer) #DeleteFeatures was replaced with truncate table -- to eliminate datbase transactions #arcpy.DeleteFeatures_management(sdeLayer) arcpy.AddMessage("Deleted All Construction features") logMessage(eventId, timeStamp, 'DEBUG', 'Deleted all features') except: ex = getExceptionInfo() arcpy.AddMessage("Failed to delete Construction features. " + ex) #logMessage(eventId, timeStamp, 'DEBUG', 'Failed to delete features. Reason: ' + ex) raise Exception('Failed to delete features. Reason: ' + ex) finally: if 'deleteRows' in locals(): del deleteRows
def truncateSdeEncounter(): try: arcpy.TruncateTable_management(out_fc) except Exception as e: print 'Error truncating sde.encounter: ' + e.message finally: print '...Done'
def main(): try: import time import os, sys print "Started at " + time.strftime("%Y/%m/%d %H.%M.%S", time.localtime()) import arcpy xFlag = 0 # Get parameters from config file sys.path.append( "//ccgisfiles01m/gisdata/prdba/crupdates/CCPythonLib/Appl/") import getConfig arcpy.env.workspace = getConfig.main('user', 'crscl', 'path', 'workspace') + 'scl_exports.gdb' sdeWorkspace = getConfig.main('globalPath', 'gismoLoad') outputStep = "start" # List the feature classes in Scl_Exports.gdb xStep = 'list Scl_Exports feature classes' fcList = arcpy.ListFeatureClasses() # Loop through the list for featureClass in fcList: # Copy to SDE if featureClass == "scl_l" or featureClass == "sclrte_l" or featureClass == "scl_n": # In order to keep address locators current without recreating them, we need to # delete the features and reload them outputStep = "truncate" arcpy.TruncateTable_management(sdeWorkspace + featureClass) outputStep = "append" arcpy.Append_management(featureClass, sdeWorkspace + featureClass, "TEST", "", "") else: if arcpy.Exists(sdeWorkspace + featureClass): outputStep = "delete sde" arcpy.Delete_management(sdeWorkspace + featureClass) outputStep = "copy to sde" arcpy.FeatureClassToFeatureClass_conversion( featureClass, sdeWorkspace, featureClass) print "Completed at " + time.strftime("%Y/%m/%d %H.%M.%S", time.localtime()) except: xFlag = 1 ex = sys.exc_info()[1] eMsg = ex.args[0] xStep = '{0} at {1}: {2}'.format(featureClass, outputStep, eMsg) arcpy.AddMessage("There was a problem deleting or converting " + featureClass + " at step " + outputStep + ".") arcpy.GetMessages() finally: return (xStep, xFlag)
def DME_master_hybrid_data_cleanup(self): feature_class_list = [self.config.DME_master_hybrid_sde_path] for feature_class in feature_class_list: try: arcpy.TruncateTable_management(feature_class) except: print(" unable to truncate, using Delete Rows") arcpy.DeleteRows_management(feature_class)
def resetMobileFeatures(features): """Clears all records from a list of datasets""" for feature in features: try: arcpy.TruncateTable_management(feature) except: return False return True
def Truncate(outputFC): # Delete all data in output feature class try: arcpy.TruncateTable_management(outputFC) return (True, 'Succeeded') except Exception as e: error = "Failed: truncate output feature class table. " + str(e) return (False, error)
def main(INP_EINDRESULTAAT, GEODYN_EINDRESULTAAT): """Eindresultaat importeren naar Oracle""" print_log("\ttruncate Oracle tabel '{}'...".format(GEODYN_EINDRESULTAAT), "i") arcpy.TruncateTable_management(GEODYN_EINDRESULTAAT) print_log("\tvul Oracle tabel '{}'...".format(GEODYN_EINDRESULTAAT), "i") arcpy.Append_management(INP_EINDRESULTAAT, GEODYN_EINDRESULTAAT, "NO_TEST")
def truncate(self, fc): """ Truncate the table Parameters ---------- fc : str the table to truncate """ arcpy.TruncateTable_management(in_table=fc)
def update_fgdb(fgdb, data_file, table): arcpy.env.workspace = fgdb logging.info('Truncating {0}'.format(table)) arcpy.TruncateTable_management(table) logging.info('Appending records') arcpy.Append_management(data_file, coronavirus_table, "TEST") logging.info('Compacting fgdb') arcpy.Compact_management(fgdb)
def crearPoligonos(fc, salida): if not arcpy.Exists(scr + os.path.sep + salida): if eco: imprimir("Creando ..." + salida) arcpy.CreateFeatureclass_management(scr, salida, 'POLYGON', template=fc, spatial_reference=fc) else: if eco: imprimir("Truncando ..." + salida) arcpy.TruncateTable_management(os.path.sep + salida)
def TruncateProductionTables(Tables): ''' Remove all entries from the production tables but keep the schema intact. ''' env.workspace = r"Database Connections/IAMUW-FS_CEO.sde" for Table in Tables: # Tables must be unregistered as versioned before they can be truncated arcpy.UnregisterAsVersioned_management(Table, "NO_KEEP_EDIT", "COMPRESS_DEFAULT") arcpy.TruncateTable_management(Table) # Re-register as versioned after successfully truncating table arcpy.RegisterAsVersioned_management(Table, "NO_EDITS_TO_BASE")
def crearPoligonos(fc, salida, fromScratch=False): #------------------------------------------ if arcpy.Exists(salida) and fromScratch: arcpy.Delete_management(salida) if not arcpy.Exists(salida): if eco: imprimir("Creando ..." + salida) arcpy.CreateFeatureclass_management(arcpy.env.workspace, salida, 'POLYGON', template=fc, spatial_reference=fc) else: if eco: imprimir("Truncando ..." + salida) arcpy.TruncateTable_management(salida)
def truncar_data_dataset(table): """Trunca la informacion de una tabla dentro del dataset.""" try: arcpy.AddMessage("Limpiando capa " + table + "...") fc = os.path.join(arcpy.env.workspace, dataset, table) print('fc: ', fc) # Truncate a feature class if it exists if arcpy.Exists(fc): print('existe: ', table) arcpy.TruncateTable_management(fc) except: print("Failed truncar_data_dataset (%s)" % traceback.format_exc()) error_log("Failed truncar_data_dataset (%s)" % traceback.format_exc())
def updateIASTables(tableList): for inTable in tableList: if inTable.split(".")[1] == 'iasworld': workspace = r"C:\MaintenanceScripts\Connections\[email protected]" outTable = inTable + '_STATIC' else: workspace = r"C:\MaintenanceScripts\Connections\[email protected]" outTable = 'Parcels_Testing' arcpy.env.workspace = workspace print "Truncating " + outTable + "..." arcpy.TruncateTable_management(outTable) print "Truncated " + outTable + "." print "Appending view to " + outTable + "..." arcpy.Append_management(inTable, outTable) print "Appended view to " + outTable + "."
def update_fgdb(fgdb, data_file, table): arcpy.env.workspace = fgdb logging.info('Truncating {0}'.format(table)) arcpy.TruncateTable_management(table) logging.info('Appending records') arcpy.Append_management( data_file, "Coronavirus_Cases", "NO_TEST", r'State "State" true true false 8000 Text 0 0,First,#,data_file,State,0,8000;' r'Country "Country" true true false 8000 Text 0 0,First,#,data_file,Country,0,8000;' r'County_Name "County Name" true true false 50 Text 0 0,First,#,data_file,County Name,0,8000;' r'Full_County_Name "Full County Name" true true false 8000 Text 0 0,First,#,data_file,Full County Name,0,8000;' r'Cases "Cases" true true false 4 Long 0 0,First,#,data_file,Cases,-1,-1;' r'Update_Time "Update_Time" true true false 8 Date 0 0,First,#,data_file,Update Time,-1,-1;' r'UVA_URL "Source" true true false 255 Text 0 0,First,#,data_file,UVA URL,0,8000;' r'Harvard_URL "Harvard_URL" true true false 255 Text 0 0,First,#,data_file,Harvard URL,0,8000', '', '') logging.info('Compacting fgdb') arcpy.Compact_management(fgdb)
def _create_fc_layer(self, file_geodb, fc_name): try: arcpy.CreateFeatureclass_management( file_geodb, fc_name, "POLYLINE", "", "DISABLED", "DISABLED", "PROJCS['NZGD_2000_New_Zealand_Transverse_Mercator',GEOGCS['GCS_NZGD_2000',DATUM['D_NZGD_2000',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',1600000.0],PARAMETER['False_Northing',10000000.0],PARAMETER['Central_Meridian',173.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-4020900 1900 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision" ) except Exception as e: # feature class already exists. self.errorlog(e.message) try: arcpy.TruncateTable_management("%s/%s" % (file_geodb, fc_name)) except Exception as e: self.errorlog(e.message) for k, v in getattr(self._config, fc_name).items(): try: arcpy.AddField_management("%s/%s" % (file_geodb, fc_name), k, v) except Exception as e: self.errorlog(e.message) raise
def PrepareWorkspace(): """ Prepare file geodatabase workspace Returns: File Geodatabase: local file gdb to keep taxi density data taxi_feature_class: feature class with taxi point locations """ # define expected file paths for file gdb folder, fgdb, taxi feature class fgdb_folder = constants.fgdb_folder fgdb_name = constants.taxi_fgdb_name file_gdb = os.path.join(fgdb_folder, fgdb_name) taxi_feature_class_name = "TaxiLocations" taxi_feature_class = os.path.join(file_gdb, taxi_feature_class_name) out_coordinate_system = arcpy.SpatialReference( 'WGS 1984') # define output spatial reference if not os.path.exists( fgdb_folder): # if file gdb folder has not been created os.mkdir(fgdb_folder) # create the folder if not arcpy.Exists(file_gdb): # if file gdb has not been created arcpy.CreateFileGDB_management(fgdb_folder, fgdb_name) # create the file gdb if not arcpy.Exists( taxi_feature_class): # if the taxi feature class does not exist # create the point feature class in WGS84 spatial reference arcpy.CreateFeatureclass_management( file_gdb, taxi_feature_class_name, "Point", spatial_reference=out_coordinate_system ) # create a point feature class with defined coordinate system arcpy.TruncateTable_management( taxi_feature_class) # delete existing features in the feature class return file_gdb, taxi_feature_class # return fgdb and feature class path to main
def process(self): dogm_admin = join(self.garage, 'dogm', 'DOGMADMIN@[email protected]') dogm_wells = join(self.garage, 'dogm', 'OilGas@[email protected]') for crate in self.get_crates(): if crate.result[0] in [Crate.UPDATED, Crate.CREATED]: if crate.destination_name == WELLS_DESTINATION_NAME: sde_destination = join( dogm_wells, 'DOGM.OILGAS.{}'.format(crate.destination_name)) else: sde_destination = join( dogm_admin, 'DOGM.DOGMADMIN.{}'.format(crate.destination_name)) self.log.info( 'truncating and appending {}'.format(sde_destination)) arcpy.TruncateTable_management(sde_destination) arcpy.Append_management(crate.destination, sde_destination, 'NO_TEST')
capa_fuente = r"%s" % FolderEntrada + "\\Partes\\" + str(1) + "\\bd" + str( 1) + ".gdb\\cuadrox_" + str(1) + "_Final" no_existen, existen, i = [], [], 1 # inicializa las variables que almacenarán las secciones procesadas y aquellas que no se pudieron procesar for capa in arreglo_features: # si la sección existe o fue procesada se almacena en el arreglo de existen sino en el de no existen if arcpy.Exists(capa): existen.append(i) else: no_existen.append(i) i += 1 if len( no_existen ) == 0: # si el arreglo de no existen no tiene ningún elemento procesa la unificación arreglo_features = listaanidada(arreglo_features, ";") arcpy.CopyFeatures_management(capa_fuente, output) # nuevo arcpy.TruncateTable_management(output) # nuevo arcpy.AddMessage(arreglo_features) arcpy.AddMessage(output) arcpy.Append_management(inputs=arreglo_features, target=output, schema_type="NO_TEST") # nuevo layer_salida = arcpy.MakeFeatureLayer_management(output, nombreSalida) arcpy.SetParameter(8, layer_salida) else: # en caso de que existan secciones que no pudieron ser procesadas se muestra el siguiente texto arcpy.AddError("no se pudieron procesar las secciones: " + str(no_existen)) print "Proceso de unificacion terminado" print "Eliminate Polygon MultiExtent Completado en %s segundos." % ( time.clock() - t_inicio)
print theStartTime file = open( "C:/Users/bgoggin/Dropbox/SF Planning/Tim_Updates/Logs/" + myStartDate + "test" + ".txt", "w") file.write(theStartTime + "\n") when = datetime.date.today() theDate = when.strftime("%d") theDay = when.strftime("%A") print theDay try: mitigation_layer = root + "Staging_Data/sfmta_commitments.gdb/Commitments" mitigation_projected = root + "Staging_Data/sfmta_commitments.gdb/Commitments_proj" mitigation_buffer = root + "Staging_Data/sfmta_commitments.gdb/Commitments_buffer" arcpy.TruncateTable_management(mitigation_layer) file.write("Deleted old feature class table" + "\n") cursor = arcpy.da.InsertCursor(mitigation_layer, ['Title', 'Description', 'SHAPE@XY']) with open('Raw_Data/mitigation.csv', 'r') as f: reader = csv.DictReader(f) #reader = [('Title1', 'The project is the construction of a 36-story 262', (-122.4248302, 37.7856142)), #('Title2', 'The proposed project would demolish the existing', (-122.4248302, 37.7856142))] for row in reader: cursor.insertRow( (str(row['Title2']), str(row['Short Description']), (float(row['Longitude']), float(row['Latitude'])))) #clean up