def execute(self, parameters, messages): # create GDB folder = parameters[0].valueAsText name = parameters[1].valueAsText arcpy.CreateFileGDB_management(folder, name) gdb_path = folder + '\\' + name # create garages shapefile, add to GDB garage_location = parameters[3].valueAsText garage_shp_name = parameters[4].valueAsText garages = arcpy.MakeXYEventLayer_management(garage_location, 'X', 'Y', garage_shp_name) arcpy.FeatureClassToGeodatabase_conversion(garages, gdb_path) garage_path = gdb_path + '\\' + garage_shp_name # create buildings shapefile given the structures .shp in Campus campus_gdb_path = parameters[2].valueAsText structures = campus_gdb_path + '\Structures' campus_buildings = gdb_path + '\\' + 'campus_building' arcpy.Copy_management(structures, campus_buildings) # reproject garages to the spatial reference of campus buildings projection = arcpy.Describe(campus_buildings).spatialReference arcpy.Project_management(garage_path, gdb_path + '\garage_projected', projection) garage_projected = gdb_path + '\garage_projected' # get building to buffer and buffer distance garage_selection = parameters[5].valueAsText buffer_distance = float(parameters[6].valueAsText) # make sure garage exists where = "Name = '%s'" % garage_selection cursor = arcpy.SearchCursor(garage_projected, where_clause=where) shouldProceed = False for row in cursor: if row.getValue('Name') == garage_selection: shouldProceed = True # if should proceed = true if shouldProceed: # generate the name for buffer layer garage_buff = r'\garage_%s_buffed_%s' % (garage_selection, buffer_distance) # get reference to building garageFeature = arcpy.Select_analysis(garage_projected, gdb_path + r'building_%s' % (garage_selection), where) # buffer selected garage garage_buffered = arcpy.Buffer_analysis(garageFeature, gdb_path + garage_buff, buffer_distance) # intersection of garage buffer and campus buildings arcpy.Intersect_analysis([gdb_path + garage_buff, gdb_path + r'\campus_buildings'], gdb_path + '\garage_building_intersection', 'All') # convert to csv arcpy.TableToTable_conversion(gdb_path + '\garage_building_intersection.dbf', 'C:\\Users\\Eileen\\Documents\\lab 5', 'nearbyBuildings.csv') else: messages.addErrorMessage('garage not found') raise arcpy.ExecuteError return
def ARCGIS_create_database(gdbpath, time_ymdhms, datatype): SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) authorization_file = os.path.join(SCRIPT_DIR, u"server10.2.ecp") database_name = datatype + time_ymdhms arcpy.CreateEnterpriseGeodatabase_management( database_platform=u"PostgreSQL", instance_name=u"localhost", database_name=database_name, account_authentication=u"DATABASE_AUTH", database_admin=u"postgres", database_admin_password=u"Lantucx2018", sde_schema=u"SDE_SCHEMA", gdb_admin_name=u"sde", gdb_admin_password=u"sde", tablespace_name=u"#", authorization_file=authorization_file) connsdepath = SCRIPT_DIR connsde = database_name + u".sde" conn = {} conn[u"out_folder_path"] = connsdepath conn[u"out_name"] = connsde conn[u"database_platform"] = u"PostgreSQL" conn[u"instance"] = u"localhost" conn[u"account_authentication"] = u"DATABASE_AUTH" conn[u"database"] = database_name conn[u"username"] = u"sde" conn[u"password"] = u"sde" conn[u"save_user_pass"] = u"SAVE_USERNAME" arcpy.CreateDatabaseConnection_management(**conn) arcpy.env.workspace = gdbpath sdepath = os.path.join(SCRIPT_DIR, connsde) for ds in arcpy.ListDatasets(feature_type=u'feature') + [u'']: if ds != u'': dspath = os.path.join(gdbpath, ds) sdedspath = os.path.join(sdepath, ds) arcpy.Copy_management(dspath, sdedspath) else: for fc in arcpy.ListFeatureClasses(feature_dataset=ds): fcpath = os.path.join(gdbpath, ds, fc) sdedspath = os.path.join(sdepath, ds, fc) arcpy.Copy_management(fcpath, sdedspath) print u'创建空间库成功' return datatype + time_ymdhms + ".sde"
def BatchExtractValuesToPoints(Input_Landsat_dir, gdb, src_featureClass, outputTable_dir): PATTERN = re.compile(r'^[0-9a-zA-Z]*_[0-9a-zA-Z]*_(\d*)_(\d*)\w*\.tif$') if not os.path.exists(outputTable_dir): os.makedirs(outputTable_dir) YEARS_DIR = os.listdir(Input_Landsat_dir) for year_dir in YEARS_DIR: year_path = os.path.join(Input_Landsat_dir, year_dir) file_dict = {} if os.path.isdir(year_path): print("processing " + Input_Landsat_dir + ": year " + year_dir) files = os.listdir(year_path) for fname in files: fpath = os.path.join(year_path, fname) if os.path.isfile(fpath) and fpath.endswith(".tif"): mo = PATTERN.match(fname) ymd = mo.group(2) if not ymd in file_dict: file_dict[ymd] = [] file_dict[ymd].append(fname) # print(mo.group(1), mo.group(2)) keys = sorted(file_dict.keys()) inRasterList = [] for k in keys: f_list = sorted(file_dict[k]) for f in f_list: inRasterList.append([f, k]) # print(inRasterList) # Local variables: Zjt_org = gdb + "/" + src_featureClass featureClasses = src_featureClass + "_" + str(year_dir) inPointFeatures = gdb + "/" + featureClasses # Process: Copy arcpy.Copy_management(Zjt_org, inPointFeatures, "FeatureClass") # Execute ExtractValuesToPoints arcpy.env.workspace = year_path arcpy.sa.ExtractMultiValuesToPoints(inPointFeatures, inRasterList, "NONE") # TableToDBASE_conversion # arcpy.TableToDBASE_conversion( inPointFeatures, output_dir) # Local variables: # Zjt_1995 = "C:/ignrr/data/LT05/Zjt.gdb/Zjt_1995" # print("outputTableDir:" + outputTable_dir) zjt_xls = os.path.join(outputTable_dir, featureClasses + ".xls") # print(zjt_xls) # Process: Table To Excel arcpy.TableToExcel_conversion(inPointFeatures, zjt_xls, "ALIAS", "CODE")
def update(self): """ Updates the second, 'sandbox' geodatbase that users can mess with """ message("**************************************************") message("Updating Sandbox Geodatabase".center(50)) message("**************************************************") env.workspace = SANDBOX old_fcs = [ item for item in arcpy.ListFeatureClasses() if item.endswith("_old") or item.endswith("_new") ] for item in old_fcs: try: arcpy.Delete_management(os.path.join(SANDBOX, item)) except Exception as e: message("~ Unable to delete {}. Please check.\n{}".format( item, str(e))) # raise for fc in self.fc_list: concerto_path = os.path.join(MAIN_PATH, fc) sandbox_path = os.path.join(SANDBOX, fc) new_sandbox_path = "{}_new".format(sandbox_path) old_sandbox_path = "{}_old".format(sandbox_path) try: arcpy.Copy_management(concerto_path, new_sandbox_path) message("Copied Concerto\\{} to Sandbox\\{}".format( fc, os.path.basename(new_sandbox_path))) try: arcpy.Rename_management(sandbox_path, old_sandbox_path) message("Renamed Sandbox\\{} to Sandbox\\{}".format( fc, os.path.basename(old_sandbox_path))) try: arcpy.Rename_management(new_sandbox_path, sandbox_path) message("Renamed Sandbox\\{} to Sandbox\\{}".format( os.path.basename(new_sandbox_path), fc)) except Exception as e: message( "~ Unable to rename Sandbox\\{} to Sandbox\\{}.\n{}" .format(os.path.basename(new_sandbox_path), fc, str(e))) #raise except Exception as e: message( "~ Unable to rename Sandbox\\{} to Sandbox\\{}.\n{}". format(fc, os.path.basename(old_sandbox_path), str(e))) #raise except Exception as e: message( ("~ Unable to copy Concerto\\{} to Sandbox\\{} - User may " "have map open.\n{}").format(fc, str(e))) #raise env.workspace = MAIN_PATH message("**************************************************") message("Finished Updating Sandbox GeoDatabase".center(50)) message("**************************************************")
def rastervalues_to_points(inPointFC, inRasterList, newFieldNameList, outPointFC): """ Extract values from a list of rasters to new fields in a point Feature Class. :Parameters: ------------ inPointFC: String Path and name of ArcGIS Point Feature Class or Shapefile defining the point locations, e.g. rain gauges, wind energy plants. The input file is not altered by this function. inRasterList : List of Strings containing paths and names of all rasters to extract values from. newFieldNameList : List of Strings containing the names for the new fields which are created in the output Feature Class. Values from the rasters are written into the new fields in corresponding order. outPointFC : String Path and name of the output point Feature Class to be created. :Returns: --------- ResObj : arcpy Result Object of the output point Feature Class """ i = 0 #avoid errors if only a string for one raster is specified if type(inRasterList) != list: inRasterList = [inRasterList] if type(newFieldNameList) != list: newFieldNameList = [newFieldNameList] for inRaster, newField in zip(inRasterList, newFieldNameList): #for the first raster in the list, the function can be executed normally if i == 0: ResObj = arcpy.sa.ExtractValuesToPoints(inPointFC, inRaster, outPointFC, "", "") #for every following raster, a copy of the first output point feature class has to be made, #since it is input and output at the same time, copy is deleted afterwards else: if outPointFC.endswith(".shp"): temp = os.path.join(os.path.split(outPointFC)[0], "temp.shp") else: temp = os.path.join(os.path.split(outPointFC)[0], "temp") arcpy.Copy_management(in_data=outPointFC, out_data=temp) ResObj = arcpy.sa.ExtractValuesToPoints(in_point_features=temp, in_raster=inRaster, out_point_features=outPointFC) arcpy.Delete_management(in_data=temp) #add field, copy raster values inside and delete RASTERVALU field. Otherwise next loop run would fail. i += 1 arcpy.AddField_management(ResObj, newField, "DOUBLE") arcpy.CalculateField_management(in_table=ResObj, field=newField, expression="!RASTERVALU!",expression_type="PYTHON") arcpy.DeleteField_management(in_table=ResObj, drop_field="RASTERVALU") return ResObj
def createLocator(): ''' Copies locators from \\lojic-files.msd.louky.local\Data\Locators that are needed to create a composite locator for Jefferson County addresses. The locators needed to create the composite locator have to be accessible to this script. ''' try: locPath = r"\\lojic-files.msd.louky.local\Data\Locators" JeffAdds = os.path.join(locPath, "JeffAdds") JeffStreets = os.path.join(locPath, "JeffStreets") JeffNames = os.path.join(locPath, "JeffNames") Jeff_Composite = os.path.join(workspace, "Jeff_Composite") JeffAdds_copy = os.path.join(workspace, "JeffAdds") JeffStreets_copy = os.path.join(workspace, "JeffStreets") JeffNames_copy = os.path.join(workspace, "JeffNames") inLocators = "{0} JeffAdds;{1} JeffStreets;{2} JeffNames".format(JeffAdds_copy, JeffStreets_copy, JeffNames_copy) inFieldMap = "Street \"Street or Intersection\" true true true 100 Text 0 0 ,First,#,{0},Street,0,0,{1},Street,0,0,{2},Street,0,0".format(JeffAdds_copy, JeffStreets_copy, JeffNames_copy) # Process: Copy JeffAdds ScriptUtils.AddMsgAndPrint("\tCopying address locators...", 0) arcpy.Copy_management(JeffAdds, JeffAdds_copy, "AddressLocator") # Process: Copy JeffStreets arcpy.Copy_management(JeffStreets, JeffStreets_copy, "AddressLocator") # Process: Copy JeffNames arcpy.Copy_management(JeffNames, JeffNames_copy, "AddressLocator") # Process: Create Composite Address Locator ScriptUtils.AddMsgAndPrint("\tCreating the composite address locator...", 0) arcpy.CreateCompositeAddressLocator_geocoding(inLocators, inFieldMap, "JeffAdds #;JeffStreets #;JeffNames #", Jeff_Composite) return Jeff_Composite except: # Return any Python specific errors tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\n Traceback Info:\n{0}\n Error Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) tmRun = time.strftime("%X", time.localtime()) endTime = time.time() prodInfo = "\tScript errored after running for {0} seconds.".format(str(round((endTime - startTime)))) msg = "\n\n\tScript error at: {0}\n{1}\n\n{2}".format(tmRun, prodInfo, pymsg) ScriptUtils.AddMsgAndPrint(msg, 2) return ""
def _add_calculate_field(base, value): scratch = arcpy.CreateScratchName("temp", data_type="FeatureClass", workspace=arcpy.env.scratchWorkspace) try: arcpy.Copy_management(_get_file_name(base), scratch) except arcpy.ExecuteError: arcpy.Delete_management(scratch) arcpy.CopyFeatures_management(_get_file_name(base), scratch) arcpy.AddField_management(scratch, "Prospect", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", "") arcpy.CalculateField_management(scratch, "Prospect", str(value), "PYTHON", "") return scratch
def test_schema_changes(self): arcpy.Copy_management(check_for_changes_gdb, test_gdb) with self.assertRaises(ValidationException): core.check_schema( Crate('ZipCodes', test_gdb, check_for_changes_gdb, 'FieldLength')) result = core.check_schema( Crate('ZipCodes', test_gdb, check_for_changes_gdb, 'ZipCodes')) self.assertEqual(result, True)
def _create_temporary_copy(self, workspace=''): """ make a copy of a project fgdbs in the given temporary table """ project_db = self.folders.get_db(workspace=workspace) temp_db = self.folders.get_temporary_db(workspace=workspace, check=False) if arcpy.Exists(temp_db): arcpy.Delete_management(temp_db) # deactivate adding of temp. gdbs to table of contents with ArcpyEnv(addOutputsToMap=False): arcpy.Copy_management(project_db, temp_db)
def copy_img_to_ftp(fromfolder, ftp_folder, filename): outFolderpath = ftp_folder + filename[4:10] if os.path.isdir(outFolderpath) == False: os.mkdir(outFolderpath) try: arcpy.Copy_management(fromfolder + filename, outFolderpath + "/" + filename) print(filename + " was copied to " + outFolderpath + ".") except: print("Error! Unable to copy " + filename + " to the ftp folder!")
def tabarea(fc): import arcpy name = os.path.splitext(os.path.basename(fc))[0] inraster = Raster[raster] mem = "in_memory" tatable = TabulateArea(fc, "FID", inraster, "Value", os.path.join(mem, name + "areas")) arcpy.Copy_management(tatable, os.path.join(outfolder, name + ".dbf")) arcpy.Delete_management(os.path.join(mem, name + "areas")) del tatable del name
def copyBackupPublishGDBtoStaging(): arcpy.Copy_management(backupPublishGDB, publishGDB) sort_fields = [["GUM_COUNT_DATE", "DESCENDING"]] table = publishGDB + "/UnsortedWorldwideGumCountByDate" sortTable = publishGDB + "/WorldwideGumCountByDate" arcpy.Sort_management(table, sortTable, sort_fields) arcpy.Delete_management(table)
def copyData(self, base, to, addPhoto, basePhoto, objectIDLabel): log('\tCopie de ' + str(base) + ' vers ' + str(to)) if not arcpy.Exists(to): try: arcpy.Copy_management(base, to) except BaseException as e: log(e) else: arcpy.Append_management(base, to, "TEST", "", "") if addPhoto and basePhoto and basePhoto != '': arcpy.EnableAttachments_management(to) arcpy.AddAttachments_management(to, objectIDLabel, basePhoto, 'objid', "img", "#")
def copy_template_gdb(gdb, name, out_ws): out_path = os.path.join(out_ws, name + '.gdb') message = 'Copying %s to %s' % (name, out_ws) say(message) if arcpy.Exists(out_path): message = "%s already exists. Deleting." % out_path say(message) arcpy.Delete_management(out_path) else: pass arcpy.Copy_management(gdb, out_path) return out_path
def write_temperature_table(aligned_args, site_shp, save_as): """Make a table of max and min monthly temperature for points.""" tempdir = tempfile.mkdtemp() # make a temporary copy of the point shapefile to append # worldclim values source_shp = site_shp point_shp = os.path.join(tempdir, 'points.shp') arcpy.Copy_management(source_shp, point_shp) temperature_month_set = set() starting_month = int(aligned_args['starting_month']) for month_index in xrange(int(aligned_args['n_months'])): month_i = (starting_month + month_index - 1) % 12 + 1 temperature_month_set.add(month_i) raster_files = [] field_list = [] for substring in ['min', 'max']: for month_i in temperature_month_set: monthly_temp_path = aligned_args[ '%s_temp_path_pattern' % substring].replace( '<month>', '%.2d' % month_i) raster_files.append(monthly_temp_path) field_list.append('{}_{}'.format(substring, month_i)) ex_list = zip(raster_files, field_list) arcpy.sa.ExtractMultiValuesToPoints(point_shp, ex_list) # read from shapefile to newly formatted table field_list.insert(0, 'site_id') temp_dict = {'site': [], 'month': [], 'tmin': [], 'tmax': []} with arcpy.da.SearchCursor(point_shp, field_list) as cursor: for row in cursor: site = row[0] temp_dict['site'].extend([site] * len(temperature_month_set)) for f_idx in range(1, len(field_list)): field = field_list[f_idx] if field.startswith('min'): temp_dict['tmin'].append(row[f_idx]) elif field.startswith('max'): temp_dict['tmax'].append(row[f_idx]) else: raise ValueError("value not recognized") temp_dict['month'].extend(temperature_month_set) for key in temp_dict.keys(): if len(temp_dict[key]) == 0: del temp_dict[key] temp_df = pandas.DataFrame.from_dict(temp_dict) # check units while max(temp_df['tmax']) > 100: temp_df['tmax'] = temp_df['tmax'] / 10. temp_df['tmin'] = temp_df['tmin'] / 10. temp_df.to_csv(save_as, index=False)
def AddNearAttributes(poly_shp,nameField,outFile,fieldName,fieldLength): arcpy.Copy_management(poly_shp,"temp_poly.shp") fieldmappings = arcpy.FieldMappings() CalFieldMappings(poly_shp,"temp_poly.shp",nameField,fieldmappings,fieldName,fieldLength) arcpy.SpatialJoin_analysis(poly_shp,"temp_poly.shp",outFile,"JOIN_ONE_TO_ONE", "KEEP_ALL", fieldmappings,"INTERSECT", "", "") arcpy.DeleteField_management(outFile,["Join_Count","TARGET_FID"]) arcpy.CalculateField_management(outFile,fieldName,"Replace(["+fieldName+"],["+nameField+"]+\",\",\"\")","VB") arcpy.CalculateField_management(outFile,fieldName,"Replace(["+fieldName+"],\",\"+["+nameField+"],\"\")","VB") arcpy.CalculateField_management(outFile,fieldName,"Replace(["+fieldName+"],["+nameField+"],\"\")","VB") ## arcpy.CalculateField_management(outFile,"NearPoly","string.replace(!"+fieldName+"!,!"+nameField+"!+',','')","PYTHON") ## arcpy.CalculateField_management(outFile,"NearPoly","string.replace(!"+fieldName+"!,','+!"+nameField+"!,'')","PYTHON") ## arcpy.CalculateField_management(outFile,"NearPoly","string.replace(!"+fieldName+"!,!"+nameField+"!,'')","PYTHON") arcpy.Delete_management("temp_poly.shp")
def sbdd_ProcessCAI (myFD, myFL): arcpy.AddMessage(" Begining CAI Processing") if arcpy.Exists("CAI"): arcpy.Delete_management("CAI") if int(arcpy.GetCount_management(myFD + "/" + myFL).getOutput(0)) > 0: arcpy.Copy_management(myFD + "/" + myFL, "CAI") arcpy.AddMessage(" Repairing geometry ...") arcpy.RepairGeometry_management("CAI") else: arcpy.AddMessage(" Nothing to do ...") sbdd_ExportToShape("CAI") del myFD, myFL return ()
def createFieldsAndFeatureClass(): input_fc = config["input_feature_class"] output_fc = config["output_feature_class"] # Add fields to input (for calculation) fieldList = [['time', 'Date'], ['air_temperature_2m', 'Double'], ['cloud_area_fraction', 'Double'], ['relative_humidity_2m', 'Double'], ['low_type_cloud_area_fraction', 'Double'], ['high_type_cloud_area_fraction', 'Double'], ['precipitation_amount_acc', 'Double']] for props in fieldList: arcpy.AddField_management(input_fc, *props) # Copy to new feature class, and truncate arcpy.Copy_management(input_fc, output_fc) arcpy.DeleteFeatures_management(output_fc)
def main(): logFileName = "T:/getIntRds.log" logFile = file(logFileName, "w") tsaNums = sys.argv[1] root = sys.argv[2] year = str(sys.argv[3]) inRds = sys.argv[4] tsas = [] fileListToArray(tsas, tsaNums) arcpy.env.overwriteOutput = True # get path of input FC list = inRds.split("\\")[0:-1] delim = "\\" rdsGDB = delim.join(list) tempGDB = "t:\\tempRDS12345.gdb" gdbName = "tempRDS12345.gdb" tempRDS = "t:\\tempRDS12345.gdb\\tempRDs" src = tempGDB + "\\" + inRds.split("\\")[-1] srcLyr = "srcLyr" if arcpy.Exists(tempGDB): arcpy.Delete_management(tempGDB) # Copy file geodatabase to T drive if not arcpy.Exists(tempGDB): arcpy.Copy_management(rdsGDB, tempGDB) arcpy.MakeFeatureLayer_management(src, srcLyr) for tsa in tsas: rootTSAgdb = root + "\\" + tsa + "\\" + tsaNum + "_" + year + ".gdb" rootTSAgdbRds = rootTSAgdb + "\\src\\IntegratedRoadsBuffers" bnd = rootTSAgdb + "\\wrk\\bnd" # Deleting existing FC delFC(rootTSAgdbRds) arcpy.SelectLayerByLocation_management(srcLyr, "INTERSECT", bnd) arcpy.CopyFeatures_management(srcLyr, tempRDS) arcpy.RepairGeometry_management(tempRDS) #Clip and copy fc to Units directory arcpy.AddMessage("Clipping...") print "output is " + wrk arcpy.Clip_analysis(tempRDS, bnd, rootTSAgdbRds) print "Elapsed time: %d seconds" % (time.clock()) logFile.close()
def CopyFeatures(): LogMessage(" Copy feature datasets...") ## arcpy.Copy_management(SewerSystem, thisWorkspace + "/SewerSystem", "FeatureDataset") ## LogMessage(" Sewer copied.") ## arcpy.Copy_management(StormWater, thisWorkspace + "/Stormwater", "FeatureDataset") ## LogMessage(" Storm copied.") arcpy.Copy_management(WaterSystem, thisWorkspace + "/Water", "FeatureDataset") LogMessage(" Water copied.") ## arcpy.Copy_management(Impervious, thisWorkspace+ "/ImperviousArea", "FeatureClass") ## LogMessage(" Impervious copied.") return
def summarizeActualAndPredictedGumCountForCity(): setupEnvironment() fc = "Sites" table = "CityGumCount" #publishGDB = "/GumCountForCity" arcpy.Statistics_analysis( fc, table, [["GUM_COUNT_ACTUAL", "Sum"], ["GUM_COUNT_PREDICTED", "Sum"]], None) outTable = publishGDB + "/" + table arcpy.Copy_management(table, outTable)
def write_worldclim_precip_table(site_shp, save_as): """Write precipitation table from Worldclim average precipitation. Worldclim average precipitation should be used for spin-up simulations. """ worldclim_pattern = os.path.join( r"E:\GIS_local_archive\General_useful_data\Worldclim_2.0\worldclim_precip", "wc2.0_30s_prec_<month>.tif") tempdir = tempfile.mkdtemp() # make a temporary copy of the point shapefile to append # worldclim values source_shp = site_shp point_shp = os.path.join(tempdir, 'points.shp') arcpy.Copy_management(source_shp, point_shp) precip_dir = os.path.dirname(worldclim_pattern) precip_basename = os.path.basename(worldclim_pattern) files = [f for f in os.listdir(precip_dir) if os.path.isfile( os.path.join(precip_dir, f))] precip_regex = re.compile(precip_basename.replace('<month>', '(\d+)')) precip_matches = [m for m in [precip_regex.search(f) for f in files] if m is not None] month_list = set([int(m.group(1)) for m in precip_matches]) raster_files = [] for month_i in month_list: precip_path = worldclim_pattern.replace('<month>', '%02d' % month_i) raster_files.append(precip_path) ex_list = zip(raster_files, month_list) arcpy.sa.ExtractMultiValuesToPoints(point_shp, ex_list) # read from shapefile to newly formatted table field_list = [str(m) for m in month_list] field_list.insert(0, 'site_id') prec_dict = {'site': [], 'month': [], 'prec': []} with arcpy.da.SearchCursor(point_shp, field_list) as cursor: for row in cursor: site = row[0] prec_dict['site'].extend([site] * 12) for f_idx in range(1, len(field_list)): field = field_list[f_idx] month = int(field) prec_dict['prec'].append(row[f_idx]) prec_dict['month'].extend(month_list) prec_df = pandas.DataFrame.from_dict(prec_dict) # divide raw Worldclim precip by 10 prec_df['prec'] = prec_df['prec'] / 10. prec_df.to_csv(save_as, index=False)
def tableTo_primaryOutput(input_object): log("Copy result table to primary output gdb") desc = arcpy.Describe(input_object) if desc.dataElementType <> 'DETable': # if not a table - convert fc to table table_view = arcpy.MakeTableView_management(input_object, desc.basename) # move table to primary output gdb full_output_name = os.path.join(config.primary_output, desc.basename) arcpy.TableToGeodatabase_conversion(table_view, config.primary_output) else: # if already a table - copy table to primary output gdb full_output_name = os.path.join(config.primary_output, desc.basename) arcpy.Copy_management(input_object, full_output_name)
def copy_table_one(self, src_table, src_table_sde, tar_table, tar_table_sde, over_write=False): """ the one table of source sde database copy to the table of target sde database remark: one table copy if over_write is true, the source sde database table over write the target sde database table :param src_table: the cpoyed source table :param src_table_sde: the table of source sde database :param tar_table: the target table :param tar_table_sde: the table of target sde database :param over_write: is or not over write table :return: True or False such as: src_table = "sde.DBO.test" src_table_sde = "D:\config\SQL_SERVER_localhost_sde_source.sde" tar_table = "ccsde.DBO.test" tar_table_sde = "D:\config\SQL_SERVER_localhost_sde_source.sde" over_write = True """ try: assert isinstance(src_table, basestring) assert isinstance(tar_table, basestring) except AssertionError as e: emsg = "SDEOpr copy_table_one parameter type is error: %s" % e.message raise Exception(emsg) if src_table_sde == tar_table_sde: # table only exist database, not exist dataset emsg = "SDEOpr copy_table_one source sde and target sde is equal" raise Exception(emsg) self.isexist_sde(src_table_sde) self.isexist_sde(tar_table_sde) src_sde_table = os.path.abspath(src_table_sde + "\\" + src_table) tar_sde_table = os.path.abspath(tar_table_sde + "\\" + tar_table) if over_write is True: try: self.del_element(tar_sde_table) except Exception as e: emsg = "SDEOpr copy_table_one del_element is failure: %s" % e.message try: arcpy.Copy_management(in_data=src_sde_table, out_data=tar_sde_table, data_type="Table") except Exception as e: return False else: return True
def copy_sde_table(self, src_sde, tar_sde, over_write=False): """ the tables of source sde database copy to the tables of target sde database remark: sde database table not exist the dataset of sde database if over_write is true, the source sde database table over write the target sde database table :param src_sde: the source sde file :param tar_sde: the target sde file :param over_write: is or not over write copy table :return: suclist is success copy table failist is failure copy table such as: src_sde = "D:\config\SQL_SERVER_localhost_sde_source.sde" tar_sde = "D:\config\SQL_SERVER_localhost_sde_source.sde" over_write_feature = False """ try: assert isinstance(over_write, bool) except Exception as e: emsg = "SDEOpr copy_table parameter type is error: %s" % e.message raise Exception(emsg) if src_sde == tar_sde: emsg = "SDEOpr copy_table source sde and target sde is equal" raise Exception(emsg) self.isexist_sde(src_sde) self.isexist_sde(tar_sde) suclist = [] failist = [] arcpy.env.workspace = tar_sde tar_tables = list(arcpy.ListTables()) arcpy.env.workspace = src_sde for src_table in arcpy.ListTables(): src_sde_table = os.path.abspath(src_sde + "\\" + src_table) tar_sde_table = os.path.abspath(tar_sde + "\\" + src_table) src_table_name = src_table.split(".")[-1] for tar_table in tar_tables: tar_table_name = tar_table.split(".")[-1] if (str(src_table_name) == str(tar_table_name)) and (over_write is True): del_tar_sde_table = os.path.abspath(tar_sde + "\\" + tar_table) self.del_element(del_tar_sde_table) try: arcpy.Copy_management(in_data=src_sde_table, out_data=tar_sde_table, data_type="Table") suclist.append(src_table_name) except: failist.append(src_table_name) return suclist, failist
def update_tfl_overview(): """Makes a backup copy of TFL Overview in staging then deletes the previous backup. Takes any TFL Poly features tagged as overview, addition or replacement, merges them and then adds them to the new TFL overview in staging. Updates The required fields in the new features""" #create a feature layer that merges the addition and current view polygons - dissolve to a single feature where_clause = "(Poly_Type='Addition') OR (Poly_Type='Current_View')OR (Poly_Type='Replacement')" boundary_fl = arcpy.MakeFeatureLayer_management(tfl_poly, 'TFL_Boundary_all', where_clause) arcpy.Dissolve_management('TFL_Boundary_all', 'TFL_Boundary_dissolve', 'FOREST_FILE_ID') arcpy.Delete_management(boundary_fl) #make a backup copy of the staging overview dataset - delete the old backup first arcpy.Delete_management(staging_overview + '_BACKUP') arcpy.Copy_management(staging_overview, staging_overview + '_BACKUP') #delete the features for the current TFL from overview overview_fl = arcpy.MakeFeatureLayer_management( staging_overview, 'TFL_Overview_FL', "FOREST_FILE_ID = '" + forest_file_id + "'") arcpy.DeleteFeatures_management('TFL_Overview_FL') #append the new overview arcpy.Append_management('TFL_Boundary_dissolve', staging_overview, 'NO_TEST') #update the licensee in the overview - use the lookup to find it licensee_lookup = r'\\spatialfiles.bcgov\ilmb\dss\projects\Mflnro\FADM_Tree_Farm_Licences\TFL_templates\data\TFL_Lookup_Tables.gdb\Licensee_Lookup' where_clause = "FOREST_FILE_ID = '" + forest_file_id + "'" fields = ['FOREST_FILE_ID', 'LICENCEE'] with arcpy.da.SearchCursor(licensee_lookup, fields, where_clause) as cursor: for row in cursor: licensee = row[1] fields = [ 'FOREST_FILE_ID', 'LICENCEE', 'TFL_TYPE', 'FEATURE_CLASS_SKEY', 'WHEN_UPDATED', 'WHO_UPDATED' ] with arcpy.da.UpdateCursor(staging_overview, fields, where_clause) as cursor: for row in cursor: row[1] = licensee row[2] = 'See Licence' row[3] = 830 row[4] = submitted_timestamp row[5] = 'GeoBC' cursor.updateRow(row) arcpy.Delete_management(input_gdb + os.sep + 'TFL_Boundary_dissolve') arcpy.Delete_management(overview_fl) arcpy.AddMessage('Updated TFL Overview in Staging folder') print('Updated TFL Overview in Staging folder')
def copyInputs(inputFolder, historicBankfull, modernBankfull, modernCenterline, historicCenterline, reachBreak): """ Puts the inputs in the proper folder structure :param inputFolder: Where to put everything :param historicBankfull: A polygon with the historic bankfull value :param modernBankfull: A polygon with the modern bankfull value :param modernCenterline: The centerline of the modern bankfull polygon :param reachBreak: A series of lines that tell us when to break the thing :return: A tuple with the paths to the copies of the inputs """ historicBankfullFolder = makeFolder(inputFolder, "01_HistoricBankfullSegmented") historicBankfullCopy = os.path.join(historicBankfullFolder, os.path.basename(historicBankfull)) arcpy.Copy_management(historicBankfull, historicBankfullCopy) modernBankfullFolder = makeFolder(inputFolder, "02_ModernBankfullSegmented") modernBankfullCopy = os.path.join(modernBankfullFolder, os.path.basename(modernBankfull)) arcpy.Copy_management(modernBankfull, modernBankfullCopy) modernCenterlineFolder = makeFolder(inputFolder, "03_ModernCenterline") modernCenterlineCopy = os.path.join(modernCenterlineFolder, os.path.basename(modernCenterline)) arcpy.Copy_management(modernCenterline, modernCenterlineCopy) historicCenterlineFolder = makeFolder(inputFolder, "04_HistoricCenterline") historicCenterlineCopy = os.path.join(historicCenterlineFolder, os.path.basename(historicCenterline)) arcpy.Copy_management(historicCenterline, historicCenterlineCopy) reachBreakCopy = None if reachBreak: reachBreakFolder = makeFolder(inputFolder, "05_ReachBreaks") reachBreakCopy = os.path.join(reachBreakFolder, os.path.basename(reachBreak)) if arcpy.Describe(reachBreak).shapeType == "Polygon": arcpy.FeatureToLine_management(reachBreak, reachBreakCopy) else: arcpy.Copy_management(reachBreak, reachBreakCopy) return historicBankfullCopy, modernBankfullCopy, modernCenterlineCopy, historicCenterlineCopy, reachBreakCopy
def main(in_data, out_data): # Overwriting the output, if it already exist env.overwriteOutput = True env.workspace = in_data for dataset in arcpy.ListDatasets(): try: message( "Copying dataset " + "\"" + dataset + "\"" + " to " + out_data, 0) arcpy.Copy_management(dataset, out_data + os.sep + dataset) except arcpy.ExecuteError: message(arcpy.GetMessages(2), 2) for fc in arcpy.ListFeatureClasses(): try: message( "Copying feature class " + "\"" + fc + "\"" + " to " + out_data, 0) arcpy.Copy_management(fc, out_data + os.sep + fc) except arcpy.ExecuteError: message(arcpy.GetMessages(2), 2) for table in arcpy.ListTables(): try: message("Copying table " + "\"" + table + "\"" + " to " + out_data, 0) arcpy.Copy_management(table, out_data + os.sep + table) except arcpy.ExecuteError: message(arcpy.GetMessages(2), 2) try: del dataset, fc, table except: pass
def main(in_file, out_file): temp_file = get_basename(in_file)[0] + '_2shp.shp' if arcpy.Exists(temp_file): arcpy.Delete_management(temp_file) field = "VALUE" # Execute RasterToPolygon arcpy.RasterToPolygon_conversion(in_file, temp_file, "NO_SIMPLIFY", field) if arcpy.Exists(out_file): arcpy.Delete_management(out_file) # 解决空格 arcpy.Copy_management(temp_file, out_file)
def arcgis_zstat_poly_analysis(self, logger=defaultLogger): self.string_args['ext'] = 'dbf' buffer_file = (BUFFERS_FOLDER + BUFFER_FILE).format(**self.string_args) field1 = 'NID' for [band_name, band_folder] in self.band_parameters: self.string_args['band'] = band_name output_all = (BANDS_DBF_FOLDER + BANDS_FILE_ALL).format(**self.string_args) output_final = (BANDS_DBF_FOLDER + BANDS_FILE).format(**self.string_args) field2 = band_name if (not os.path.exists(output_final) and not os.path.exists(output_final.replace('dbf','csv'))): print('Processing band {} of scene {scene}'.format(band_name, **self.string_args)) with open(PROCESS_FILE.format(**self.string_args), 'w') as f: f.write(output_final) arcpy.gp.ZonalStatisticsAsTable_sa( buffer_file, field1, band_folder, output_all, "DATA", 'ALL' ) arcpy.Copy_management(output_all, output_final, "") arcpy.AddField_management( output_final, field2, 'DOUBLE', "", "", "", "", "NULLABLE", "NON_REQUIRED", "" ) arcpy.CalculateField_management(output_final, field2, '[MEAN]', "VB", "") fields_to_drop = ( "AREA;MIN;MAX;MEAN;SUM;FID_CARBBA;FID_SAMPLE;Year;Month;Day;" + "Source;Date_freef;Latitude_s;Longitude_;SubSite;Lat_number;" + "LAKENAME_1;LAKENAME_2;SHAPE_LENG;SHAPE_AREA;ID;GRIDCODE;AREA_1;" + "NID_1;ID_1;GRIDCODE_1;NID_12;ZONE-CODE" ) arcpy.DeleteField_management(output_final, fields_to_drop) os.remove(PROCESS_FILE.format(**self.string_args)) if os.path.exists(PROCESS_FILE.format(**self.string_args)): os.remove(PROCESS_FILE.format(**self.string_args)) return None