def download(stringDateNow, stringTimeNow, paramFN, paramDL): '''Download NetCDF data and add to mosaic dataset''' if DEBUG: print ("datetime to use: %s, %s" % (stringDateNow, stringTimeNow)) #Import required Multidimensional tools tbxMST = os.path.join(topFolder, tls, r"MultidimensionSupplementalTools\Multidimension Supplemental Tools.pyt") if DEBUG: print ("Importing %s" % tbxMST) arcpy.ImportToolbox(tbxMST) # Get target NetCDF data file names outputOpDataFile, outputWindDataFile = makeOutputFilePath(topFolder, NetCDFData, stringDateNow, paramFN) if os.path.exists(outputOpDataFile): print("removing existing %s" % outputOpDataFile) os.remove(outputOpDataFile) if os.path.exists(outputWindDataFile): print("removing existing %s" % outputWindDataFile) os.remove(outputWindDataFile) # Get source URL path in_url = makeSourceURLPath(stringDateNow, paramDL) #Run OPeNDAP to NetCDF tool if DEBUG: print("in_url: %s" % in_url) print("variable: %s" % opVariables) print("dimension: %s" % timeDimension ) print ("OPeNDAP Tool run for Operational Weather variables...") arcpy.OPeNDAPtoNetCDF_mds(in_url, opVariables, outputOpDataFile, geoExtent, timeDimension, "BY_VALUE") #Run OPeNDAP to NetCDF tool print ("OPeNDAP Tool run for Wind variables...") arcpy.OPeNDAPtoNetCDF_mds(in_url, windVariables, outputWindDataFile, geoExtent, timeDimension, "BY_VALUE") targetOpDataMosaic = os.path.join(topFolder, gdb, r"OperationalWeather.gdb\OperationalData") targetWindDataMosaic = os.path.join(topFolder, gdb, r"OperationalWeather.gdb\OperationalWind") # Remove Rasters From Mosaic Dataset if REMOVE_EXISTING_RASTERS: print ("Removing existing rasters from Operational Weather...") arcpy.RemoveRastersFromMosaicDataset_management(targetOpDataMosaic, "OBJECTID >=0", "NO_BOUNDARY", "NO_MARK_OVERVIEW_ITEMS", "NO_DELETE_OVERVIEW_IMAGES", "NO_DELETE_ITEM_CACHE", "REMOVE_MOSAICDATASET_ITEMS", "NO_CELL_SIZES") print ("Removing existing rasters from Wind...") arcpy.RemoveRastersFromMosaicDataset_management(targetWindDataMosaic, "OBJECTID >= 0", "UPDATE_BOUNDARY", "MARK_OVERVIEW_ITEMS", "DELETE_OVERVIEW_IMAGES", "DELETE_ITEM_CACHE", "REMOVE_MOSAICDATASET_ITEMS", "UPDATE_CELL_SIZES") # Add Rasters To Mosaic Dataset print ("Adding new rasters from Operational Weather...") arcpy.AddRastersToMosaicDataset_management(targetOpDataMosaic, "NetCDF", outputOpDataFile, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "*.nc", "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") print ("Adding new rasters from Wind...") arcpy.AddRastersToMosaicDataset_management(targetWindDataMosaic, "NetCDF", outputWindDataFile, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "*.nc", "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") return
def _addRasterToMosaicDataset(self, in_raster): armc = self.add_raster_to_mosaic_config add_raster_to_dataset_result = arcpy.AddRastersToMosaicDataset_management( self.raster_mosaic_dataset.fullpath, armc['raster_type'], in_raster, armc.get('update_cellsize_ranges', ''), armc.get('update_boundary', ''), armc.get('update_overviews', ''), armc.get('maximum_pyramid_levels', ''), armc.get('maximum_cell_size', ''), armc.get('minimum_dimension', ''), armc.get('spatial_reference', ''), armc.get('filter', ''), armc.get('sub_folder', ''), armc.get('duplicate_items_action', ''), armc.get('build_pyramids', ''), armc.get('calculate_statistics', ''), armc.get('build_thumbnails', ''), armc.get('operation_description', ''), ) self.debug_logger("AddRastersToMosaicDataset_management status", add_raster_to_dataset_result.status)
def make_mosaic_from_tiles(dem_tiles_folder, mosaic_name, geodatabase, coordinate_system, make_gdb=True, export_to_raster=False): if not arcpy.Exists(geodatabase) and make_gdb is True: folder, name = os.path.split(geodatabase) arcpy.CreateFileGDB_management(folder, name) print("Making Mosaic Dataset") arcpy.CreateMosaicDataset_management(geodatabase, mosaic_name, coordinate_system, num_bands=1) mosaic_dataset = os.path.join(geodatabase, mosaic_name) print("Adding Tiles to Dataset") arcpy.AddRastersToMosaicDataset_management( mosaic_dataset, "Raster Dataset", dem_tiles_folder, update_overviews="UPDATE_OVERVIEWS") if export_to_raster: arcpy.MosaicToNewRaster_management(mosaic_dataset, geodatabase, "{}_export".format(mosaic_name), pixel_type="32_BIT_FLOAT", number_of_bands=1)
def add_raster_to_MDS(self, mds, folder): arcpy.AddRastersToMosaicDataset_management( mds, "Raster Dataset", folder, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "", "SUBFOLDERS", "OVERWRITE_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") return None
def main(): # Parse commandline arguments parser = argparse.ArgumentParser(description='make mosaics') parser.add_argument( '--geodatabase', '-g', required=True, help='path to geodatabase where mosaics will be created') args = parser.parse_args() for mosaic in ['tcd', 'area', 'loss', 'biomass']: print "building mosaic for {}".format(mosaic) path_dict = { 'tcd': r'S:\treecoverdensity_2000', 'area': r'S:\area_tiles', 'loss': r'S:\lossdata_2001_2014', 'biomass': r'S:\biomass' } out_cs = arcpy.SpatialReference(4326) arcpy.CreateMosaicDataset_management(args.geodatabase, mosaic, out_cs) # add rasters mosaic_name = os.path.join(args.geodatabase, mosaic) rastype = "Raster Dataset" path_to_files = path_dict[mosaic] arcpy.AddRastersToMosaicDataset_management(mosaic_name, rastype, path_to_files)
def create_mosaics(): print "Creating geodatabase for loss and fire mosaics in Amazonia..." out_coor_system = arcpy.Describe(loss_tile).spatialReference print " Creating gdb..." arcpy.CreateFileGDB_management(dir, gdb) print " Creating Hansen loss mosaic..." arcpy.CreateMosaicDataset_management("{}.gdb".format(gdb_path), Hansen_mosaic, out_coor_system, num_bands="1", pixel_type="8_BIT_UNSIGNED") print " Adding Hansen loss tiles to mosaic..." arcpy.AddRastersToMosaicDataset_management( "{0}.gdb/{1}".format(gdb_path, Hansen_mosaic), raster_type="Raster Dataset", input_path=Hansen_loss_tile_path) # I haven't actually tested this code out. It's based on the Python snippet from manually clipping, so it might # not work as I've modified it here. At least, the rectangle arguments need to change. print " Clipping Hansen loss to Brazil boundary..." arcpy.Clip_management( in_raster="{0}.gdb/{1}".format(gdb_path, Hansen_mosaic), rectangle= "-73.9783164486978 -18.0406669808439 -43.9135843925793 5.27136996674568", out_raster=legal_Amazon_loss_dir, in_template_dataset="prodes_full_extent_reproj", nodata_value="256", clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT")
def addRastersToMosaicDataset(gdbPath, pluginName, prjFile, tiffFileName): """镶嵌数据集信息存储""" try: createFileGDB(gdbPath) createMosaicDataset(gdbPath, pluginName, prjFile) mdname = gdbPath + "/" + pluginName rastype = "Raster Dataset" inpath = tiffFileName updatecs = "UPDATE_CELL_SIZES" updatebnd = "UPDATE_BOUNDARY" updateovr = "NO_OVERVIEWS" maxlevel = "#" maxcs = "0" maxdim = "1500" spatialref = "#" inputdatafilter = "" subfolder = "SUBFOLDERS" duplicate = "ALLOW_DUPLICATES" buildpy = "NO_PYRAMIDS" calcstats = "NO_STATISTICS" buildthumb = "NO_THUMBNAILS" comments = "#" forcesr = "#" # 添加栅格到镶嵌数据集 arcpy.AddRastersToMosaicDataset_management( mdname, rastype, inpath, updatecs, updatebnd, updateovr, maxlevel, maxcs, maxdim, spatialref, inputdatafilter, subfolder, duplicate, buildpy, calcstats, buildthumb, comments, forcesr) except: return
def extract_loss(scratch, lossyearmosaic, country_shapefile_int, iso, scratch_gdb): arcpy.CheckOutExtension("Spatial") arcpy.env.overwriteOutput = True arcpy.env.workspace = scratch_gdb arcpy.env.scratchWorkspace = scratch mosaic_name = iso + "_extract" sr = 4326 iso_loss_mosaic = arcpy.CreateMosaicDataset_management(scratch_gdb, mosaic_name, sr) fields = ['ISO', 'SHAPE@'] shapecount = 0 with arcpy.da.SearchCursor(country_shapefile_int, fields) as cursor: for row in cursor: shapecount += 1 iso_row = row[0] geometry = row[1] if iso_row == iso: country_loss_30tcd = arcpy.sa.ExtractByMask(lossyearmosaic, geometry) extracted_folder = os.path.join(scratch, "extracted_tifs") if not os.path.exists(extracted_folder): os.mkdir(extracted_folder) extracted_tif = os.path.join(extracted_folder, "{0}_{1}.tif".format(iso, shapecount)) country_loss_30tcd.save(extracted_tif) extracted_tif_nd = extracted_tif.replace(".tif", "_nd.tif") cmd = ["gdal_translate", "-a_nodata", "0", extracted_tif, extracted_tif_nd] subprocess.check_call(cmd) # arc having trouble deleting pre-nodata files, so just adding nd to mosaic nd_tifs = glob.glob(os.path.join(extracted_folder, "{}*nd*".format(iso))) for tif in nd_tifs: arcpy.AddRastersToMosaicDataset_management(iso_loss_mosaic, "Raster Dataset", tif) return iso_loss_mosaic
def mosaic_ndwi(self, gcs): self.gcs = gcs for ext in self.file_extension: filters = ext filters_new = "*" + filters # Creates a file geodatabase geodatabase_file_name = 'NDWI_GDB' + '_' + filters[:-4] + '.gdb' print(geodatabase_file_name) #Creates a raster dataset with .tif extension mosaiced_raster_name = 'NDWI_Mosaiced' + '_' + filters[:-4] + '.tif' print(mosaiced_raster_name) # 1) Creates a file geodatabase in a folder print('Creating Geodatabase file: {} of {} ... ^_^'.format( ext, self.file_extension)) output_dir = os.path.join(self.main_dir, self.subfolder_1, self.subfolder_3) # output_dir_serach = os.path.split(output_dir)[0] print('Output_directory {}:'.format(output_dir)) arcpy.CreateFileGDB_management( output_dir, geodatabase_file_name) # Creates gdb file print('Done!^_^') # 2) Creates an empty mosaic dataset in a file geodatabase in_workspace = os.path.join( output_dir, geodatabase_file_name) # Path to the geodatabase in_mosaicdataset_name = 'NDWI_RasterDataset' + '_' + filters[:-4] NumberOfBand = "1" PixelType = "32_BIT_FLOAT" # Pixel type can be changed product_definition = "NONE" Wavelength = "" print('Creating an empty mosaic dataset: {} of {} ... ^_^'.format( ext, self.file_extension)) arcpy.CreateMosaicDataset_management(in_workspace, in_mosaicdataset_name, self.gcs, NumberOfBand, PixelType, product_definition, Wavelength) print('Done!^_^') # 3) Add raster dataset to a mosaic dataset from many sources, including a file, folder, raster catalog, table, or web service. in_mosaic_dataset = os.path.join(in_workspace, in_mosaicdataset_name) print( 'Adding rasters to an empty mosaic dataset: {} of {} ... ^_^'. format(ext, self.file_extension)) arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset, "Raster Dataset", output_dir, \ "UPDATE_CELL_SIZES","UPDATE_BOUNDARY","NO_OVERVIEWS","2","#","#",'#', filters_new, "SUBFOLDERS",\ "EXCLUDE_DUPLICATES","NO_PYRAMIDS","NO_STATISTICS","NO_THUMBNAILS","#","NO_FORCE_SPATIAL_REFERENCE") print('Done! ^_^') # 4) Creates a folder to copy NDWI mosaiced dataset in_raster = os.path.join(in_workspace, in_mosaicdataset_name) ndwi = os.path.join(output_dir, 'NDWI_Mosaic' + '_' + filters[:-4]) if not os.path.exists(ndwi): os.makedirs(ndwi) print('Copying raster: {} of {} ... ^_^'.format( ext, self.file_extension)) out_raster = os.path.join(ndwi, mosaiced_raster_name) arcpy.CopyRaster_management(in_raster, out_raster, "#", "0", "0", "NONE", "NONE", "32_BIT_FLOAT", "NONE", "NONE") print('Done! ^_^ ^_^ ^_^')
def create_mosaic(country_loss_30tcd, scratch_gdb): out_cs = arcpy.SpatialReference(4326) mosaic_name = "mosaic_country_loss_30tcd" mosaic_path = os.path.join(scratch_gdb, mosaic_name) arcpy.CreateMosaicDataset_management(scratch_gdb, mosaic_name, out_cs) arcpy.AddRastersToMosaicDataset_management(mosaic_path, "Raster Dataset", country_loss_30tcd) return os.path.join(scratch_gdb, mosaic_name)
def addtable(self, stable, exclude_overviews=False, **kwargs): tablename = stable.fname if type(stable) is MosaicDataset: if exclude_overviews == True: stable.selectbyattribute("NEW_SELECTION", "Category = 1") tablename = stable.mlayer.lname arcpy.AddRastersToMosaicDataset_management(self.fname, "Table", tablename, **kwargs) DisplayMessages()
def mosaic_dem_cal_slp(self, gdb_name, folder2save_mosaic_ras, mosaiced_dem_ras_name, projection, folder2save_slp_ras, slp_ras_name, z_factor): self.gdb_name = gdb_name self.folder2save_mosaic_ras = folder2save_mosaic_ras self.mosaiced_dem_ras_name = mosaiced_dem_ras_name self.projection = projection self.folder2save_slp_ras = folder2save_slp_ras self.slp_ras_name = slp_ras_name self.z_factor = z_factor file_path = os.path.join(self.main_dir, self.subfolder_1) gdb_file_path = os.path.join(self.main_dir, self.subfolder_1, self.subfolder_3) print('Creating geodatabase file... ^__^') arcpy.CreateFileGDB_management(gdb_file_path, self.gdb_name) print('Creating an empty raster dataset inside geodatabse... ^__^') mosaic_dataset_name = 'My_RasterDataset' mosaicgdb = os.path.join(gdb_file_path, self.gdb_name) print("mosaicgdb", mosaicgdb) NumberOfBand = "1" PixelType = "16_BIT_SIGNED" # Pixel type can be changed ProductDefinition = "NONE" Wavelength = "" arcpy.CreateMosaicDataset_management(mosaicgdb, mosaic_dataset_name, self.projection, NumberOfBand, PixelType, ProductDefinition, Wavelength) print('Adding DEM rasters into an empty raster dataset... ^__^') path_and_nameof_mosaic_dataset = os.path.join(mosaicgdb, mosaic_dataset_name) arcpy.AddRastersToMosaicDataset_management(path_and_nameof_mosaic_dataset, "Raster Dataset", file_path, "UPDATE_CELL_SIZES","UPDATE_BOUNDARY","NO_OVERVIEWS","2","#","#", self.projection, '*'+self.file_extension, "SUBFOLDERS","EXCLUDE_DUPLICATES","NO_PYRAMIDS","NO_STATISTICS", "NO_THUMBNAILS","#","FORCE_SPATIAL_REFERENCE") in_raster = os.path.join(mosaicgdb, mosaic_dataset_name) create_folder = os.path.join(self.main_dir, self.subfolder_1, self.subfolder_3, self.folder2save_mosaic_ras) if not os.path.exists(create_folder): os.makedirs(create_folder) out_raster = os.path.join(create_folder, self.mosaiced_dem_ras_name) arcpy.CopyRaster_management(in_raster, out_raster, "#","#","#","NONE","NONE","16_BIT_UNSIGNED","NONE","NONE") def slope_cal(): print('Reading DEM! ^_^') read_dem = arcpy.Raster(out_raster) print('Caculating slope... ^__^') arcpy.CheckOutExtension("spatial") slope_raster = Slope(read_dem, "DEGREE", self.z_factor) arcpy.CheckInExtension("spatial") slope_cal_path = os.path.join(self.main_dir, self.subfolder_1, self.subfolder_3, self.folder2save_slp_ras) if not os.path.exists(slope_cal_path): os.makedirs(slope_cal_path) slope_file = os.path.join(slope_cal_path, self.slp_ras_name) print('Writing slope raster... ^__^') slope_raster.save(slope_file) print('Done! ^__^') print('Resampling Slope raster...') arcpy.CheckOutExtension("spatial") in_ras_resample = arcpy.Raster(slope_file) resampled_slope_file = os.path.join(slope_cal_path, 'Resam_' + self.slp_ras_name) arcpy.Resample_management(in_ras_resample, resampled_slope_file, "8.9831528e-05", "BILINEAR") arcpy.CheckInExtension("spatial") print('Done! ^__^') slope_cal()
def consolidate_elevation(folder_path, mosaic): workspace = folder_path out_directory = r'C:\data\elevation' mosaic_dataset = mosaic walk = arcpy.da.Walk(workspace, topdown=True, datatype="RasterDataset") app.logger.info("Discovering Items...") copied_files = [] error_files = [] for dirpath, dirnames, filenames in walk: # Disregard any folder named 'back_up' in creating list of rasters for filename in filenames: if "thumb" not in filename: try: in_file = os.path.join(dirpath, filename) out_file = os.path.join(out_directory, filename + ".tif") if arcpy.Exists(out_file) == False: app.logger.info("Moving {}".format(filename)) arcpy.CopyRaster_management(in_file, out_file) copied_files.append(filename) app.logger.info( "Successfully moved {}".format(filename)) else: arcpy.AddMessage( " {0} already exists in {1}, passing".format( filename, out_directory)) except Exception as e: app.logger.error(str(e)) error_files.append(filename) app.logger.info( 'Completed copy of data, beginning update of Mosaic Dataset.') try: arcpy.AddRastersToMosaicDataset_management( mosaic_dataset, "Raster Dataset", out_directory, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "UPDATE_OVERVIEWS", "2", "#", "#", "#", "*.tif", "SUBFOLDERS", "EXCLUDE_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "BUILD_THUMBNAILS", "", "", "NO_STATISTICS", "", "USE_PIXEL_CACHE") mosaic_updated = True except Exception as e: app.logger.error(str(e)) mosaic_updated = False app.logger.info("Completed update of Mosaic Dataset") return { "job-type": "upload elevation data", "copied-files": copied_files, "error-files": error_files, "mosaic-path": mosaic, "mosaic-updated": mosaic_updated }
def addrasters(mdpath, outputws, itematt): try: arcpy.AddMessage("Adding the download raster to mosaic dataset...") rastype = "Raster Dataset" # Add downloaded raster to mosaic dataset, exclude duplicated item arcpy.AddRastersToMosaicDataset_management(mdpath, rastype, outputws, "", "", "", "", "", "", "", "", "", "EXCLUDE_DUPLICATES") # recover field values arcpy.AddMessage( "Copying field values from Image Service to Mosaic Dataset...") # no need to recover name and shape del itematt["Name"] del itematt["Shape_Length"] del itematt["Shape_Area"] # find the last row in the mosaic dataset to update whereclause = """OBJECTID=%s""" % arcpy.GetCount_management( mdpath).getOutput(0) # get date field names mdfields = arcpy.ListFields(mdpath) datefieldsName = [] for mdfield in mdfields: if mdfield.type == "Date": datefieldsName.append(mdfield.name.lower()) cursor = arcpy.da.UpdateCursor(mdpath, "*", whereclause) # create mosaic dataset field name list to search the index mdfieldNamesUpper = cursor.fields mdfieldNames = [x.lower() for x in mdfieldNamesUpper] lastrow = cursor.next() for fieldName in itematt: fieldVal = itematt[fieldName] # Save the OBJECTID to the OOID field if fieldName.lower() == "objectid": findex = mdfieldNames.index("ooid") lastrow[findex] = fieldVal else: findex = mdfieldNames.index(fieldName.lower()) # Recover date field if fieldName.lower() in datefieldsName: dateval = datetime.datetime.utcfromtimestamp(fieldVal / 1000) lastrow[findex] = dateval else: lastrow[findex] = fieldVal cursor.updateRow(lastrow) except: arcpy.AddError("ERROR: Failure in adding output to mosaic dataset")
def add_rasters_to_mosaic(fgdb, mosaic, rasters): """Add the list of rasters to the mosaic dataset in the fgdb.""" dataset = os.path.join(fgdb, mosaic) arcpy.AddRastersToMosaicDataset_management( in_mosaic_dataset=dataset, raster_type="Raster Dataset", input_path=rasters, update_overviews="NO_OVERVIEWS", )
def update_mosaic(mosaic): mosaic_short = path.basename(mosaic) arcpy.AddRastersToMosaicDataset_management( mosaic, 'Raster Dataset', r'C:\Users\smithn78\Dropbox\CL_HUB_GEO\QAQC\Watersheds\Watersheds_COPY', filter='*{}*jpg'.format(mosaic_short), update_overviews='UPDATE_OVERVIEWS', sub_folder='SUBFOLDERS', duplicate_items_action='EXCLUDE_DUPLICATES')
def add_raster_emd(in_mosaic_dataset, input_path): """Add rasters to an empty mosaic dataset""" Raster_Type = "Raster Dataset" Update_CellSize = "UPDATE_CELL_SIZES" Update_Boundary = "UPDATE_BOUNDARY" Update_Overview = "NO_OVERVIEWS" add_raster = arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset, Raster_Type, input_path,\ Update_CellSize, Update_Boundary, Update_Overview, "2","#","#",'#', filter_ext, "SUBFOLDERS",\ "EXCLUDE_DUPLICATES","NO_PYRAMIDS","NO_STATISTICS","NO_THUMBNAILS","#","NO_FORCE_SPATIAL_REFERENCE") return add_raster
def addMosaics(data, dfile): ''' Add data to mosaic dataset ''' print("Adding new data to mosaic") try: arcpy.AddRastersToMosaicDataset_management( data, Raster_Type, dfile, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "*.nc", "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") return True except NAMDownloadException as e: print("Error in addMosaics: \n" + str(e.tb))
def CreateMosaicDataset(self): arcpy.CreateMosaicDataset_management(workspace, mosaic_name, coordinate_sys, num_bands="", pixel_type="", product_definition="NONE", product_band_definitions="") arcpy.AddRastersToMosaicDataset_management( mosaic_dataset, "Raster Dataset", path_to_files, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", imagery_spatial_ref, "#", "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE")
def create_mosaic_dataset(self): if not arcpy.Exists(self.output_gdb): arcpy.CreateFileGDB_management(os.path.dirnam(output_gdb), os.path.basename(output_gdb)) if not arcpy.Exists(join(self.output_gdb, self.output_mosaic)): arcpy.CreateMosaicDataset_management(self.output_gdb, self.output_mosaic, self.mosaic_spatial_ref, num_bands=self.numBands, pixel_type="", product_definition="NONE", product_band_definitions="") arcpy.AddRastersToMosaicDataset_management( join(self.output_gdb, self.output_mosaic), "Raster Dataset", self.composite_path, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "UPDATE_OVERVIEWS", "", "0", "1500", self.imagery_spatial_ref, "#", "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "#", "NO_FORCE_SPATIAL_REFERENCE")
def add_to_mosaic_dataset(imgfile): """ Adds a raster to the global Mosaic Dataset and updates the time field with the date of the raster Required: imgfile -- the path to the raster to be added Output: None """ arcpy.AddRastersToMosaicDataset_management( MOSAICDS, "Raster Dataset", imgfile, duplicate_items_action="OVERWRITE_DUPLICATES", build_pyramids=True, calculate_statistics=True, ) with arcpy.da.UpdateCursor(MOSAICDS, MOSAIC_FIELDS) as cursor: for row in cursor: if row[0] == os.path.splitext(os.path.basename(imgfile))[0]: row[1] = strip_raster_date_from_name(row[0]) cursor.updateRow(row)
def LoadRasters(temp_workspace): try: arcpy.CheckOutExtension("Spatial") inSQLClause = "VALUE >= 0" arcpy.env.workspace = temp_workspace # E:\Temp\SMAP_Extract\ arcpy.env.overwriteOutput = True # Lance added... rasters = arcpy.ListRasters() for raster in rasters: # In case the file is not a valid raster file... add a try / catch exception try: logging.info('Processing file: %s' % raster) print ('Processing file: %s' % raster) extract = arcpy.sa.ExtractByAttributes(raster, inSQLClause) finalRaster = os.path.join(myConfig['finalTranslateFolder'], raster) extract.save(finalRaster) arcpy.AddRastersToMosaicDataset_management(mosaicDS, "Raster Dataset", finalRaster, "UPDATE_CELL_SIZES", "NO_BOUNDARY", "NO_OVERVIEWS", "2", "#", "#", "#", "#", "NO_SUBFOLDERS", "EXCLUDE_DUPLICATES", "BUILD_PYRAMIDS", "CALCULATE_STATISTICS", "NO_THUMBNAILS", "Add Raster Datasets", "#") theName = os.path.splitext(raster)[0] Expression = "Name= '" + theName + "'" rows = arcpy.UpdateCursor(mosaicDS, Expression) # Establish r/w access to data in the query expression. year = theName[5:9] # Lance added... month = theName[9:11] day = theName[11:13] theStartDate = year + "/" + month + "/" + day dt_obj = theStartDate # dt_str.strptime('%Y/%m/%d') for r in rows: r.dateObtained = dt_obj # here the value is being set in the proper field rows.updateRow(r) # update the values extract = None del extract, rows except Exception, e: logging.warning('### Error processing file! SKIPPING! ###, %s' % e) print ('### Error processing file! - Skipping... ###') del rasters
def mosaicCreator(_sourceGDB, _mosaic, _simulationSteps, _frameInterval): #arcpy.CreateMosaicDataset_management(_sourceGDB, _mosaic, "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0]]", None, None, "NONE", None) mosaicPath = os.path.join(_sourceGDB, _mosaic) rasters = '' for i in range(int(_simulationSteps) // int(_frameInterval)): filePath = os.path.join(_sourceGDB, "Frame" + str(i)) rasters = rasters + ';' + filePath arcpy.AddMessage("Adding Rasters to Mosaic") arcpy.AddRastersToMosaicDataset_management( mosaicPath, "Raster Dataset", rasters, calculate_statistics="CALCULATE_STATISTICS", estimate_statistics="ESTIMATE_STATISTICS") #arcpy.management.AddRastersToMosaicDataset(os.path.join(_mosaicPath,'Mosaic'), "Raster Dataset", rasters, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", None, 0, 1500, None, None, "SUBFOLDERS", "ALLOW_DUPLICATES", "NO_PYRAMIDS", "CALCULATE_STATISTICS", "NO_THUMBNAILS", None, "NO_FORCE_SPATIAL_REFERENCE", "ESTIMATE_STATISTICS", None) arcpy.AddMessage("Adding Time Stamps") arcpy.AddField_management(mosaicPath, "TimeStamp", "LONG") arcpy.CalculateField_management(mosaicPath, "TimeStamp", "getTime(!Name!)", code_block=getTime)
def ReloadMD(): # Empty the mosaic dataset prior to reloading it arcpy.AddMessage("Removing previous forecast data from mosaic dataset...") arcpy.RemoveRastersFromMosaicDataset_management(inputMD, "1=1") # Add the rasters to the mosaic dataset arcpy.AddMessage("Adding new forecast data to mosaic dataset...") arcpy.AddRastersToMosaicDataset_management(inputMD, "Raster Dataset", forecastGDBPath) # Check something was imported result = int(arcpy.GetCount_management(inputMD).getOutput(0)) if result > 0: # Re-calculate statistics on the mosaic dataset arcpy.AddMessage("Calculating statistics on the newly loaded mosaic dataset") arcpy.CalculateStatistics_management(inputMD) # Re-build overviews on the mosaic dataset #arcpy.AddMessage("Building overviews on the mosaic dataset") #arcpy.BuildOverviews_management(inputMD) # Calculate the time fields on the mosaic dataset arcpy.AddMessage("Calculating the time fields on the mosaic dataset") locale.setlocale(locale.LC_TIME, '') mdLayer = "mdLayer" arcpy.MakeMosaicLayer_management(inputMD, mdLayer, "Category = 1") # Leave out overviews - only calculate fields on primary rasters arcpy.CalculateField_management(mdLayer, dateForecastImportedField, """time.strftime("%c")""", "PYTHON","#") arcpy.CalculateField_management(mdLayer, dateForecastEffectiveFromField, """time.strftime("%c", time.strptime(!Name!,""" + "\"" + weatherName + """%Y%m%dT%H%M"))""", "PYTHON", "#") arcpy.CalculateField_management(mdLayer, dateForecastEffectiveToField, "!" + dateForecastEffectiveFromField + "!", "PYTHON", "#")
CTX_Mosaic_Footprints1 = CTX_Mosaic_NoDATA CTX_Mosaic_Footprints2 = CTX_Mosaic_Footprints1 CTX_Final = CTX_Mosaic_Footprints2 CTXmosaic_GBD_gdb = OUTPUT_FOLDER CTX_Mosaic_Init = CTXmosaic_GBD_gdb CompleteDownload = "true" ExpressionTF = "CompleteDown = \"false\"" # Process: Create File GDB arcpy.CreateFileGDB_management(OUTPUT_FOLDER, MosaicName, "CURRENT") # Process: Create Mosaic Dataset arcpy.CreateMosaicDataset_management(CTXmosaic_GBD_gdb, MosaicName, "PROJCS['Mars_Equi180',GEOGCS['GCS_Mars_2000_Sphere',DATUM['D_Mars_2000_Sphere',SPHEROID['Mars_2000_Sphere_IAU_IAG',3396190.0,0.0]],PRIMEM['Reference_Meridian',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Equidistant_Cylindrical'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',180.0],UNIT['Meter',1.0]];-10669600 -16102700 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision", "1", "8_BIT_UNSIGNED", "NONE", "") # Process: Add Rasters To Mosaic Dataset arcpy.AddRastersToMosaicDataset_management(CTX_Mosaic_Init, "Raster Dataset", "C:\\Users\\thare\\Documents\\GIT\\ArcGIS_Plugins\\CTX_CreateMosaic_fromASU", "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "*.JP2", "NO_SUBFOLDERS", "EXCLUDE_DUPLICATES", "NO_PYRAMIDS", "CALCULATE_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") # Process: Define Mosaic Dataset NoData arcpy.DefineMosaicDatasetNoData_management(CTX_Mosaic_Rasters, "1", "BAND_1 0", "", "", "NO_COMPOSITE_NODATA") # Process: BuildFootprints_1 arcpy.BuildFootprints_management(CTX_Mosaic_NoDATA, "", "RADIOMETRY", "1", "255", "10", "500", "NO_MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "NO_BOUNDARY", "2000", "100", "NONE", "", "20", "0.05") # Process: BuildFootprints_2 arcpy.BuildFootprints_management(CTX_Mosaic_Footprints1, "", "NONE", "1", "254", "10", "5000", "NO_MAINTAIN_EDGES", "SKIP_DERIVED_IMAGES", "UPDATE_BOUNDARY", "2000", "100", "NONE", "", "20", "0.05") # Process: Set Mosaic Dataset Properties arcpy.SetMosaicDatasetProperties_management(CTX_Mosaic_Footprints2, "4100", "15000", "None;LZ77;JPEG;LERC", "None", "75", "0", "BILINEAR", "CLIP", "FOOTPRINTS_MAY_CONTAIN_NODATA", "CLIP", "NOT_APPLY", "", "NONE", "Center;NorthWest;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None", "Seamline", "", "", "ASCENDING", "BLEND", "10", "600", "300", "1000", "0.8", "", "FULL", "", "DISABLED", "", "", "", "", "20", "1000", "GENERIC", "1") # Process: Calculate Value arcpy.CalculateValue_management(ExpressionTF, "", "Boolean")
def AddRasters(self): self.log("Adding rasters:", self.const_general_text) for sourceID in self.sMdNameList: MDName = self.sMdNameList[sourceID]['md'] fullPath = os.path.join(self.m_base.m_geoPath, MDName) if (arcpy.Exists(fullPath) == False): self.log("Path doesn't exist: %s" % (fullPath), self.const_critical_text) return False self.m_base.m_last_AT_ObjectID = self.getLastObjectID (self.m_base.m_geoPath, MDName) for hshAddRaster in self.sMdNameList[sourceID]['addraster']: try: self.log("\tUsing mosaic dataset/ID:" + MDName + '/' + \ hshAddRaster['dataset_id'], self.const_general_text) rasterType = 'Raster Dataset' name_toupper = MDName.upper() if (hshAddRaster.has_key('art')): rasterType = hshAddRaster['art'] self.log("\tUsing ART for " + name_toupper + ': ' + rasterType, self.const_general_text) if (self.m_base.m_art_apply_changes == True): art_doc = minidom.parse(rasterType) if (self.m_base.updateART(art_doc, self.m_base.m_art_ws, self.m_base.m_art_ds) == True): self.log("\tUpdating ART (Workspace, RasterDataset) values with (%s, %s) respectively." % (self.m_base.m_art_ws, self.m_base.m_art_ds), self.const_general_text) c = open(rasterType, "w") c.write(art_doc.toxml()) c.close() set_filter = '' if (hshAddRaster.has_key('filter')): set_filter = hshAddRaster['filter'] if (set_filter == '*'): set_filter = '' set_spatial_reference = '' if (hshAddRaster.has_key('spatial_reference')): set_spatial_reference = hshAddRaster['spatial_reference'] objID = self.getLastObjectID (self.m_base.m_geoPath, MDName) self.sMdNameList[sourceID]['pre_AddRasters_record_count'] = objID self.sMdNameList[sourceID]['Dataset_ID'] = hshAddRaster['dataset_id'] self.log('Adding items..') arcpy.AddRastersToMosaicDataset_management(fullPath, rasterType, hshAddRaster['data_path'],self.GetValue(hshAddRaster, 'update_cellsize_ranges'),self.GetValue(hshAddRaster, 'update_boundary'),self.GetValue(hshAddRaster,'update_overviews'),self.GetValue(hshAddRaster,'maximum_pyramid_levels'),self.GetValue(hshAddRaster,'maximum_cell_size'),self.GetValue(hshAddRaster,'minimum_dimension'),self.GetValue(hshAddRaster,'spatial_reference'), set_filter, self.GetValue(hshAddRaster,'sub_folder'), self.GetValue(hshAddRaster,'duplicate_items_action'), self.GetValue(hshAddRaster,'build_pyramids'), self.GetValue(hshAddRaster,'calculate_statistics'), self.GetValue(hshAddRaster,'build_thumbnails'), self.GetValue(hshAddRaster,'operation_description'), self.GetValue(hshAddRaster,'force_spatial_reference')) newObjID = self.getLastObjectID (self.m_base.m_geoPath, MDName) if (newObjID <= objID): self.log('No new mosaic dataset item was added for Dataset ID (%s)' % (hshAddRaster['dataset_id'])) continue for callback_fn in self.callback_functions: if (callback_fn(self.m_base.m_geoPath, sourceID, self.sMdNameList[sourceID]) == False): return False except Exception as inst: self.log(str(inst), self.const_warning_text) self.log(arcpy.GetMessages(), self.const_warning_text) Warning = True newObjID = self.getLastObjectID (self.m_base.m_geoPath, MDName) if (newObjID <= self.m_base.m_last_AT_ObjectID): self.log('No new mosaic dataset items added to dataset (%s). Verify the input data path/raster type is correct' % (MDName), self.const_critical_text) self.log(arcpy.GetMessages(), self.const_critical_text) return False return True
arcpy.AddMessage("File geodatabase created!") # Process: Create Mosaic Dataset arcpy.CreateMosaicDataset_management(gdb_path + "\\" + gdb_name, mosaic_dataset, mosaic_cs, "1", "32_BIT_FLOAT", "NONE", "") arcpy.AddMessage("Mosaic dataset created!") arcpy.AddMessage("Adding rasters to mosaic dataset") # Process: Add Rasters To Mosaic Dataset arcpy.env.workspace = dem_in dem_list = arcpy.ListRasters() for raster in dem_list: arcpy.AddMessage("Adding " + raster + " to mosaic dataset") arcpy.AddRastersToMosaicDataset_management( gdb_path + "\\" + gdb_name + "\\" + mosaic_dataset, "Raster Dataset", raster) # Process: Clip DEM arcpy.AddMessage("Clipping DEM...") arcpy.Clip_management(gdb_path + "\\" + gdb_name + "\\" + mosaic_dataset, "#", dem_out, clip_shp, "#", "ClippingGeometry", "NO_MAINTAIN_EXTENT") # Process: Hillshade arcpy.AddMessage("Creating hillshade and clipping to extent...") arcpy.gp.HillShade_sa(dem_out, hill_out, "315", "45", "NO_SHADOWS", "1") arcpy.AddMessage("Processing complete!")
coordSys = arcpy.Describe(inFprints).spatialReference # Process: Create Mosaic Dataset # This creates the blank (empty) mosaic dataset arcpy.AddMessage('Creating empty mosaic dataset...') md = 'md_' + mosaicName solarMosaic = arcpy.CreateMosaicDataset_management(scratchGDB, md, coordSys) # Loop through the geodatabases and add rasters to the mosaic dataset for gdb in inGDB.split(';'): try: # Process: Add Rasters To Mosaic Dataset arcpy.AddMessage('Adding rasters from %s to mosaic dataset...' % gdb) arcpy.AddRastersToMosaicDataset_management( solarMosaic, "Raster Dataset", gdb, "UPDATE_CELL_SIZES", "UPDATE_BOUNDARY", "NO_OVERVIEWS", "", "0", "1500", "", "", "SUBFOLDERS", "OVERWRITE_DUPLICATES", "NO_PYRAMIDS", "NO_STATISTICS", "NO_THUMBNAILS", "", "NO_FORCE_SPATIAL_REFERENCE") Log.write('Successfully added rasters from %s to mosaic dataset. \n' % gdb) except: arcpy.AddWarning('Failed to add rasters from %s.' % gdb) Log.write('Failed to add rasters from %s to mosaic dataset. \n' % gdb) # Process: Import Mosaic Dataset Geometry arcpy.AddMessage('Importing footprint geometry...') arcpy.ImportMosaicDatasetGeometry_management(solarMosaic, "FOOTPRINT", "Name", inFprints, joinFld) # Process: Copy final output raster dataset try:
def createQARasterMosaicDataset(md_name, gdb_path, spatial_reference, input_folder, mxd, footprint_path=None, lasd_boundary_path=None): #Utility.printArguments(["md_name", "gdb_path", "spatial_reference", "input_folder", "mxd", "footprint_path", "lasd_boundary_path"], # [md_name, gdb_path, spatial_reference, input_folder, mxd, footprint_path, lasd_boundary_path], "A04_C CreateQARasterMosaicDatasets") md_path = os.path.join(gdb_path, md_name) a = datetime.datetime.now() if arcpy.Exists(md_path): arcpy.AddMessage("\tMD Exists: {}".format(md_path)) else: try: raster_count = 0 #arcpy.AddMessage("\t\tLooking for rasters to add to {} in folder {}".format(md_path, input_folder)) for root, dirs, files in os.walk(input_folder): # @UnusedVariable for f in files: if f.upper().endswith(".TIF"): raster_count = raster_count+1 if raster_count <=0: arcpy.AddMessage("\t\tNo rasters to add to {} in folder {}".format(md_path, input_folder)) else: arcpy.AddMessage("\t\tLooking for rasters to add to {} in folder {}".format(md_path, input_folder)) # Create a MD in same SR as LAS Dataset arcpy.CreateMosaicDataset_management(in_workspace=gdb_path, in_mosaicdataset_name=md_name, coordinate_system=spatial_reference, num_bands="", pixel_type="", product_definition="NONE", product_band_definitions="") arcpy.SetMosaicDatasetProperties_management(in_mosaic_dataset=md_path, rows_maximum_imagesize="4100", columns_maximum_imagesize="15000", allowed_compressions="None;JPEG;LZ77;LERC", default_compression_type="LERC", JPEG_quality="75", LERC_Tolerance="0.01", resampling_type="CUBIC", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata="FOOTPRINTS_DO_NOT_CONTAIN_NODATA", clip_to_boundary="CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="Basic", default_mensuration_capabilities="Basic", allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None", default_mosaic_method="NorthWest", order_field="", order_base="", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="10", view_point_x="600", view_point_y="300", max_num_per_mosaic="2000", cell_size_tolerance="0.8", cell_size="10 10", metadata_level="BASIC", transmission_fields="Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;CenterX;CenterY;ZOrder;Shape_Length;Shape_Area;Thumbnail", use_time="DISABLED", start_time_field="", end_time_field="", time_format="", geographic_transform="", max_num_of_download_items="20", max_num_of_records_returned="1000", data_source_type="GENERIC", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None", time_interval="", time_interval_units="") a = doTime(a, "\t\tCreated MD {}".format(md_name)) arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset=md_path, raster_type="Raster Dataset", input_path=input_folder, update_cellsize_ranges="UPDATE_CELL_SIZES", update_boundary="UPDATE_BOUNDARY", update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", maximum_cell_size="0", minimum_dimension="150", spatial_reference="", filter="#", sub_folder="SUBFOLDERS", duplicate_items_action="EXCLUDE_DUPLICATES", build_pyramids="BUILD_PYRAMIDS", calculate_statistics="CALCULATE_STATISTICS", build_thumbnails="BUILD_THUMBNAILS", operation_description="#", force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE", estimate_statistics="ESTIMATE_STATISTICS", aux_inputs="") total_rows = int(arcpy.GetCount_management(md_path).getOutput(0)) if total_rows <= 0: arcpy.AddWarning("No rasters found for '{}'".format(md_path)) deleteFileIfExists(md_path, True) else: try: importMosaicDatasetGeometries(md_path, footprint_path, lasd_boundary_path) except: arcpy.AddWarning("Failed to update MD boundaries for '{}'".format(md_path)) a = doTime(a, "\tAdded Rasters to MD {}".format(md_name)) except: arcpy.AddWarning("Failed to create MD for QA Raster Layer '{}'. Please remove any locks and delete related intermediate files".format(md_path)) return [md_path, md_name]
import arcpy arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset="/data/sharedData/mdcs-py/0MD/Master.gdb/GPM_2019", raster_type="Table", input_path="/data/sharedData/mdcs-py/0MD/inputtable.gdb/gpm_2019", update_cellsize_ranges="UPDATE_CELL_SIZES", update_boundary="UPDATE_BOUNDARY", update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", maximum_cell_size="0", minimum_dimension="1500", spatial_reference="", filter="#", sub_folder="SUBFOLDERS", duplicate_items_action="ALLOW_DUPLICATES", build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", build_thumbnails="NO_THUMBNAILS", operation_description="#", force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE", estimate_statistics="NO_STATISTICS", aux_inputs="")