示例#1
0
else:
    if all_files_in_directory == 1:
        FCE_Spp = arcpy.ListRasters("FCE*", "tif")
        CCE_Spp = arcpy.ListRasters("CCE*", "tif")
        if len(subset_of_CEs) > 0:
            FCE_Spp = FCE_Spp[subset_of_CEs[0]:subset_of_CEs[1]]
            CCE_Spp = CCE_Spp[subset_of_CEs[0]:subset_of_CEs[1]]
    else:
        CCE_Spp = ['CCE0220.tif']  #for lanai cce0003, cce0055
        FCE_Spp = ['FCE0220.tif']

#Filter CO points to island only
if island != 'all':
    island_mask = "%s%s/DEM/%s_extent.tif" % (landscape_factor_dir, island,
                                              island)
    island_mask = arcpy.Raster(island_mask)

#LOAD AUXILIARY LAYERS
#veg_zone_layer="%sVegetation Zones1.tif" %(landscape_factor_dir)
veg_zone_layer = "%sveg_zones2" % (landscape_factor_dir)
veg_zone_layer = arcpy.Raster(veg_zone_layer)

veg_types_layer = "%slandfire_reclass_wetland_coastal_UTM_8b.tif" % (
    landscape_factor_dir)
veg_types_layer = arcpy.Raster(veg_types_layer)

#CO_data=r"%scorrected_CO_data2_merged_and_filtered.shp" %(CAO_data_dir) #corrected_CO_dataXY
CO_data = r"%scorrected_CO_data4_merged_and_filtered.shp" % (
    CAO_data_dir)  #corrected_CO_dataXY

arcpy.MakeFeatureLayer_management(CO_data, "CO_lyr")
def execute_RunSim_prev_2var(str_zonefolder, str_simfolder, str_lisfloodfolder, str_csvq, str_csvz, voutput, simtime, channelmanning, r_zbed, str_log, messages):

    str_inbci = str_zonefolder + "\\inbci.shp"
    str_outbci = str_zonefolder + "\\outbci.shp"
    zbed = RasterIO(r_zbed)



    bcipointcursor = arcpy.da.SearchCursor(str_inbci, ["SHAPE@", "zoneid", "flowacc", "type", "fpid"])
    dictsegmentsin = {}



    for point in bcipointcursor:
        if point[1] not in dictsegmentsin:
            dictsegmentsin[point[1]] = []
        dictsegmentsin[point[1]].append(point)


    allzones = list(dictsegmentsin.keys())
    allzones.sort()


    dictzones_fp = {}

    for zone in allzones:
        if dictsegmentsin[zone][0][4] not in dictzones_fp:
            dictzones_fp[dictsegmentsin[zone][0][4]] = []

        dictzones_fp[dictsegmentsin[zone][0][4]].append(zone)

    sortedzones = []
    listfp = list(dictzones_fp.keys())
    listfp.sort()
    for fp in listfp:
        listzones_fp = dictzones_fp[fp]
        listzones_fp.sort(reverse=True)
        sortedzones.extend(listzones_fp)

    # Ajout des information du fichier outbci.shp
    listzonesout = {}
    bcipointcursor = arcpy.da.SearchCursor(str_outbci, ["zoneid", "side", "lim1", "lim2", "side2", "lim3", "lim4", "SHAPE@"])
    for point in bcipointcursor:
        listzonesout[point[0]] = point


    zones = str_zonefolder + "\\envelopezones.shp"
    # récupération du bci lac
    zonesscursor = arcpy.da.SearchCursor(zones, ["GRID_CODE", "SHAPE@", "Lake_ID"])
    lakeid_byzone = {}
    for zoneshp in zonesscursor:

        if zoneshp[2] != -999:
            lakeid_byzone[zoneshp[0]] = zoneshp[2]




    filelog = open(str_log, 'w')



    csvfilez = open(str_csvz)
    csv_readerz = csv.DictReader(csvfilez)

    ref_raster = None

    for csvzrow in csv_readerz:
        zname = csvzrow["nom"]
        hfix = float(csvzrow["z"])

        csvfileq = open(str_csvq)
        csv_readerq = csv.DictReader(csvfileq)

        for csvqrow in csv_readerq:
            simname = csvqrow["nom"] + "_" + zname
            currentsimfolder = str_simfolder + "\\" + simname
            currentresult = str_simfolder + "\\res_" + simname
            simq = float(csvqrow["q"])

            if not os.path.isdir(currentsimfolder):
                os.makedirs(currentsimfolder)

            for zone in sortedzones:
                segment = dictsegmentsin[zone]
                for point in sorted(segment, key=lambda q: q[2]):

                    if point[3]=="main":
                        try:
                            if not arcpy.Exists(currentsimfolder + "\\elev_zone" + str(point[1])):

                                if ref_raster is None:
                                    ref_raster = arcpy.Raster(str_zonefolder + "\\zone" + str(point[1]))


                                outpointshape = listzonesout[point[1]][7].firstPoint

                                if arcpy.Exists(currentresult):
                                    # dans le cas où un fichier résultat existe avec un résulat valide, on prends les résultats de ce fichier comme limite aval
                                    if arcpy.Exists(currentsimfolder + "\\tmp_zone" + str(point[1])):
                                        arcpy.Delete_management(currentsimfolder + "\\tmp_zone" + str(point[1]))
                                    arcpy.Copy_management(currentresult, currentsimfolder + "\\tmp_zone" + str(point[1]))
                                    res_downstream = RasterIO(arcpy.Raster(currentsimfolder + "\\tmp_zone" + str(point[1])))
                                    hfix_raster = res_downstream.getValue(res_downstream.YtoRow(outpointshape.Y),
                                                                   res_downstream.XtoCol(outpointshape.X))
                                    if hfix_raster != res_downstream.nodata:
                                        hfix_sim = hfix_raster
                                    else:
                                        hfix_sim = hfix
                                    arcpy.Delete_management(currentsimfolder + "\\tmp_zone" + str(point[1]))
                                else:
                                    hfix_sim = hfix

                                # par

                                newfile = str_simfolder + "\\zone" + str(point[1]) + ".par"
                                if os.path.isfile(newfile):
                                    os.remove(newfile)

                                filepar = open(newfile, 'w')
                                filepar.write("DEMfile\tzone" + str(point[1]) + ".txt\n")
                                filepar.write("resroot\tzone" + str(point[1]) + "\n")
                                filepar.write("dirroot\t" + simname + "\n")
                                filepar.write("manningfile\tnzone" + str(point[1]) + ".txt\n")
                                filepar.write("bcifile\tzone" + str(point[1]) + ".bci" + "\n")
                                filepar.write("sim_time\t" + str(simtime) + "\n")
                                filepar.write("saveint\t" + str(simtime) + "\n")
                                filepar.write("bdyfile\t"+ simname +"\\zone" + str(point[1]) + ".bdy" + "\n")
                                filepar.write("SGCwidth\twzone" + str(point[1]) + ".txt\n")
                                filepar.write("SGCbank\tzone" + str(point[1]) + ".txt\n")
                                filepar.write("SGCbed\tdzone" + str(point[1]) + ".txt\n")
                                filepar.write("SGCn\t" + str(channelmanning) + "\n")

                                filepar.write("chanmask\tmzone" + str(point[1]) + ".txt\n")

                                # Vitesses du courant
                                if voutput:
                                    filepar.write("hazard\n")
                                    filepar.write("qoutput\n")
                                filepar.write("cfl\t0.3\n")
                                filepar.write("max_Froude\t1\n")
                                # filepar.write("debug\n")
                                filepar.close()

                                # bdy
                                newfilebdy = currentsimfolder + "\\zone" + str(point[1]) + ".bdy"

                                for point2 in sorted(segment, key=lambda q: q[2]):

                                    q_value = point2[2]*simq*(ref_raster.meanCellHeight*ref_raster.meanCellWidth)/1000000.

                                    if point2[3] == "main":
                                        # Création du fichier bdy


                                        pointdischarge = q_value / (
                                        (ref_raster.meanCellHeight + ref_raster.meanCellWidth) / 2)
                                        lastdischarge = q_value
                                        latnum = 0
                                        filebdy = open(newfilebdy, 'w')
                                        filebdy.write("zone"+str(point[1]) + ".bdy\n")
                                        filebdy.write("zone"+str(point[1]) + "\n")
                                        filebdy.write("3\tseconds\n")
                                        filebdy.write("0\t0\n")
                                        filebdy.write("{0:.3f}".format(pointdischarge) + "\t50000\n")
                                        filebdy.write("{0:.3f}".format(pointdischarge) + "\t" + str(simtime))
                                        filebdy.close()
                                    else:
                                        latnum += 1
                                        pointdischarge = (q_value - lastdischarge) / (
                                        (ref_raster.meanCellHeight + ref_raster.meanCellWidth) / 2)
                                        lastdischarge = q_value
                                        filebdy = open(newfilebdy, 'a')
                                        filebdy.write("\nzone" + str(point[1]) + "_" + str(latnum) + "\n")
                                        filebdy.write("3\tseconds\n")
                                        filebdy.write("0\t0\n")
                                        filebdy.write("{0:.3f}".format(pointdischarge) + "\t50000\n")
                                        filebdy.write("{0:.3f}".format(pointdischarge) + "\t" + str(simtime))
                                        filebdy.close()

                                # condition aval: 30cm au dessus du lit pour commencer
                                zdep = min(zbed.getValue(zbed.YtoRow(outpointshape.Y),
                                                         zbed.XtoCol(outpointshape.X)) + 0.3, hfix_sim)
                                filebdy = open(newfilebdy, 'a')
                                filebdy.write("\nhvar\n")
                                filebdy.write("4\tseconds\n")
                                filebdy.write("{0:.2f}".format(zdep) + "\t0\n")
                                filebdy.write("{0:.2f}".format(zdep) + "\t50000\n")
                                filebdy.write("{0:.2f}".format(hfix_sim) + "\t55000\n")
                                filebdy.write("{0:.2f}".format(hfix_sim) + "\t" + str(simtime))

                                filebdy.close()

                                # calcul pour le -steadytol
                                # Divise le débit par 200 et ne conserve qu'un chiffre significatif
                                steadytol = str(
                                    round(lastdischarge / 200., - int(math.floor(math.log10(abs(lastdischarge / 200.))))))

                                subprocess.check_call([str_lisfloodfolder + "\\lisflood_intelRelease_double.exe", "-steady", "-steadytol", steadytol, str_simfolder + "\\zone" + str(point[1]) + ".par"], shell=True, cwd=str_simfolder)




                                # Conversion des fichiers output

                                # on renomme les fichiers créés (nécessaire pour être acceptés par l'outil de convsersion ASCII vers raster)
                                zonename = "zone"+str(point[1])

                                if os.path.exists(currentsimfolder + "\\"  + zonename + "elev.txt"):
                                    os.remove(currentsimfolder + "\\"   + zonename + "elev.txt")

                                if os.path.exists(currentsimfolder   + "\\" + zonename + "-9999.elev"):
                                    os.rename(currentsimfolder  + "\\"  + zonename + "-9999.elev",
                                              currentsimfolder + "\\" + zonename + "elev.txt")
                                else:
                                    os.rename(currentsimfolder  + "\\" + zonename + "-0001.elev",
                                              currentsimfolder + "\\" + zonename + "elev.txt")
                                    filelog.write("Steady state not reached : " + zonename + ", sim " + simname)
                                    messages.addWarningMessage("Steady state not reached : " + zonename + ", sim " + simname)

                                if os.path.exists(currentsimfolder + "\\" + zonename + "-9999.Vx") or os.path.exists(currentsimfolder + "\\"  + zonename + "-0001.Vx"):
                                    if os.path.exists(currentsimfolder + "\\" + zonename + "Vx.txt"):
                                        os.remove(currentsimfolder + "\\" + zonename + "Vx.txt")

                                    if os.path.exists(currentsimfolder + "\\" + zonename + "Vy.txt"):
                                        os.remove(currentsimfolder + "\\" + zonename + "Vy.txt")
                                    if os.path.exists(currentsimfolder  + "\\" + zonename + "-9999.Vx"):
                                        os.rename(currentsimfolder  + "\\" + zonename + "-9999.Vx",
                                                  currentsimfolder+ "\\" + zonename + "Vx.txt")
                                        os.rename(currentsimfolder  + "\\"  + zonename + "-9999.Vy",
                                                  currentsimfolder  + "\\" + zonename + "Vy.txt")
                                    else:
                                        os.rename(currentsimfolder  + "\\" + zonename + "-0001.Vx",
                                                  currentsimfolder  + "\\" + zonename + "Vx.txt")
                                        os.rename(currentsimfolder  + "\\" + zonename + "-0001.Vy",
                                                  currentsimfolder  + "\\" + zonename + "Vy.txt")
                                    arcpy.ASCIIToRaster_conversion(currentsimfolder  + "\\" + zonename + "Vx.txt",
                                                                   currentsimfolder + "\\Vx_" + zonename,
                                                               "FLOAT")
                                    arcpy.ASCIIToRaster_conversion(currentsimfolder  + "\\" + zonename + "Vy.txt",
                                                                   currentsimfolder + "\\Vy_" + zonename,
                                                               "FLOAT")
                                    arcpy.DefineProjection_management(currentsimfolder + "\\Vx_" + zonename, ref_raster.spatialReference)
                                    arcpy.DefineProjection_management(currentsimfolder + "\\Vy_" + zonename, ref_raster.spatialReference)


                                # Conversion des fichiers de sortie en raster pour ArcGIS
                                str_elev = currentsimfolder + "\\elev_" + zonename
                                arcpy.ASCIIToRaster_conversion(currentsimfolder + "\\"  + zonename + "elev.txt", str_elev, "FLOAT")


                                # Ajout de la projection
                                arcpy.DefineProjection_management(str_elev, ref_raster.spatialReference)



                            if not arcpy.Exists(currentresult):

                                arcpy.Copy_management(currentsimfolder + "\\elev_" + "zone"+str(point[1]), currentresult)
                            else:

                                arcpy.Mosaic_management(currentsimfolder + "\\elev_" + "zone"+str(point[1]), currentresult, mosaic_type="MAXIMUM")
                                #arcpy.Copy_management(str_output + "\\lisflood_res", str_output + "\\tmp_mosaic")
                                #arcpy.Delete_management(str_output + "\\lisflood_res")
                                # arcpy.MosaicToNewRaster_management(';'.join([str_output + "\\tmp_mosaic", str_output + "\\elev_" + point[1]]),
                                #                                    str_output,"lisflood_res",
                                #                                    pixel_type="32_BIT_FLOAT",
                                #                                    number_of_bands=1)

                        except BaseException as e:
                            filelog.write("ERREUR in " + simname + ": sim aborded during zone "+ str(point[1]) + "\n")
                            messages.addWarningMessage("Some simulations skipped. See log file.")
    return
Feb 2, 2017 by nmtarr

Code to run analyses on the importance of southeastern woody wetlands 
for wildlife
"""
import sys
sys.path.append('P:/Proj3/USGap/Scripts/SE_Woody_Wetlands')
execfile("T:/Scripts/AppendPaths27.py")
import arcpy
import pandas as pd
import SEWWConfig as floodconfig
pd.set_option('display.width', 1000)
arcpy.CheckOutExtension("Spatial")
arcpy.env.overwriteOutput = True

#############################################  Mask the richness with the study region
######################################################################################
for group in floodconfig.richnessPathsCONUS:
    print group
    SE = arcpy.sa.ExtractByMask(
        arcpy.Raster(floodconfig.richnessPathsCONUS[group]), floodconfig.AOI)
    SE.save(floodconfig.richnessPathsSE[group])

#############################################  Mask the richness with the study region
######################################################################################
for group in floodconfig.richnessPathsCONUS:
    print group
    MU = arcpy.sa.ExtractByMask(
        arcpy.Raster(floodconfig.richnessPathsCONUS[group]),
        arcpy.Raster(floodconfig.SEWW))
    MU.save(floodconfig.richnessPathsFlood[group])
示例#4
0
def long_task(self):
    """Background task that runs a long function with progress reports."""
    import arcpy
    import pandas as pd
    import numpy as np
    dataPath = "C:/Prog/banghendrik/Combinasi_654_Jabo_Lapan_modified.tif"
    modelPath = "C:/Prog/banghendrik/DataTest_decisionTree.pkl"
    outputPath = "C:/Prog/banghendrik/Combinasi_654_Jabo_Lapan_modified_clf.tif"
    rasterarray = arcpy.RasterToNumPyArray(dataPath)
    # proc = subprocess.Popen(
    #     ['dir'],             #call something with a lot of output so we can see it
    #     shell=True,
    #     stdout=subprocess.PIPE
    # )

    # for line in iter(proc.stdout.readline,''):
    #     yield line.rstrip() + '<br/>\n'

    data = np.array([rasterarray[0].ravel(), rasterarray[1].ravel(), rasterarray[2].ravel()])
    data = data.transpose()

    import pandas as pd
    print("Change to dataframe format")
    logging.info('Change to dataframe format')
    columns = ['band1','band2', 'band3']
    df = pd.DataFrame(data, columns=columns)

    print("Split data to 20 chunks")
    logging.info('Split data to 20 chunks')
    df_arr = np.array_split(df, 20)
    from sklearn.externals import joblib
    clf = joblib.load(modelPath) 
    kelasAll = []
    for i in range(len(df_arr)):
        
        print ("predicting data chunk-"+str(i))
        logging.info("predicting data chunk-"+str(i))
        kelas = clf.predict(df_arr[i])
        dat = pd.DataFrame()
        dat['kel'] = kelas
        print ("mapping to integer class")
        logging.info("mapping to integer class")
        mymap = {'awan':1, 'air':2, 'tanah':3, 'vegetasi':4}
        dat['kel'] = dat['kel'].map(mymap)

        band1Array = dat['kel'].values
        print ("extend to list")
        logging.info("extend to list")
        kelasAll.extend(band1Array.tolist())

    del df_arr
    del clf
    del kelas
    del dat
    del band1Array
    del data

    print ("change list to np array")
    logging.info("change list to np array")
    kelasAllArray = np.array(kelasAll, dtype=np.uint8)

    print ("reshaping np array")
    logging.info("reshaping np array")
    band1 = np.reshape(kelasAllArray, (-1, rasterarray[0][0].size))
    band1 = band1.astype(np.uint8)

    raster = arcpy.Raster(dataPath)
    inputRaster = dataPath

    spatialref = arcpy.Describe(inputRaster).spatialReference
    cellsize1  = raster.meanCellHeight
    cellsize2  = raster.meanCellWidth
    extent     = arcpy.Describe(inputRaster).Extent
    pnt        = arcpy.Point(extent.XMin,extent.YMin)

    del raster

    # save the raster
    print ("numpy array to raster ..")
    logging.info("numpy array to raster ..")
    out_ras = arcpy.NumPyArrayToRaster(band1, pnt, cellsize1, cellsize2)
    #out_ras.save(outputPath)
    #arcpy.CheckOutExtension("Spatial")
    print ("define projection ..")
    logging.info ("define projection ..")
    arcpy.CopyRaster_management(out_ras, outputPath)
    arcpy.DefineProjection_management(outputPath, spatialref)
    verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking']
    adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast']
    noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit']
    message = ''
    total = random.randint(10, 50)
    for i in range(total):
        if not message or random.random() < 0.25:
            message = '{0} {1} {2}...'.format(random.choice(verb),
                                              random.choice(adjective),
                                              random.choice(noun))
        self.update_state(state='PROGRESS',
                          meta={'current': i, 'total': total,
                                'status': message})
        time.sleep(1)
    return {'current': 100, 'total': 100, 'status': 'Task completed!',
            'result': 42}
示例#5
0
	if desc.workspaceType == 'LocalDatabase':
		saveInGDB = True
	else:
		saveInGDB = False
	
	# Define the summary table (created later if necessary)
	if saveInGDB:
		summaryTableName = 'SLBL_results'
	else:
		summaryTableName = 'SLBL_results.dbf'
		
	summaryTable = os.path.join(ws,summaryTableName)
	
	arcpy.env.workspace = ws

	grid_dem_file = arcpy.Raster(grid_dem_file)
	#grid_dem_lyr = os.path.basename(grid_dem_file)
	#arcpy.MakeRasterLayer_management (grid_dem_file, grid_dem_lyr, "", "", "1")

	# Convert the polygon features to a raster mask
	try:
		arcpy.RecalculateFeatureClassExtent_management(mask_file)
	except:
		pass
	mask_desc = arcpy.Describe(mask_file)
	try:
		# Retrieve the selected polygons (works if the input is a layer)
		Set = mask_desc.FIDSet
	except:
		#makes a layer first if the input is a file
		if arcpy.GetInstallInfo()['ProductName'] == 'Desktop':
示例#6
0
#https://community.esri.com/thread/139164
import arcpy

# reference your raster
ras = r"D:\Thesis\Data\Afg\ESA\ESACCI-LC-L4-LCCS-Map-300m-P5Y-2010-v1.6.1.tif"
raster = arcpy.Raster(ras)

# determine number of pixels
cnt_pix = raster.height * raster.width

# determine if raster has no data values
if int(arcpy.GetRasterProperties_management(ras,
                                            "ANYNODATA").getOutput(0)) == 1:

    # determine if raster has all data values
    if int(
            arcpy.GetRasterProperties_management(
                ras, "ALLNODATA").getOutput(0)) == 1:
        print "All cells of raster are NoData"
        print "Data pixels  : {0} ({1}%)".format(0, 0.0)
        print "Nodata pixels: {0} ({1}%)".format(cnt_pix, 100.0)

    else:
        # handle integer different from float
        if raster.isInteger and raster.hasRAT:
            print "Integer raster with RAT"
            lst_cnt = [r.COUNT for r in arcpy.SearchCursor(raster)]
            cnt_data = sum(lst_cnt)
            cnt_nodata = cnt_pix - cnt_data

        else:
import sys
#from dbfpy import dbf #module for r/w dbf files with py available at http://dbfpy.sourceforge.net/
from arcpy import env
from random import randrange

jnk = randrange(10000)
arcpy.env.overwriteOutput = True
arcpy.env.workspace = wd
arcpy.env.compression = "LZW"
#arcpy.CreateFileGDB_management("D:/temp/arcgis/", "scratchoutput"+str(jnk)+".gdb")
#arcpy.env.scratchWorkspace = "D:/temp/arcgis/scratchoutput"+str(jnk)+".gdb"

if arcpy.CheckExtension("Spatial") == "Available":
    arcpy.CheckOutExtension("Spatial")

roads_raster = arcpy.Raster(wd + "Roads_TigerLine_raster.tif")
habqual_raster = arcpy.Raster(wd + "habqual_v3_4.tif")
goodqual_raster = arcpy.sa.Con(habqual_raster, 1, 0, "Value=3")
notgoodqual_raster = arcpy.sa.Con(habqual_raster, 1, 0, "Value<3")
notgoodqual_raster = arcpy.sa.SetNull(notgoodqual_raster, 1, "Value=0")

roads_raster_buffer = arcpy.sa.EucDistance(roads_raster, 100, "", "")
roads_raster_buffer = arcpy.sa.Con(arcpy.sa.IsNull(roads_raster_buffer), 101,
                                   roads_raster_buffer)
roads_raster_buffer = arcpy.sa.Int(
    roads_raster_buffer
)  #must convert to integerr to use conditional statement
roads_raster_buffer = arcpy.CalculateStatistics_management(roads_raster_buffer)
roads_raster_buffer_core = arcpy.sa.Con(roads_raster_buffer, 1, 0, "Value>100")

notgoodqual_raster_buffer = arcpy.sa.EucDistance(notgoodqual_raster, 100, "",
def getband_count(input_raster_path):
    raster = arcpy.Raster(input_raster_path)
    print(u"获取的波段数:" + str(raster.bandCount))
    return raster.bandCount
示例#9
0
        #pFolderP = apwrutils.Utils.getcwd()
        pFolderPRunName = os.path.join(pFolderP, runName)
        pFolderPS = os.path.join(pFolderPRunName, "WKSP")
        if (os.path.exists(pFolderPS) == False):
            apwrutils.Utils.makeSureDirExists(pFolderPS)

        ddt = time.clock()
        arcpy.env.overwriteOutput = True
        #..Create GDB with the runName, copy the inStream, inPoints etc to that GDB.
        lDHs = sDH.split(";")
        if (len(lDHs) > 0):
            if (nProcessors == 0):
                arcpy.AddMessage("It is required that nProcessors >  0")
                pass
            else:
                inRaster = arcpy.Raster(inDemRaster)
                arcpy.env.cellSize = inRaster.meanCellWidth
                oDesc = arcpy.Describe(inCatchment)
                sName = oDesc.name
                oidFld = oDesc.oidFieldName
                pWorkspace = apwrutils.Utils.getWorkspace(inCatchment)
                pStatTable = os.path.join(pWorkspace, "{}_Stats".format(sName))
                arcpy.Statistics_analysis(inCatchment, pStatTable,
                                          [[oidFld, "MIN"], [oidFld, "MAX"]])
                with arcpy.da.SearchCursor(
                        pStatTable,
                    ["MIN_{}".format(oidFld), "MAX_{}".format(oidFld)
                     ]) as rows:
                    for row in rows:
                        nMin = row[0]
                        nMax = row[1]
示例#10
0
"""
License is Apache 2.0
"""


def rescale(raster, out_min=0, out_max=1):
    arr_min, arr_max = raster.minimum, raster.maximum
    m = (out_max - out_min) / (arr_max - arr_min)
    b = out_min - m * arr_min
    return m * raster + b


if __name__ == '__main__':
    import arcpy
    raster = arcpy.Raster(arcpy.Describe(arcpy.GetParameter(0)).catalogPath)
    out_min = arcpy.GetParameter(1)
    out_max = arcpy.GetParameter(2)
    out_raster = arcpy.GetParameterAsText(3)

    raster = rescale(raster, out_min, out_max)
    raster.save(out_raster)
示例#11
0
def main(best_plant_dir=str(),
         lf_dir=str(),
         crit_lf=float(),
         prj_name=str(),
         unit=str(),
         version=str()):
    """ derive and draw stabilizing features for vegetation plantings
    crit_lf = 2.5               # years of minimum plant survival without stabilization
    prj_name = "TBR"             # corresponding to folder name
    unit = "us" or "si"
    version = "v10"             # type() =  3-char str: vII
    """
    logger = logging.getLogger("logfile")
    logger.info("STABILIZING PLANTS ----- ----- ----- -----")
    if unit == "us":
        area_units = "SQUARE_FEET_US"
        ft2_to_acres = config.ft2ac
    else:
        area_units = "SQUARE_METERS"
        ft2_to_acres = 1.0

    arcpy.CheckOutExtension('Spatial')
    arcpy.gp.overwriteOutput = True

    dir2pp = config.dir2pm + prj_name + "_" + version + "\\"

    # folder settings
    ras_dir = dir2pp + "Geodata\\Rasters\\"
    shp_dir = dir2pp + "Geodata\\Shapefiles\\"
    quant_dir = dir2pp + "Quantities\\"

    # file and variable settings
    xlsx_target = dir2pp + prj_name + "_assessment_" + version + ".xlsx"
    feature_dict = {
        "Large wood": 211,
        "ELJs (plantings)": 212,
        "Bioengineering (veget.)": 213,
        "Bioengineering (mineral)": 214,
        "Angular boulders (instream)": 215
    }

    # LOOK UP INPUT RASTERS
    try:
        logger.info("Looking up maximum lifespan rasters ...")
        max_lf_plants = arcpy.Raster(ras_dir + "max_lf_pl_c.tif")
        logger.info(" >> Vegetation plantings OK.")
        logger.info(" -- OK (MaxLifespan raster read)\n")
    except:
        logger.info("ERROR: Could not find max. lifespan Rasters.")
        return -1

    logger.info("Looking up specific bioengineering lifespan rasters ...")
    logger.info(best_plant_dir + "lf_wood.tif")
    try:
        lf_wood = arcpy.Raster(lf_dir + "lf_wood.tif")
        logger.info(" >> Added Streamwood.")
    except:
        lf_wood = Float(0)
        logger.info(
            "WARNING: Could not find Lifespan Raster (%slf_wood.tif)." %
            lf_dir)
        logger.info(
            "         > Go to the Lifespan Tab and create lifespan rasters for the Bioengineering feature group."
        )
        logger.info("         > Applying 0-lifespans instead.")

    try:
        lf_bio = arcpy.Raster(lf_dir + "lf_bio_v_bio.tif")
        logger.info(" >> Added Other bioengineering.")
    except:
        lf_bio = Float(0)
        logger.info(
            "WARNING: Could not find Lifespan Raster (%slf_bio_v_bio.tif)." %
            lf_dir)
        logger.info(
            "         > Go to the Lifespan Tab and create lifespan rasters for the Bioengineering feature group."
        )
        logger.info("         > Applying 0-lifespans instead.")
    logger.info(" -- OK (Bioengineering raster read)")

    # EVALUATE BEST STABILIZATION FEATURES
    try:
        logger.info("Assessing best features for plant stabilization.")
        arcpy.env.extent = max_lf_plants.extent
        best_stab = Con(
            max_lf_plants <= crit_lf,
            Con(
                ~IsNull(lf_wood),
                Con(lf_wood > crit_lf, Int(feature_dict["Large wood"]),
                    Int(feature_dict["ELJs (plantings)"])),
                Con(
                    ~IsNull(lf_bio),
                    Con(lf_bio > crit_lf,
                        Int(feature_dict["Bioengineering (veget.)"]),
                        Int(feature_dict["Bioengineering (mineral)"])),
                    Int(feature_dict["Angular boulders (instream)"]))))
        logger.info(" -- OK (Stabilization assessment.)\n")
    except:
        logger.info("ERROR: Best stabilization assessment failed.")
        return -1

    # SAVE RASTERS
    try:
        logger.info("Saving results raster as " + ras_dir + "plant_stab.tif")
        best_stab.save(ras_dir + "plant_stab.tif")
        logger.info(" -- OK (Raster saved.)\n")
    except:
        logger.info("ERROR: Result geofile saving failed.")
        return -1

    # SHAPEFILE CONVERSION AND STATS
    try:
        logger.info("Extracting quantities from geodata ...")
        logger.info(" >> Converting results raster to polygon shapefile ...")
        p_stab_shp = shp_dir + "Plant_stab.shp"
        try:
            arcpy.RasterToPolygon_conversion(Int(best_stab), p_stab_shp,
                                             "NO_SIMPLIFY")
            if not fGl.verify_shp_file(p_stab_shp):
                logger.info(
                    "NO STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." %
                    p_stab_shp)
                logger.info(fGl.open_file(xlsx_target))
                return -1
        except:
            logger.info(
                "NOTHING TO DO. Consider to increase the critical lifespan threshold."
            )
        logger.info(" >> Calculating area statistics ... ")
        try:
            arcpy.AddField_management(p_stab_shp, "F_AREA", "FLOAT", 9)
        except:
            logger.info(
                "    * field F_AREA already exists or the dataset is opened by another software."
            )
        try:
            arcpy.CalculateGeometryAttributes_management(
                p_stab_shp,
                geometry_property=[["F_AREA", "AREA"]],
                area_unit=area_units)
        except:
            logger.info("    * no plant stabilization applicable ")

        logger.info(" >> Adding field (stabilizing feature) ... ")
        try:
            arcpy.AddField_management(p_stab_shp, "Stab_feat", "TEXT")
        except:
            logger.info("    * field Stab_feat already exists ")
        logger.info(" >> Evaluating field (stabilizing feature) ... ")
        inv_feature_dict = {v: k for k, v in feature_dict.items()}
        code_block = "inv_feature_dict = " + str(inv_feature_dict)
        try:
            arcpy.CalculateField_management(p_stab_shp, "Stab_feat",
                                            "inv_feature_dict[!gridcode!]",
                                            "PYTHON", code_block)
        except:
            logger.info("    * no plant stabilization added ... ")
        logger.info(" >> Exporting tables ...")
        arcpy.TableToTable_conversion(p_stab_shp, quant_dir, "plant_stab.txt")
        logger.info(" -- OK (Quantity export)\n")
    except:
        logger.info("ERROR: Shapefile operations failed.")
        return -1

    # PREPARE AREA DATA (QUANTITIES)
    logger.info("Processing table statistics ...")
    write_dict = {}
    for k in feature_dict.keys():
        write_dict.update({k: 0.0})  # set to zero for surface count

    stat_data = fGl.read_txt(quant_dir + "plant_stab.txt")
    logger.info(" >> Extracting relevant area sizes ...")

    for row in stat_data:
        try:
            write_dict[inv_feature_dict[int(row[0])]] += row[1]
        except:
            logger.info("      --- Unknown key: " + str(int(row[0])))

    if unit == "us":
        logger.info(" >> Converting ft2 to acres ...")
        for k in write_dict.keys():
            write_dict[k] = write_dict[k] * float(ft2_to_acres)
    logger.info(" -- OK (Area extraction finished)\n")

    # WRITE AREA DATA TO EXCEL FILE
    logger.info("Writing results to costs workbook (sheet: from_geodata) ...")
    fGl.write_dict2xlsx(write_dict, xlsx_target, "B", "C", 12)

    # CLEAN UP useless shapefiles
    logger.info("Cleaning up redundant shapefiles ...")
    arcpy.env.workspace = shp_dir
    all_shps = arcpy.ListFeatureClasses()
    for shp in all_shps:
        if "_del" in str(shp):
            try:
                arcpy.Delete_management(shp)
            except:
                logger.info(
                    str(shp) +
                    " is locked. Remove manually to avoid confusion.")
    arcpy.env.workspace = dir2pp + "Geodata\\"
    logger.info(" -- OK (Clean up)\n")
示例#12
0
#prepares the sample data for the development of the models

import arcpy
import numpy

myworkspace = "D:/CCW20/GIS"
cmapsdir = myworkspace + "/climatemaps"
dhmraster = arcpy.Raster("D:/CCW20/GIS/dhm25.tif")
arcpy.env.overwriteOutput = True
arcpy.CheckOutExtension('Spatial')
#read the positive sample dataset
samples_n_fc = myworkspace + "/samples.gdb/samples_n_6190"
samples_s_fc = myworkspace + "/samples.gdb/samples_s_6190"
arcpy.env.workspace = cmapsdir
inrasterlist = [["dhm25.tif", "dhm25"], ["litho7g.tif", "litho7g"],
                ["tyymin6190.tif", "tyymin6190"],
                ["tyymean6190.tif", "tyymean6190"],
                ["tyymax6190.tif", "tyymax6190"],
                ["toctmin6190.tif", "toctmin6190"],
                ["toctmean6190.tif", "toctmean6190"],
                ["toctmax6190.tif", "toctmax6190"],
                ["tjulmin6190.tif", "tjulmin6190"],
                ["tjulmean6190.tif", "tjulmean6190"],
                ["tjulmax6190.tif", "tjulmax6190"],
                ["tjanmin6190.tif", "tjanmin6190"],
                ["tjanmean6190.tif", "tjanmean6190"],
                ["tjanmax6190.tif", "tjanmax6190"],
                ["taprmin6190.tif", "taprmin6190"],
                ["taprmean6190.tif", "taprmean6190"],
                ["taprmax6190.tif", "taprmax6190"], ["tabsmin.tif", "tabsmin"],
                ["tabsmax.tif", "tabsmax"], ["rnormyy6190.tif", "rnormyy6190"],
示例#13
0
def RevalueRaster(f_path, elev_type, raster_props, target_path, publish_path, minZ, maxZ, bound_path, spatial_ref=None):
    arcpy.AddMessage("RevalueRaster {} {}: ZRange({},{})".format(elev_type, f_path,minZ,maxZ))
    Utility.setArcpyEnv(is_overwrite_output=True)
    a = datetime.now()
    nodata = RasterConfig.NODATA_DEFAULT
    isInt = (elev_type == INT)
    if isInt:
        minZ, maxZ = 0, 255
        arcpy.AddMessage("RevalueRaster type {} is intensity {}: ZRange({},{})".format(elev_type, f_path,minZ,maxZ))

    f_name, target_f_path, publish_f_path, stat_out_folder, stat_file_path, bound_out_folder, vector_bound_path = getFilePaths(f_path, elev_type, target_path, publish_path)  # @UnusedVariable

#     target_f_left, target_f_right = os.path.splitext(target_f_path)
#     target1_f_path = "{}1{}".format(target_f_left, target_f_right)

    publish_f_left, publish_f_right = os.path.splitext(publish_f_path)
    publish1_f_path = "{}1{}".format(publish_f_left, publish_f_right)

    # Don't maintain fGDB raster format, update to TIFF
#     if raster_props[FORMAT] == "FGDBR":
#         target_f_path = "{}.TIF".format(target_f_path)


    if raster_props[BAND_COUNT] <> 1:
        arcpy.AddMessage("Skipping Raster {}, not 1 band image.".format(f_path))
    else:
        # Intensity may be another type
        if not isInt and not (raster_props[PIXEL_TYPE] == PIXEL_TYPE_F32 or raster_props[PIXEL_TYPE] == PIXEL_TYPE_D64):
            arcpy.AddMessage("Skipping Raster '{}', '{}' not Float32 type image.".format(f_path, raster_props[PIXEL_TYPE]))
        else:
            if not (raster_props[FORMAT] == "TIFF" or raster_props[FORMAT] == "GRID" or raster_props[FORMAT] == "IMAGINE Image" or raster_props[FORMAT] == "FGDBR"):
                arcpy.AddMessage("Skipping Raster '{}', '{}' not supported image format.".format(f_path, raster_props[FORMAT]))
            else:

                if arcpy.Exists(target_f_path):
                    arcpy.AddMessage("\tDerived Raster exists: {}".format(target_f_path))
                else:
                    deleteFileIfExists(target_f_path, True)
                    arcpy.AddMessage("\tSaving derived raster to {}".format(target_f_path))

                    # Compression isn't being applied properly so results are uncompressed
                    rasterObject = arcpy.Raster(f_path)
                    if isInt:
                        mean = rasterObject.mean
                        stdDev = rasterObject.standardDeviation
                        maximumPixel = mean + (stdDev * 2)
                        linearTransform = arcpy.sa.TfLinear(maximum=maximumPixel, upperThreshold=maximumPixel)
                        outRescale = arcpy.sa.RescaleByFunction(rasterObject, linearTransform, minZ, maxZ)
                        outRescale.save(target_f_path)
                        del outRescale, rasterObject
                    else:
                        outSetNull = arcpy.sa.Con(((rasterObject >= (float(minZ))) & (rasterObject <= (float(maxZ)))), f_path)  # @UndefinedVariable
                        outSetNull.save(target_f_path)
                        del outSetNull, rasterObject

                    if spatial_ref is not None:
                        arcpy.AddMessage("Applying projection to raster '{}' {}".format(target_f_path, spatial_ref))
                        if str(spatial_ref).lower().endswith(".prj"):
                            arcpy.AddMessage("loading spatial reference from prj file '{}'".format(spatial_ref))
                            spatial_ref = arcpy.SpatialReference(spatial_ref)
                            arcpy.AddMessage("loaded spatial reference from prj file '{}'".format(spatial_ref))
                        # 3/22/18 - Handle UTF-8 Encoding - 'u\u2013' From MI Delta
                        try:
                            arcpy.AddMessage("Applying projection '{}'".format( spatial_ref))
                            arcpy.AddMessage("Applying string projection '{}'".format( spatial_ref.exportToString()))
                            arcpy.AddMessage("Applying encoded projection '{}'".format( spatial_ref.exportToString().encode('utf-8')))
                        except Exception as e:
                            arcpy.AddMessage('Error: {}'.format(e))

                        arcpy.DefineProjection_management(in_dataset=target_f_path, coor_system=spatial_ref)

                    # Set the no data default value on the input raster
                    arcpy.SetRasterProperties_management(in_raster=target_f_path, data_type="ELEVATION", nodata="1 {}".format(nodata))
                    arcpy.CalculateStatistics_management(in_raster_dataset=target_f_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set")
#                     arcpy.BuildPyramidsandStatistics_management(in_workspace=target_f_path,
#                                                                 build_pyramids="BUILD_PYRAMIDS",
#                                                                 calculate_statistics="CALCULATE_STATISTICS",
#                                                                 BUILD_ON_SOURCE="BUILD_ON_SOURCE",
#                                                                 pyramid_level="-1",
#                                                                 SKIP_FIRST="NONE",
#                                                                 resample_technique="BILINEAR",
#                                                                 compression_type="LZ77",
#                                                                 compression_quality="75",
#                                                                 skip_existing="SKIP_EXISTING")


                    # make sure we make a new published copy of this
                    if arcpy.Exists(publish_f_path):
                        arcpy.Delete_management(publish_f_path)

                    a = doTime(a, "\tCopied '{}' to '{}' with valid values between {} and {}".format(f_path, target_f_path, minZ, maxZ))


                if arcpy.Exists(publish_f_path):
                    arcpy.AddMessage("\tPublish Raster exists: {}".format(publish_f_path))
                else:
                    arcpy.AddMessage("\tCopy and clip published raster from {} to {}".format(target_f_path, publish1_f_path))
                    a = datetime.now()

                    deleteFileIfExists(publish1_f_path, True)
                    deleteFileIfExists(publish_f_path, True)
                    # arcpy.RasterToOtherFormat_conversion(target_f_path, publish_f_path, Raster_Format="TIFF")
                    arcpy.CopyRaster_management(in_raster=target_f_path, out_rasterdataset=publish1_f_path, config_keyword="", background_value="", nodata_value=nodata, onebit_to_eightbit="NONE", colormap_to_RGB="NONE", pixel_type="32_BIT_FLOAT", scale_pixel_value="NONE", RGB_to_Colormap="NONE", format="TIFF", transform="NONE")

                    arcpy.AddMessage("\tCliping temp raster {} to {}".format(publish1_f_path, publish_f_path))
                    arcpy.Clip_management(in_raster=publish1_f_path, out_raster=publish_f_path, in_template_dataset=bound_path, nodata_value=nodata, clipping_geometry="ClippingGeometry", maintain_clipping_extent="NO_MAINTAIN_EXTENT")

                    deleteFileIfExists(publish1_f_path, True)

                    arcpy.SetRasterProperties_management(in_raster=publish_f_path, data_type="ELEVATION", nodata="1 {}".format(nodata))
                    arcpy.CalculateStatistics_management(in_raster_dataset=publish_f_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set")
#                     arcpy.BuildPyramidsandStatistics_management(in_workspace=publish_f_path,
#                                                                 build_pyramids="BUILD_PYRAMIDS",
#                                                                 calculate_statistics="CALCULATE_STATISTICS",
#                                                                 BUILD_ON_SOURCE="BUILD_ON_SOURCE",
#                                                                 pyramid_level="-1",
#                                                                 SKIP_FIRST="NONE",
#                                                                 resample_technique="BILINEAR",
#                                                                 compression_type="LZ77",
#                                                                 compression_quality="75",
#                                                                 skip_existing="SKIP_EXISTING")

                    a = doTime(a, "\tCopied '{}' to '{}'".format(target_f_path, publish_f_path))
示例#14
0
def main():
    print "load modules..."
    import arcpy
    arcpy.env.overwriteOutput = True

    import numpy as np
    template1 = r'C:\GeoNet\Correlation\Parameter1\r{0}_NPP.TIF'
    template2 = r'C:\GeoNet\Correlation\Parameter2\r{0}_WUE.TIF'
    nodata = -3.4028235e+38
    out_ras = r'C:\GeoNet\Correlation\correlation.TIF'

    print "create nested numpy array list..."
    lst_np_ras = []
    for i in range(1, 14):
        ras_path1 = template1.format("%03d" % (i, ))
        print " - ", ras_path1
        ras_np1 = arcpy.RasterToNumPyArray(ras_path1)
        ras_path2 = template2.format("%03d" % (i, ))
        print " - ", ras_path2
        ras_np2 = arcpy.RasterToNumPyArray(ras_path2)
        lst_np_ras.append([ras_np1, ras_np2])

    print "read props numpy raster..."
    ras_np = lst_np_ras[0][0]  # take first numpy array from list
    rows = ras_np.shape[0]
    cols = ras_np.shape[1]
    print " - rows:", rows
    print " - cols:", cols

    print "create output numpy array..."
    ras_path = template1.format("%03d" % (1, ))
    raster = arcpy.Raster(ras_path)
    ras_np_res = np.ndarray((rows, cols))
    print " - out rows:", ras_np_res.shape[0]
    print " - out cols:", ras_np_res.shape[1]

    print "loop through pixels..."
    pix_cnt = 0
    for row in range(rows):
        for col in range(cols):
            pix_cnt += 1
            if pix_cnt % 5000 == 0:
                print " - row:", row, "  col:", col, "  pixel:", pix_cnt
            lst_vals1 = []
            lst_vals2 = []
            try:
                for lst_pars in lst_np_ras:
                    lst_vals1.append(lst_pars[0][row, col])
                    lst_vals2.append(lst_pars[1][row, col])
                lst_vals1 = ReplaceNoData(lst_vals1, nodata)
                lst_vals2 = ReplaceNoData(lst_vals2, nodata)
                # perform calculation on list
                correlation = CalculateCorrelation(lst_vals1, lst_vals2,
                                                   nodata)
                ras_np_res[row, col] = correlation
            except Exception as e:
                print "ERR:", e
                print " - row:", row, "  col:", col, "  pixel:", pix_cnt
                print " - lst_vals1:", lst_vals1
                print " - lst_vals2:", lst_vals2

    pnt = arcpy.Point(raster.extent.XMin,
                      raster.extent.YMin)  #  - raster.meanCellHeight
    xcellsize = raster.meanCellWidth
    ycellsize = raster.meanCellHeight

    print "Write output raster..."
    print " - ", out_ras
    ras_res = arcpy.NumPyArrayToRaster(ras_np_res,
                                       lower_left_corner=pnt,
                                       x_cell_size=xcellsize,
                                       y_cell_size=ycellsize,
                                       value_to_nodata=nodata)
    ras_res.save(out_ras)
    arcpy.DefineProjection_management(
        in_dataset=out_ras,
        coor_system=
        "GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]]"
    )
示例#15
0
        (0.2 * in_band_nir - in_band_red)) / (0.2 * in_band_nir + in_band_red)
    return WDRVI_result


# Check
def WVVI(in_band_nir, in_band_red, out_WVVI=None):
    WVVI_result = ((in_band_nir * 2 - in_band_red) /
                   (in_band_nir * 2 + in_band_red))
    return WVVI_result


if __name__ == "__main__":

    arcpy.env.overwriteOutput = True
    band_nir = arcpy.Raster(
        r"D:\Python\Test\Baza\ImaginiT\LC08_L1TP_183029_20190821_20190903_01_T1\LC08_L1TP_183029_20190821_20190903_01_T1_B5.TIF"
    )
    band_red = arcpy.Raster(
        r"D:\Python\Test\Baza\ImaginiT\LC08_L1TP_183029_20190821_20190903_01_T1\LC08_L1TP_183029_20190821_20190903_01_T1_B4.TIF"
    )
    band_blue = arcpy.Raster(
        r"D:\Python\Test\Baza\ImaginiT\LC08_L1TP_183029_20190821_20190903_01_T1\LC08_L1TP_183029_20190821_20190903_01_T1_B2.TIF"
    )
    band_green = arcpy.Raster(
        r"D:\Python\Test\Baza\ImaginiT\LC08_L1TP_183029_20190821_20190903_01_T1\LC08_L1TP_183029_20190821_20190903_01_T1_B3.TIF"
    )

    output = r"D:\Python\Test\Baza\ImaginiT\Rezultate_functii"
    # mediana lungimii de unda
    # Trebuie sa rezulte 29 de Indici
    # EVI,GARI,LAI,MNLI,NLI ies fara valori
示例#16
0
def zonalStatistics(in_shp_path, in_dem):
    in_shp_dir = os.path.split(in_shp_path)[0]
    in_shp_name = os.path.split(in_shp_path)[1]
    arcpy.env.workspace = in_shp_dir
    arcpy.env.overwriteOutput = True
    arcpy.env.snapRaster = in_dem
    dbf_dir = os.path.join(in_shp_dir, "dbf")
    os.mkdir(dbf_dir)
    tif_dir = os.path.join(in_shp_dir, "tif")
    os.mkdir(tif_dir)
    shapefiles = os.listdir(in_shp_dir)
    dem = arcpy.Raster(in_dem)
    cell_size = dem.meanCellHeight
    for shp in shapefiles:
        if shp.endswith(".shp") and shp == in_shp_name:
            shp_path = os.path.join(in_shp_dir, shp)
            dbf_path = os.path.join(dbf_dir,
                                    "zonal_" + shp.replace("shp", "dbf"))
            tif_path = os.path.join(tif_dir, shp.replace("shp", "tif"))
            arcpy.PolygonToRaster_conversion(
                shp_path,
                value_field="FID",
                out_rasterdataset=tif_path,
                cell_assignment="CELL_CENTER",
                priority_field="NONE",
                cellsize=cell_size,
            )
            arcpy.sa.ZonalStatisticsAsTable(tif_path, "Value", in_dem,
                                            dbf_path, "DATA", "ALL")
            arcpy.JoinField_management(
                shp_path,
                in_field="FID",
                join_table=dbf_path,
                join_field="Value",
                fields="COUNT;MIN;MAX;RANGE;MEAN;STD;SUM",
            )
            # arcpy.AddField_management(shp_path,field_name="dep2catR",field_type="FLOAT")
            arcpy.AddField_management(shp_path,
                                      field_name="volume",
                                      field_type="FLOAT")
            arcpy.AddField_management(shp_path,
                                      field_name="mean_depth",
                                      field_type="FLOAT")
            # arcpy.CalculateField_management(shp_path,field="dep2catR",expression="!AREA! / !cat_area!", expression_type="PYTHON_9.3")
            arcpy.CalculateField_management(
                shp_path,
                field="volume",
                expression="( !COUNT! * !MAX! - !SUM!) * ( !AREA! / !COUNT! )",
                expression_type="PYTHON_9.3",
            )
            arcpy.CalculateField_management(
                shp_path,
                field="mean_depth",
                expression="!volume! / !AREA!",
                expression_type="PYTHON_9.3",
            )
            arcpy.CalculateField_management(
                shp_path,
                field="ID",
                expression="!FID! + 1",
                expression_type="PYTHON_9.3",
            )
            arcpy.DeleteField_management(shp_path, drop_field="GRIDCODE")

    if os.path.exists(dbf_dir):
        shutil.rmtree(dbf_dir)
    if os.path.exists(tif_dir):
        shutil.rmtree(tif_dir)
    return True
示例#17
0
def main():
    """
    Calculate water depth from a flood extent polygon (e.g. from remote sensing analysis) based on
    an underlying DEM (or HAND).

    Program procedure:
        1. Flood extent polygon to polyline
        2. Polyline to Raster - DEM extent and resolution (Env)
        3. Con - DEM values to Raster
        4. Euclidean Allocation - assign boundary cell elevation to nearest domain cells
        5. Calculate water depth by deducting DEM by Euclidean Allocation
        6. Run low-pass Filter

    Created by Sagy Cohen and Austin Raney, Surface Dynamics Modeling Lab, University of Alabama
    email: [email protected]; [email protected]
    web: http://sdml.ua.edu
    """
    ####################
    #   INPUT/OUTPUT   #
    ####################

    # Set the Workspace and Scratch space to the first param provided
    ws = arcpy.env.workspace = script.GetParameterAsText(0)
    arcpy.env.scratchWorkspace = script.GetParameterAsText(0)

    # Set Input DEM, Inundation Polygon.
    dem_name = os.path.basename(script.GetParameterAsText(2))

    # Create raster object from DEM
    dem = arcpy.Raster(dem_name)

    inund_polygon = os.path.basename(script.GetParameterAsText(1))

    # If this is not provided, the clip_dem will be calculated with the Clip_management function
    clip_dem = script.GetParameterAsText(4)

    # Check if optional Cost Raster was provided
    if script.GetParameterAsText(5):
        cost_raster = script.GetParameterAsText(5)
    else:
        cost_raster = (((dem <= 0)*999)+1)
        cost_raster.save(ws + '\cost_raster')

    ####################
    # END INPUT/OUTPUT #
    ####################


    # Set overriding within the Workspace to True
    arcpy.env.overwriteOutput = True

    # Cell size here would be the x or y distance resolution from the raster
    # i.e., a 30 meter dem would have a cell size of 30
    cell_size = dem.meanCellHeight

    # Generate raster line. See CalculateBoundary docstring for more info
    boundary = CalculateBoundary(dem, inund_polygon, cell_size, ws)

    # Proper string representation of dem extent to be accepted by Clip_management method
    extent = '{} {} {} {}'.format(dem.extent.XMin, dem.extent.YMin, dem.extent.XMax, dem.extent.YMax)

    # If optional clip dem no provided then create a clipping dem cut out from the flood inundation polygon
    if not clip_dem:
        clip_dem = 'Clip_DEM'
        arcpy.Clip_management(dem_name, extent, clip_dem, inund_polygon,
                              cell_size, 'ClippingGeometry', 'NO_MAINTAIN_EXTENT')

    arcpy.env.extent = arcpy.Extent(dem.extent.XMin, dem.extent.YMin, dem.extent.XMax, dem.extent.YMax)

    print('Convert boundary to integer')

    # Must convert boundary, i.e., raster line to int for cost allocation function. It only takes int rasters
    MULTIPLIER = 10000
    boundary_int = Int(boundary * MULTIPLIER)
    boundary_int.save('boundary_int')

    print('Running cost allocation')
    cost_alloc = CostAllocation(boundary_int, cost_raster, '#', '#', 'Value')
    cost_alloc.save('CostAlloc_int')

    # Divide the result from the cost allocation function using the same constant used to create the integer
    # representation of the boundary
    cost_alloc = Float(cost_alloc) / MULTIPLIER
    cost_alloc.save('cost_alloc')

    print('Calculating estimated water depth')

    # Raster calculator cost_alloc - clip_dem
    water_depth = Minus(cost_alloc, clip_dem)

    # Remove estimated water depths below 0 and change them to 0
    water_depth = Con(water_depth <= 0, 0, water_depth)
    water_depth.save(os.path.basename(script.GetParameterAsText(3)))

    print('Calculating low pass filter')

    water_depth_filtered = Filter(water_depth, 'LOW', 'DATA')

    waterDepthFilter2 = Con(clip_dem, water_depth_filtered, '#', 'VALUE > 0')
    waterDepthFilter2.save(os.path.basename(script.GetParameterAsText(3))+'_filtered')
    print('Done')
示例#18
0
Args:
    inRas (raster) -- Input filled digital elevation model.
    Flow_Direction (raster) -- Output flow direction grid.
    
Returns:
    FlowDirGrid (raster) -- Flow direction grid.
"""

import csv
import random
import matplotlib
import arcpy
import numpy

# Get user to select input raster surface.
inRas = arcpy.Raster(arcpy.GetParameterAsText(0))

# Find lower left coordinate of input raster.
lowerLeft = arcpy.Point(inRas.extent.XMin, inRas.extent.YMin)

# Find cell size of input raster.
cellSize = inRas.meanCellWidth

# Convert raster to numpy array.
land = arcpy.RasterToNumPyArray(inRas)

# Get user to name output flow direction raster.
Flow_Direction = arcpy.GetParameterAsText(1)

w = len(land) - 2  # Set width to len(land) - 2.
h = len(land) - 2  # Set height to len(land) - 2.
示例#19
0
print "Loading arcpy"
# import library packages
import arcpy, os, sys, numpy
from bmpFlowModFast import *

print "Checking inputs"
# get parameters (input and output datasets, filenames, etc)
# Flow_Direction = Raster(arcpy.getParameterAsText(0))
# BMP_Points     = Raster(arcpy.getParameterAsText(1))
# Output         = arcpy.getParameterAsText(2)
Flow_Direction = arcpy.Raster(
    "C:/Users/csomerlot/Desktop/Lab05Data/Lab05Geodatabase.gdb/FlowDir_Fill1")
BMP_Points = arcpy.Raster(
    "C:/Users/csomerlot/Desktop/Lab05Data/Lab05Geodatabase.gdb/BMP_Points_PointToRaster"
)
Output = "C:/Users/csomerlot/Desktop/Lab05Data/Lab05Geodatabase.gdb/output"

# set environment

# create variables to hold input and output datasets
flowdirData = arcpy.RasterToNumPyArray(Flow_Direction)
lowerLeft = arcpy.Point(Flow_Direction.extent.XMin, Flow_Direction.extent.YMin)
cellSize = Flow_Direction.meanCellWidth
height = len(flowdirData)
width = len(flowdirData[0])

bmppointData = arcpy.RasterToNumPyArray(BMP_Points)
if BMP_Points.extent.XMin != Flow_Direction.extent.XMin:
    print BMP_Points.extent.XMin, Flow_Direction.extent.XMin
    raise Exception("Xmin of extents not the same")
if BMP_Points.extent.YMin != Flow_Direction.extent.YMin:
示例#20
0
# CROP: a raster mask is provided in this repo. It is a GRID file named mask_data
# MODIS DATA: https://lpdaac.usgs.gov/dataset_discovery/modis/modis_products_table/mod11a2

import arcpy
import os

# Check out any necessary licenses
arcpy.CheckOutExtension("spatial")

# 'parent_dir' is the pathway for the folder containing raw HDF_EOS (MOD11A2 product) files
parent_dir = "your_pathway\\to_the\\folder\\with\\modis\\data "  # Note the "\\".

#'path2mask' is is the pathway for the mask data (shapefile or raster)
# I provided a mask for Kaparao National Park in MODIS_CLIMATOLOGIES repo.
path2mask = "your_path_to_mask\\mask_data"  # Note the "\\"
mask_data = arcpy.Raster(path2mask)

arcpy.env.workspace = parent_dir
os.chdir(parent_dir)

os.mkdir(hdf2raster)
os.mkdir(parent_dir + '\\pro_data')
os.mkdir(parent_dir + '\\final')
hdf2raster = parent_dir + '\\hdf2raster'
prodata = parent_dir + "\\pro_data"
final = parent_dir + '\\final'

lista = arcpy.ListDatasets()
hdf_files = [x for x in lista if '.hdf' in x]

fn_vars = ['a14', 'da', 'ni']
示例#21
0
if IN_JY == "99999":
    VAR_1 = "0"
    IN_JY = create_raster(5, IN_CLIP, VAR_FBL, "tihuan.tif")
if IN_GW == "99999":
    VAR_2 = "0"
    IN_GW = create_raster(5, IN_CLIP, VAR_FBL, "tihuan1.tif")
if IN_DW == "99999":
    VAR_3 = "0"
    IN_DW = create_raster(5, IN_CLIP, VAR_FBL, "tihuan2.tif")
if IN_DF == "99999":
    VAR_4 = "0"
    IN_DF = create_raster(5, IN_CLIP, VAR_FBL, "tihuan3.tif")
if IN_GH == "99999":
    VAR_5 = "0"
    IN_GH = create_raster(5, IN_CLIP, VAR_FBL, "tihuan4.tif")
f = float(VAR_1) * arcpy.Raster(IN_JY) + float(VAR_2) * arcpy.Raster(
    IN_GW) + float(VAR_3) * arcpy.Raster(IN_DW) + float(VAR_4) * arcpy.Raster(
        IN_DF) + float(VAR_5) * arcpy.Raster(IN_GH)
f.save("ff.tif")

#################################
outReclass2 = arcpy.sa.Reclassify(
    "ff.tif", "Value",
    arcpy.sa.RemapRange([[float(VAR_ZH1), float(VAR_ZH2), 1],
                         [float(VAR_ZH2), float(VAR_ZH3), 2],
                         [float(VAR_ZH3), float(VAR_ZH4), 3],
                         [float(VAR_ZH4), float(VAR_ZH5), 4],
                         [float(VAR_ZH5), float(VAR_ZH6), 5]]), 'NODATA')
outReclass2.save(OUTPUTPATH + OUTPUTNAME + ".tif")
arcpy.BuildRasterAttributeTable_management(OUTPUTPATH + OUTPUTNAME + ".tif",
                                           "Overwrite")
示例#22
0
#-------------------------------------------------------------------------------
# DESCRIBE RASTER
#-------------------------------------------------------------------------------
import arcpy

# Set the current workspace
arcpy.env.workspace = "C:\Users\laboratorios\ELVLC\DATA\sagunto"

desc = arcpy.Describe(arcpy.env.workspace)

# Get and print a list of GRIDs from the workspace
rasters = arcpy.ListRasters("*", "ALL")

for raster in rasters:
    i = arcpy.Raster(raster)
    print(raster)
    print "Name: ", i.name
    if hasattr(desc, "name"):
        print "Name:        " + desc.name
    if hasattr(desc, "dataType"):
        print "DataType:    " + desc.dataType
    if hasattr(desc, "catalogPath"):
        print "CatalogPath: " + desc.catalogPath

#-------------------------------------------------------------------------------
# Extract features to a new feature class based on a Location and an attribute query
#-------------------------------------------------------------------------------
# Import arcpy and set path to data
import arcpy
arcpy.env.workspace = r"C:\Users\laboratorios\ELVLC\DATA\castilla-leon"
示例#23
0
def create_input_nc(start_date,
                    years,
                    cellsize,
                    basin_shp,
                    p_path,
                    et_path,
                    eto_path,
                    lai_path,
                    swi_path,
                    swio_path,
                    swix_path,
                    qratio_path,
                    rainydays_path,
                    thetasat_ras,
                    rootdepth_ras,
                    input_nc,
                    epsg=4326,
                    bbox=None):
    """
    Creates the input netcdf file required to run waterpix
    """
    # Script parameters
    print "Variable\tRaster"
    arcpy.CheckOutExtension('spatial')
    if bbox:
        latlim = [bbox[1], bbox[3]]
        lonlim = [bbox[0], bbox[2]]
    else:
        shp_extent = arcpy.Describe(basin_shp).extent
        latlim = [shp_extent.YMin, shp_extent.YMax]
        lonlim = [shp_extent.XMin, shp_extent.XMax]
    arcpy.env.extent = arcpy.Extent(lonlim[0], latlim[0], lonlim[1], latlim[1])
    arcpy.env.cellSize = cellsize

    time_range = pd.date_range(start_date, periods=12 * years, freq='MS')
    time_ls = [d.strftime('%Y%m') for d in time_range]
    time_dt = [pd.to_datetime(i, format='%Y%m') for i in time_ls]

    time_n = len(time_ls)

    years_ls = set()
    years_ls = [
        i.year for i in time_dt
        if i.year not in years_ls and not years_ls.add(i.year)
    ]

    time_indeces = {}

    for j, item in enumerate(years_ls):
        temp_ls = [
            int(i.strftime('%Y%m')) for i in pd.date_range(
                str(item) + '0101', str(item) + '1231', freq='MS')
        ]
        time_indeces[item] = [time_ls.index(str(i)) for i in temp_ls]

    for key in time_indeces.keys():
        if time_indeces[key] != range(time_indeces[key][0],
                                      time_indeces[key][-1] + 1):
            raise Exception('The year {0} in the netcdf file is incomplete'
                            ' or the dates are non-consecutive')

    all_paths = {
        'p': p_path,
        'et': et_path,
        'eto': eto_path,
        'lai': lai_path,
        'swi': swi_path,
        'swio': swio_path,
        'swix': swix_path,
        'qratio': qratio_path,
        'rainydays': rainydays_path
    }

    # Latitude and longitude
    lat_ls = pd.np.arange(latlim[0] + 0.5 * cellsize,
                          latlim[1] + 0.5 * cellsize, cellsize)
    lat_ls = lat_ls[::-1]  # ArcGIS numpy
    lon_ls = pd.np.arange(lonlim[0] + 0.5 * cellsize,
                          lonlim[1] + 0.5 * cellsize, cellsize)
    lat_n = len(lat_ls)
    lon_n = len(lon_ls)
    spa_ref = arcpy.SpatialReference(epsg)
    projection = spa_ref.exportToString()
    ll_corner = arcpy.Point(lonlim[0], latlim[0])

    # Snap raster
    temp_ras = arcpy.NumPyArrayToRaster(pd.np.zeros((lat_n, lon_n)), ll_corner,
                                        cellsize, cellsize)
    scratch_ras = arcpy.CreateScratchName('ras_', '.tif', '',
                                          arcpy.env.scratchFolder)
    temp_ras.save(scratch_ras)
    arcpy.management.DefineProjection(scratch_ras, spa_ref)
    arcpy.env.snapRaster = scratch_ras

    # Basin mask
    basin_ras = arcpy.CreateScratchName('bas_', '.tif', '',
                                        arcpy.env.scratchFolder)
    buff_shp = arcpy.CreateScratchName('bas_', '.shp', '',
                                       arcpy.env.scratchFolder)
    arcpy.analysis.Buffer(basin_shp, buff_shp, 2 * cellsize, 'FULL', 'ROUND',
                          'NONE', '#', 'PLANAR')
    arcpy.conversion.FeatureToRaster(buff_shp, "FID", basin_ras, cellsize)

    # Create NetCDF file
    nc_file = netCDF4.Dataset(input_nc, 'w', format="NETCDF4")
    nc_file.set_fill_on()

    # Create dimensions
    lat_dim = nc_file.createDimension('latitude', lat_n)
    lon_dim = nc_file.createDimension('longitude', lon_n)
    month_dim = nc_file.createDimension('time_yyyymm', time_n)
    year_dim = nc_file.createDimension('time_yyyy', len(years_ls))

    # Create NetCDF variables
    crs_var = nc_file.createVariable('crs', 'i', (), fill_value=-9999)
    crs_var.standard_name = 'crs'
    crs_var.grid_mapping_name = 'latitude_longitude'
    crs_var.crs_wkt = projection

    lat_var = nc_file.createVariable('latitude',
                                     'f8', ('latitude'),
                                     fill_value=-9999)
    lat_var.units = 'degrees_north'
    lat_var.standard_name = 'latitude'

    lon_var = nc_file.createVariable('longitude',
                                     'f8', ('longitude'),
                                     fill_value=-9999)
    lon_var.units = 'degrees_east'
    lon_var.standard_name = 'longitude'

    month_var = nc_file.createVariable('time_yyyymm',
                                       'l', ('time_yyyymm'),
                                       fill_value=-9999)
    month_var.standard_name = 'time'
    month_var.format = 'YYYYMM'

    year_var = nc_file.createVariable('time_yyyy',
                                      'l', ('time_yyyy'),
                                      fill_value=-9999)
    year_var.standard_name = 'time'
    year_var.format = 'YYYY'

    # Variables
    p_var = nc_file.createVariable('Precipitation_M',
                                   'f8',
                                   ('time_yyyymm', 'latitude', 'longitude'),
                                   fill_value=-9999)
    p_var.long_name = 'Precipitation'
    p_var.units = 'mm/month'

    py_var = nc_file.createVariable('Precipitation_Y',
                                    'f8',
                                    ('time_yyyy', 'latitude', 'longitude'),
                                    fill_value=-9999)
    py_var.long_name = 'Precipitation'
    py_var.units = 'mm/year'

    et_var = nc_file.createVariable('Evapotranspiration_M',
                                    'f8',
                                    ('time_yyyymm', 'latitude', 'longitude'),
                                    fill_value=-9999)
    et_var.long_name = 'Evapotranspiration'
    et_var.units = 'mm/month'

    ety_var = nc_file.createVariable('Evapotranspiration_Y',
                                     'f8',
                                     ('time_yyyy', 'latitude', 'longitude'),
                                     fill_value=-9999)
    ety_var.long_name = 'Evapotranspiration'
    ety_var.units = 'mm/year'

    eto_var = nc_file.createVariable('ReferenceET_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    eto_var.long_name = 'Reference Evapotranspiration'
    eto_var.units = 'mm/month'

    lai_var = nc_file.createVariable('LeafAreaIndex_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    lai_var.long_name = 'Leaf Area Index'
    lai_var.units = 'm2/m2'

    swi_var = nc_file.createVariable('SWI_M',
                                     'f8',
                                     ('time_yyyymm', 'latitude', 'longitude'),
                                     fill_value=-9999)
    swi_var.long_name = 'Soil Water Index - Monthly mean'
    swi_var.units = '%'

    swio_var = nc_file.createVariable('SWIo_M',
                                      'f8',
                                      ('time_yyyymm', 'latitude', 'longitude'),
                                      fill_value=-9999)
    swio_var.long_name = 'Soil water index - First day of the month'
    swio_var.units = '%'

    swix_var = nc_file.createVariable('SWIx_M',
                                      'f8',
                                      ('time_yyyymm', 'latitude', 'longitude'),
                                      fill_value=-9999)
    swix_var.long_name = 'Soil water index - Last day of the month'
    swix_var.units = '%'

    qratio_var = nc_file.createVariable('RunoffRatio_Y',
                                        'f8',
                                        ('time_yyyy', 'latitude', 'longitude'),
                                        fill_value=-9999)
    qratio_var.long_name = 'Runoff ratio'
    qratio_var.units = '-'

    rainydays_var = nc_file.createVariable(
        'RainyDays_M',
        'f8', ('time_yyyymm', 'latitude', 'longitude'),
        fill_value=-9999)
    rainydays_var.long_name = 'Number of rainy days per month'
    rainydays_var.units = 'No. rainy days/month'

    thetasat_var = nc_file.createVariable('SaturatedWaterContent',
                                          'f8', ('latitude', 'longitude'),
                                          fill_value=-9999)
    thetasat_var.long_name = 'Saturated water content (top soil)'
    thetasat_var.units = 'cm3/cm3'

    rootdepth_var = nc_file.createVariable('RootDepth',
                                           'f8', ('latitude', 'longitude'),
                                           fill_value=-9999)
    rootdepth_var.long_name = 'Root depth'
    rootdepth_var.units = 'mm'

    basinmask_var = nc_file.createVariable('BasinBuffer',
                                           'l', ('latitude', 'longitude'),
                                           fill_value=0)
    basinmask_var.long_name = 'Basin buffer'

    # Load data
    lat_var[:] = lat_ls
    lon_var[:] = lon_ls
    month_var[:] = time_ls
    year_var[:] = years_ls

    # Static variables
    temp_dir = tempfile.mkdtemp()

    # Theta sat
    print "{0}\t{1}".format('thetasat', thetasat_ras)
    thetasat_temp = os.path.join(temp_dir, 'thetasat.tif')
    arcpy.management.Resample(thetasat_ras, thetasat_temp, cellsize)
    inp_ras = arcpy.Raster(thetasat_temp)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    thetasat_var[:, :] = array[:, :]

    # Root depth
    print "{0}\t{1}".format('rootdepth', rootdepth_ras)
    rootdepth_temp = os.path.join(temp_dir, 'rootdepth.tif')
    arcpy.management.Resample(rootdepth_ras, rootdepth_temp, cellsize)
    inp_ras = arcpy.Raster(rootdepth_temp)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    rootdepth_var[:, :] = array[:, :]

    # Basin mask
    inp_ras = arcpy.sa.Con(arcpy.sa.IsNull(arcpy.Raster(basin_ras)), 0, 1)

    array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n, -9999)
    basinmask_var[:, :] = array[:, :]

    # Dynamic variables
    for var in ['p', 'et', 'eto', 'lai', 'swi', 'swio', 'swix', 'rainydays']:
        # Make temp directory
        temp_dir2 = tempfile.mkdtemp()
        for yyyymm in time_ls:
            yyyy = yyyymm[:4]
            mm = yyyymm[-2:]
            ras = all_paths[var].format(yyyy=yyyy, mm=mm)
            print "{0}\t{1}".format(var, ras)
            arcpy.management.Resample(
                ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize,
                'NEAREST')
            inp_ras = arcpy.Raster(
                os.path.join(temp_dir2, os.path.basename(ras)))
            array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n,
                                             pd.np.nan)
            t_index = time_ls.index(yyyymm)
            exec('{0}_var[t_index, :, :] = array[:, :]'.format(var))
    # Runoff ratio
    temp_dir2 = tempfile.mkdtemp()
    for yyyy in years_ls:
        ras = all_paths['qratio'].format(yyyy=yyyy)
        print "{0}\t{1}".format('qratio', ras)
        arcpy.management.Resample(
            ras, os.path.join(temp_dir2, os.path.basename(ras)), cellsize,
            'NEAREST')
        inp_ras = arcpy.Raster(os.path.join(temp_dir2, os.path.basename(ras)))
        array = arcpy.RasterToNumPyArray(inp_ras, ll_corner, lon_n, lat_n,
                                         pd.np.nan)
        y_index = years_ls.index(yyyy)
        qratio_var[y_index, :, :] = array[:, :]
    # Calculate yearly rasters
    for yyyy in years_ls:
        yyyyi = years_ls.index(yyyy)
        ti1 = time_indeces[yyyy][0]
        ti2 = time_indeces[yyyy][-1] + 1

        py_var[yyyyi, :, :] = pd.np.sum(p_var[ti1:ti2, :, :], axis=0)
        ety_var[yyyyi, :, :] = pd.np.sum(et_var[ti1:ti2, :, :], axis=0)

    # Close file
    arcpy.env.extent = None
    arcpy.env.snapRaster = None
    arcpy.env.cellSize = 'MAXOF'
    nc_file.close()

    # Return
    return input_nc
示例#24
0
def MFRC(option,pic,pathIMERG, path3Mean, pathOutput, pathGWR,fileName):

    timeOfprocess = time.clock()

    # option='MF'   #'MFn'   'MFn-GWR'    控制选择一种分形
    # pic='Y' #   'N' 控制是否制作特征图

    dimension = 2
    branch = 2
    m = 50  #随机级联的次数
    n = 4   #每次级联的层数
    # "经常修改的参数"

    fileName = fileName
    # path = pathIMERG
    pathIMERG= pathIMERG
    path3Mean= path3Mean

    pathResultRaster=pathOutput+'Rasters/'
    pathAnalysis=pathOutput+'Analysis/'
    #   此处应留GWR数据的操作
    if (option=='MFn-GWR'):
        pathGWR=''

    q = np.linspace(-10, 10, 21)  # "q取值范围"


    # 读入栅格数据
    rasIMERG = arcpy.Raster(pathIMERG)
    ras3Mean = arcpy.Raster(path3Mean)

    # Set environmental variables for output
    arcpy.env.overwriteOutput = True
    arcpy.env.outputCoordinateSystem = rasIMERG
    outResolution = 1000
    cellWidth = rasIMERG.meanCellWidth/16.0
    cellHeight = rasIMERG.meanCellHeight/16.0
    lowerLeft = arcpy.Point(rasIMERG.extent.XMin, rasIMERG.extent.YMin)

    # Convert Raster to numpy array
    arrIMERG = arcpy.RasterToNumPyArray(rasIMERG)
    arrIMERG = np.maximum(arrIMERG, 0)
    arr3Mean = arcpy.RasterToNumPyArray(ras3Mean)
    arr3Mean = np.maximum(arr3Mean, 0)

    if (option=='MFn-GWR'):
        rasGWR = arcpy.Raster(pathGWR)
        arrGWR = arcpy.RasterToNumPyArray(rasGWR)
        arrGWR = np.maximum(arrGWR, 0)

    # print arrIMERG,'\n\n\n',arr3Mean  #arrGWR

    # "匀质化"
    row = arrIMERG.shape[0]
    col = arrIMERG.shape[1]

    for i in range(0, row):
        for j in range(0, col):
            arr3Mean[i,j]=arr3Mean[i,j]/np.sum(arr3Mean)*(row*col)

# print row,col
    field = np.empty((row, col), np.float)
    for i in range(0, row):
        for j in range(0, col):
            if (arr3Mean[i, j] > 0):
                field[i, j] = arrIMERG[i, j] / arr3Mean[i, j]
            else:
                field[i, j] = 0
    print("field:", np.mean(arrIMERG))
    print("field:", np.mean(field))

    # field=arrIMERG

    # "归一化"
    # sumField = np.sum(field)
    # if (sumField > 0):
    #     field = field / sumField
    # print field

    fieldSize = field.shape[0]
    # "layers+1 即向上分析的层数,scales即每层中像元大小对应的起始0.1度时的倍数"
    layers = np.arange(0, int(math.log(fieldSize, branch)))
    scales = branch ** layers
    # print("layers:", layers, "scales:", scales)

    # "求统计矩moment"
    # "d1用来计算taoq的一阶导(求奇异值alpha和参数beta"),d2用来计算taoq的二阶导(求参数sigma)"
    # "d3用来计算D(q)"
    moment = np.zeros((len(layers), len(q)))
    d1 = np.zeros((len(layers), len(q)))
    d2 = np.zeros((len(layers), len(q)))
    d3 = np.zeros((len(layers), len(q)))
    for i in range(0, len(layers)):
        distrib = coarseGraining(field, field.shape // scales[i])  ##[x // scales[i] for x infield.shape]
        positiveDist = distrib[distrib > 0]
        for j in range(0, len(q)):
            qmass = positiveDist ** q[j]
            moment[i, j] = np.sum(qmass)
            # print"distrib",distrib
            # print "q[j]",q[j]
            # print "moment[i,j]",moment[i,j]
            d1[i, j] = np.sum(qmass * np.log(positiveDist)) / np.sum(qmass)
            d2[i, j] = np.sum(qmass * np.log(positiveDist) ** 2) / np.sum(qmass) - d1[i, j] ** 2
            if (q[j] != 1):
                d3[i, j] = np.log(np.sum(qmass)) / (q[j] - 1)
            else:
                d3[i, j] = d1[i, j]

    lambd = 1.0/ branch ** (4-layers)
    print lambd
    logMoment = np.log(moment) / np.log(2)
    logLambd = np.log(lambd) / np.log(2)
    # "证明具有幂律特征"# "求tao(q),tao(q)就是斜率"
    k = np.zeros(len(q))  # 存放斜率
    b = np.zeros(len(q))  # 存放截距
    # R_Squared = np.zeros(len(q))  # 存放R方

    # x = np.log(lambd) / np.log(2)  # log以2为底的lambda,线性最小二乘的X输入
    # X = sm.add_constant(X.T)  # 加上截距项
    for i in range(0, len(q)):
        # y = np.log(moment[:, i]) / np.log(2)  # log以2为底的moment,线性最小二乘的X输入
        # results = sm.OLS(Y, X).fit()  # log以2为底的moment和lambda的线性拟合
        line = np.polyfit(logLambd, logMoment[:, i], 1)
        k[i] = line[0]  # 斜率
        b[i] = line[1]  # 截距
        # R_Squared[i] = results.rsquared
        # print("k:", k[i], "b:", b[i], "Rsquared:", R_Squared[i])

    # "在多重分形领域taoq就是上面的斜率,与级联降尺度中的taoq不同"
    taoq = -k
    # "证明具有多重分形特征"#"用多重分形谱f(α)表示"
    alpha = np.zeros(len(q))
    f_alpha = np.zeros(len(q))
    for j in range(0, len(q)):
        line = np.polyfit(np.log(lambd), d1[:, j], 1)
        alpha[j] = line[0]
        f_alpha[j] = alpha[j] * q[j] -k[j]

    # "证明具有多重分形特征"# "用广义分形维数D(q)表示"
    D_q = np.zeros((len(q)))
    for j in range(0, len(q)):
        line = np.polyfit(np.log(lambd), d3[:, j], 1)
        D_q[j] = line[0]

    # "求二阶导,继而计算β和σ,将q=1处作为返回值"
    # "由于级联降尺度的研究者,分形尺度仍然从1到5,强行把负号移到了taoq上,所以在Xu等的降尺度中,taoq已经变化,增加来自λ的负号"
    # scales = scales[::-1]
    # print (scales)

    d1taoq  = -alpha
    d2taoq = np.zeros(len(q))
    for j in range(0, len(q)):
        d2[:, j] = d2[::, j]        #-号
        line = np.polyfit(np.log(lambd), d2[:, j], 1)
        d2taoq[j] = -line[0]
    # print 'taoq的一阶导:',d1taoq,'\n','taoq的二阶导:',d2taoq

    e = 0
    v = 1
    for i in range(0, len(q)):
        # print q[i]
        if (q[i] >= 1):
            if (option == "MF"):
                # "X是标准正态分布"
                sigma = math.sqrt(d2taoq[i] / (dimension * np.log(branch**2)))
                beta = 1 + d1taoq[i] / dimension - sigma ** 2 * np.log(branch ** 2) * (q[i] - 0.5)
            else:
                # "X是非标准正态分布"
                # "需要计算原数据的均值和方差"
                data = np.array(arrIMERG).reshape(row * col, 1)
                d = []
                for j in range(0, row * col):
                    if data[j, 0] != 0:
                        d.append(data[j, 0])
                e = np.sum(np.log(d) / np.log(2)) / len(d)
                v = np.sum((np.log(d) / np.log(2) - e) ** 2) / len(d)
                # print("e:", e, "v:", v)
                sigma = math.sqrt(d2taoq[i] / (v*dimension * np.log(branch ** 2)))
                beta = 1 + d1taoq[i] / dimension - sigma ** 2 * np.log(branch ** 2) * (q[i] - 0.5)
            break
    print (beta, sigma, e, v)
    exportPlot(logLambd,logMoment,k,b,q,alpha,f_alpha,taoq,D_q,pathAnalysis,fileName,pic,beta,sigma,e,v)

    timeOfprocess=time.clock() - timeOfprocess
    print "多重分形特征分析及参数计算耗时=", timeOfprocess, "秒"

    # "实际降尺度并得到结果"
    fieldAll = []
    cascade = []
    for i in range(0, n + 1):
        cascade.append(np.zeros((branch ** i, branch ** i), np.double))
    # print ("cascade:",cascade)

    gamma = beta - sigma * e - v * sigma ** 2 * np.log(branch**2) / 2

    # "循环m次"
    for k in range(0, m):
        for i in range(row):
            for j in range(col):
                cascade[0][0][0] = field[i, j]
                for x in range(1, n + 1):
                    for y in range(0, branch ** (x - 1)):
                        for z in range(0, branch ** (x - 1)):
                            w=np.zeros((4,1),float)
                            if (random.uniform(0,1)<=(branch**2)**(-beta)):
                                w[0]=(branch**2) ** (gamma + sigma * random.gauss(e, v))
                            else:
                                w[0]=0
                            if (random.uniform(0,1)<=(branch**2)**(-beta)):
                                w[1]=(branch**2) ** (gamma + sigma * random.gauss(e, v))
                            else:
                                w[1]=0
                            if (random.uniform(0,1)<=(branch**2)**(-beta)):
                                w[2]=(branch**2) ** (gamma + sigma * random.gauss(e, v))
                            else:
                                w[2]=0
                            w[3]=4-w[0]-w[1]-w[2]
                            # print w

                            cascade[x][y * 2][z * 2] = cascade[x - 1][y][z] * w[0]
                            cascade[x][y * 2][z * 2 + 1] = cascade[x - 1][y][z] * w[1]
                            cascade[x][y * 2 + 1][z * 2] = cascade[x - 1][y][z] * w[2]
                            cascade[x][y * 2 + 1][z * 2 + 1] = cascade[x - 1][y][z] * w[3]
                # simfield[:,.  :] = coarseGraining(cascade[n], (32, 32))
                # print("simfield:",simfield)
                if (j == 0):
                    fieldRow = cascade[n].copy()
                else:
                    fieldRow = np.hstack((fieldRow, cascade[n].copy()))
            if (i == 0):
                fieldMatrix = fieldRow.copy()
            else:
                fieldMatrix = np.vstack((fieldMatrix, fieldRow.copy()))
        # np.savetxt('F:/Test/OUT/'+"fieldAll"+str(k)+""+".txt",fieldMatrix,fmt = '%.8f')
        fieldAll.append(fieldMatrix)

    # "求多次结果平均值"
    fieldAve = np.zeros((row * 2 ** n, col * 2 ** n), np.double)
    for k in range(0, m):
        fieldAve = fieldAve + fieldAll[k]
    fieldAve = fieldAve / m
    # np.savetxt('F:/Test/OUT/'+"fieldAve"+"ave"+".txt", fieldAve,fmt = '%.8f')

    # "恢复异质性"
    fieldHeter = np.zeros((row * 2 ** n, col * 2 ** n), np.double)
    for i in range(0, row):
        for j in range(0, col):
            if option == 'MFn-GWR':
                temp = arrGWR[i * 2 ** n:(i + 1) * 2 ** n, j * 2 ** n:(j + 1) * 2 ** n] \
                       * fieldAve[ i * 2 ** n:(i + 1) * 2 ** n, j * 2 ** n:(j + 1) * 2 ** n]
            else:
                temp = arr3Mean[i, j] * fieldAve[i * 2 ** n:(i + 1) * 2 ** n, j * 2 ** n:(j + 1) * 2 ** n]
            if (np.sum(temp) != 0):
                temp = temp / np.sum(temp)
            else:
                temp = 0
            fieldHeter[i * 2 ** n:(i + 1) * 2 ** n, j * 2 ** n:(j + 1) * 2 ** n] = temp * arrIMERG[i, j] * (
                    2 ** n * 2 ** n)
    # result = np.array(result).reshape(row * 2 ** n * col * 2 ** n, 1)
    # np.savetxt(path + 'out/' + "r" + option + ".txt", result, fmt='%.8f')

    # 输出数据,制作栅格
    tempRaster = arcpy.NumPyArrayToRaster(fieldHeter,lowerLeft,cellWidth,cellHeight)
    onekmRaster=pathResultRaster + 'r'+fileName+".tif"
    arcpy.Resample_management(tempRaster, onekmRaster, outResolution, "BILINEAR")   #"重采样到1km"

    timeOfprocess=time.clock() - timeOfprocess
    print "降尺度计算耗时=", timeOfprocess, "秒"
示例#25
0
pour_point = arcpy.GetParameterAsText(5)
arcpy.AddMessage("-------------------------")
arcpy.AddMessage("Loaded datasets")
arcpy.AddMessage("-------------------------")

# Calculate the cell size of the DTM
DTM_cell_size = arcpy.GetRasterProperties_management(DTM, "CELLSIZEX")
#Get the elevation standard deviation value from geoprocessing result object
cell_size = DTM_cell_size.getOutput(0)
arcpy.AddMessage("Calculated operating cell size")
arcpy.AddMessage("-------------------------")

# Clip the river network to the extent to the same as the DTM
# First calculate the extent of the DTM
pnt_array = arcpy.Array()
extent = arcpy.Raster(DTM).extent
pnt_array.add(extent.lowerLeft)
pnt_array.add(extent.lowerRight)
pnt_array.add(extent.upperRight)
pnt_array.add(extent.upperLeft)

# Turn the extents of the raster into a polygon to clip the river_network
clipping_polygon = arcpy.Polygon(pnt_array)

# Clip the river network to the same as the digital terrain model
river_network = arcpy.Clip_analysis(river_network, clipping_polygon,
                                    "out_river")

# Add fields to the river network polyline
arcpy.AddField_management(river_network, "river_depth", "SHORT")
arcpy.AddField_management(river_network, "river_cell_size", "SHORT")
示例#26
0
    def calculate_det(self, path2h_ras, path2dem_ras):
        try:
            arcpy.CheckOutExtension('Spatial')  # check out license
            arcpy.gp.overwriteOutput = True
            arcpy.env.workspace = self.cache

            try:
                self.logger.info(" * Reading input rasters ...")
                ras_h = arcpy.Raster(path2h_ras)
                ras_dem = arcpy.Raster(path2dem_ras)
                arcpy.env.extent = ras_dem.extent
                self.logger.info(" * OK")
            except:
                self.logger.info(
                    "ERROR: Could not find / access input rasters.")
                return True

            try:
                self.logger.info(" * Making Thalweg elevation raster ...")
                ras_hmin = Con((ras_h > 0.0), Float(ras_dem))
                ras_h_with_null = Con((Float(ras_h) > 0.0), Float(ras_h))
                temp_dem = Con(((ras_dem > 0) & IsNull(ras_h_with_null)),
                               Float(ras_dem))
                ras_dem = temp_dem
                self.logger.info(" * OK")
            except:
                self.logger.info("ERROR: Input Rasters contain invalid data.")
                return True

            try:
                self.logger.info(" * Converting Thalweg raster to points ...")
                pts_hmin = arcpy.RasterToPoint_conversion(
                    ras_hmin, self.cache + "pts_hmin.shp")
                self.logger.info(" * OK")
                self.logger.info(" * Converting DEM raster to points ...")
                pts_dem = arcpy.RasterToPoint_conversion(
                    ras_dem, self.cache + "pts_dem.shp")
                self.logger.info(" * OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                return True

            base_join = self.out_dir + '\\spat_join_det.shp'
            try:
                self.logger.info(" * Spatial join analysis ...")
                base_join = arcpy.SpatialJoin_analysis(
                    target_features=pts_dem,
                    join_features=pts_hmin,
                    out_feature_class=base_join,
                    join_operation='JOIN_ONE_TO_MANY',
                    join_type='KEEP_ALL',
                    match_option='CLOSEST')
                self.logger.info(" * OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                return True

            try:
                self.logger.info(
                    " * Converting relative Thalweg dem to raster ...")
                arcpy.PointToRaster_conversion(in_features=base_join,
                                               value_field="grid_cod_1",
                                               out_rasterdataset=self.cache +
                                               "ras_hmin_dem",
                                               cell_assignment="MEAN",
                                               cellsize=5)
                self.logger.info(" * OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                return True

            try:
                self.logger.info(
                    " * Calculating depth to Thalweg raster (detrended DEM) ..."
                )
                ras_hmin_dem = arcpy.Raster(self.cache + "ras_hmin_dem")
                ras_det = Con((ras_hmin_dem > 0), (ras_dem - ras_hmin_dem))
                self.logger.info(" * OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                return True

            try:
                self.logger.info(
                    " * Saving depth to Thalweg raster (detrended DEM) to:\n%s"
                    % self.out_dir + "\\dem_detrend.tif")
                ras_det.save(self.out_dir + "\\dem_detrend.tif")
                self.logger.info(" * OK")
            except arcpy.ExecuteError:
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                return True
            except Exception as e:
                self.logger.info(arcpy.GetMessages(2))
                return True

            arcpy.CheckInExtension('Spatial')
        except arcpy.ExecuteError:
            self.logger.info("ExecuteERROR: (arcpy).")
            self.logger.info(arcpy.GetMessages(2))
            return True
        except Exception as e:
            self.logger.info("ExceptionERROR: (arcpy).")
            self.logger.info(e.args[0])
            return True
        try:
            del ras_det, ras_hmin, base_join, pts_dem, pts_hmin, ras_hmin_dem
        except:
            pass

        try:
            self.clean_up()
        except:
            pass
        # return False if successfull
        return False
示例#27
0
                nb += 1


if __name__ == "__main__":

    # Retrive the parameters from the GUI
    SlblFile = arcpy.GetParameterAsText(0)
    DepthFile = arcpy.GetParameterAsText(1)
    ErosionFile = arcpy.GetParameterAsText(2)
    outFolder = arcpy.GetParameterAsText(3)
    cellFactor = float(arcpy.GetParameterAsText(4))

    str_message = 'Saving DAN3D input files...'
    arcpy.AddMessage(str_message)

    SlblFile = arcpy.Raster(SlblFile)
    DepthFile = arcpy.Raster(DepthFile)
    if len(ErosionFile) > 0:
        Erosion = True
        ErosionFile = arcpy.Raster(ErosionFile)
    else:
        Erosion = False

    IncellSize = SlblFile.meanCellWidth
    if cellFactor != 1:
        cellSize = IncellSize * cellFactor
        str_message = "Inputs rasters are being aggreagated. Output cell size will be {}m".format(
            cellSize)
        arcpy.AddMessage(str_message)
        SlblFile = arcpy.sa.Aggregate(SlblFile, cellFactor, "MEDIAN", "EXPAND",
                                      "DATA")
示例#28
0
def make_sub_condition(source_condition, target_condition, base_geo_name):
    # source_condition =  STR of a source condition ("D:\\...\\RiverArchitect\\01_Conditions\\2017\\")
    # target_condition = STR of a target condition ("D:\\...\\RiverArchitect\\01_Conditions\\2017_confinement\\")
    # base_geo_name = STR of a full path and name of a raster that is used for limiting raster extents ("D:\\...\\Rasters\\projectarea")

    logger = logging.getLogger('logfile.log')
    logger.info(" * Setting arcpy environment ...")

    try:
        arcpy.CheckOutExtension('Spatial')
        arcpy.env.overwriteOutput = True
        arcpy.env.workspace = config.dir2ra
    except:
        logger.info(
            "ERROR: Could not set arcpy environment (permissions and licenses?)."
        )
        return True

    try:
        logger.info(" * Verifying provided boundary files ...")
        base_geo_name = chk_geo_type(base_geo_name, target_condition)
    except:
        logger.info("ERROR: The provided boundary file (%s) is invalid." %
                    base_geo_name)
        return True

    try:
        logger.info(" * Loading boundary Raster ...")
        base_ras = arcpy.Raster(base_geo_name.split('.aux')[0])
        arcpy.env.extent = base_ras.extent
    except:
        logger.info("ERROR: Could not load boundary Raster: " +
                    str(base_geo_name).split('.aux')[0])
        return True

    logger.info(" * Looking for source GeoTIFFs in %s ..." % source_condition)
    geotiff_names = [
        i for i in os.listdir(source_condition) if i.endswith('.tif')
    ]

    for gtiff_full_name in geotiff_names:
        logger.info("  > Loading " + str(gtiff_full_name) + " ... ")
        try:
            gtiff_full = arcpy.Raster(source_condition + gtiff_full_name)
        except:
            logger.info("ERROR: Could not read source file (%s)." %
                        str(source_condition + gtiff_full_name))
            continue
        logger.info("     * Cropping raster ... ")
        try:
            gtiff_cropped = Con(~IsNull(base_ras), Float(gtiff_full))
        except:
            logger.info("ERROR: Could not crop " + str(gtiff_full_name))
            continue
        logger.info("     * Saving cropped raster as " + target_condition +
                    gtiff_full_name + " ... ")
        try:
            gtiff_cropped.save(target_condition + gtiff_full_name)
            logger.info("     * OK")
        except:
            logger.info("ERROR: Could not save " +
                        str(target_condition + gtiff_full_name))

    arcpy.CheckInExtension('Spatial')
    logger.info(" * Spatial Sub-Condition creation complete..")

    # return Error=False after successful execution
    return False
示例#29
0
# The only rule is, you should run your script once, and generate the NVDI for ALL MONTHS provided. 
# As part of your code submission, you should also provide a visualization document (e.g. an ArcMap layout), 
# showing the patterns for an area of RI that you find interesting.

import arcpy

arcpy.env.workspace=r"C:\Users\17065\OneDrive\Desktop\NRS 528\06_Cheating\ALL_FILES_lfs\Step_3_data_lfs\201502"

# To allow overwriting outputs change overwriteOutput option to True.
arcpy.env.overwriteOutput = True

# Check out any necessary licenses.
arcpy.CheckOutExtension("spatial")
arcpy.CheckOutExtension("ImageAnalyst")

LC08_L1TP_012031_20150201_20170301_01_T1_B5_tif = arcpy.Raster("LC08_L1TP_012031_20150201_20170301_01_T1_B5.tif")
LC08_L1TP_012031_20150201_20170301_01_T1_B4_tif = arcpy.Raster("LC08_L1TP_012031_20150201_20170301_01_T1_B4.tif")


# Process: Raster Calculator (Raster Calculator) (ia)
NDVI = "C:\\Users\\17065\\OneDrive\\Desktop\\NRS 528\\NDVI\\ndvi02"
Raster_Calculator = NDVI
NDVI = nvdi =  (LC08_L1TP_012031_20150201_20170301_01_T1_B5_tif-LC08_L1TP_012031_20150201_20170301_01_T1_B4_tif)/(LC08_L1TP_012031_20150201_20170301_01_T1_B5_tif+LC08_L1TP_012031_20150201_20170301_01_T1_B4_tif)
NDVI.save(Raster_Calculator)


arcpy.env.workspace=r"C:\Users\17065\OneDrive\Desktop\NRS 528\06_Cheating\ALL_FILES_lfs\Step_3_data_lfs\201504"

# To allow overwriting outputs change overwriteOutput option to True.
arcpy.env.overwriteOutput = True
示例#30
0
    def save(self):
        # Set environmental variables for output
        arcpy.env.outputCoordinateSystem = self.rasterlike.catalogPath
        arcpy.env.cellSize = self.rasterlike.catalogPath

        # Loop over data blocks
        filelist = []
        blockno = 0
        randomname = binascii.hexlify(os.urandom(6))
        picklefilename = arcpy.env.scratchWorkspace + "\\" + randomname + ".pkl"
        pickledict = open(picklefilename, 'wb')
        pickle.dump(self.dict, pickledict)
        pickledict.close()

        for x in range(0, self.rasterlike.width, self.blocksize):
            for y in range(0, self.rasterlike.height, self.blocksize):

                # Save on disk with a random name
                randomname = binascii.hexlify(os.urandom(6))
                filetemp = arcpy.env.scratchWorkspace + "\\" + randomname
                if self.raster is not None:
                    startcmd = "python.exe \""+sys.path[0].encode('utf-8')+"\\RasterIO.py\"" \
                               + " -rasterlike \"" + self.rasterlike.catalogPath.encode('utf-8') + "\""\
                               + " -x " + str(x) \
                               + " -y " + str(y) \
                               + " -blocksize " + str(self.blocksize) \
                               + " -blockname \"" + str(filetemp) + "\""\
                               + " -nodata " + str(self.nodata) \
                               + " -dtype " + str(self.dtype.__name__) \
                               + " -pickledict \"" + picklefilename.encode('utf-8') + "\""\
                               + " -raster \"" + self.raster.catalogPath.encode('utf-8') + "\""
                else:
                    startcmd = "python.exe \""+sys.path[0].encode('utf-8')+"\\RasterIO.py\"" \
                               + " -rasterlike \"" + self.rasterlike.catalogPath.encode('utf-8') + "\""\
                               + " -x " + str(x) \
                               + " -y " + str(y) \
                               + " -blocksize " + str(self.blocksize) \
                               + " -blockname \"" + str(filetemp) + "\""\
                               + " -nodata " + str(self.nodata) \
                               + " -dtype " + str(self.dtype.__name__) \
                               + " -pickledict \"" + picklefilename.encode('utf-8') + "\""

                FNULL = open(os.devnull, 'w')
                si = subprocess.STARTUPINFO()
                si.dwFlags = subprocess.STARTF_USESHOWWINDOW
                si.wShowWindow = subprocess.SW_HIDE
                subprocess.check_call(startcmd, startupinfo=si, stdout=FNULL, stderr=subprocess.STDOUT)

                # Maintain a list of saved temporary files
                filelist.append(filetemp)
                blockno += 1

        os.remove(picklefilename)

        if arcpy.Exists(self.fileout):
            arcpy.Delete_management(self.fileout)

        if len(filelist) > 1:
            # Mosaic temporary files
            rastertype = {"U1": "1_BIT",
                          "U2": "2_BIT",
                          "U4": "4_BIT",
                          "U8": "8_BIT_UNSIGNED",
                          "S8": "8_BIT_SIGNED",
                          "U16": "16_BIT_UNSIGNED",
                          "S16": "16_BIT_SIGNED",
                          "U32": "32_BIT_UNSIGNED",
                          "S32": "32_BIT_SIGNED",
                          "F32": "32_BIT_FLOAT",
                          "F64": "64_BIT_FLOAT"}

            arcpy.MosaicToNewRaster_management(';'.join(filelist), os.path.dirname(os.path.abspath(self.fileout)),
                                               os.path.basename(self.fileout),
                                               pixel_type=rastertype[arcpy.Raster(filelist[0]).pixelType],
                                               number_of_bands=1)


        else:
            arcpy.Copy_management(filelist[0], self.fileout)

        # if len(filelist) > 1:
        #     # Mosaic temporary files
        #     arcpy.Mosaic_management(';'.join(filelist[1:]), filelist[0])
        #
        # arcpy.Copy_management(filelist[0], self.fileout)

        # Remove temporary files
        for fileitem in filelist:
            if arcpy.Exists(fileitem):
                arcpy.Delete_management(fileitem)


        self.raster = arcpy.Raster(self.fileout)
        if self.raster.pixelType == "U1":
            self.dtype = bool
        elif self.raster.pixelType == "F32" or self.raster.pixelType == "F64":
            self.dtype = float
        else:
            self.dtype = int

        arcpy.DefineProjection_management(self.raster, self.rasterlike.spatialReference)