#Path to output csv
out = open(
    "C:/Users/hgleason/Dropbox/Git_Snow_MODIS/Data/MODIS/Derived/SD_Random_Sampls.csv",
    "w")

#For each annual snow metric GeoTiff
for tile in files:

    # open dataset
    ds = gdal.Open(tile)

    #Get the Geotransform
    geoTrans = ds.GetGeoTransform()

    #Convert the GeoTiff to Numpy Array
    img_cube = grast.bands_to_np_cube(ds, 1)

    #Get the current year from image path string
    year = str(tile)[90:94]

    print(year, str(tile))

    #For each uniqe EE sample
    for samp in range(len(samples)):

        #Get sample details
        uid = samples[samp, 2]

        lat = samples[samp, 0]

        lon = samples[samp, 1]
def process_tile(tile_pth, tile_dir, tile_name, strt_idx, frac, ndsi_strt,
                 ndsi_end, min_obs, EPSG):

    #Update user
    print("Starting: Process " + tile_pth + " @ " + str(time.time()))

    #Open the GeoTiff with gdal
    data = gdal.Open(tile_pth)

    #Get the geo-transformation
    GeoTran = data.GetGeoTransform()

    #Get origin of raster
    RastOrigin = (GeoTran[0], GeoTran[3])

    #Get pixel dimensions
    PixWidth = GeoTran[1]
    PixHeight = GeoTran[5]

    #Get name of fist tile without extension
    tile_name = tile_name[:-4]

    #Establish output path from directory and current tile
    RastOut = tile_dir + "/" + tile_name + "_out.tif"

    #Commit the GeoTiff to memory as 3D NumPy array
    time_cube = grast.bands_to_np_cube(tile_pth, strt_idx)

    #Get data dimensions
    rows = len(time_cube[0])
    cols = len(time_cube[0][0])

    #Initialize output NumPy array with input dimensions and 4 bands
    snow_stack = np.arange(rows * cols * 4).reshape(4, rows, cols)

    #Progress keeper
    qrt = rows * 0.1
    thsh = qrt

    #For each row
    for row in range(rows):
        #Update user on progress
        if row >= thsh:
            print("Finished " + str(round((float(row) / float(rows)) * 100)) +
                  "% of " + tile_pth)
            thsh = thsh + qrt
        #For each col
        for col in range(cols):

            #Get the NDSI time series from the 'time_cube' @ row and col
            ndsi_vect = time_cube[:, row, col]

            #Check that all data is not missing (-9) or all (0)
            if np.sum(ndsi_vect > 0) > 0:

                #Get snow duration metrics
                metrics = ndsi_lowess_interp(ndsi_vect, frac, ndsi_strt,
                                             ndsi_end, min_obs)

                #Update each array with snow metric results

                #Snow Start Band
                snow_stack[0, row, col] = metrics[0]

                #Snow End Band
                snow_stack[1, row, col] = metrics[1]

                #Snow Dur Band
                snow_stack[2, row, col] = metrics[2]

                #Snow Obs Band
                snow_stack[3, row, col] = metrics[3]

            else:

                #Populate arrays with missing data value
                snow_stack[0, row, col] = -9
                snow_stack[1, row, col] = -9
                snow_stack[2, row, col] = -9
                snow_stack[3, row, col] = -9

    #Convert the 3D NumPy annual stack array to mulit-band GeoTiff
    grast.array2gtiff(RastOut, RastOrigin, PixWidth, PixHeight, snow_stack,
                      EPSG)

    #Update user
    print("Finished: Process " + tile_pth + " @ " + str(time.time()))