Exemple #1
0
def RetrieveData(args):
    """
    This function retrieves JRC data for a given date from the
    http://storage.googleapis.com/global-surface-water/downloads/ server.

    Keyword arguments:
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [output_folder, Names_to_download, lonlim, latlim] = args

    # Collect the data from the JRC webpage and returns the data and lat and long in meters of those tiles
    try:
        Collect_data(Names_to_download, output_folder)
    except:
        print("Was not able to download the file")

    # Clip the data to the users extend
    if len(Names_to_download) == 1:
        trash_folder = os.path.join(output_folder, "Trash")
        data_in = os.path.join(trash_folder, Names_to_download[0])
        data_end, geo_end = RC.clip_data(data_in, latlim, lonlim)
    else:

        data_end = np.zeros([int((latlim[1] - latlim[0])/0.00025), int((lonlim[1] - lonlim[0])/0.00025)])

        for Name_to_merge in Names_to_download:
            trash_folder = os.path.join(output_folder, "Trash")
            data_in = os.path.join(trash_folder, Name_to_merge)
            geo_out, proj, size_X, size_Y = RC.Open_array_info(data_in)
            lat_min_merge = np.maximum(latlim[0], geo_out[3] + size_Y * geo_out[5])
            lat_max_merge = np.minimum(latlim[1], geo_out[3])
            lon_min_merge = np.maximum(lonlim[0], geo_out[0])
            lon_max_merge = np.minimum(lonlim[1], geo_out[0] + size_X * geo_out[1])

            lonmerge = [lon_min_merge, lon_max_merge]
            latmerge = [lat_min_merge, lat_max_merge]
            data_one, geo_one = RC.clip_data(data_in, latmerge, lonmerge)

            Ystart = int((geo_one[3] - latlim[1])/geo_one[5])
            Yend = int(Ystart + np.shape(data_one)[0])
            Xstart = int((geo_one[0] - lonlim[0])/geo_one[1])
            Xend = int(Xstart + np.shape(data_one)[1])

            data_end[Ystart:Yend, Xstart:Xend] = data_one

        geo_end = tuple([lonlim[0], geo_one[1], 0, latlim[1], 0, geo_one[5]])

    # Save results as Gtiff
    fileName_out = os.path.join(output_folder, 'JRC_Occurrence_percent.tif')
    DC.Save_as_tiff(name=fileName_out, data=data_end, geo=geo_end, projection='WGS84')
    shutil.rmtree(trash_folder)
    return True
Exemple #2
0
def DownloadData(Dir, latlim, lonlim, Waitbar):
    """
    This function downloads NLDAS Forcing data hourly, daily or monthly data

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    latlim -- [ymin, ymax]
    lonlim -- [xmin, xmax]
    """

    # Define the output name
    output_filename = os.path.join(Dir, 'LU_ESACCI.tif')

    # Set the url of the server
    url = r"https://storage.googleapis.com/cci-lc-v207/ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.zip"

    # Create a Trash folder
    Dir_trash = os.path.join(Dir, "Trash")
    if not os.path.exists(Dir_trash):
        os.makedirs(Dir_trash)

    # Define location of download
    filename_out = os.path.join(
        Dir_trash, "ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.zip")

    # Download data
    urllib.request.urlretrieve(url, filename=filename_out)

    # Extract data
    DC.Extract_Data(filename_out, Dir_trash)

    # Define input of the world tiff file
    filename_world = os.path.join(
        Dir_trash, "product", "ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7.tif")

    try:
        # Clip data to user extend
        data, Geo_out = RC.clip_data(filename_world, latlim, lonlim)

        # Save data of clipped array
        DC.Save_as_tiff(output_filename, data, Geo_out, 4326)

    except:

        RC.Clip_Dataset_GDAL(RC.clip_data(filename_world, latlim, lonlim))

    # Remove trash folder
    shutil.rmtree(Dir_trash)

    return ()
Exemple #3
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD16 ET data for a given date from the
    ftp://ftp.ntsg.umt.edu/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, latlim, lonlim,
        timestep, hdf_library, Size_pix
    ] = args

    if timestep == 'monthly':
        ETfileName = os.path.join(
            output_folder, 'ET_MOD16A2_mm-month-1_monthly_' +
            Date.strftime('%Y') + '.' + Date.strftime('%m') + '.01.tif')
    elif timestep == '8-daily':
        ETfileName = os.path.join(
            output_folder,
            'ET_MOD16A2_mm-8days-1_8-daily_' + Date.strftime('%Y') + '.' +
            Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    if not os.path.exists(ETfileName):

        # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
        try:
            Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                         timestep, hdf_library, Size_pix)
        except:
            print("Was not able to download the file")
        try:
            # Define the output name of the collect data function
            name_collect = os.path.join(output_folder, 'Merged.tif')

            # Reproject the MODIS product to epsg_to
            epsg_to = '4326'
            name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

            # Clip the data to the users extend
            data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

            DC.Save_as_tiff(name=ETfileName,
                            data=data,
                            geo=geo,
                            projection='WGS84')

            # remove the side products
            os.remove(os.path.join(output_folder, name_collect))
            os.remove(os.path.join(output_folder, name_reprojected))
        except:
            print("Failed for date: %s" % Date)

    return ()
Exemple #4
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD15 FPAR data for a given date from the
    http://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, unit,
        dataset, nameDownload, hdf_library
    ] = args

    FPARfileName = os.path.join(
        output_folder,
        '%s_MOD15_%s_8-daily_' % (dataset, unit) + Date.strftime('%Y') + '.' +
        Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    if not os.path.exists(FPARfileName):

        # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
        try:
            Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                         nameDownload, hdf_library)
        except:
            print("Was not able to download the file")

        try:
            # Define the output name of the collect data function
            name_collect = os.path.join(output_folder, 'Merged.tif')

            # Reproject the MODIS product to epsg_to
            epsg_to = '4326'
            name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

            # Clip the data to the users extend
            data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

            # Save the file as tiff
            DC.Save_as_tiff(name=FPARfileName,
                            data=data,
                            geo=geo,
                            projection='WGS84')

            # remove the side products
            os.remove(os.path.join(output_folder, name_collect))
            os.remove(os.path.join(output_folder, name_reprojected))

        except:
            print("Failed for date: %s" % Date)

    return True
Exemple #5
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD9 Reflectance data for a given date from the
    http://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim, band,
        resolution, hdf_library
    ] = args

    ReffileName = os.path.join(
        output_folder,
        'ReflectanceBand%d_MOD09GQ_-_daily_' % band + Date.strftime('%Y') +
        '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')

    if not os.path.exists(ReffileName):
        # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
        try:
            Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                         band, resolution, hdf_library)
        except:
            print("Was not able to download the file")

        # Define the output name of the collect data function
        name_collect = os.path.join(output_folder, 'Merged.tif')
        try:
            # Reproject the MODIS product to epsg_to
            epsg_to = '4326'
            name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

            # Clip the data to the users extend
            data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

            # Save results as Gtiff
            DC.Save_as_tiff(name=ReffileName,
                            data=data,
                            geo=geo,
                            projection='WGS84')

            # remove the side products
            os.remove(os.path.join(output_folder, name_collect))
            os.remove(os.path.join(output_folder, name_reprojected))
        except:
            print('data for %02d-%02d-%d is not available' %
                  (Date.day, Date.month, Date.year))

    return True
Exemple #6
0
def Download_GWF_from_WA_FTP(output_folder, filename_Out, lonlim, latlim):
    """
    This function retrieves GWF data for a given date from the
    ftp.wateraccounting.unesco-ihe.org server.

    Keyword arguments:
    output_folder -- name of the end file with the weekly ALEXI data
    End_filename -- name of the end file
    lonlim -- [ymin, ymax] (values must be between -60 and 70)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    try:
        # Collect account and FTP information
        username, password = WebAccounts.Accounts(Type='FTP_WA')
        ftpserver = "ftp.wateraccounting.unesco-ihe.org"

        # Set the file names and directories
        filename = "Gray_Water_Footprint.tif"
        local_filename = os.path.join(output_folder, filename)

        # Download data from FTP
        ftp = FTP(ftpserver)
        ftp.login(username, password)
        directory = "/WaterAccounting_Guest/Static_WA_Datasets/"
        ftp.cwd(directory)
        lf = open(local_filename, "wb")
        ftp.retrbinary("RETR " + filename, lf.write)
        lf.close()

        # Clip extend out of world data
        dataset, Geo_out = RC.clip_data(local_filename, latlim, lonlim)

        # make geotiff file
        DC.Save_as_tiff(name=filename_Out,
                        data=dataset,
                        geo=Geo_out,
                        projection="WGS84")

        # delete old tif file
        os.remove(local_filename)

    except:
        print("file not exists")

    return
def DownloadData(output_folder, latlim, lonlim):
    """
    This function downloads DEM data from SRTM

    Keyword arguments:
    output_folder -- directory of the result
	latlim -- [ymin, ymax] (values must be between -60 and 60)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    """
    # Check the latitude and longitude and otherwise set lat or lon on greatest extent
    if latlim[0] < -60 or latlim[1] > 60:
        print(
            'Latitude above 60N or below 60S is not possible. Value set to maximum'
        )
        latlim[0] = np.max(latlim[0], -60)
        latlim[1] = np.min(latlim[1], 60)
    if lonlim[0] < -180 or lonlim[1] > 180:
        print(
            'Longitude must be between 180E and 180W. Now value is set to maximum'
        )
        lonlim[0] = np.max(lonlim[0], -180)
        lonlim[1] = np.min(lonlim[1], 180)

    # converts the latlim and lonlim into names of the tiles which must be
    # downloaded
    name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim)

    # Memory for the map x and y shape (starts with zero)
    size_X_tot = 0
    size_Y_tot = 0

    nameResults = []

    # Create a temporary folder for processing
    output_folder_trash = os.path.join(output_folder, "Temp")
    if not os.path.exists(output_folder_trash):
        os.makedirs(output_folder_trash)

    # Download, extract, and converts all the files to tiff files
    for nameFile in name:

        try:
            # Download the data from
            # http://earlywarning.usgs.gov/hydrodata/
            output_file, file_name = Download_Data(nameFile,
                                                   output_folder_trash)

            # extract zip data
            DC.Extract_Data(output_file, output_folder_trash)

            # The input is the file name and in which directory the data must be stored
            file_name_tiff = file_name.replace(".zip", ".tif")
            output_tiff = os.path.join(output_folder_trash, file_name_tiff)

            # convert data from adf to a tiff file
            dest_SRTM = gdal.Open(output_tiff)
            geo_out = dest_SRTM.GetGeoTransform()
            size_X = dest_SRTM.RasterXSize
            size_Y = dest_SRTM.RasterYSize

            if (int(size_X) != int(6001) or int(size_Y) != int(6001)):
                data = np.ones((6001, 6001)) * -9999

                # Create the latitude bound
                Vfile = nameFile.split("_")[2][0:2]
                Bound2 = 60 - 5 * (int(Vfile) - 1)

                # Create the longitude bound
                Hfile = nameFile.split("_")[1]
                Bound1 = -180 + 5 * (int(Hfile) - 1)

                Expected_X_min = Bound1
                Expected_Y_max = Bound2

                Xid_start = int(
                    np.round((geo_out[0] - Expected_X_min) / geo_out[1]))
                Xid_end = int(
                    np.round(
                        ((geo_out[0] + size_X * geo_out[1]) - Expected_X_min) /
                        geo_out[1]))
                Yid_start = int(
                    np.round((Expected_Y_max - geo_out[3]) / (-geo_out[5])))
                Yid_end = int(
                    np.round((Expected_Y_max - (geo_out[3] +
                                                (size_Y * geo_out[5]))) /
                             (-geo_out[5])))

                data_SRTM = dest_SRTM.GetRasterBand(1).ReadAsArray()

                data[Yid_start:Yid_end, Xid_start:Xid_end] = data_SRTM
                if np.max(data) == 255:
                    data[data == 255] = -9999
                data[data < -9999] = -9999

                geo_in = [
                    Bound1 - 0.5 * 0.00083333333333333, 0.00083333333333333,
                    0.0, Bound2 + 0.5 * 0.00083333333333333, 0.0,
                    -0.0008333333333333333333
                ]

                # save chunk as tiff file
                destDEM = DC.Save_as_MEM(data=data,
                                         geo=geo_in,
                                         projection="WGS84")

                dest_SRTM = None

        except:

            # If tile not exist create a replacing zero tile (sea tiles)
            file_name_tiff = file_name.replace(".zip", ".tif")
            output_tiff = os.path.join(output_folder_trash, file_name_tiff)
            file_name = nameFile
            data = np.ones((6001, 6001)) * -9999
            data = data.astype(np.float32)

            # Create the latitude bound
            Vfile = nameFile.split("_")[2][0:2]
            Bound2 = 60 - 5 * (int(Vfile) - 1)

            # Create the longitude bound
            Hfile = nameFile.split("_")[1]
            Bound1 = -180 + 5 * (int(Hfile) - 1)

            # Geospatial data for the tile
            geo_in = [
                Bound1 - 0.5 * 0.00083333333333333, 0.00083333333333333, 0.0,
                Bound2 + 0.5 * 0.00083333333333333, 0.0,
                -0.0008333333333333333333
            ]

            # save chunk as tiff file
            destDEM = DC.Save_as_MEM(data=data, geo=geo_in, projection="WGS84")

        # clip data
        Data, Geo_data = RC.clip_data(destDEM, latlim, lonlim)
        size_Y_out = int(np.shape(Data)[0])
        size_X_out = int(np.shape(Data)[1])

        # Total size of the product so far
        size_Y_tot = int(size_Y_tot + size_Y_out)
        size_X_tot = int(size_X_tot + size_X_out)

        if nameFile is name[0]:
            Geo_x_end = Geo_data[0]
            Geo_y_end = Geo_data[3]
        else:
            Geo_x_end = np.min([Geo_x_end, Geo_data[0]])
            Geo_y_end = np.max([Geo_y_end, Geo_data[3]])

        # create name for chunk
        FileNameEnd = "%s_temporary.tif" % (file_name)
        nameForEnd = os.path.join(output_folder_trash, FileNameEnd)
        nameResults.append(str(nameForEnd))

        # save chunk as tiff file
        DC.Save_as_tiff(name=nameForEnd,
                        data=Data,
                        geo=Geo_data,
                        projection="WGS84")

    size_X_end = int(size_X_tot / len(rangeLat))  # + 1 !!!
    size_Y_end = int(size_Y_tot / len(rangeLon))  # + 1 !!!

    # Define the georeference of the end matrix
    geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]]

    latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]]
    lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end]

    # merge chunk together resulting in 1 tiff map
    datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end,
                           size_X_end)

    datasetTot[datasetTot < -9999] = -9999

    # name of the end result
    output_DEM_name = "DEM_SRTM_m_3s.tif"

    Save_name = os.path.join(output_folder, output_DEM_name)

    # Make geotiff file
    DC.Save_as_tiff(name=Save_name,
                    data=datasetTot,
                    geo=geo_out,
                    projection="WGS84")
    os.chdir(output_folder)
def DownloadData(output_folder, latlim, lonlim, parameter, resolution):
    """
    This function downloads DEM data from HydroSHED

    Keyword arguments:
    output_folder -- directory of the result
	latlim -- [ymin, ymax] (values must be between -50 and 50)
    lonlim -- [xmin, xmax] (values must be between -180 and 180)
    Resample -- 1 = The data will be resampled to 0.001 degree spatial
                    resolution
             -- 0 = The data will have the same pixel size as the data obtained
                    from the internet
    """
    # Define parameter depedent variables
    if parameter == "dir_3s":
        para_name = "DIR"
        unit = "-"
        resolution = '3s'
        parameter = 'dir'

    if parameter == "dem_3s":
        para_name = "DEM"
        unit = "m"
        resolution = '3s'
        parameter = 'dem'

    if parameter == "dir_15s":
        para_name = "DIR"
        unit = "-"
        resolution = '15s'
        parameter = 'dir'

    if parameter == "dem_15s":
        para_name = "DEM"
        unit = "m"
        resolution = '15s'
        parameter = 'dem'

    if parameter == "dir_30s":
        para_name = "DIR"
        unit = "-"
        resolution = '30s'
        parameter = 'dir'

    if parameter == "dem_30s":
        para_name = "DEM"
        unit = "m"
        resolution = '30s'
        parameter = 'dem'

# converts the latlim and lonlim into names of the tiles which must be
# downloaded
    if resolution == '3s':

        name, rangeLon, rangeLat = Find_Document_Names(latlim, lonlim,
                                                       parameter)

        # Memory for the map x and y shape (starts with zero)
        size_X_tot = 0
        size_Y_tot = 0

    if resolution == '15s' or resolution == '30s':
        name = Find_Document_names_15s_30s(latlim, lonlim, parameter,
                                           resolution)

    nameResults = []
    # Create a temporary folder for processing
    output_folder_trash = os.path.join(output_folder, "Temp")
    if not os.path.exists(output_folder_trash):
        os.makedirs(output_folder_trash)

    # Download, extract, and converts all the files to tiff files
    for nameFile in name:

        try:
            # Download the data from
            # http://earlywarning.usgs.gov/hydrodata/
            output_file, file_name = Download_Data(nameFile,
                                                   output_folder_trash,
                                                   parameter, para_name,
                                                   resolution)

            # extract zip data
            DC.Extract_Data(output_file, output_folder_trash)

            # Converts the data with a adf extention to a tiff extension.
            # The input is the file name and in which directory the data must be stored
            file_name_tiff = file_name.split('.')[0] + '_trans_temporary.tif'
            file_name_extract = file_name.split('_')[0:3]
            if resolution == '3s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1]

            if resolution == '15s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1] + '_15s'

            if resolution == '30s':
                file_name_extract2 = file_name_extract[
                    0] + '_' + file_name_extract[1] + '_30s'

            output_tiff = os.path.join(output_folder_trash, file_name_tiff)

            # convert data from adf to a tiff file
            if (resolution == "15s" or resolution == "3s"):

                input_adf = os.path.join(output_folder_trash,
                                         file_name_extract2,
                                         file_name_extract2, 'hdr.adf')
                output_tiff = DC.Convert_adf_to_tiff(input_adf, output_tiff)

            # convert data from adf to a tiff file
            if resolution == "30s":

                input_bil = os.path.join(output_folder_trash,
                                         '%s.bil' % file_name_extract2)
                output_tiff = DC.Convert_bil_to_tiff(input_bil, output_tiff)

            geo_out, proj, size_X, size_Y = RC.Open_array_info(output_tiff)
            if (resolution == "3s" and
                (int(size_X) != int(6000) or int(size_Y) != int(6000))):
                data = np.ones((6000, 6000)) * -9999

                # Create the latitude bound
                Vfile = str(nameFile)[1:3]
                SignV = str(nameFile)[0]
                SignVer = 1

                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(nameFile)[4:7]
                SignH = str(nameFile)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                Expected_X_min = Bound1
                Expected_Y_max = Bound2 + 5

                Xid_start = int(
                    np.round((geo_out[0] - Expected_X_min) / geo_out[1]))
                Xid_end = int(
                    np.round(
                        ((geo_out[0] + size_X * geo_out[1]) - Expected_X_min) /
                        geo_out[1]))
                Yid_start = int(
                    np.round((Expected_Y_max - geo_out[3]) / (-geo_out[5])))
                Yid_end = int(
                    np.round((Expected_Y_max - (geo_out[3] +
                                                (size_Y * geo_out[5]))) /
                             (-geo_out[5])))

                data[Yid_start:Yid_end,
                     Xid_start:Xid_end] = RC.Open_tiff_array(output_tiff)
                if np.max(data) == 255:
                    data[data == 255] = -9999
                data[data < -9999] = -9999

                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

        except:

            if resolution == '3s':
                # If tile not exist create a replacing zero tile (sea tiles)
                output = nameFile.split('.')[0] + "_trans_temporary.tif"
                output_tiff = os.path.join(output_folder_trash, output)
                file_name = nameFile
                data = np.ones((6000, 6000)) * -9999
                data = data.astype(np.float32)

                # Create the latitude bound
                Vfile = str(file_name)[1:3]
                SignV = str(file_name)[0]
                SignVer = 1
                # If the sign before the filename is a south sign than latitude is negative
                if SignV is "s":
                    SignVer = -1
                Bound2 = int(SignVer) * int(Vfile)

                # Create the longitude bound
                Hfile = str(file_name)[4:7]
                SignH = str(file_name)[3]
                SignHor = 1
                # If the sign before the filename is a west sign than longitude is negative
                if SignH is "w":
                    SignHor = -1
                Bound1 = int(SignHor) * int(Hfile)

                # Geospatial data for the tile
                geo_in = [
                    Bound1, 0.00083333333333333, 0.0,
                    int(Bound2 + 5), 0.0, -0.0008333333333333333333
                ]

                # save chunk as tiff file
                DC.Save_as_tiff(name=output_tiff,
                                data=data,
                                geo=geo_in,
                                projection="WGS84")

            if resolution == '15s':

                print('no 15s data is in dataset')

        if resolution == '3s':

            # clip data
            Data, Geo_data = RC.clip_data(output_tiff, latlim, lonlim)
            size_Y_out = int(np.shape(Data)[0])
            size_X_out = int(np.shape(Data)[1])

            # Total size of the product so far
            size_Y_tot = int(size_Y_tot + size_Y_out)
            size_X_tot = int(size_X_tot + size_X_out)

            if nameFile is name[0]:
                Geo_x_end = Geo_data[0]
                Geo_y_end = Geo_data[3]
            else:
                Geo_x_end = np.min([Geo_x_end, Geo_data[0]])
                Geo_y_end = np.max([Geo_y_end, Geo_data[3]])

            # create name for chunk
            FileNameEnd = "%s_temporary.tif" % (nameFile)
            nameForEnd = os.path.join(output_folder_trash, FileNameEnd)
            nameResults.append(str(nameForEnd))

            # save chunk as tiff file
            DC.Save_as_tiff(name=nameForEnd,
                            data=Data,
                            geo=Geo_data,
                            projection="WGS84")

    if resolution == '3s':
        #size_X_end = int(size_X_tot) #!
        #size_Y_end = int(size_Y_tot) #!

        size_X_end = int(size_X_tot / len(rangeLat)) + 1  #!
        size_Y_end = int(size_Y_tot / len(rangeLon)) + 1  #!

        # Define the georeference of the end matrix
        geo_out = [Geo_x_end, Geo_data[1], 0, Geo_y_end, 0, Geo_data[5]]

        latlim_out = [geo_out[3] + geo_out[5] * size_Y_end, geo_out[3]]
        lonlim_out = [geo_out[0], geo_out[0] + geo_out[1] * size_X_end]

        # merge chunk together resulting in 1 tiff map
        datasetTot = Merge_DEM(latlim_out, lonlim_out, nameResults, size_Y_end,
                               size_X_end)

        datasetTot[datasetTot < -9999] = -9999

    if resolution == '15s':
        output_file_merged = os.path.join(output_folder_trash, 'merged.tif')
        datasetTot, geo_out = Merge_DEM_15s_30s(output_folder_trash,
                                                output_file_merged, latlim,
                                                lonlim, resolution)

    if resolution == '30s':
        output_file_merged = os.path.join(output_folder_trash, 'merged.tif')
        datasetTot, geo_out = Merge_DEM_15s_30s(output_folder_trash,
                                                output_file_merged, latlim,
                                                lonlim, resolution)

    # name of the end result
    output_DEM_name = "%s_HydroShed_%s_%s.tif" % (para_name, unit, resolution)

    Save_name = os.path.join(output_folder, output_DEM_name)

    # Make geotiff file
    DC.Save_as_tiff(name=Save_name,
                    data=datasetTot,
                    geo=geo_out,
                    projection="WGS84")
    os.chdir(output_folder)

    # Delete the temporary folder
    shutil.rmtree(output_folder_trash)
Exemple #9
0
def RetrieveData(Date, args):
    """
    This function retrieves MOD11 LST data for a given date from the
    https://e4ftl01.cr.usgs.gov/ server.

    Keyword arguments:
    Date -- 'yyyy-mm-dd'
    args -- A list of parameters defined in the DownloadData function.
    """
    # Argument
    [
        output_folder, TilesVertical, TilesHorizontal, lonlim, latlim,
        TimeStep, hdf_library
    ] = args

    if TimeStep == 8:
        LSTfileNamePart = os.path.join(
            output_folder, 'LST_MOD11A2_K_8-daily_' + Date.strftime('%Y') +
            '.' + Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')
    if TimeStep == 1:
        LSTfileNamePart = os.path.join(
            output_folder, 'LST_MOD11A1_K_daily_' + Date.strftime('%Y') + '.' +
            Date.strftime('%m') + '.' + Date.strftime('%d') + '.*.tif')

    filesMOD = glob.glob(LSTfileNamePart)

    if not len(filesMOD) == 1:

        # Collect the data from the MODIS webpage and returns the data and lat and long in meters of those tiles
        try:
            Collect_data(TilesHorizontal, TilesVertical, Date, output_folder,
                         TimeStep, hdf_library)
        except:
            print("Was not able to download the file")
        try:
            # Define the output name of the collect data function
            name_collect = os.path.join(output_folder, 'Merged.tif')

            # Reproject the MODIS product to epsg_to
            epsg_to = '4326'
            name_reprojected = RC.reproject_MODIS(name_collect, epsg_to)

            # Clip the data to the users extend
            data, geo = RC.clip_data(name_reprojected, latlim, lonlim)

            # Save results as Gtiff
            if TimeStep == 8:
                LSTfileName = os.path.join(
                    output_folder,
                    'LST_MOD11A2_K_8-daily_' + Date.strftime('%Y') + '.' +
                    Date.strftime('%m') + '.' + Date.strftime('%d') + '.tif')
            if TimeStep == 1:
                name_collect_time = os.path.join(output_folder,
                                                 'Merged_Time.tif')
                name_reprojected_time = RC.reproject_MODIS(
                    name_collect_time, epsg_to)
                data_time, geo = RC.clip_data(name_reprojected_time, latlim,
                                              lonlim)
                data_time[data_time == 25.5] = np.nan
                data_time_ave = np.nanmean(data_time)
                try:
                    hour_GMT = int(np.floor(data_time_ave))
                    minutes_GMT = int(
                        (data_time_ave - np.floor(data_time_ave)) * 60)
                except:
                    hour_GMT = int(12)
                    minutes_GMT = int(0)
                LSTfileName = os.path.join(
                    output_folder,
                    'LST_MOD11A1_K_daily_' + Date.strftime('%Y') + '.' +
                    Date.strftime('%m') + '.' + Date.strftime('%d') +
                    '.%02d%02d.tif' % (hour_GMT, minutes_GMT))
                os.remove(name_collect_time)
                os.remove(name_reprojected_time)

            data[data == 0.] = -9999
            DC.Save_as_tiff(name=LSTfileName,
                            data=data,
                            geo=geo,
                            projection='WGS84')

            # remove the side products
            os.remove(os.path.join(output_folder, name_collect))
            os.remove(os.path.join(output_folder, name_reprojected))

        except:
            print("Failed for date: %s" % Date)

    return True
Exemple #10
0
def DownloadData(Dir, Startdate, Enddate, latlim, lonlim, Waitbar, version, Product):
    """
    This scripts downloads SSEBop ET data from the UNESCO-IHE ftp server.
    The output files display the total ET in mm for a period of one month.
    The name of the file corresponds to the first day of the month.

    Keyword arguments:
    Dir -- 'C:/file/to/path/'
    Startdate -- 'yyyy-mm-dd'
    Enddate -- 'yyyy-mm-dd'
    lonlim -- [ymin, ymax] (values must be between -90 and 90)
    latlim -- [xmin, xmax] (values must be between -180 and 180)
    """

    if version == "FTP":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -59.2 or latlim[1] > 80:
            print('Latitude above 80N or below -59.2S is not possible. Value set to maximum')
            latlim[0] = np.max(latlim[0], -59.2)
            latlim[1] = np.min(latlim[1], 80)
        if lonlim[0] < -180 or lonlim[1] > 180:
            print('Longitude must be between 180E and 180W. Now value is set to maximum')
            lonlim[0] = np.max(lonlim[0],-180)
            lonlim[1] = np.min(lonlim[1],180)

    	# Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            Enddate = pd.Timestamp('2014-10-31')

    if version == "V4":
        # Check the latitude and longitude and otherwise set lat or lon on greatest extent
        if latlim[0] < -60 or latlim[1] > 80.0022588483988670:
            print('Latitude above 80N or below -59.2S is not possible. Value set to maximum')
            latlim[0] = np.max(latlim[0], -60)
            latlim[1] = np.min(latlim[1], 80.0022588483988670)
        if lonlim[0] < -180 or lonlim[1] > 180.0002930387853439:
            print('Longitude must be between 180E and 180W. Now value is set to maximum')
            lonlim[0] = np.max(lonlim[0],-180)
            lonlim[1] = np.min(lonlim[1],180.0002930387853439)

    	# Check Startdate and Enddate
        if not Startdate:
            Startdate = pd.Timestamp('2003-01-01')
        if not Enddate:
            import datetime
            Enddate = pd.Timestamp(datetime.datetime.now())

   # Define directory and create it if not exists
    if Product == "ETact":
        output_folder = os.path.join(Dir, 'Evaporation', 'SSEBop', 'Monthly')
        freq_use = "MS"
    if Product == "ETpot":
        output_folder = os.path.join(Dir, 'Potential_Evapotranspiration', 'FEWS', 'Daily')        
        freq_use = "D"

    if not os.path.exists(output_folder):
        os.makedirs(output_folder)

    # Creates dates library
    Dates = pd.date_range(Startdate, Enddate, freq = freq_use)

    # Create Waitbar
    if Waitbar == 1:
        import watertools.Functions.Random.WaitbarConsole as WaitbarConsole
        total_amount = len(Dates)
        amount = 0
        WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)

    # Loop over the dates
    for Date in Dates:

        # Define year and month
        year = Date.year
        month = Date.month
        day = Date.day
        
        if version == "FTP":

            # Date as printed in filename
            Filename_out= os.path.join(output_folder,'ETa_SSEBop_FTP_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))

            # Define end filename
            Filename_dir = os.path.join("%s" %year, "m%s%02d.tif" %(str(year)[2:], month))
            Filename_only = "m%s%02d.tif" %(str(year)[2:], month)

        if version == "V4":

            # Date as printed in filename
            if Product == "ETpot":
                Filename_out= os.path.join(output_folder,'ETpot_FEWS_mm-day-1_daily_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = 'et%02s%02d%02d.tar.gz' %(str(year)[2:], month, day)
                # The end file name after downloading and unzipping
                Filename_only = "et%02s%02d%02d.bil" %(str(year)[2:], month, day)
                # Create bin folder
                temp_folder = os.path.join(output_folder, "Temp")
                if not os.path.exists(temp_folder):
                    os.makedirs(temp_folder)
                local_filename = os.path.join(temp_folder, Filename_only)

            if Product == "ETact":
                Filename_out= os.path.join(output_folder,'ETa_SSEBop_V4_mm-month-1_monthly_%s.%02s.%02s.tif' %(Date.strftime('%Y'), Date.strftime('%m'), Date.strftime('%d')))
                # Define the downloaded zip file
                Filename_only_zip = "m%s%02d.zip" %(str(year), month)
                # The end file name after downloading and unzipping
                Filename_only = "m%s%02d_modisSSEBopETv4_actual_mm.tif" %(str(year), month)

        		    # Temporary filename for the downloaded global file
                local_filename = os.path.join(output_folder, Filename_only)

        # Download the data from FTP server if the file not exists
        if not os.path.exists(Filename_out):
            try:

                if version == "FTP":
                    Download_SSEBop_from_WA_FTP(local_filename, Filename_dir)
                if version == "V4":
                    if Product == "ETpot":
                        Download_SSEBop_from_Web(temp_folder, Filename_only_zip, Product)
                    if Product == "ETact":
                        Download_SSEBop_from_Web(output_folder, Filename_only_zip, Product)
                        
                if Product == "ETpot":
                    Array_ETpot = RC.Open_bil_array(local_filename)
                    Array_ETpot = Array_ETpot/100
                    Geo_out = tuple([-180.5, 1, 0, 90.5, 0, -1])
                    dest = DC.Save_as_MEM(Array_ETpot, Geo_out, "WGS84")
                    data, Geo_out = RC.clip_data(dest, latlim, lonlim)
                    DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84")
                    
                if Product == "ETact":    
                    # Clip dataset
                    data, Geo_out = RC.clip_data(local_filename, latlim, lonlim)
                    data[data<-9999] = -9999
                    DC.Save_as_tiff(Filename_out, data, Geo_out, "WGS84")
                    os.remove(local_filename)

            except:
                print("Was not able to download file with date %s" %Date)

        # Adjust waitbar
        if Waitbar == 1:
            amount += 1
            WaitbarConsole.printWaitBar(amount, total_amount, prefix = 'Progress:', suffix = 'Complete', length = 50)

    if version == "V4":
        import glob
        os.chdir(output_folder)
        if Product == "ETact":
            zipfiles = glob.glob("*.zip")
            for zipfile in zipfiles:
                os.remove(os.path.join(output_folder, zipfile))
            xmlfiles = glob.glob("*.xml")
            for xmlfile in xmlfiles:
                os.remove(os.path.join(output_folder, xmlfile))
        if Product == "ETpot":  
            import shutil
            Temp_dir = os.path.join(output_folder, "Temp")
            shutil.rmtree(Temp_dir)
            
    return