def build(self, variable, ftp):
        """Build initial dataset."""
        # We need to build these three files
        paths = self.get_paths(variable)

        # Retrieve rasters
        self.get_rasters(variable, ftp)

        # Reproject for area calculations
        for file in glob(os.path.join(self.tif_folder, "*tif")):
            self.reproject(file)

        # Convert originals to NetCDF file
        files = glob(os.path.join(self.tif_folder, variable + "*tif"))
        toNetCDF(tfiles=files, ncfiles=None, savepath=paths["original"],
                 index=variable, proj=4326, year1=1895, month1=1,
                 year2=TODAYS_DATE.year - 2, month2=12, wmode="w",
                 percentiles=False)

        # Convert projected files to NetCDF file
        files = glob(os.path.join(self.tif_folder, "proj_*tif"))
        toNetCDFAlbers(tfiles=files, ncfiles=None, savepath=paths["albers"],
                       index=variable, proj=PROJ, year1=1895, month1=1,
                       year2=TODAYS_DATE.year - 2, month2=12, wmode="w",
                       percentiles=False)

        # Create a percentile dataset
        toNetCDFPercentile(paths["original"], paths["percentile"])

        # Clear temp folder
        self._clear_tif()
Exemple #2
0
                del base_data
                del base_data_proj

                # Write changes to file and close
                times[n] = days
                times_proj[n] = days
                values[n] = array
                values_proj[n] = array_proj
                old.close()
                old_proj.close()

            # Now recreate the entire percentile data set
            print('Reranking percentiles...')
            pc_path = os.path.join(pc_folder, index + '.nc')
            os.remove(pc_path)
            toNetCDFPercentile(original_path, pc_path)

    ############## If we need to start over ###################################
    else:
        print(original_path + " not detected, building new dataset...\n")

        # Get all of the last day of month files for the index
        ftp_years = ftp.nlst()
        ftp_years = [f for f in ftp_years if isInt(f)]

        # First Date
        ftp.cwd(
            os.path.join('/Projects/EDDI/CONUS_archive/data/', ftp_years[0]))
        ftp_files = ftp.nlst()
        ftp_files = [
            f for f in ftp_files if f[-17:-13] == "{:02d}mn".format(scale)
Exemple #3
0
                    # Write changes to file and close
                    times[n] = days
                    times_p[n] = days
                    values[n] = array
                    values_p[n] = array_p
                    old.close()
                    old_p.close()

                # Now recreate the entire percentile data set
                print('Reranking percentiles...')
                pc_path = os.path.join(
                    data_path, 'data/droughtindices/netcdfs/percentiles',
                    index_map[index] + '.nc')
                os.remove(pc_path)
                toNetCDFPercentile(nc_path, pc_path)

    else:
        ############## If we need to start over #######################
        print(nc_path + " not detected, building new data set...")

        # Get the data from wwdt
        print("Downloading the 12 netcdf files for " + index + "...")
        for i in tqdm(range(1, 13), position=0):
            # These come in monthly files - e.g. all januaries in one file
            file_name = '{}_{}_PRISM.nc'.format(index, i)
            target_url = wwdt_url + '/' + index + '/' + file_name
            temp_path = os.path.join(local_path, 'temp_{}.nc'.format(i))

            # Download
            try:
Exemple #4
0
                 year2=todays_date.year,
                 month2=12,
                 wmode='w',
                 percentiles=False)
        toNetCDFAlbers(tfiles=tfiles_proj,
                       ncfiles=None,
                       savepath=albers_path,
                       index=variable,
                       proj=proj,
                       year1=1895,
                       month1=1,
                       year2=todays_date.year,
                       month2=12,
                       wmode='w',
                       percentiles=False)
        toNetCDFPercentile(original_path, percentile_path)

        # Empty tif folder
        for t in glob(os.path.join(tif_folder, '*')):
            os.remove(t)

# One last thing, we only have min and max vapor pressure deficit
meanNC(minsrc='data/droughtindices/netcdfs/albers/vpdmin.nc',
       maxsrc='data/droughtindices/netcdfs/albers/vpdmax.nc',
       dst='data/droughtindices/netcdfs/albers/vpdmean.nc')
meanNC(minsrc='data/droughtindices/netcdfs/percentiles/vpdmin.nc',
       maxsrc='data/droughtindices/netcdfs/percentiles/vpdmax.nc',
       dst='data/droughtindices/netcdfs/percentiles/vpdmean.nc')
meanNC(minsrc='data/droughtindices/netcdfs/vpdmin.nc',
       maxsrc='data/droughtindices/netcdfs/vpdmax.nc',
       dst='data/droughtindices/netcdfs/vpdmean.nc')
Exemple #5
0
        # Save another projected version
        toNetCDFAlbers(tfiles=tfiles_proj,
                       ncfiles=None,
                       savepath=ncdir_proj,
                       index=index,
                       year1=1980,
                       month1=1,
                       year2=todays_date.year,
                       month2=todays_date.month,
                       proj=proj,
                       percentiles=False,
                       wmode='w')

        # Now lets get the percentile values
        pc_path = os.path.join(data_path,
                               "data/droughtindices/netcdfs/" + "percentiles",
                               index + ".nc")
        toNetCDFPercentile(ncdir, pc_path)

# Close connection with FTP server
ftp.quit()

print("Update Complete.")
print("####################################################")
print("#######################################")
print("#######################")
print("############")
print("#####")
print("##")
    def update(self, variable, ftp):
        """Update dataset."""
        # We need to build these three files
        paths = self.get_paths(variable)

        # Find the missing files
        needed_dates = self.needed_dates(variable, ftp)

        # Download needed files
        for year, months in needed_dates.items():
            # Get all files availabel for this year
            ftp.cwd("/monthly/" + variable + "/" + year)
            rfiles = ftp.nlst()
            rfiles = [f for f in rfiles if "_" + year + "_" not in f]
            rfiles.sort()
            for rfile in rfiles:
                for month in months:
                    if rfile[-10: -8] == month:
                        # Update the WGS file
                        self._clear_temp("*bil")
                        self._retrieve(variable, rfile, year, ftp)
                        lfile = os.path.join(self.tif_folder,
                                             f"{variable}_{year}{month}.tif")
                        with Dataset(paths["original"], "r+") as old:
                            times = old.variables["time"]
                            values = old.variables["value"]
                            n = times.shape[0]
                            base_data = gdal.Open(lfile)
                            array = base_data.ReadAsArray()
                            del base_data
        
                            # Catch the day      
                            date = dt.datetime(int(year), int(month), day=15)
                            days = date - dt.datetime(1900, 1, 1)
                            days = np.float64(days.days)
        
                            # Write changes to file and close
                            times[n] = days
                            values[n] = array
        
                        # Update the Albers file
                        self.reproject(lfile)
                        pfile = os.path.join(
                            self.tif_folder,
                            f"proj_{variable}_{year}{month}.tif"
                        )
                        with Dataset(paths["albers"], "r+") as old:
                            times = old.variables["time"]
                            values = old.variables["value"]
                            n = times.shape[0]
                            base_data = gdal.Open(pfile)
                            array = base_data.ReadAsArray()
                            del base_data
        
                            # Catch the day                    
                            date = dt.datetime(int(year), int(month), day=15)
                            days = date - dt.datetime(1900, 1, 1)
                            days = np.float64(days.days)
        
                            # Write changes to file and close
                            times[n] = days
                            values[n] = array

        # Reset the percentiles file
        toNetCDFPercentile(paths["original"], paths["percentile"])