Beispiel #1
0
def reclassify_raster(infile,
                      outfile,
                      operator,
                      threshold,
                      pixel_value=100,
                      frmt="GTiff",
                      silence=True):
    """
    Reclassify a raster into a single value according to a threshold
    :param infile: path to the input
    :param outfile: path to the output
    :param operator: string, possible values ">","<",">=","<="
    :param threshold: threshold value
    :param pixel_value: the output pixel value for pixels that pass the threshold
    :param frmt: the output format, default "GTiff"
    :param silence: pass any value to enable debug info
    :return: None
    """
    '''using this instead of gdal_calc because gdal calc didn't work
    scriptpath = os.path.join(os.path.dirname(__file__), '../', "pysdss/utility/gdal_calc.py")
    scriptpath = os.path.normpath(scriptpath)
    params = [scriptpath,"-A",path+input,"--outfile="+path+output,"--calc='(A>0.5)'", "--debug"]
    print("Reclassify raster")
    out,err = utils.run_script(params, callpy=["python3"])
    print("output" + out)
    if err:
        print("ERROR:" + err)
        raise Exception("gdal grid failed")'''

    if operator not in [">", "<", ">=", "<="]:
        raise Exception("unknown operator")

    band1 = None
    ds1 = None
    bandOut = None
    dsOut = None

    try:
        if not silence:
            print("opening input")

        # Open the dataset
        ds1 = gdal.Open(infile, gdct.GA_ReadOnly)
        band1 = ds1.GetRasterBand(1)
        # Read the data into numpy arrays
        data1 = gdar.BandReadAsArray(band1)
        # get the nodata value
        nodata = band1.GetNoDataValue()

        if not silence:
            print("filtering input")

        # The actual calculation
        filter = "data1" + operator + str(threshold)
        data1[eval(filter)] = pixel_value
        data1[data1 != pixel_value] = nodata

        if not silence:
            print("output result")

        # Write the out file
        driver = gdal.GetDriverByName(frmt)
        dsOut = driver.Create(outfile, ds1.RasterXSize, ds1.RasterYSize, 1,
                              band1.DataType)
        gdar.CopyDatasetInfo(ds1, dsOut)
        bandOut = dsOut.GetRasterBand(1)
        bandOut.SetNoDataValue(nodata)
        gdar.BandWriteArray(bandOut, data1)

    except RuntimeError as err:
        raise err
    except Exception as e:
        raise e

    finally:
        # Close the datasets
        if band1: band1 = None
        if ds1: ds1 = None
        if bandOut: bandOut = None
        if dsOut: dsOut = None
def main(start_dt, end_dt, netcdf_ws, ancillary_ws, output_ws,
         variables=['prcp'], extent_path=None, output_extent=None,
         stats_flag=True, overwrite_flag=False):
    """Extract DAYMET temperature

    Parameters
    ----------
    start_dt : datetime
        Start date.
    end_dt : datetime
        End date.
    netcdf_ws : str
        Folder of DAYMET netcdf files.
    ancillary_ws : str
        Folder of ancillary rasters.
    output_ws : str
        Folder of output rasters.
    variables : list, optional
        DAYMET variables to download (the default is ['prcp']).
        Choices: 'prcp', 'srad', 'vp', 'tmmn', 'tmmx', 'all.
        Set as ['all'] to process all variables.
    extent_path : str, optional
        File path defining the output extent.
    output_extent : list, optional
        Decimal degrees values defining output extent.
    stats_flag : bool, optional
        If True, compute raster statistics (the default is True).
    overwrite_flag : bool, optional
        If True, overwrite existing files (the default is False).

    Returns
    -------
    None

    """
    logging.info('\nExtracting DAYMET variables')
    logging.debug('  Start date: {}'.format(start_dt))
    logging.debug('  End date:   {}'.format(end_dt))

    # Get DAYMET spatial reference from an ancillary raster
    mask_raster = os.path.join(ancillary_ws, 'daymet_mask.img')

    daymet_re = re.compile('daymet_v3_(?P<VAR>\w+)_(?P<YEAR>\d{4})_na.nc4$')

    # DAYMET rasters to extract
    var_full_list = ['prcp', 'srad', 'vp', 'tmmn', 'tmmx']
    if not variables:
        logging.error('\nERROR: variables parameter is empty\n')
        sys.exit()
    elif type(variables) is not list:
        # DEADBEEF - I could try converting comma separated strings to lists?
        logging.warning('\nERROR: variables parameter must be a list\n')
        sys.exit()
    elif 'all' in variables:
        logging.error('\nDownloading all variables\n  {}'.format(
            ','.join(var_full_list)))
        var_list = var_full_list[:]
    elif not set(variables).issubset(set(var_full_list)):
        logging.error('\nERROR: variables parameter is invalid\n  {}'.format(
            variables))
        sys.exit()
    else:
        var_list = variables[:]

    # DAYMET band name dictionary
    # daymet_band_dict = dict()
    # daymet_band_dict['prcp'] = 'precipitation_amount'
    # daymet_band_dict['srad'] = 'surface_downwelling_shortwave_flux_in_air'
    # daymet_band_dict['sph'] = 'specific_humidity'
    # daymet_band_dict['tmin'] = 'air_temperature'
    # daymet_band_dict['tmax'] = 'air_temperature'

    # Get extent/geo from mask raster
    daymet_ds = gdal.Open(mask_raster)
    daymet_osr = drigo.raster_ds_osr(daymet_ds)
    daymet_proj = drigo.osr_proj(daymet_osr)
    daymet_cs = drigo.raster_ds_cellsize(daymet_ds, x_only=True)
    daymet_extent = drigo.raster_ds_extent(daymet_ds)
    daymet_geo = daymet_extent.geo(daymet_cs)
    daymet_x, daymet_y = daymet_extent.origin()
    daymet_ds = None
    logging.debug('  Projection: {}'.format(daymet_proj))
    logging.debug('  Cellsize: {}'.format(daymet_cs))
    logging.debug('  Geo: {}'.format(daymet_geo))
    logging.debug('  Extent: {}'.format(daymet_extent))
    logging.debug('  Origin: {} {}'.format(daymet_x, daymet_y))

    # Subset data to a smaller extent
    if output_extent is not None:
        logging.info('\nComputing subset extent & geo')
        logging.debug('  Extent: {}'.format(output_extent))
        # Assume input extent is in decimal degrees
        output_extent = drigo.project_extent(
            drigo.Extent(output_extent), drigo.epsg_osr(4326), daymet_osr, 0.001)
        output_extent = drigo.intersect_extents([daymet_extent, output_extent])
        output_extent.adjust_to_snap('EXPAND', daymet_x, daymet_y, daymet_cs)
        output_geo = output_extent.geo(daymet_cs)
        logging.debug('  Geo: {}'.format(output_geo))
        logging.debug('  Extent: {}'.format(output_extent))
    elif extent_path is not None:
        logging.info('\nComputing subset extent & geo')
        if not os.path.isfile(extent_path):
            logging.error(
                '\nThe extent object does not exist, exiting\n'
                '  {}'.format(extent_path))
            return False
        elif extent_path.lower().endswith('.shp'):
            output_extent = drigo.feature_path_extent(extent_path)
            extent_osr = drigo.feature_path_osr(extent_path)
            extent_cs = None
        else:
            output_extent = drigo.raster_path_extent(extent_path)
            extent_osr = drigo.raster_path_osr(extent_path)
            extent_cs = drigo.raster_path_cellsize(extent_path, x_only=True)
        output_extent = drigo.project_extent(
            output_extent, extent_osr, daymet_osr, extent_cs)
        output_extent = drigo.intersect_extents([daymet_extent, output_extent])
        output_extent.adjust_to_snap('EXPAND', daymet_x, daymet_y, daymet_cs)
        output_geo = output_extent.geo(daymet_cs)
        logging.debug('  Geo: {}'.format(output_geo))
        logging.debug('  Extent: {}'.format(output_extent))
    else:
        output_extent = daymet_extent.copy()
        output_geo = daymet_geo[:]
    # output_shape = output_extent.shape(cs=daymet_cs)
    xi, yi = drigo.array_geo_offsets(daymet_geo, output_geo, daymet_cs)
    output_rows, output_cols = output_extent.shape(daymet_cs)
    logging.debug('  Shape: {} {}'.format(output_rows, output_cols))
    logging.debug('  Offsets: {} {} (x y)'.format(xi, yi))

    # Process each variable
    for input_var in var_list:
        logging.info("\nVariable: {}".format(input_var))

        # Rename variables to match cimis
        if input_var == 'prcp':
            output_var = 'ppt'
        else:
            output_var = input_var

        # Build output folder
        var_ws = os.path.join(output_ws, output_var)
        if not os.path.isdir(var_ws):
            os.makedirs(var_ws)

        # Process each file in the input workspace
        for input_name in sorted(os.listdir(netcdf_ws)):
            logging.debug("{}".format(input_name))
            input_match = daymet_re.match(input_name)
            if not input_match:
                logging.debug('  Regular expression didn\'t match, skipping')
                continue
            elif input_match.group('VAR') != input_var:
                logging.debug('  Variable didn\'t match, skipping')
                continue
            year_str = input_match.group('YEAR')
            logging.info("  Year: {}".format(year_str))
            year_int = int(year_str)
            year_days = int(dt.datetime(year_int, 12, 31).strftime('%j'))
            if start_dt is not None and year_int < start_dt.year:
                logging.debug('    Before start date, skipping')
                continue
            elif end_dt is not None and year_int > end_dt.year:
                logging.debug('    After end date, skipping')
                continue

            # Build input file path
            input_raster = os.path.join(netcdf_ws, input_name)
            # if not os.path.isfile(input_raster):
            #     logging.debug(
            #         '    Input raster doesn\'t exist, skipping    {}'.format(
            #             input_raster))
            #     continue

            # Build output folder
            output_year_ws = os.path.join(var_ws, year_str)
            if not os.path.isdir(output_year_ws):
                os.makedirs(output_year_ws)

            # Read in the DAYMET NetCDF file
            input_nc_f = netCDF4.Dataset(input_raster, 'r')
            # logging.debug(input_nc_f.variables)

            # Check all valid dates in the year
            year_dates = _utils.date_range(
                dt.datetime(year_int, 1, 1), dt.datetime(year_int + 1, 1, 1))
            for date_dt in year_dates:
                if start_dt is not None and date_dt < start_dt:
                    logging.debug('  {} - before start date, skipping'.format(
                        date_dt.date()))
                    continue
                elif end_dt is not None and date_dt > end_dt:
                    logging.debug('  {} - after end date, skipping'.format(
                        date_dt.date()))
                    continue
                else:
                    logging.info('  {}'.format(date_dt.date()))

                output_path = os.path.join(
                    output_year_ws, '{}_{}_daymet.img'.format(
                        output_var, date_dt.strftime('%Y%m%d')))
                if os.path.isfile(output_path):
                    logging.debug('    {}'.format(output_path))
                    if not overwrite_flag:
                        logging.debug('    File already exists, skipping')
                        continue
                    else:
                        logging.debug('    File already exists, removing existing')
                        os.remove(output_path)

                doy = int(date_dt.strftime('%j'))
                doy_i = range(1, year_days + 1).index(doy)

                # Arrays are being read as masked array with a fill value of -9999
                # Convert to basic numpy array arrays with nan values
                try:
                    input_ma = input_nc_f.variables[input_var][
                        doy_i, yi: yi + output_rows, xi: xi + output_cols]
                except IndexError:
                    logging.info('    date not in netcdf, skipping')
                    continue
                input_nodata = float(input_ma.fill_value)
                output_array = input_ma.data.astype(np.float32)
                output_array[output_array == input_nodata] = np.nan

                # Convert Kelvin to Celsius
                if input_var in ['tmax', 'tmin']:
                    output_array -= 273.15

                # Save the array as 32-bit floats
                drigo.array_to_raster(
                    output_array.astype(np.float32), output_path,
                    output_geo=output_geo, output_proj=daymet_proj,
                    stats_flag=stats_flag)
                del input_ma, output_array
            input_nc_f.close()
            del input_nc_f

    logging.debug('\nScript Complete')
 def get_band(self, path):
     obj_gdal = gdal.Open(path)
     return obj_gdal.GetRasterBand(1).ReadAsArray()
Beispiel #4
0
def create_nodata_mask(mhhw_path,
                       output_path,
                       desired_nodata=99,
                       driver="HFA",
                       blocksize=(600, 600)):
    """
  This function takes the data from the merged dataset created using ArcGIS
  and restablishes the nodata field in GDAL.
  """

    logger.info('input: {0}'.format(mhhw_path))
    logger.info('output: {0}'.format(output_path))

    # Open the MHHW tile as read-only and get the driver GDAL is using to access the data
    mhhw_fh = gdal.Open(mhhw_path, gdal.GA_ReadOnly)
    input_driver = mhhw_fh.GetDriver()

    # Pull Metadata associated with the LIDAR tile so we can create the output raster later
    geotransform = mhhw_fh.GetGeoTransform()
    projection = mhhw_fh.GetProjection()
    cols = mhhw_fh.RasterXSize  # Get the number of columns
    rows = mhhw_fh.RasterYSize  # Get the number of rows
    logger.info("  cols: {0}".format(cols))
    logger.info("  rows: {0}".format(rows))
    mhhw_data = mhhw_fh.GetRasterBand(1)  # Get the raster band
    mhhw_original_nodata = mhhw_data.GetNoDataValue(
    )  # Get the NoData value so we can set our mask

    logger.info("  original nodata value: {0}".format(mhhw_original_nodata))
    logger.info("  desired nodata value: {0}".format(desired_nodata))

    # Get block size from parameters
    xBlockSize = blocksize[0]
    yBlockSize = blocksize[1]

    num_block_cols = cols / xBlockSize
    num_block_rows = rows / yBlockSize

    logger.info("  col blocks: {0}".format(num_block_cols))
    logger.info("  row blocks: {0}".format(num_block_rows))

    # Create a copy of the data using in the input tile as an example.
    logger.info("  Creating new raster...")
    output_driver = gdal.GetDriverByName(driver)  # Setup the output driver
    output_fh = output_driver.Create(output_path, cols, rows, 1,
                                     gdal.GDT_Int32)
    output_fh.SetGeoTransform(geotransform)
    output_fh.SetProjection(projection)
    output_band = output_fh.GetRasterBand(1)
    output_band.SetNoDataValue(desired_nodata)
    logger.info("    done.")

    logger.info("  Processing data...")

    #  Building Jobs
    job_n = 0
    for i in range(0, rows, yBlockSize):  # Loop through row blocks
        if i + yBlockSize < rows: numRows = yBlockSize
        else: numRows = rows - i

        for j in range(0, cols, xBlockSize):  # Loop through col blocks
            if j + xBlockSize < cols: numCols = xBlockSize
            else: numCols = cols - j
            # Build job here
            #logger.info("    Working on block offset ({0},{1}); cols {2}; rows: {3}...".format(j,i, numCols, numRows))
            mhhw_np = mhhw_data.ReadAsArray(j, i, numCols, numRows)
            mhhw_wp_interp_mask = np.greater(
                mhhw_np, mhhw_original_nodata
            )  # This marks each cell as 0/1. 0 means we want to interpolate.

            # Write the array to the raster
            output_band.WriteArray(mhhw_wp_interp_mask, j, i)

            # Clean Up
            output_fh.FlushCache()
            mhhw_wp_interp_mask = None
            mhhw_np = None
    # Done looping through blocks

    logger.info("   done.")

    # Compute Statistics before closing out the dataset
    logger.info("  Computing stats...")
    output_band.ComputeStatistics(False)
    logger.info("    done.")

    logger.info("  Building blocks...")
    output_fh.BuildOverviews(overviewlist=[2, 4, 8, 16, 32, 64, 128])
    logger.info("    done.")

    logger.info("  Flushing the cache...")
    output_fh.FlushCache()
    logger.info("    done.")

    # Clean up the dataset file handlers
    logger.info("  Closing the dataset...")
    output_band = None
    mhhw_fh = None
    output_fh = None
    logger.info("    done.")

    return output_path
Beispiel #5
0
def fix_nodata(input_path,
               output_path,
               desired_nodata=-9999,
               driver="HFA",
               blocksize=(600, 600)):
    """
  This function takes the data from the grid dataset and restablishes the nodata field in GDAL.
  """

    # Filepaths
    logger.info('input: {0}'.format(input_path))
    logger.info('output: {0}'.format(output_path))

    # Open the VDatm grid as read-only and get the driver GDAL is using to access the data
    input_fh = gdal.Open(input_path, gdal.GA_ReadOnly)
    input_driver = input_fh.GetDriver()

    # Pull Metadata associated with the LIDAR tile so we can create the output raster later
    geotransform = input_fh.GetGeoTransform()
    projection = input_fh.GetProjection()
    cols = input_fh.RasterXSize  # Get the number of columns
    rows = input_fh.RasterYSize  # Get the number of rows
    logger.info("  cols: {0}".format(cols))
    logger.info("  rows: {0}".format(rows))
    grid_data = input_fh.GetRasterBand(1)  # Get the raster band
    grid_datatype = grid_data.DataType
    grid_original_nodata = grid_data.GetNoDataValue(
    )  # Get the NoData value so we can set our mask
    logger.info("  original nodata value: {0}".format(grid_original_nodata))
    logger.info("  desired nodata value: {0}".format(desired_nodata))

    if grid_original_nodata is None:
        logger.warn(
            "  Using a nodata value of -88.88 because the src file didn't define a nodata value."
        )
        grid_original_nodata = float(-88.8)
    logger.info("  desired nodata value: {0}".format(desired_nodata))

    # Get block size from parameters
    xBlockSize = blocksize[0]
    yBlockSize = blocksize[1]

    num_block_cols = cols / xBlockSize
    num_block_rows = rows / yBlockSize

    logger.info("  col blocks: {0}".format(num_block_cols))
    logger.info("  row blocks: {0}".format(num_block_rows))

    # Create a copy of the data using in the input tile as an example.
    logger.info("  Creating new raster...")
    output_driver = gdal.GetDriverByName(driver)  # Setup the output driver
    output_fh = output_driver.Create(output_path, cols, rows, 1, grid_datatype)
    output_fh.SetGeoTransform(geotransform)
    output_fh.SetProjection(projection)
    output_band = output_fh.GetRasterBand(1)
    output_band.SetNoDataValue(desired_nodata)
    logger.info("    done.")

    logger.info("  Processing data...")

    #  Building Jobs
    job_n = 0
    for i in range(0, rows, yBlockSize):  # Loop through row blocks
        if i + yBlockSize < rows: numRows = yBlockSize
        else: numRows = rows - i
        for j in range(0, cols, xBlockSize):  # Loop through col blocks
            if j + xBlockSize < cols: numCols = xBlockSize
            else: numCols = cols - j
            # Build job here
            #logger.info("    Working on block offset ({0},{1}); cols {2}; rows: {3}...".format(j,i, numCols, numRows))
            grid_np = grid_data.ReadAsArray(j, i, numCols, numRows)
            grid_np_masked = np.ma.masked_less_equal(
                grid_np, grid_original_nodata,
                copy=False).filled(np.NaN)  # Create the mask

            # Write the array to the raster
            output_band.WriteArray(grid_np_masked, j, i)

            # Clean Up
            output_fh.FlushCache()
            grid_np_masked = None
            grid_np = None
    # Done looping through blocks

    logger.info("   done.")

    # Compute Statistics before closing out the dataset
    logger.info("  Computing stats...")
    output_band.ComputeStatistics(False)
    logger.info("    done.")

    logger.info("  Building blocks...")
    output_fh.BuildOverviews(overviewlist=[2, 4, 8, 16, 32, 64, 128])
    logger.info("    done.")

    logger.info("  Flushing the cache...")
    output_fh.FlushCache()
    logger.info("    done.")

    # Clean up the dataset file handlers
    logger.info("  Closing the dataset...")
    output_band = None
    input_fh = None
    output_fh = None
    logger.info("    done.")

    return output_path
Beispiel #6
0
OutputDir = os.path.join(HomeDir, Project, cDataDir, Sensor, cOutputDir, cTile)
PriorDataDir = os.path.join(HomeDir, Project, cDataDir, Sensor, cTile,
                            cPriorDataDir)

print(time.strftime("Processing starting at: %d/%m/%Y %H:%M:%S"))
start = time.time()

# Get list of files to process
FileList, Year, DoY, JulianDoY, Weigth = GetFileList(strDoY, DataDir,
                                                     int(strYear))

# From the first file get dimensions
TotalRows, TotalCols = GetDimensions(FileList[0])

# From the first file get projection information
tmp_d = gdal.Open(FileList[0])
gt = tmp_d.GetGeoTransform()
proj = tmp_d.GetProjection()

# Create arrays
Parameters = shm.empty(
    (TotalRows, TotalCols, NumberOfBands, NumberOfParameters), np.float32)
ParametersVariance = shm.empty(
    (TotalRows, TotalCols, NumberOfBands, NumberOfParameters), np.float32)
GoodnessOfFit = shm.empty((TotalRows, TotalCols), np.float32)
NumberOfSamples = shm.empty((TotalRows, TotalCols), np.int16)

# Depending on the processing system, the composite could be created storing ALL
# datasets in RAM, however for prototyping a tile-based processing will be implemented.
# 100 tiles will be the default setting.
#NumberOfTiles = 12
Beispiel #7
0
def GetPrior(PriorDataDir,
             strYear,
             strDoY,
             InitRow,
             InitCol,
             rows,
             cols,
             type='MCD43',
             PriorScaleFactor=10.0):

    NumberOfBands = 5
    NumberOfParameters = 3

    # Create matrices
    Prior = np.zeros((rows, cols, NumberOfBands * NumberOfParameters),
                     np.float32)
    PriorVariance = np.zeros((rows, cols, NumberOfBands * NumberOfParameters),
                             np.float32)
    Mask = np.zeros((rows, cols), np.int8)
    NSamples = np.zeros((rows, cols), np.int8)

    C = np.zeros((rows, cols, NumberOfBands * NumberOfParameters,
                  NumberOfBands * NumberOfParameters), np.float32)
    Cinv = np.zeros((rows, cols, NumberOfBands * NumberOfParameters,
                     NumberOfBands * NumberOfParameters), np.float32)
    CinvF = np.zeros((rows, cols, NumberOfBands * NumberOfParameters),
                     np.float32)  # Matrix to store C^-1 * Fpr

    if type == 'MCD43':
        if int(strDoY) == 1:
            strDoY = "361"
        else:
            strDoY = '%03d' % ((int(strDoY) - 8))

        PriorFile = 'MCD43A1.Prior.%s.img' % (strDoY)
        PriorFile = os.path.join(PriorDataDir, PriorFile)
        print("Opening prior", PriorFile, "with scaling factor",
              PriorScaleFactor)
    else:
        # Previous DoY
        Year = int(strYear)
        if int(strDoY) == 1:
            Year = Year - 1
            strYear = str(Year)
            strDoY = "361"
        else:
            strDoY = '%03d' % ((int(strDoY) - 8))

        PriorFile = 'BRDF_Parameters.%s%s.tif' % (strYear, strDoY)
        PriorFile = os.path.join(PriorDataDir, PriorFile)
        # Check if file exist
        if glob.glob(PriorFile):
            print("Opening prior", PriorFile, "with scaling factor",
                  PriorScaleFactor)
        else:
            return ReturnPriorData(Cinv, CinvF, Prior, PriorVariance, Mask,
                                   NSamples)

    dataset = gdal.Open(PriorFile, GA_ReadOnly)
    for i in range(NumberOfBands * NumberOfParameters):
        BandData = dataset.GetRasterBand(i + 1).ReadAsArray(
            InitCol, InitRow, cols, rows)
        Prior[:, :, i] = BandData

        BandData = dataset.GetRasterBand(i +
                                         (NumberOfBands * NumberOfParameters) +
                                         1).ReadAsArray(
                                             InitCol, InitRow, cols, rows)
        # Could be the case that the covariance is 0 or very small but there are samples, make variance = 1
        PriorVariance[:, :, i] = np.where(BandData[:, :] <= 1.0e-8, 1.0,
                                          BandData[:, :])
        C[:, :, i, i] = PriorVariance[:, :, i] * PriorScaleFactor
        C[:, :, i, i] = np.where(C[:, :, i, i] > 1.0, 1.0, C[:, :, i, i])

    # Number of Samples
    nsamples_band = (NumberOfBands * NumberOfParameters * 2) + 1
    NSamples[:, :] = dataset.GetRasterBand(nsamples_band).ReadAsArray(
        InitCol, InitRow, cols, rows)

    BandData = dataset = None

    # Calculate C inverse
    for j in range(0, cols):
        for i in range(0, rows):
            # Check that al least the isotropic parameters have values
            if np.where((Prior[i, j, [0, 3, 6]] > 0.0)
                        & (Prior[i, j, [0, 3, 6]] <= 1.0))[0].shape[0] == 3:

                try:
                    Cinv[i, j, :, :] = np.matrix(C[i, j, :, :]).I
                except np.linalg.LinAlgError:
                    indices = np.where(PriorVariance[i, j, :] == 0.0)[0]
                    PriorVariance[i, j, indices] = 1.0
                    C[i, j, indices, indices] = 1.0
                    Cinv[i, j, :, :] = np.matrix(C[i, j, :, :]).I

                for k in range(NumberOfBands * NumberOfParameters):
                    CinvF[i, j, k] = Cinv[i, j, k, k] * Prior[i, j, k]

                Mask[i, j] = 1

    return ReturnPriorData(Cinv, CinvF, Prior, PriorVariance, Mask, NSamples)
Beispiel #8
0
def test_mem_9():

    # Test IRasterIO(GF_Read,)
    src_ds = gdal.Open('data/rgbsmall.tif')
    drv = gdal.GetDriverByName('MEM')

    for interleave in ['BAND', 'PIXEL']:
        out_ds = drv.CreateCopy('',
                                src_ds,
                                options=['INTERLEAVE=%s' % interleave])
        ref_data = src_ds.GetRasterBand(2).ReadRaster(20, 8, 4, 5)
        got_data = out_ds.GetRasterBand(2).ReadRaster(20, 8, 4, 5)
        if ref_data != got_data:
            import struct
            print(struct.unpack('B' * 4 * 5, ref_data))
            print(struct.unpack('B' * 4 * 5, got_data))
            pytest.fail(interleave)

        ref_data = src_ds.GetRasterBand(2).ReadRaster(20,
                                                      8,
                                                      4,
                                                      5,
                                                      buf_pixel_space=3,
                                                      buf_line_space=100)
        got_data = out_ds.GetRasterBand(2).ReadRaster(20,
                                                      8,
                                                      4,
                                                      5,
                                                      buf_pixel_space=3,
                                                      buf_line_space=100)
        assert ref_data == got_data, interleave

        ref_data = src_ds.ReadRaster(20, 8, 4, 5)
        got_data = out_ds.ReadRaster(20, 8, 4, 5)
        assert ref_data == got_data, interleave

        ref_data = src_ds.ReadRaster(20,
                                     8,
                                     4,
                                     5,
                                     buf_pixel_space=3,
                                     buf_band_space=1)
        got_data = out_ds.ReadRaster(20,
                                     8,
                                     4,
                                     5,
                                     buf_pixel_space=3,
                                     buf_band_space=1)
        assert ref_data == got_data, interleave

        out_ds.WriteRaster(20,
                           8,
                           4,
                           5,
                           got_data,
                           buf_pixel_space=3,
                           buf_band_space=1)
        got_data = out_ds.ReadRaster(20,
                                     8,
                                     4,
                                     5,
                                     buf_pixel_space=3,
                                     buf_band_space=1)
        assert ref_data == got_data, interleave

        ref_data = src_ds.ReadRaster(20,
                                     8,
                                     4,
                                     5,
                                     buf_pixel_space=3,
                                     buf_line_space=100,
                                     buf_band_space=1)
        got_data = out_ds.ReadRaster(20,
                                     8,
                                     4,
                                     5,
                                     buf_pixel_space=3,
                                     buf_line_space=100,
                                     buf_band_space=1)
        assert ref_data == got_data, interleave

        ref_data = src_ds.ReadRaster(20,
                                     20,
                                     4,
                                     5,
                                     buf_type=gdal.GDT_Int32,
                                     buf_pixel_space=12,
                                     buf_band_space=4)
        got_data = out_ds.ReadRaster(20,
                                     20,
                                     4,
                                     5,
                                     buf_type=gdal.GDT_Int32,
                                     buf_pixel_space=12,
                                     buf_band_space=4)
        assert ref_data == got_data, interleave
        out_ds.WriteRaster(20,
                           20,
                           4,
                           5,
                           got_data,
                           buf_type=gdal.GDT_Int32,
                           buf_pixel_space=12,
                           buf_band_space=4)
        got_data = out_ds.ReadRaster(20,
                                     20,
                                     4,
                                     5,
                                     buf_type=gdal.GDT_Int32,
                                     buf_pixel_space=12,
                                     buf_band_space=4)
        assert ref_data == got_data, interleave

        # Test IReadBlock
        ref_data = src_ds.GetRasterBand(1).ReadRaster(0, 10,
                                                      src_ds.RasterXSize, 1)
        # This is a bit nasty to have to do that. We should fix the core
        # to make that unnecessary
        out_ds.FlushCache()
        got_data = out_ds.GetRasterBand(1).ReadBlock(0, 10)
        assert ref_data == got_data, interleave

        # Test IRasterIO(GF_Write,)
        ref_data = src_ds.GetRasterBand(1).ReadRaster(2, 3, 4, 5)
        out_ds.GetRasterBand(1).WriteRaster(6, 7, 4, 5, ref_data)
        got_data = out_ds.GetRasterBand(1).ReadRaster(6, 7, 4, 5)
        assert ref_data == got_data

        # Test IRasterIO(GF_Write, change data type) + IWriteBlock() + IRasterIO(GF_Read, change data type)
        ref_data = src_ds.GetRasterBand(1).ReadRaster(10,
                                                      11,
                                                      4,
                                                      5,
                                                      buf_type=gdal.GDT_Int32)
        out_ds.GetRasterBand(1).WriteRaster(10,
                                            11,
                                            4,
                                            5,
                                            ref_data,
                                            buf_type=gdal.GDT_Int32)
        got_data = out_ds.GetRasterBand(1).ReadRaster(10,
                                                      11,
                                                      4,
                                                      5,
                                                      buf_type=gdal.GDT_Int32)
        assert ref_data == got_data, interleave

        ref_data = src_ds.GetRasterBand(1).ReadRaster(10, 11, 4, 5)
        got_data = out_ds.GetRasterBand(1).ReadRaster(10, 11, 4, 5)
        assert ref_data == got_data, interleave

        # Test IRasterIO(GF_Write, resampling) + IWriteBlock() + IRasterIO(GF_Read, resampling)
        ref_data = src_ds.GetRasterBand(1).ReadRaster(10, 11, 4, 5)
        ref_data_zoomed = src_ds.GetRasterBand(1).ReadRaster(
            10, 11, 4, 5, 8, 10)
        out_ds.GetRasterBand(1).WriteRaster(10, 11, 8, 10, ref_data, 4, 5)
        got_data = out_ds.GetRasterBand(1).ReadRaster(10, 11, 8, 10)
        assert ref_data_zoomed == got_data, interleave

        got_data = out_ds.GetRasterBand(1).ReadRaster(10, 11, 8, 10, 4, 5)
        assert ref_data == got_data, interleave
Beispiel #9
0
def test_mem_10():

    # Error case: building overview on a 0 band dataset
    ds = gdal.GetDriverByName('MEM').Create('', 1, 1, 0)
    with gdaltest.error_handler():
        ds.BuildOverviews('NEAR', [2])

    # Requesting overviews when they are not
    ds = gdal.GetDriverByName('MEM').Create('', 1, 1)
    assert ds.GetRasterBand(1).GetOverviewCount() == 0
    assert ds.GetRasterBand(1).GetOverview(-1) is None
    assert ds.GetRasterBand(1).GetOverview(0) is None

    # Single band case
    ds = gdal.GetDriverByName('MEM').CreateCopy('', gdal.Open('data/byte.tif'))
    for _ in range(2):
        ret = ds.BuildOverviews('NEAR', [2])
        assert ret == 0
        assert ds.GetRasterBand(1).GetOverviewCount() == 1
        cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
        assert cs == 1087

    ret = ds.BuildOverviews('NEAR', [4])
    assert ret == 0
    assert ds.GetRasterBand(1).GetOverviewCount() == 2
    cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
    assert cs == 1087
    cs = ds.GetRasterBand(1).GetOverview(1).Checksum()
    assert cs == 328

    ret = ds.BuildOverviews('NEAR', [2, 4])
    assert ret == 0
    assert ds.GetRasterBand(1).GetOverviewCount() == 2
    cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
    assert cs == 1087
    cs = ds.GetRasterBand(1).GetOverview(1).Checksum()
    assert cs == 328

    # Test that average in one or several steps give the same result
    ds.GetRasterBand(1).GetOverview(0).Fill(0)
    ds.GetRasterBand(1).GetOverview(1).Fill(0)

    ret = ds.BuildOverviews('AVERAGE', [2, 4])
    assert ret == 0
    assert ds.GetRasterBand(1).GetOverviewCount() == 2
    cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
    assert cs == 1152
    cs = ds.GetRasterBand(1).GetOverview(1).Checksum()
    assert cs == 240

    ds.GetRasterBand(1).GetOverview(0).Fill(0)
    ds.GetRasterBand(1).GetOverview(1).Fill(0)

    ret = ds.BuildOverviews('AVERAGE', [2])
    ret = ds.BuildOverviews('AVERAGE', [4])
    assert ret == 0
    assert ds.GetRasterBand(1).GetOverviewCount() == 2
    cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
    assert cs == 1152
    cs = ds.GetRasterBand(1).GetOverview(1).Checksum()
    assert cs == 240

    ds = None

    # Multiple band case
    ds = gdal.GetDriverByName('MEM').CreateCopy('',
                                                gdal.Open('data/rgbsmall.tif'))
    ret = ds.BuildOverviews('NEAR', [2])
    assert ret == 0
    cs = ds.GetRasterBand(1).GetOverview(0).Checksum()
    assert cs == 5057
    cs = ds.GetRasterBand(2).GetOverview(0).Checksum()
    assert cs == 5304
    cs = ds.GetRasterBand(3).GetOverview(0).Checksum()
    assert cs == 5304
    ds = None

    # Clean overviews
    ds = gdal.GetDriverByName('MEM').CreateCopy('', gdal.Open('data/byte.tif'))
    ret = ds.BuildOverviews('NEAR', [2])
    assert ret == 0
    ret = ds.BuildOverviews('NONE', [])
    assert ret == 0
    assert ds.GetRasterBand(1).GetOverviewCount() == 0
    ds = None
Beispiel #10
0
##  Hora decimal da aquisição
dd = date.hour + (date.minute / 60) + (date.second / (60 * 60))
##  Dia juliano (dia absoluto no ano)
dy = date.timetuple().tm_yday
##  Calculo da elevação solar
h = mt.radians(abs((dd - 12) * 15))
##  Delta para calculo do ângulo solar zenital
delta = mt.radians(23.45 * mt.sin((360 / 365) * (dy - 80)))
##  Calculo do ângulo solar zenital
theta_s = mt.degrees(
    mt.acos((mt.sin(lat) * mt.sin(delta)) +
            (mt.cos(lat) * mt.cos(delta) * mt.cos(h))))

##
ref_band = r'.\00_Dados\Imagens\utm_mask_T23KMV_20190701T131251_B01.tif'
dataset_de_referencia = gdal.Open(ref_band, GA_ReadOnly)

##  Aplicando o modelo QAA v6 nas imagens carregadas
kd_msi_b1 = Qaa_v6_model_msi(b1, 443, b1, b2, b3, b4, b5, 560, aw, bbw,
                             theta_s)
salvar_banda(kd_msi_b1, r'.\03_Results_image\QAAv6_msi_2019_60m_443nm.tif',
             dataset_de_referencia)
del kd_msi_b1
gc.collect()

kd_msi_b2 = Qaa_v6_model_msi(b2, 492, b1, b2, b3, b4, b5, 560, aw, bbw,
                             theta_s)
salvar_banda(kd_msi_b2, r'.\03_Results_image\QAAv6_msi_2019_60m_492nm.tif',
             dataset_de_referencia)
del kd_msi_b2
gc.collect()
Beispiel #11
0
def test_mem_2():

    gdal.PushErrorHandler('CPLQuietErrorHandler')
    ds = gdal.Open('MEM:::')
    gdal.PopErrorHandler()
    assert ds is None, 'opening MEM dataset should have failed.'

    try:
        import ctypes
    except ImportError:
        pytest.skip()

    for libname in ['msvcrt', 'libc.so.6']:
        try:
            crt = ctypes.CDLL(libname)
        except OSError:
            crt = None
        if crt is not None:
            break

    if crt is None:
        pytest.skip()

    malloc = crt.malloc
    malloc.argtypes = [ctypes.c_size_t]
    malloc.restype = ctypes.c_void_p

    free = crt.free
    free.argtypes = [ctypes.c_void_p]
    free.restype = None

    # allocate band data array.
    width = 50
    height = 3
    p = malloc(width * height * 4)
    if p is None:
        pytest.skip()
    float_p = ctypes.cast(p, ctypes.POINTER(ctypes.c_float))

    # build ds name.
    dsnames = [
        'MEM:::DATAPOINTER=0x%X,PIXELS=%d,LINES=%d,BANDS=1,DATATYPE=Float32,PIXELOFFSET=4,LINEOFFSET=%d,BANDOFFSET=0'
        % (p, width, height, width * 4),
        'MEM:::DATAPOINTER=0x%X,PIXELS=%d,LINES=%d,DATATYPE=Float32' %
        (p, width, height)
    ]

    for dsname in dsnames:

        for i in range(width * height):
            float_p[i] = 5.0

        dsro = gdal.Open(dsname)
        if dsro is None:
            free(p)
            pytest.fail('opening MEM dataset failed in read only mode.')

        chksum = dsro.GetRasterBand(1).Checksum()
        if chksum != 750:
            print(chksum)
            free(p)
            pytest.fail('checksum failed.')
        dsro = None

        dsup = gdal.Open(dsname, gdal.GA_Update)
        if dsup is None:
            free(p)
            pytest.fail('opening MEM dataset failed in update mode.')

        dsup.GetRasterBand(1).Fill(100.0)
        dsup.FlushCache()

        if float_p[0] != 100.0:
            print(float_p[0])
            free(p)
            pytest.fail('fill seems to have failed.')

        dsup = None

    free(p)
Beispiel #12
0
#!/usr/bin/env python
# return the Median value of a Geotiff image (only containing 1 band)

import sys
import numpy as np
from osgeo import gdal

filename = sys.argv[1]
gtif = gdal.Open(filename)
image = gtif.GetRasterBand(1)
value_arr = image.ReadAsArray()
print np.median(value_arr[~np.isnan(value_arr)])
Beispiel #13
0
    def test_reproject_raster():

        ndviname = "2003.tif"

        inp = None
        out = None
        try:
            # create dataset
            inp = gdal.Open(ndviname)

            # reproject
            print("reprojecting to etrs1989...")
            out = reproject_raster(inp,
                                   epsg_from=None,
                                   epsg_to=etrs1989,
                                   fltr=gdal.GRA_Bilinear)
            # save to disk
            print("saving to disk...")
            inp.GetDriver().CreateCopy(ndviname + "_3035.tif", out)

            # reproject
            print("reprojecting to etrs1989...")
            out = reproject_raster_1(inp,
                                     pixel_spacing=None,
                                     epsg_from=None,
                                     epsg_to=etrs1989,
                                     fltr=gdal.GRA_Bilinear)
            # save to disk
            print("saving to disk...")
            inp.GetDriver().CreateCopy(ndviname + "_3035_1.tif", out)

            # reproject
            print("reprojecting to 4326...")
            out = reproject_raster(inp,
                                   epsg_from=None,
                                   epsg_to=4326,
                                   fltr=gdal.GRA_Bilinear)
            # save to disk
            print("saving to disk...")
            inp.GetDriver().CreateCopy(ndviname + "_4326.tif", out)

            # reproject back
            print("reprojecting from 4326 to rdnew...")

            inp = gdal.Open(ndviname + "_4326.tif")
            out = reproject_raster(inp,
                                   epsg_from=None,
                                   epsg_to=28992,
                                   fltr=gdal.GRA_NearestNeighbour)
            # save to disk
            print("saving to disk...")
            inp.GetDriver().CreateCopy(ndviname + "_28992.tif", out)

        except RuntimeError as err:
            raise err
        except Exception as e:
            raise e

        finally:
            # close datasets
            if inp:
                inp = None
            if out:
                out = None
Beispiel #14
0
def sieve_raster(infile,
                 threshold,
                 connectedness=4,
                 outfile=None,
                 mask=None,
                 frmt="GTiff",
                 silence=True):
    """    
    Removes raster polygons smaller than a provided threshold size (in pixels) and replaces 
    them with the pixel value of the largest neighbour polygon. The result can be written back 
    to the existing raster band, or copied into a new file.
    
    ###NOTE: Polygons smaller than the threshold with no neighbours that are as large as the threshold will not be altered. 
    Polygons surrounded by nodata areas will therefore not be altered.####

    The input dataset is read as integer data which means that floating point values are rounded 
    to integers. Re-scaling source data may be necessary in some cases 
    (e.g. 32-bit floating point data with min=0 and max=1).
      
    The algorithm makes three passes over the input file to enumerate the polygons and collect limited information about them. Memory use is proportional to the number of polygons (roughly 24 bytes per polygon), but is not directly related to the size of the raster. So very large raster files can be processed effectively if there aren't too many polygons. 
    But extremely noisy rasters with many one pixel polygons will end up being expensive (in memory) to process.

    More info:
    http://www.gdal.org/gdal__alg_8h.html#a33309c0a316b223bd33ae5753cc7f616
    http://www.gdal.org/gdal_sieve.html

    :param infile: path to input file
    :param threshold:  size threshold in pixels. Only raster polygons smaller than this size will be removed.
    :param connectedness: 
     4 Four connectedness should be used when determining polygons. That is diagonal pixels are not 
     considered directly connected. This is the default.

    8  Eight connectedness should be used when determining polygons. That is diagonal pixels are 
    considered directly connected
    
    :param outfile: path to output file, pass None to change the input in place
    :param mask: an optional path to a mask band. All pixels in the mask band with a value other than zero 
    will be considered suitable for inclusion in polygons.
    Pass "default" to use a mask included in the input file
    :param frmt: the output format, default "GTiff"
    :param silence: pass any value to enable debug info
    :return: None
    """

    if connectedness != 4:
        connectedness = 8

    src_ds = None
    dst_ds = None
    mask_ds = None

    try:
        #open source file and mask
        if not outfile:
            src_ds = gdal.Open(infile, gdal.GA_Update)
        else:
            src_ds = gdal.Open(infile, gdal.GA_ReadOnly)
        srcband = src_ds.GetRasterBand(1)

        if mask:
            if mask is 'default':
                maskband = srcband.GetMaskBand()
            else:
                mask_ds = gdal.Open(mask)
                maskband = mask_ds.GetRasterBand(1)
        else:
            maskband = None

        # define the output, copy properties from the input
        if outfile:
            drv = gdal.GetDriverByName(frmt)
            dst_ds = drv.Create(outfile, src_ds.RasterXSize,
                                src_ds.RasterYSize, 1, srcband.DataType)
            wkt = src_ds.GetProjection()
            if wkt != '':
                dst_ds.SetProjection(wkt)
            dst_ds.SetGeoTransform(src_ds.GetGeoTransform())
            dstband = dst_ds.GetRasterBand(1)
            # set the nodata value
            dstband.SetNoDataValue(srcband.GetNoDataValue())
        else:
            dstband = srcband

        if silence:
            prog_func = None
        else:
            prog_func = gdal.TermProgress

        gdal.SieveFilter(srcband,
                         maskband,
                         dstband,
                         threshold,
                         connectedness,
                         callback=prog_func)

    except:
        if src_ds: src_ds = None
        if dst_ds: dst_ds = None
        if mask_ds: mask_ds = None
Beispiel #15
0
def s3netcdf2other(input_dir,
                   output_image,
                   instrument='OLCI',
                   outformat='GTiff',
                   to_utm=True,
                   outres=300):
    tempdir = os.path.join(input_dir, 'tmp')
    if os.path.exists(tempdir):
        shutil.rmtree(tempdir)
    os.mkdir(tempdir)
    print('Retrieving coordinates...')
    if instrument == 'OLCI':
        BANDNAMES = [
            'Oa{0}_radiance'.format(str(i).zfill(2)) for i in range(1, 22)
        ]  # range(1, 22)
        nc_coords = os.path.join(input_dir, 'geo_coordinates.nc')
        ds_nc = Dataset(nc_coords, 'r')
        coords = (ds_nc.variables['latitude'], ds_nc.variables['longitude'])
    elif instrument == 'SLSTR':
        BANDNAMES = [
            'S{0}_radiance_an'.format(str(i).zfill(1)) for i in range(1, 7)
        ]  # range(1, 10)
        BANDNAMES = BANDNAMES + [
            'S{0}_BT_in'.format(str(i).zfill(1)) for i in range(7, 10)
        ]  # range(1, 10)
        BANDNAMES = BANDNAMES + [
            'F{0}_BT_in'.format(str(i).zfill(1)) for i in range(1, 3)
        ]  # range(1, 10)
        nc_coords = os.path.join(input_dir, 'geodetic_an.nc')
        ds_nc = Dataset(nc_coords, 'r')
        coords = (ds_nc.variables['latitude_an'],
                  ds_nc.variables['longitude_an'])
    else:
        print('Wrong instrument indicator! Must be either "OLCI" or "SLSTR"!')
    lat_tif = os.path.join(tempdir, coords[0].name + '.tif')
    lon_tif = os.path.join(tempdir, coords[1].name + '.tif')
    rad_tifs = []
    nc_paths = [os.path.join(input_dir, band + '.nc') for band in BANDNAMES]
    # get lat/lon
    for v, var in enumerate(coords):
        nodata = var._FillValue
        scale = var.scale_factor
        var_vrt = os.path.join(tempdir, var.name + '.vrt')
        var_tif = os.path.join(tempdir, var.name + '.tif')
        cmd = [
            'gdalbuildvrt', '-sd',
            str(1 + var._varid), '-separate', '-overwrite', var_vrt, nc_coords
        ]
        sub.call(cmd)
        # edit vrt
        with open(var_vrt, 'r') as f:
            xml = f.readlines()
        for line in xml:
            if '<VRTRasterBand ' in line:
                head_index = xml.index(line) + 1
            if '<DstRect xOff' in line:
                tail_index = xml.index(line) + 1
        xml.insert(head_index,
                   '    <NoDataValue>{nd}</NoDataValue>\n'.format(nd=nodata))
        xml.insert(head_index + 1,
                   '    <Scale>{sc}</Scale>\n'.format(sc=scale))
        tail_index = tail_index + 2
        xml.insert(tail_index,
                   '      <NODATA>{nd}</NODATA>\n'.format(nd=nodata))
        xml.insert(tail_index + 2, '    <Offset>0.0</Offset>\n')
        xml.insert(tail_index + 3,
                   '    <Scale>{sc}</Scale>\n'.format(sc=scale))
        xml = [line.replace('="Int32', '="Float32') for line in xml]
        with open(var_vrt, 'w') as f:
            f.writelines(xml)
        # write to temporary tif
        cmd = ['gdal_translate', '-unscale', var_vrt, var_tif]
        sub.call(cmd)
    ds_nc.close()
    # single bands to vrt, then to tif
    print('Converting all {n} bands...'.format(n=len(BANDNAMES)))
    for n, nc in enumerate(nc_paths):
        print('\t... BAND {b}'.format(b=n + 1))
        ds_nc = Dataset(nc, 'r')
        var = ds_nc.variables[os.path.basename(nc)[:-3]]
        nodata = var._FillValue
        offset = var.add_offset
        rows = var.shape[0]
        scale = var.scale_factor
        ds_nc.close()
        data_vrt = os.path.join(tempdir, 'data.vrt')
        data_vrt_tif = data_vrt.replace('.vrt', '.tif')
        out_vrt = os.path.join(tempdir, os.path.basename(nc)[:-3] + '.vrt')
        out_tif = out_vrt.replace('.vrt', '.tif')
        if instrument == 'OLCI':
            cmd = [
                'gdalbuildvrt', '-sd', '1', '-separate', '-overwrite',
                data_vrt, nc
            ]
        else:
            if os.path.basename(nc).endswith('BT_in.nc'):
                cmd = [
                    'gdalbuildvrt', '-sd', '1', '-separate', '-overwrite',
                    data_vrt, nc
                ]
            else:
                cmd = [
                    'gdalbuildvrt', '-sd', '3', '-separate', '-overwrite',
                    data_vrt, nc
                ]
        sub.call(cmd)
        # edit vrt
        with open(data_vrt, 'r') as f:
            xml = f.readlines()
        for line in xml:
            if '<VRTRasterBand ' in line:
                head_index = xml.index(line)
            if '<DstRect xOff' in line:
                tail_index = xml.index(line) + 1
        xml[head_index] = '  <VRTRasterBand dataType="Float32" band="1">\n'
        xml.insert(head_index + 1,
                   '    <NoDataValue>{nd}</NoDataValue>\n'.format(nd=nodata))
        xml[head_index + 2] = '    <ComplexSource>\n'
        xml[head_index + 5] = xml[head_index + 5].replace(
            'DataType="UInt16"', 'DataType="Float32"')
        tail_index = tail_index + 1
        xml.insert(tail_index,
                   '      <NODATA>{nd}</NODATA>\n'.format(nd=nodata))
        xml[tail_index + 1] = '    </ComplexSource>\n'
        xml.insert(tail_index + 2,
                   '    <Offset>{off}</Offset>\n'.format(off=offset))
        xml.insert(tail_index + 3,
                   '    <Scale>{sc}</Scale>\n'.format(sc=scale))
        with open(data_vrt, 'w') as f:
            f.writelines(xml)
        # write to temporary tif, then build a new vrt
        cmd = ['gdal_translate', '-unscale', data_vrt, data_vrt_tif]
        sub.call(cmd)
        # update GeoTransform
        ds = gdal.Open(data_vrt_tif, gdal.GA_Update)
        ds.SetGeoTransform((0.0, 1.0, 0.0, float(rows), 0.0, -1.0))
        ds.FlushCache()
        # build new vrt
        cmd = [
            'gdalbuildvrt', '-sd', '1', '-separate', '-overwrite', out_vrt,
            data_vrt_tif
        ]
        sub.call(cmd)
        # edit vrt
        with open(out_vrt, 'r') as f:
            xml = f.readlines()
        for line in xml:
            if '<VRTRasterBand ' in line:
                head_index = xml.index(line)
                break
        xml[head_index] = '  <VRTRasterBand dataType="Float32" band="1">\n'
        xml.insert(
            -1, '''  <metadata domain="GEOLOCATION">
    <mdi key="X_DATASET">{lon}</mdi>
    <mdi key="X_BAND">1</mdi>
    <mdi key="Y_DATASET">{lat}</mdi>
    <mdi key="Y_BAND">1</mdi>
    <mdi key="PIXEL_OFFSET">0</mdi>
    <mdi key="LINE_OFFSET">0</mdi>
    <mdi key="PIXEL_STEP">1</mdi>
    <mdi key="LINE_STEP">1</mdi>
  </metadata>\n'''.format(lon=lon_tif, lat=lat_tif))
        for line in xml:
            if os.sep in line:
                xml[xml.index(line)] = line.replace(os.sep, '/')
        with open(out_vrt, 'w') as f:
            f.writelines(xml)
        # convert to tif
        cmd = [
            'gdalwarp', '-t_srs', 'epsg:4326', '-geoloc', '-srcnodata',
            str(nodata), '-dstnodata', '-9999', out_vrt, out_tif
        ]
        sub.call(cmd)
        # remove temp files safely
        os.remove(out_vrt)
        ds = gdal.Open(data_vrt_tif, gdal.GA_ReadOnly)
        ds = None
        os.remove(data_vrt_tif)
        rad_tifs.append(out_tif)
    # stack together
    print('Stacking bands...')
    if 'win' in sys.platform.lower():
        gdal_path = r'c:\Program Files\GDAL'
    else:
        gdal_path = r'/usr/bin'
    gdal_merge = os.path.join(gdal_path, 'gdal_merge.py')
    stack = os.path.join(tempdir, 'stack.tif')
    cmd = ['python', gdal_merge, '-separate', '-n', '-9999', '-o', stack]
    for r in rad_tifs:
        cmd.append(r)
    if os.path.exists(output_image):
        drv = gdal.GetDriverByName(outformat)
        drv.Delete(output_image)
    sub.call(cmd)
    if to_utm is True:
        epsg = calculateUtmZone(stack)
        print('Reprojecting to UTM (EPSG: {e})'.format(e=epsg))
        cmd = [
            'gdalwarp', '-of', outformat, '-srcnodata', '-9999', '-dstnodata',
            '-9999', '-overwrite', '-t_srs', 'epsg:{e}'.format(e=str(epsg)),
            '-tr',
            str(outres),
            str(outres), stack, output_image
        ]
        sub.call(cmd)
    else:
        if not outformat == 'GTiff':
            print('Converting to {of}...'.format(of=outformat))
            cmd = [
                'gdal_translate', '-of', outformat, '-a_nodata', '-9999',
                stack, output_image
            ]
            if outformat == 'ENVI':
                cmd.append('-co')
                cmd.append('interleave=bil')
            elif outformat == 'GTiff':
                cmd.append('-co')
                cmd.append('compress=lzw')
                sub.call(cmd)
    print('Cleaning up...')
    shutil.rmtree(tempdir)
    print('Done!')
def test_gdal_rasterize_1():

    if test_cli_utilities.get_gdal_rasterize_path() is None:
        return 'skip'

    # Setup working spatial reference
    #sr_wkt = 'LOCAL_CS["arbitrary"]'
    #sr = osr.SpatialReference( sr_wkt )
    sr = osr.SpatialReference()
    sr.ImportFromEPSG(32631)
    sr_wkt = sr.ExportToWkt()

    # Create a raster to rasterize into.

    target_ds = gdal.GetDriverByName('GTiff').Create( 'tmp/rast1.tif', 100, 100, 3,
                                                    gdal.GDT_Byte )
    target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) )
    target_ds.SetProjection( sr_wkt )

    # Close TIF file
    target_ds = None

    # Create a layer to rasterize from.

    rast_ogr_ds = \
              ogr.GetDriverByName('MapInfo File').CreateDataSource( 'tmp/rast1.tab' )
    rast_lyr = rast_ogr_ds.CreateLayer( 'rast1', srs=sr )

    rast_lyr.GetLayerDefn()
    field_defn = ogr.FieldDefn('foo')
    rast_lyr.CreateField(field_defn)

    # Add a polygon.

    wkt_geom = 'POLYGON((1020 1030,1020 1045,1050 1045,1050 1030,1020 1030))'

    feat = ogr.Feature( rast_lyr.GetLayerDefn() )
    feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )

    rast_lyr.CreateFeature( feat )

    # Add feature without geometry to test fix for #3310
    feat = ogr.Feature( rast_lyr.GetLayerDefn() )
    rast_lyr.CreateFeature( feat )

    # Add a linestring.

    wkt_geom = 'LINESTRING(1000 1000, 1100 1050)'

    feat = ogr.Feature( rast_lyr.GetLayerDefn() )
    feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )

    rast_lyr.CreateFeature( feat )

    # Close file
    rast_ogr_ds.Destroy()


    # Run the algorithm.
    (out, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_gdal_rasterize_path() + ' -b 3 -b 2 -b 1 -burn 200 -burn 220 -burn 240 -l rast1 tmp/rast1.tab tmp/rast1.tif')
    if not (err is None or err == '') :
        gdaltest.post_reason('got error/warning')
        print(err)
        return 'fail'

    # Check results.

    target_ds = gdal.Open('tmp/rast1.tif')
    expected = 6452
    checksum = target_ds.GetRasterBand(2).Checksum()
    if checksum != expected:
        print(checksum)
        gdaltest.post_reason( 'Did not get expected image checksum' )

        return 'fail'

    target_ds = None

    return 'success'
Beispiel #17
0
        i += 1
        out_format = sys.argv[i]
    elif sys.argv[i][0] == '-':
        Usage()
    elif in_dataset is None:
        in_dataset = sys.argv[i]
    elif out_dataset is None:
        out_dataset = sys.argv[i]
    else:
        Usage()
    i += 1

if out_dataset is None:
    Usage()

ds = gdal.Open(in_dataset)
out_ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(out_dataset)
sr = None
wkt = ds.GetGCPProjection()
if wkt != '':
    sr = osr.SpatialReference(wkt)
out_lyr = out_ds.CreateLayer('gcps', geom_type=ogr.wkbPoint, srs=sr)
out_lyr.CreateField(ogr.FieldDefn('Id', ogr.OFTString))
out_lyr.CreateField(ogr.FieldDefn('Info', ogr.OFTString))
out_lyr.CreateField(ogr.FieldDefn('X', ogr.OFTReal))
out_lyr.CreateField(ogr.FieldDefn('Y', ogr.OFTReal))
gcps = ds.GetGCPs()
for gcp in gcps:
    f = ogr.Feature(out_lyr.GetLayerDefn())
    f.SetField('Id', gcp.Id)
    f.SetField('Info', gcp.Info)
Beispiel #18
0
        filename = arg

    else:
        Usage()

    i = i + 1

if longitude is None:
    Usage()
if latitude is None:
    Usage()
if filename is None:
    filename()

# Open input dataset
ds = gdal.Open(filename, gdal.GA_ReadOnly)
if ds is None:
    print('Cannot open %s' % filename)
    sys.exit(1)

# Build Spatial Reference object based on coordinate system, fetched from the
# opened dataset
if coordtype_georef:
    X = longitude
    Y = latitude
else:
    srs = osr.SpatialReference()
    srs.ImportFromWkt(ds.GetProjection())

    srsLatLong = srs.CloneGeogCS()
    # Convert from (longitude,latitude) to projected coordinates
Beispiel #19
0
def GetReflectances(ReflectancesFile, KernelsFile, CloudMaskFile, Weigth,
                    InitRow, InitCol, rows, cols, ProcessSnow):

    ReflScaleFactor = 10000.0
    NumberOfBands = 5
    NumberOfParameters = 3

    # BBDR matrix size -> rows x columns x NumberOfBands x 1
    Reflectance = np.zeros((rows, cols, NumberOfBands, 1), np.float32)
    ReflectanceSD = np.zeros((rows, cols, NumberOfBands), np.float32)
    NumberOfSamples = np.zeros((rows, cols), np.float32)

    # Open dataset
    dataset = gdal.Open(ReflectancesFile, GA_ReadOnly)
    band_files = dataset.GetFileList()[1::]

    #SnowMask = dataset.GetRasterBand( ( NumberOfBands * 2 ) + 1 ).ReadAsArray(InitCol,InitRow, cols ,rows)
    # Mask based on ProcessSnow
    #if ProcessSnow == 1:
    #    SnowMask = np.where( SnowMask == 1, 1, 0)
    #else:
    #    SnowMask = np.where( SnowMask == 1, 0, 1)

    # Cloud mask
    d_cloud_mask = gdal.Open(CloudMaskFile)
    CloudMask = d_cloud_mask.ReadAsArray(InitCol, InitRow, cols,
                                         rows).astype(np.bool)

    # Band 5 is 7, no processing bands 5 and 6
    refl_SD_noise_estimates = {
        1: 0.004,
        2: 0.015,
        3: 0.003,
        4: 0.004,
        5: 0.006
    }

    for i in range(NumberOfBands):
        dataset = gdal.Open(band_files[i])
        BandData = dataset.GetRasterBand(1).ReadAsArray(
            InitCol, InitRow, cols, rows)
        BandData[BandData == -9999] = 0.0
        #BandData = dataset.GetRasterBand(i+1).ReadAsArray(InitCol,InitRow, cols ,rows)
        #Reflectance[:,:,i,0] = ( BandData / ReflScaleFactor ) * SnowMask * CloudMask
        Reflectance[:, :, i, 0] = (BandData / ReflScaleFactor) * CloudMask

        #BandData = dataset.GetRasterBand(i+NumberOfBands+1).ReadAsArray(InitCol,InitRow, cols ,rows)
        #ReflectanceSD[:,:,i] = ( BandData / ReflScaleFactor ) * SnowMask * CloudMask
        ReflectanceSD[:, :, i] = refl_SD_noise_estimates[i + 1] * CloudMask

    dataset = None

    #-------------------------------------
    # Build C -- coveriance matrix for obs
    #-------------------------------------
    # C in a symetric matrix form of NumberOfBands * NumberOfBands
    C = np.zeros((rows, cols, NumberOfBands, NumberOfBands), np.float32)
    Cinv = np.zeros((rows, cols, NumberOfBands, NumberOfBands), np.float32)

    for i in range(NumberOfBands):
        C[:, :, i, i] = ReflectanceSD[:, :, i] * ReflectanceSD[:, :, i]

    # Create matrices: M, V and E
    # M = Kernels^T C^-1 Kernels
    # V = Kernels^T C^-1 Reflectance
    M = np.zeros((rows, cols, NumberOfBands * NumberOfParameters,
                  NumberOfBands * NumberOfParameters), np.float32)
    V = np.zeros((rows, cols, NumberOfBands * NumberOfParameters), np.float32)

    # Get Kernels
    Kernels = GetKernels(KernelsFile, InitRow, InitCol, rows, cols)

    for j in range(0, cols):
        for i in range(0, rows):
            if np.where( (Reflectance[i,j,:]>0.0) & (Reflectance[i,j,:]<=1.0) )[0].shape[0] >= 5 and \
               np.where( (ReflectanceSD[i,j,:]>=0.0) & (ReflectanceSD[i,j,:]<=1.0) )[0].shape[0] >= 5 :

                Cinv[i, j, :, :] = np.matrix(C[i, j, :, :]).I
                M[i,j,:,:] = np.matrix(Kernels[i,j,:,:]).T * np.matrix(Cinv[i,j,:,:]) * \
                                       np.matrix(Kernels[i,j,:,:])
                # Multiply only using lead diagonal of Cinv, additionally transpose
                # the result to store V as a 1 x 9 vector
                V[i, j, :] = (np.matrix(Kernels[i, j, :, :]).T *
                              np.diagflat(np.diagonal(Cinv[i, j, :, :])) *
                              Reflectance[i, j, :, :]).T
                NumberOfSamples[i, j] = Weigth

    return ReturnGetReflectances(Reflectance, ReflectanceSD, M, V,
                                 NumberOfSamples, Kernels[:, :, 0, 0:3])
Beispiel #20
0
def raster_mask2(reference_filename, \
                target_vector_file = "data/world.shp",\
                attribute_filter = 0):

    #burn_value = 1

    # First, open the file that we'll be taking as a reference
    # We will need to gleam the size in pixels, as well as projection
    # and geotransform.

    vector_ds = ogr.Open(target_vector_file)

    source_ds = ogr.GetDriverByName("Memory").CopyDataSource(vector_ds, "")
    source_layer = source_ds.GetLayer(0)
    source_srs = source_layer.GetSpatialRef()
    wkt = source_srs.ExportToWkt()

    lyr = vector_ds.GetLayer(0)
    #lyr.SetAttributeFilter( attribute_filter )
    # Get a field definition from the original vector file.
    # We don't need much more detail here
    poly = lyr.GetFeature(attribute_filter)

    geom = poly.GetGeometryRef()
    pts = geom.GetGeometryRef(0)

    # extract and plot the transformed data
    pnts = np.array([(pts.GetX(p), pts.GetY(p))
                     for p in range(pts.GetPointCount())]).transpose()

    # MODIS
    g = gdal.Open(reference_filename)
    raster = gdal.Open(reference_filename)
    # get the wicket
    modisWKT = raster.GetProjectionRef()

    oSRS = osr.SpatialReference()
    oSRSop = osr.SpatialReference()

    oSRSop.ImportFromWkt(modisWKT)
    # wkt from above, is the wicket from the shapefile
    oSRS.ImportFromWkt(wkt)
    # now make sure we have the shapefile geom

    geom = poly.GetGeometryRef()
    pts = geom.GetGeometryRef(0)

    # pts is the polygon of interest
    pts.AssignSpatialReference(oSRS)
    # so transform it to the MODIS geometry
    pts.TransformTo(oSRSop)

    pnts = np.array([(pts.GetX(p), pts.GetY(p))
                     for p in range(pts.GetPointCount())]).transpose()

    geo_t = raster.GetGeoTransform()

    pixel, line = world2Pixel(geo_t, pnts[0], pnts[1])

    rasterPoly = Image.new("L", (raster.RasterXSize, raster.RasterYSize), 1)
    rasterize = ImageDraw.Draw(rasterPoly)
    # must be a tuple now ... doh
    listdata = list(
        tuple(pixel) for pixel in np.array((pixel, line)).T.tolist())
    rasterize.polygon(listdata, outline=0, fill=0)
    mask = np.array(rasterPoly).astype(bool)
    return mask
Beispiel #21
0
__author__ = 'singhdj2'

#GDAL based program to access the image and Geotiff exif information.
#only works for some JPEGs and Tiffs
from osgeo import gdal
gjpeg = gdal.Open( "/Users/singhdj2/Desktop/IMG_4887.JPG" )

meta_dict = gjpeg.GetMetadata()

#print type(meta_dict)
#print dir(meta_dict)

for tag, value in meta_dict.items():
    print tag, ':', value



Beispiel #22
0
def raster_mask(reference_filename, \
                target_vector_file = "data/world.shp",\
                attribute_filter = "NAME = 'IRELAND'"):

    burn_value = 1

    # First, open the file that we'll be taking as a reference
    # We will need to gleam the size in pixels, as well as projection
    # and geotransform.

    g = gdal.Open(reference_filename)

    # We now create an in-memory raster, with the appropriate dimensions
    drv = gdal.GetDriverByName('MEM')
    target_ds = drv.Create('', g.RasterXSize, g.RasterXSize, 1, gdal.GDT_Byte)
    target_ds.SetGeoTransform(g.GetGeoTransform())

    # We set up a transform object as we saw in the previous notebook.
    # This goes from WGS84 to the projection in the reference datasets

    wgs84 = osr.SpatialReference()  # Define a SpatialReference object
    wgs84.ImportFromEPSG(4326)  # And set it to WGS84 using the EPSG code

    # Now for the target projection, Ordnance Survey's British National Grid
    to_proj = osr.SpatialReference()  # define the SpatialReference object
    # In this case, we get the projection from a Proj4 string

    # or, if using the proj4 representation
    to_proj.ImportFromWkt(g.GetProjectionRef())
    target_ds.SetProjection(to_proj.ExportToWkt())
    # Now, we define a coordinate transformtion object, *from* wgs84 *to* OSNG
    tx = osr.CoordinateTransformation(wgs84, to_proj)

    # We define an output in-memory OGR dataset
    # You could also do select a driver for an eg "ESRI Shapefile" here
    # and give it a sexier name than out!

    drv = ogr.GetDriverByName('Memory')
    dst_ds = drv.CreateDataSource('out')
    # This is a single layer dataset. The layer needs to be of polygons
    # and needs to have the target files' projection
    dst_layer = dst_ds.CreateLayer('', srs=to_proj, geom_type=ogr.wkbPolygon)

    # Open the original shapefile, get the first layer, and filter by attribute
    vector_ds = ogr.Open(target_vector_file)
    lyr = vector_ds.GetLayer(0)
    lyr.SetAttributeFilter(attribute_filter)

    # Get a field definition from the original vector file.
    # We don't need much more detail here
    feature = lyr.GetFeature(0)
    field = feature.GetFieldDefnRef(0)
    # Apply the field definition from the original to the output
    dst_layer.CreateField(field)
    feature_defn = dst_layer.GetLayerDefn()
    # Reset the original layer so we can read all features
    lyr.ResetReading()
    for feat in lyr:
        # For each feature, get the geometry
        geom = feat.GetGeometryRef()
        # transform it to the reference projection
        geom.Transform(tx)
        # Create an output feature
        out_geom = ogr.Feature(feature_defn)
        # Set the geometry to be the reprojected/transformed geometry
        out_geom.SetGeometry(geom)
        # Add the feature with its geometry to the output yaer
        dst_layer.CreateFeature(out_geom)
        # Clear things up
        out_geom.Destroy
        geom.Destroy
    # Done adding geometries
    # Reset the output layer to the 0th geometry
    dst_layer.ResetReading()

    # Now, we rastertize the output vector in-memory file
    # into the in-memory output raster file

    err = gdal.RasterizeLayer(target_ds, [1],
                              dst_layer,
                              burn_values=[burn_value])
    if err != 0:
        print("error:", err)

    # Read the data from the raster, this is your mask
    data = target_ds.ReadAsArray()

    # return False for the desired area
    # and True elsewhere

    return ~data.astype(bool)
Beispiel #23
0
def fill_nodata(input_path,
                mask_path,
                output_path,
                max_distance=0,
                smoothing_iterations=0,
                options=[],
                driver="HFA",
                desired_nodata=-9999,
                quiet=False):
    """
  This function mimicks the gdal_fillnodata.py script because it's heavily based on it.
  Basically I just added more logging to fit it into this project.
  """

    logger.info('input: {0}'.format(input_path))
    logger.info('mask: {0}'.format(mask_path))
    logger.info('output: {0}'.format(output_path))

    # Open the MHHW tile as read-only and get the driver GDAL is using to access the data
    input_fh = gdal.Open(input_path, gdal.GA_ReadOnly)
    input_driver = input_fh.GetDriver()

    # Open the mask tile as read-only and get the driver GDAL is using to access the data
    mask_fh = gdal.Open(mask_path, gdal.GA_ReadOnly)
    mask_driver = mask_fh.GetDriver()
    mask_band = mask_fh.GetRasterBand(1)  # Get the raster band

    # Pull Metadata associated with the LIDAR tile so we can create the output raster later
    geotransform = input_fh.GetGeoTransform()
    projection = input_fh.GetProjection()
    cols = input_fh.RasterXSize  # Get the number of columns
    rows = input_fh.RasterYSize  # Get the number of rows
    logger.info("  cols: {0}".format(cols))
    logger.info("  rows: {0}".format(rows))
    input_data = input_fh.GetRasterBand(1)  # Get the raster band
    original_nodata = input_data.GetNoDataValue(
    )  # Get the NoData value so we can set our mask
    logger.info("  original nodata value: {0}".format(original_nodata))

    # Create a copy of the data using in the input tile as an example.
    logger.info("  Creating new raster...")
    output_driver = gdal.GetDriverByName(driver)  # Setup the output driver
    output_fh = output_driver.Create(output_path, cols, rows, 1,
                                     gdal.GDT_CFloat32)
    output_fh.SetGeoTransform(geotransform)
    output_fh.SetProjection(projection)
    output_band = output_fh.GetRasterBand(1)
    output_band.SetNoDataValue(9)
    logger.info("    done.")

    logger.info("  copying band to destination file...")
    gm_gdal.CopyBand(input_data, output_band)
    logger.info("    done.")

    # Suppress progress report if we ask for quiet behavior
    if quiet:
        prog_func = None
    else:
        prog_func = gdal.TermProgress

    logger.info("  Running FillNodata()...")
    result = gdal.FillNodata(output_band,
                             mask_band,
                             max_distance,
                             smoothing_iterations,
                             options,
                             callback=prog_func)
    logger.info("    done.")

    # Compute Statistics before closing out the dataset
    logger.info("  Computing stats...")
    try:
        output_band.ComputeStatistics(False)
    except RuntimeError:
        logger.warn("    Cannot compute statistics.")
    logger.info("    done.")

    logger.info("    cleanning up band...")
    output_band = None
    logger.info("    done.")

    logger.info("  Building blocks...")
    output_fh.BuildOverviews(overviewlist=[2, 4, 8, 16, 32, 64, 128])
    logger.info("    done.")

    logger.info("  Flushing the cache...")
    output_fh.FlushCache()
    logger.info("    done.")

    logger.info("  closing mask bands and all file handlers...")
    output_fh = None
    mask_band = None
    input_fh = None
    logger.info("      done.")

    return result
Beispiel #24
0
    def processAlgorithm(self, parameters, context, feedback):
        raster_layer = self.parameterAsRasterLayer(parameters, self.INPUT_DEM,
                                                   context)
        target_crs = raster_layer.crs()
        rasterPath = raster_layer.source()

        source = self.parameterAsSource(parameters, self.BOUNDARY_LAYER,
                                        context)
        if source is None:
            raise QgsProcessingException(
                self.invalidSourceError(parameters, self.BOUNDARY_LAYER))

        step = self.parameterAsDouble(parameters, self.STEP, context)
        percentage = self.parameterAsBool(parameters, self.USE_PERCENTAGE,
                                          context)

        outputPath = self.parameterAsString(parameters, self.OUTPUT_DIRECTORY,
                                            context)

        rasterDS = gdal.Open(rasterPath, gdal.GA_ReadOnly)
        geoTransform = rasterDS.GetGeoTransform()
        rasterBand = rasterDS.GetRasterBand(1)
        noData = rasterBand.GetNoDataValue()

        cellXSize = abs(geoTransform[1])
        cellYSize = abs(geoTransform[5])
        rasterXSize = rasterDS.RasterXSize
        rasterYSize = rasterDS.RasterYSize

        rasterBBox = QgsRectangle(geoTransform[0],
                                  geoTransform[3] - cellYSize * rasterYSize,
                                  geoTransform[0] + cellXSize * rasterXSize,
                                  geoTransform[3])
        rasterGeom = QgsGeometry.fromRect(rasterBBox)

        crs = osr.SpatialReference()
        crs.ImportFromProj4(str(target_crs.toProj4()))

        memVectorDriver = ogr.GetDriverByName('Memory')
        memRasterDriver = gdal.GetDriverByName('MEM')

        features = source.getFeatures(QgsFeatureRequest().setDestinationCrs(
            target_crs, context.transformContext()))
        total = 100.0 / source.featureCount() if source.featureCount() else 0

        for current, f in enumerate(features):
            if not f.hasGeometry():
                continue

            if feedback.isCanceled():
                break

            geom = f.geometry()
            intersectedGeom = rasterGeom.intersection(geom)

            if intersectedGeom.isEmpty():
                feedback.pushInfo(
                    self.tr('Feature {0} does not intersect raster or '
                            'entirely located in NODATA area').format(f.id()))
                continue

            fName = os.path.join(
                outputPath,
                'hystogram_%s_%s.csv' % (source.sourceName(), f.id()))

            ogrGeom = ogr.CreateGeometryFromWkt(intersectedGeom.asWkt())
            bbox = intersectedGeom.boundingBox()
            xMin = bbox.xMinimum()
            xMax = bbox.xMaximum()
            yMin = bbox.yMinimum()
            yMax = bbox.yMaximum()

            (startColumn,
             startRow) = raster.mapToPixel(xMin, yMax, geoTransform)
            (endColumn, endRow) = raster.mapToPixel(xMax, yMin, geoTransform)

            width = endColumn - startColumn
            height = endRow - startRow

            srcOffset = (startColumn, startRow, width, height)
            srcArray = rasterBand.ReadAsArray(*srcOffset)

            if srcOffset[2] == 0 or srcOffset[3] == 0:
                feedback.pushInfo(
                    self.tr('Feature {0} is smaller than raster '
                            'cell size').format(f.id()))
                continue

            newGeoTransform = (geoTransform[0] +
                               srcOffset[0] * geoTransform[1], geoTransform[1],
                               0.0, geoTransform[3] +
                               srcOffset[1] * geoTransform[5], 0.0,
                               geoTransform[5])

            memVDS = memVectorDriver.CreateDataSource('out')
            memLayer = memVDS.CreateLayer('poly', crs, ogr.wkbPolygon)

            ft = ogr.Feature(memLayer.GetLayerDefn())
            ft.SetGeometry(ogrGeom)
            memLayer.CreateFeature(ft)
            ft.Destroy()

            rasterizedDS = memRasterDriver.Create('', srcOffset[2],
                                                  srcOffset[3], 1,
                                                  gdal.GDT_Byte)
            rasterizedDS.SetGeoTransform(newGeoTransform)
            gdal.RasterizeLayer(rasterizedDS, [1], memLayer, burn_values=[1])
            rasterizedArray = rasterizedDS.ReadAsArray()

            srcArray = numpy.nan_to_num(srcArray)
            masked = numpy.ma.MaskedArray(
                srcArray,
                mask=numpy.logical_or(srcArray == noData,
                                      numpy.logical_not(rasterizedArray)))

            self.calculateHypsometry(f.id(), fName, feedback, masked,
                                     cellXSize, cellYSize, percentage, step)

            memVDS = None
            rasterizedDS = None
            feedback.setProgress(int(current * total))

        rasterDS = None

        return {self.OUTPUT_DIRECTORY: outputPath}
def validate(ds, check_tiled=True):
    """Check if a file is a (Geo)TIFF with cloud optimized compatible structure.

    Args:
      ds: GDAL Dataset for the file to inspect.
      check_tiled: Set to False to ignore missing tiling.

    Returns:
      A tuple, whose first element is an array of error messages
      (empty if there is no error), and the second element, a dictionary
      with the structure of the GeoTIFF file.

    Raises:
      ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the
        file is not a Tiff.
    """

    if int(gdal.VersionInfo('VERSION_NUM')) < 2020000:
        raise ValidateCloudOptimizedGeoTIFFException(
            'GDAL 2.2 or above required')

    unicode_type = type(''.encode('utf-8').decode('utf-8'))
    if isinstance(ds, str) or isinstance(ds, unicode_type):
        gdal.PushErrorHandler()
        ds = gdal.Open(ds)
        gdal.PopErrorHandler()
        if ds is None:
            raise ValidateCloudOptimizedGeoTIFFException(
                'Invalid file : %s' % gdal.GetLastErrorMsg())
        if ds.GetDriver().ShortName != 'GTiff':
            raise ValidateCloudOptimizedGeoTIFFException(
                'The file is not a GeoTIFF')

    details = {}
    errors = []
    filename = ds.GetDescription()
    main_band = ds.GetRasterBand(1)
    ovr_count = main_band.GetOverviewCount()
    filelist = ds.GetFileList()
    if filelist is not None and filename + '.ovr' in filelist:
        errors += [
            'Overviews found in external .ovr file. They should be internal']

    if main_band.XSize >= 512 or main_band.YSize >= 512:
        if check_tiled:
            block_size = main_band.GetBlockSize()
            if block_size[0] == main_band.XSize and block_size[0] > 1024:
                errors += [
                    'The file is greater than 512xH or Wx512, but is not tiled']

        if ovr_count == 0:
            errors += [
                'The file is greater than 512xH or Wx512, but has no overviews']

    ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))
    ifd_offsets = [ifd_offset]
    if ifd_offset not in (8, 16):
        errors += [
            'The offset of the main IFD should be 8 for ClassicTIFF '
            'or 16 for BigTIFF. It is %d instead' % ifd_offsets[0]]
    details['ifd_offsets'] = {}
    details['ifd_offsets']['main'] = ifd_offset

    for i in range(ovr_count):
        # Check that overviews are by descending sizes
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        if i == 0:
            if (ovr_band.XSize > main_band.XSize or
                    ovr_band.YSize > main_band.YSize):
                errors += [
                    'First overview has larger dimension than main band']
        else:
            prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1)
            if (ovr_band.XSize > prev_ovr_band.XSize or
                    ovr_band.YSize > prev_ovr_band.YSize):
                errors += [
                    'Overview of index %d has larger dimension than '
                    'overview of index %d' % (i, i - 1)]

        if check_tiled:
            block_size = ovr_band.GetBlockSize()
            if block_size[0] == ovr_band.XSize and block_size[0] > 1024:
                errors += [
                    'Overview of index %d is not tiled' % i]

        # Check that the IFD of descending overviews are sorted by increasing
        # offsets
        ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))
        ifd_offsets.append(ifd_offset)
        details['ifd_offsets']['overview_%d' % i] = ifd_offset
        if ifd_offsets[-1] < ifd_offsets[-2]:
            if i == 0:
                errors += [
                    'The offset of the IFD for overview of index %d is %d, '
                    'whereas it should be greater than the one of the main '
                    'image, which is at byte %d' %
                    (i, ifd_offsets[-1], ifd_offsets[-2])]
            else:
                errors += [
                    'The offset of the IFD for overview of index %d is %d, '
                    'whereas it should be greater than the one of index %d, '
                    'which is at byte %d' %
                    (i, ifd_offsets[-1], i - 1, ifd_offsets[-2])]

    # Check that the imagery starts by the smallest overview and ends with
    # the main resolution dataset
    block_offset = main_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF')
    if not block_offset:
        errors += ['Missing BLOCK_OFFSET_0_0']
    data_offset = int(block_offset) if block_offset else None
    data_offsets = [data_offset]
    details['data_offsets'] = {}
    details['data_offsets']['main'] = data_offset
    for i in range(ovr_count):
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        data_offset = int(ovr_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF'))
        data_offsets.append(data_offset)
        details['data_offsets']['overview_%d' % i] = data_offset

    if data_offsets[-1] < ifd_offsets[-1]:
        if ovr_count > 0:
            errors += [
                'The offset of the first block of the smallest overview '
                'should be after its IFD']
        else:
            errors += [
                'The offset of the first block of the image should '
                'be after its IFD']
    for i in range(len(data_offsets) - 2, 0, -1):
        if data_offsets[i] < data_offsets[i + 1]:
            errors += [
                'The offset of the first block of overview of index %d should '
                'be after the one of the overview of index %d' %
                (i - 1, i)]
    if len(data_offsets) >= 2 and data_offsets[0] < data_offsets[1]:
        errors += [
            'The offset of the first block of the main resolution image'
            'should be after the one of the overview of index %d' %
            (ovr_count - 1)]

    return errors, details
def main(img_ws=os.getcwd(),
         ancillary_ws=os.getcwd(),
         output_ws=os.getcwd(),
         etr_flag=False,
         eto_flag=False,
         start_date=None,
         end_date=None,
         extent_path=None,
         output_extent=None,
         stats_flag=True,
         overwrite_flag=False,
         use_cimis_eto_flag=False):
    """Compute daily ETr/ETo from CIMIS data

    Args:
        img_ws (str): root folder of GRIDMET data
        ancillary_ws (str): folder of ancillary rasters
        output_ws (str): folder of output rasters
        etr_flag (bool): if True, compute alfalfa reference ET (ETr)
        eto_flag (bool): if True, compute grass reference ET (ETo)
        start_date (str): ISO format date (YYYY-MM-DD)
        end_date (str): ISO format date (YYYY-MM-DD)
        extent_path (str): file path defining the output extent
        output_extent (list): decimal degrees values defining output extent
        stats_flag (bool): if True, compute raster statistics.
            Default is True.
        overwrite_flag (bool): If True, overwrite existing files
        use_cimis_eto_flag (bool): if True, use CIMIS ETo raster if one of
            the component rasters is missing and ETo/ETr cannot be computed

    Returns:
        None
    """
    logging.info('\nComputing CIMIS ETo/ETr')
    np.seterr(invalid='ignore')

    # Use CIMIS ETo raster directly instead of computing from components
    # Currently this will only be applied if one of the inputs is missing
    use_cimis_eto_flag = True

    # Compute ETr and/or ETo
    if not etr_flag and not eto_flag:
        logging.info('  ETo/ETr flag(s) not set, defaulting to ETr')
        etr_flag = True

    # If a date is not set, process 2017
    try:
        start_dt = dt.datetime.strptime(start_date, '%Y-%m-%d')
        logging.debug('  Start date: {}'.format(start_dt))
    except:
        start_dt = dt.datetime(2017, 1, 1)
        logging.info('  Start date: {}'.format(start_dt))
    try:
        end_dt = dt.datetime.strptime(end_date, '%Y-%m-%d')
        logging.debug('  End date:   {}'.format(end_dt))
    except:
        end_dt = dt.datetime(2017, 12, 31)
        logging.info('  End date:   {}'.format(end_dt))

    etr_folder = 'etr'
    eto_folder = 'eto'
    etr_fmt = 'etr_{}_daily_cimis.img'
    eto_fmt = 'eto_{}_daily_cimis.img'

    # DEM for air pressure calculation
    mask_raster = os.path.join(ancillary_ws, 'cimis_mask.img')
    dem_raster = os.path.join(ancillary_ws, 'cimis_elev.img')
    lat_raster = os.path.join(ancillary_ws, 'cimis_lat.img')
    # lon_raster = os.path.join(ancillary_ws, 'cimis_lon.img')

    # Interpolate zero windspeed pixels
    # interpolate_zero_u2_flag = False

    # Interpolate edge and coastal cells
    # interpolate_edge_flag = False

    # Resample type
    # 0 = GRA_NearestNeighbour, Nearest neighbour (select on one input pixel)
    # 1 = GRA_Bilinear,Bilinear (2x2 kernel)
    # 2 = GRA_Cubic, Cubic Convolution Approximation (4x4 kernel)
    # 3 = GRA_CubicSpline, Cubic B-Spline Approximation (4x4 kernel)
    # 4 = GRA_Lanczos, Lanczos windowed sinc interpolation (6x6 kernel)
    # 5 = GRA_Average, Average (computes the average of all non-NODATA contributing pixels)
    # 6 = GRA_Mode, Mode (selects the value which appears most often of all the sampled points)
    resample_type = gdal.GRA_CubicSpline

    # Wind speed is measured at 2m
    zw = 2

    # Output workspaces
    etr_ws = os.path.join(output_ws, etr_folder)
    eto_ws = os.path.join(output_ws, eto_folder)
    if etr_flag and not os.path.isdir(etr_ws):
        os.makedirs(etr_ws)
    if eto_flag and not os.path.isdir(eto_ws):
        os.makedirs(eto_ws)

    # Check ETr/ETo functions
    test_flag = False

    # Check that the daily_refet_func produces the correct values
    if test_flag:
        doy_test = 245
        elev_test = 1050.0
        lat_test = 39.9396 * math.pi / 180
        tmin_test = 11.07
        tmax_test = 34.69
        rs_test = 22.38
        u2_test = 1.94
        zw_test = 2.5
        tdew_test = -3.22
        ea_test = et_common.saturation_vapor_pressure_func(tdew_test)
        pair_test = 101.3 * np.power((285 - 0.0065 * elev_test) / 285, 5.26)
        q_test = 0.622 * ea_test / (pair_test - (0.378 * ea_test))
        etr = float(
            et_common.daily_refet_func(tmin_test, tmax_test, q_test, rs_test,
                                       u2_test, zw_test, elev_test, doy_test,
                                       lat_test, 'ETR'))
        eto = float(
            et_common.daily_refet_func(tmin_test, tmax_test, q_test, rs_test,
                                       u2_test, zw_test, elev_test, doy_test,
                                       lat_test, 'ETO'))
        print('ETr: 8.89', etr)
        print('ETo: 6.16', eto)
        sys.exit()

    # Get CIMIS grid properties from mask
    cimis_mask_ds = gdal.Open(mask_raster)
    cimis_osr = gdc.raster_ds_osr(cimis_mask_ds)
    cimis_proj = gdc.osr_proj(cimis_osr)
    cimis_cs = gdc.raster_ds_cellsize(cimis_mask_ds, x_only=True)
    cimis_extent = gdc.raster_ds_extent(cimis_mask_ds)
    cimis_full_geo = cimis_extent.geo(cimis_cs)
    cimis_x, cimis_y = cimis_extent.origin()
    cimis_mask_ds = None
    logging.debug('  Projection: {}'.format(cimis_proj))
    logging.debug('  Cellsize: {}'.format(cimis_cs))
    logging.debug('  Geo: {}'.format(cimis_full_geo))
    logging.debug('  Extent: {}'.format(cimis_extent))

    # Manually set CIMIS grid properties
    # cimis_extent = gdc.Extent((-400000, -650000, 600000, 454000))
    # cimis_cs = 2000
    # cimis_geo = gdc.extent_geo(cimis_extent, cellsize)
    # cimis_epsg = 3310  # NAD_1983_California_Teale_Albers
    # cimis_x, cimis_y = (0,0)

    # Subset data to a smaller extent
    if output_extent is not None:
        logging.info('\nComputing subset extent & geo')
        logging.debug('  Extent: {}'.format(output_extent))
        cimis_extent = gdc.Extent(output_extent)
        cimis_extent.adjust_to_snap('EXPAND', cimis_x, cimis_y, cimis_cs)
        cimis_geo = cimis_extent.geo(cimis_cs)
        logging.debug('  Geo: {}'.format(cimis_geo))
        logging.debug('  Extent: {}'.format(output_extent))
    elif extent_path is not None:
        logging.info('\nComputing subset extent & geo')
        if extent_path.lower().endswith('.shp'):
            cimis_extent = gdc.feature_path_extent(extent_path)
            extent_osr = gdc.feature_path_osr(extent_path)
            extent_cs = None
        else:
            cimis_extent = gdc.raster_path_extent(extent_path)
            extent_osr = gdc.raster_path_osr(extent_path)
            extent_cs = gdc.raster_path_cellsize(extent_path, x_only=True)
        cimis_extent = gdc.project_extent(cimis_extent, extent_osr, cimis_osr,
                                          extent_cs)
        cimis_extent.adjust_to_snap('EXPAND', cimis_x, cimis_y, cimis_cs)
        cimis_geo = cimis_extent.geo(cimis_cs)
        logging.debug('  Geo: {}'.format(cimis_geo))
        logging.debug('  Extent: {}'.format(cimis_extent))
    else:
        cimis_geo = cimis_full_geo

    # Latitude
    lat_array = gdc.raster_to_array(lat_raster,
                                    mask_extent=cimis_extent,
                                    return_nodata=False)
    lat_array = lat_array.astype(np.float32)
    lat_array *= math.pi / 180

    # Elevation data
    elev_array = gdc.raster_to_array(dem_raster,
                                     mask_extent=cimis_extent,
                                     return_nodata=False)
    elev_array = elev_array.astype(np.float32)

    # Process each year in the input workspace
    logging.info("")
    for year_str in sorted(os.listdir(img_ws)):
        logging.debug('{}'.format(year_str))
        if not re.match('^\d{4}$', year_str):
            logging.debug('  Not a 4 digit year folder, skipping')
            continue
        year_ws = os.path.join(img_ws, year_str)
        year_int = int(year_str)
        # year_days = int(dt.datetime(year_int, 12, 31).strftime('%j'))
        if start_dt is not None and year_int < start_dt.year:
            logging.debug('  Before start date, skipping')
            continue
        elif end_dt is not None and year_int > end_dt.year:
            logging.debug('  After end date, skipping')
            continue
        logging.info('{}'.format(year_str))

        # Output paths
        etr_raster = os.path.join(etr_ws, etr_fmt.format(year_str))
        eto_raster = os.path.join(eto_ws, eto_fmt.format(year_str))
        if etr_flag and (overwrite_flag or not os.path.isfile(etr_raster)):
            logging.debug('  {}'.format(etr_raster))
            gdc.build_empty_raster(etr_raster,
                                   band_cnt=366,
                                   output_dtype=np.float32,
                                   output_proj=cimis_proj,
                                   output_cs=cimis_cs,
                                   output_extent=cimis_extent,
                                   output_fill_flag=True)
        if eto_flag and (overwrite_flag or not os.path.isfile(eto_raster)):
            logging.debug('  {}'.format(eto_raster))
            gdc.build_empty_raster(eto_raster,
                                   band_cnt=366,
                                   output_dtype=np.float32,
                                   output_proj=cimis_proj,
                                   output_cs=cimis_cs,
                                   output_extent=cimis_extent,
                                   output_fill_flag=True)

        # Process each date in the year
        for date_str in sorted(os.listdir(year_ws)):
            logging.debug('{}'.format(date_str))
            try:
                date_dt = dt.datetime.strptime(date_str, '%Y_%m_%d')
            except ValueError:
                logging.debug(
                    '  Invalid folder date format (YYYY_MM_DD), skipping')
                continue
            if start_dt is not None and date_dt < start_dt:
                logging.debug('  Before start date, skipping')
                continue
            elif end_dt is not None and date_dt > end_dt:
                logging.debug('  After end date, skipping')
                continue
            logging.info(date_str)
            date_ws = os.path.join(year_ws, date_str)
            doy = int(date_dt.strftime('%j'))

            # Set file paths
            tmax_path = os.path.join(date_ws, 'Tx.img')
            tmin_path = os.path.join(date_ws, 'Tn.img')
            tdew_path = os.path.join(date_ws, 'Tdew.img')
            rso_path = os.path.join(date_ws, 'Rso.img')
            rs_path = os.path.join(date_ws, 'Rs.img')
            u2_path = os.path.join(date_ws, 'U2.img')
            eto_path = os.path.join(date_ws, 'ETo.img')
            # k_path = os.path.join(date_ws, 'K.img')
            # rnl_path = os.path.join(date_ws, 'Rnl.img')
            input_list = [
                tmin_path, tmax_path, tdew_path, u2_path, rs_path, rso_path
            ]

            # If any input raster is missing, skip the day
            #   Unless ETo is present (and use_cimis_eto_flag is True)
            day_skip_flag = False
            for t_path in input_list:
                if not os.path.isfile(t_path):
                    logging.info('    {} is missing'.format(t_path))
                    day_skip_flag = True

            if (day_skip_flag and use_cimis_eto_flag
                    and os.path.isfile(eto_path)):
                logging.info('    Using CIMIS ETo directly')
                eto_array = gdc.raster_to_array(eto_path,
                                                1,
                                                cimis_extent,
                                                return_nodata=False)
                eto_array = eto_array.astype(np.float32)
                if not np.any(eto_array):
                    logging.info('    {} is empty or missing'.format(eto_path))
                    logging.info('    Skipping date')
                    continue
                # ETr
                if etr_flag:
                    gdc.array_to_comp_raster(1.2 * eto_array,
                                             etr_raster,
                                             band=doy,
                                             stats_flag=False)
                    # gdc.array_to_raster(
                    #     1.2 * eto_array, etr_raster,
                    #     output_geo=cimis_geo, output_proj=cimis_proj,
                    #     stats_flag=stats_flag)
                # ETo
                if eto_flag:
                    gdc.array_to_comp_raster(eto_array,
                                             eto_raster,
                                             band=doy,
                                             stats_flag=False)
                    # gdc.array_to_raster(
                    #     eto_array, eto_raster,
                    #     output_geo=cimis_geo, output_proj=cimis_proj,
                    #     stats_flag=stats_flag)
                del eto_array
                continue
            elif not day_skip_flag:
                # Read in rasters
                # DEADBEEF - Read with extent since some arrays are too big
                # i.e. 2012-03-21, 2013-03-20, 2014-02-27
                tmin_array = gdc.raster_to_array(tmin_path,
                                                 1,
                                                 cimis_extent,
                                                 return_nodata=False)
                tmax_array = gdc.raster_to_array(tmax_path,
                                                 1,
                                                 cimis_extent,
                                                 return_nodata=False)
                tdew_array = gdc.raster_to_array(tdew_path,
                                                 1,
                                                 cimis_extent,
                                                 return_nodata=False)
                rso_array = gdc.raster_to_array(rso_path,
                                                1,
                                                cimis_extent,
                                                return_nodata=False)
                rs_array = gdc.raster_to_array(rs_path,
                                               1,
                                               cimis_extent,
                                               return_nodata=False)
                u2_array = gdc.raster_to_array(u2_path,
                                               1,
                                               cimis_extent,
                                               return_nodata=False)
                # k_array = gdc.raster_to_array(
                #     k_path, 1, cimis_extent, return_nodata=False)
                # rnl_array = gdc.raster_to_array(
                #     rnl_path, 1, cimis_extent, return_nodata=False)

                # Check that all input arrays have data
                for t_name, t_array in [[tmin_path, tmin_array],
                                        [tmax_path, tmax_array],
                                        [tdew_path, tdew_array],
                                        [u2_path, u2_array],
                                        [rs_path, rs_array]]:
                    if not np.any(t_array):
                        logging.warning(
                            '    {} is empty or missing'.format(t_name))
                        day_skip_flag = True
                if day_skip_flag:
                    logging.warning('    Skipping date')
                    continue

                # DEADBEEF - Some arrays have a 500m cellsize
                # i.e. 2011-07-25, 2010-01-01 -> 2010-07-27
                tmin_array = rescale_array_func(tmin_array, elev_array, 'tmin')
                tmax_array = rescale_array_func(tmax_array, elev_array, 'tmax')
                tdew_array = rescale_array_func(tdew_array, elev_array, 'tdew')
                rso_array = rescale_array_func(rso_array, elev_array, 'rso')
                rs_array = rescale_array_func(rs_array, elev_array, 'rs')
                u2_array = rescale_array_func(u2_array, elev_array, 'u2')
                # k_array = rescale_array_func(k_array, elev_array, 'k')
                # rnl_array = rescale_array_func(rnl_array, elev_array, 'rnl')

                # Back calculate q from tdew by first calculating ea from tdew
                es_array = et_common.saturation_vapor_pressure_func(tdew_array)
                pair_array = et_common.air_pressure_func(elev_array)
                q_array = 0.622 * es_array / (pair_array - (0.378 * es_array))
                del es_array, pair_array, tdew_array

                # Back calculate rhmin/rhmax from tdew
                # ea_tmax = et_common.saturation_vapor_pressure_func(tmax_array)
                # ea_tmin = et_common.saturation_vapor_pressure_func(tmin_array)
                # rhmin = ea_tdew * 2 / (ea_tmax + ea_tmin);
                # rhmax = ea_tdew * 2 / (ea_tmax + ea_tmin);
                # del ea_tmax, ea_tmin

                # ETr
                if etr_flag:
                    etr_array = et_common.refet_daily_func(tmin_array,
                                                           tmax_array,
                                                           q_array,
                                                           rs_array,
                                                           u2_array,
                                                           zw,
                                                           elev_array,
                                                           lat_array,
                                                           doy,
                                                           ref_type='ETR',
                                                           rso_type='ARRAY',
                                                           rso=rso_array)
                    gdc.array_to_comp_raster(etr_array.astype(np.float32),
                                             etr_raster,
                                             band=doy,
                                             stats_flag=False)
                    # gdc.array_to_raster(
                    #     etr_array.astype(np.float32), etr_raster,
                    #     output_geo=cimis_geo, output_proj=cimis_proj,
                    #     stats_flag=stats_flag)
                    del etr_array
                # ETo
                if eto_flag:
                    eto_array = et_common.refet_daily_func(tmin_array,
                                                           tmax_array,
                                                           q_array,
                                                           rs_array,
                                                           u2_array,
                                                           zw,
                                                           elev_array,
                                                           lat_array,
                                                           doy,
                                                           ref_type='ETO',
                                                           rso_type='ARRAY',
                                                           rso=rso_array)
                    gdc.array_to_comp_raster(eto_array.astype(np.float32),
                                             eto_raster,
                                             band=doy,
                                             stats_flag=False)
                    # gdc.array_to_raster(
                    #     eto_array.astype(np.float32), eto_raster,
                    #     output_geo=cimis_geo, output_proj=cimis_proj,
                    #     stats_flag=stats_flag)
                    del eto_array
                # Cleanup
                del tmin_array, tmax_array, u2_array, rs_array, q_array
                # del rnl, rs, rso
            else:
                logging.info('    Skipping date')
                continue

        if stats_flag and etr_flag:
            gdc.raster_statistics(etr_raster)
        if stats_flag and eto_flag:
            gdc.raster_statistics(eto_raster)

    logging.debug('\nScript Complete')
 def get_band_swir_2(self):
     obj_gdal = gdal.Open(self.pattern_path + "B7.TIF")
     return obj_gdal.GetRasterBand(1).ReadAsArray()
#
# Apr 4, 2019
# read slope data along with litho , veg and earthquake data
# 
import ogr
import osgeo
import math
from osgeo import gdal,osr,ogr

# shapefile
drv = ogr.GetDriverByName('ESRI Shapefile')
ds_in = drv.Open("/home/btpbatch3/Downloads/shp_files/kobe_Uchida_and_others_2004/Uchida_and_others_2004.shp")    #Get the contents of the shape file
lyr_in = ds_in.GetLayer(0)    #Get the shape file's first layer

# soil type
geo = gdal.Open('/home/btpbatch3/Downloads/soiltype_hartmann-moosdorf_2012/glim_wgs84_0point5deg.txt.asc')
arr = geo.ReadAsArray()


#slope
slope_geo = gdal.Open('/home/btpbatch3/Downloads/final_slope_data/slope_deg/w001001.adf')
band = slope_geo.GetRasterBand(1)
slope_arr = band.ReadAsArray()
geotransform = slope_geo.GetGeoTransform() 
originX = geotransform[0]
originY = geotransform[3]
pixelWidth = geotransform[1]
pixelHeight = geotransform[5]

# vegetation
vegi_data=gdal.Open('HDF4_EOS:EOS_GRID:"/home/btpbatch3/Downloads/vegetation_MODIS_28th_March_data/MCD12C1.A2017001.006.2018257171411.hdf":MOD12C1:Majority_Land_Cover_Type_1')
Beispiel #29
0
def rasterio_5():

    ds = gdal.Open('data/byte.tif')

    for obj in [ds, ds.GetRasterBand(1)]:
        obj.ReadRaster(0,0,-2000000000,1,1,1)
        obj.ReadRaster(0,0,1,-2000000000,1,1)

    for band_number in [-1,0,2]:
        gdal.ErrorReset()
        gdal.PushErrorHandler('CPLQuietErrorHandler')
        res = ds.ReadRaster(0,0,1,1,band_list=[band_number])
        gdal.PopErrorHandler()
        error_msg = gdal.GetLastErrorMsg()
        if res is not None:
            gdaltest.post_reason('expected None')
            return 'fail'
        if error_msg.find('this band does not exist on dataset') == -1:
            gdaltest.post_reason('did not get expected error msg')
            print(error_msg)
            return 'fail'

    res = ds.ReadRaster(0,0,1,1,band_list=[1,1])
    if res is None:
        gdaltest.post_reason('expected non None')
        return 'fail'

    for obj in [ds, ds.GetRasterBand(1)]:
        gdal.ErrorReset()
        gdal.PushErrorHandler('CPLQuietErrorHandler')
        res = obj.ReadRaster(0,0,21,21)
        gdal.PopErrorHandler()
        error_msg = gdal.GetLastErrorMsg()
        if res is not None:
            gdaltest.post_reason('expected None')
            return 'fail'
        if error_msg.find('Access window out of range in RasterIO()') == -1:
            gdaltest.post_reason('did not get expected error msg (1)')
            print(error_msg)
            return 'fail'

        # This should only fail on a 32bit build
        try:
            maxsize = sys.maxint
        except:
            maxsize = sys.maxsize

        # On win64, maxsize == 2147483647 and ReadRaster()
        # fails because of out of memory condition, not
        # because of integer overflow. I'm not sure on how
        # to detect win64 better.
        if maxsize == 2147483647 and sys.platform != 'win32':
            gdal.ErrorReset()
            gdal.PushErrorHandler('CPLQuietErrorHandler')
            res = obj.ReadRaster(0,0,1,1,1000000,1000000)
            gdal.PopErrorHandler()
            error_msg = gdal.GetLastErrorMsg()
            if res is not None:
                gdaltest.post_reason('expected None')
                return 'fail'
            if error_msg.find('Integer overflow') == -1:
                gdaltest.post_reason('did not get expected error msg (2)')
                print(error_msg)
                return 'fail'

        gdal.ErrorReset()
        gdal.PushErrorHandler('CPLQuietErrorHandler')
        res = obj.ReadRaster(0,0,0,1)
        gdal.PopErrorHandler()
        error_msg = gdal.GetLastErrorMsg()
        if res is not None:
            gdaltest.post_reason('expected None')
            return 'fail'
        if error_msg.find('Illegal values for buffer size') == -1:
            gdaltest.post_reason('did not get expected error msg (3)')
            print(error_msg)
            return 'fail'

    ds = None

    return 'success'
Beispiel #30
0
def main(argv=None):

    global verbose, quiet
    verbose = 0
    quiet = 0
    names = []
    format = None
    out_file = 'out.tif'

    ulx = None
    psize_x = None
    separate = 0
    copy_pct = 0
    nodata = None
    a_nodata = None
    create_options = []
    pre_init = []
    band_type = None
    createonly = 0
    bTargetAlignedPixels = False
    start_time = time.time()

    gdal.AllRegister()
    if argv is None:
        argv = sys.argv
    argv = gdal.GeneralCmdLineProcessor(argv)
    if argv is None:
        sys.exit(0)

    # Parse command line arguments.
    i = 1
    while i < len(argv):
        arg = argv[i]

        if arg == '-o':
            i = i + 1
            out_file = argv[i]

        elif arg == '-v':
            verbose = 1

        elif arg == '-q' or arg == '-quiet':
            quiet = 1

        elif arg == '-createonly':
            createonly = 1

        elif arg == '-separate':
            separate = 1

        elif arg == '-seperate':
            separate = 1

        elif arg == '-pct':
            copy_pct = 1

        elif arg == '-ot':
            i = i + 1
            band_type = gdal.GetDataTypeByName(argv[i])
            if band_type == gdal.GDT_Unknown:
                print('Unknown GDAL data type: %s' % argv[i])
                sys.exit(1)

        elif arg == '-init':
            i = i + 1
            str_pre_init = argv[i].split()
            for x in str_pre_init:
                pre_init.append(float(x))

        elif arg == '-n':
            i = i + 1
            nodata = float(argv[i])

        elif arg == '-a_nodata':
            i = i + 1
            a_nodata = float(argv[i])

        elif arg == '-f' or arg == '-of':
            i = i + 1
            format = argv[i]

        elif arg == '-co':
            i = i + 1
            create_options.append(argv[i])

        elif arg == '-ps':
            psize_x = float(argv[i + 1])
            psize_y = -1 * abs(float(argv[i + 2]))
            i = i + 2

        elif arg == '-tap':
            bTargetAlignedPixels = True

        elif arg == '-ul_lr':
            ulx = float(argv[i + 1])
            uly = float(argv[i + 2])
            lrx = float(argv[i + 3])
            lry = float(argv[i + 4])
            i = i + 4

        elif arg[:1] == '-':
            print('Unrecognized command option: %s' % arg)
            Usage()
            sys.exit(1)

        else:
            names.append(arg)

        i = i + 1

    if len(names) == 0:
        print('No input files selected.')
        Usage()
        sys.exit(1)

    if format is None:
        format = GetOutputDriverFor(out_file)

    Driver = gdal.GetDriverByName(format)
    if Driver is None:
        print('Format driver %s not found, pick a supported driver.' % format)
        sys.exit(1)

    DriverMD = Driver.GetMetadata()
    if 'DCAP_CREATE' not in DriverMD:
        print(
            'Format driver %s does not support creation and piecewise writing.\nPlease select a format that does, such as GTiff (the default) or HFA (Erdas Imagine).'
            % format)
        sys.exit(1)

    # Collect information on all the source files.
    file_infos = names_to_fileinfos(names)

    if ulx is None:
        ulx = file_infos[0].ulx
        uly = file_infos[0].uly
        lrx = file_infos[0].lrx
        lry = file_infos[0].lry

        for fi in file_infos:
            ulx = min(ulx, fi.ulx)
            uly = max(uly, fi.uly)
            lrx = max(lrx, fi.lrx)
            lry = min(lry, fi.lry)

    if psize_x is None:
        psize_x = file_infos[0].geotransform[1]
        psize_y = file_infos[0].geotransform[5]

    if band_type is None:
        band_type = file_infos[0].band_type

    # Try opening as an existing file.
    gdal.PushErrorHandler('CPLQuietErrorHandler')
    t_fh = gdal.Open(out_file, gdal.GA_Update)
    gdal.PopErrorHandler()

    # Create output file if it does not already exist.
    if t_fh is None:

        if bTargetAlignedPixels:
            ulx = math.floor(ulx / psize_x) * psize_x
            lrx = math.ceil(lrx / psize_x) * psize_x
            lry = math.floor(lry / -psize_y) * -psize_y
            uly = math.ceil(uly / -psize_y) * -psize_y

        geotransform = [ulx, psize_x, 0, uly, 0, psize_y]

        xsize = int((lrx - ulx) / geotransform[1] + 0.5)
        ysize = int((lry - uly) / geotransform[5] + 0.5)

        if separate != 0:
            bands = 0

            for fi in file_infos:
                bands = bands + fi.bands
        else:
            bands = file_infos[0].bands

        t_fh = Driver.Create(out_file, xsize, ysize, bands, band_type,
                             create_options)
        if t_fh is None:
            print('Creation failed, terminating gdal_merge.')
            sys.exit(1)

        t_fh.SetGeoTransform(geotransform)
        t_fh.SetProjection(file_infos[0].projection)

        if copy_pct:
            t_fh.GetRasterBand(1).SetRasterColorTable(file_infos[0].ct)
    else:
        if separate != 0:
            bands = 0
            for fi in file_infos:
                bands = bands + fi.bands
            if t_fh.RasterCount < bands:
                print(
                    'Existing output file has less bands than the input files. You should delete it before. Terminating gdal_merge.'
                )
                sys.exit(1)
        else:
            bands = min(file_infos[0].bands, t_fh.RasterCount)

    # Do we need to set nodata value ?
    if a_nodata is not None:
        for i in range(t_fh.RasterCount):
            t_fh.GetRasterBand(i + 1).SetNoDataValue(a_nodata)

    # Do we need to pre-initialize the whole mosaic file to some value?
    if pre_init is not None:
        if t_fh.RasterCount <= len(pre_init):
            for i in range(t_fh.RasterCount):
                t_fh.GetRasterBand(i + 1).Fill(pre_init[i])
        elif len(pre_init) == 1:
            for i in range(t_fh.RasterCount):
                t_fh.GetRasterBand(i + 1).Fill(pre_init[0])

    # Copy data from source files into output file.
    t_band = 1

    if quiet == 0 and verbose == 0:
        progress(0.0)
    fi_processed = 0

    for fi in file_infos:
        if createonly != 0:
            continue

        if verbose != 0:
            print("")
            print(
                "Processing file %5d of %5d, %6.3f%% completed in %d minutes."
                %
                (fi_processed + 1, len(file_infos), fi_processed * 100.0 /
                 len(file_infos), int(round(
                     (time.time() - start_time) / 60.0))))
            fi.report()

        if separate == 0:
            for band in range(1, bands + 1):
                fi.copy_into(t_fh, band, band, nodata)
        else:
            for band in range(1, fi.bands + 1):
                fi.copy_into(t_fh, band, t_band, nodata)
                t_band = t_band + 1

        fi_processed = fi_processed + 1
        if quiet == 0 and verbose == 0:
            progress(fi_processed / float(len(file_infos)))

    # Force file to be closed.
    t_fh = None