def mask_23():

    drv = gdal.GetDriverByName('GTiff')
    md = drv.GetMetadata()
    if md['DMD_CREATIONOPTIONLIST'].find('JPEG') == -1:
        return 'skip'

    src_ds = drv.Create('tmp/mask_23_src.tif', 3000, 2000, 3, options=['TILED=YES', 'SPARSE_OK=YES'])
    src_ds.CreateMaskBand(gdal.GMF_PER_DATASET)

    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK', 'YES')
    old_val = gdal.GetCacheMax()
    gdal.SetCacheMax(15000000)
    gdal.ErrorReset()
    ds = drv.CreateCopy('tmp/mask_23_dst.tif', src_ds, options=['TILED=YES', 'COMPRESS=JPEG'])
    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK', 'NO')
    gdal.SetCacheMax(old_val)

    del ds
    error_msg = gdal.GetLastErrorMsg()
    src_ds = None

    drv.Delete('tmp/mask_23_src.tif')
    drv.Delete('tmp/mask_23_dst.tif')

    # 'ERROR 1: TIFFRewriteDirectory:Error fetching directory count' was triggered before
    if error_msg != '':
        return 'fail'

    return 'success'
Exemple #2
0
def test_misc_9():

    old_val = gdal.GetCacheMax()
    gdal.SetCacheMax(3000000000)
    ret_val = gdal.GetCacheMax()
    gdal.SetCacheMax(old_val)

    assert ret_val == 3000000000, 'did not get expected value'
Exemple #3
0
def misc_9():

    old_val = gdal.GetCacheMax()
    gdal.SetCacheMax(3000000000)
    ret_val = gdal.GetCacheMax()
    gdal.SetCacheMax(old_val)

    if ret_val != 3000000000:
        gdaltest.post_reason('did not get expected value')
        print(ret_val)
        return 'fail'

    return 'success'
Exemple #4
0
def test_jpeg_18():
    height = 1024
    width = 1024
    src_ds = gdal.GetDriverByName('GTiff').Create('/vsimem/jpeg_18.tif', width,
                                                  height, 1)
    for i in range(height):
        data = struct.pack('B' * 1, int(i / (height / 256)))
        src_ds.WriteRaster(0, i, width, 1, data, 1, 1)

    ds = gdal.GetDriverByName('JPEG').CreateCopy('/vsimem/jpeg_18.jpg',
                                                 src_ds,
                                                 options=['QUALITY=99'])
    src_ds = None
    gdal.Unlink('/vsimem/jpeg_18.tif')

    oldSize = gdal.GetCacheMax()
    gdal.SetCacheMax(0)

    line0 = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    data = struct.unpack('B' * width, line0)
    assert abs(data[0] - 0) <= 10
    line1023 = ds.GetRasterBand(1).ReadRaster(0, height - 1, width, 1)
    data = struct.unpack('B' * width, line1023)
    assert abs(data[0] - 255) <= 10
    line0_ovr1 = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0, 0, int(width / 4), 1)
    data = struct.unpack('B' * (int(width / 4)), line0_ovr1)
    assert abs(data[0] - 0) <= 10
    line1023_bis = ds.GetRasterBand(1).ReadRaster(0, height - 1, width, 1)
    assert line1023_bis != line0 and line1023 == line1023_bis
    line0_bis = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    assert line0 == line0_bis
    line255_ovr1 = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0,
        int(height / 4) - 1, int(width / 4), 1)
    data = struct.unpack('B' * int(width / 4), line255_ovr1)
    assert abs(data[0] - 255) <= 10
    line0_bis = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    assert line0 == line0_bis
    line0_ovr1_bis = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0, 0, int(width / 4), 1)
    assert line0_ovr1 == line0_ovr1_bis
    line255_ovr1_bis = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0,
        int(height / 4) - 1, int(width / 4), 1)
    assert line255_ovr1 == line255_ovr1_bis

    gdal.SetCacheMax(oldSize)

    ds = None
    gdal.Unlink('/vsimem/jpeg_18.jpg')
Exemple #5
0
    def calib_L1A(self, filein, QualifyValue, CalibValue):
        '''
        对L1A影像进行辐射定标
        '''
        gdal.SetCacheMax(2**30)
        dataset = gdal.Open(filein)
        data = dataset.ReadAsArray(0,
                                   0,
                                   dataset.RasterXSize,
                                   dataset.RasterYSize,
                                   buf_type='float32')
        description = dataset.GetDriver().GetDescription()
        geotrans = dataset.GetGeoTransform()
        projection = dataset.GetProjection()
        dcal = data[0]**2 + data[1]**2
        del data

        Qtmp = (QualifyValue / 32767)**2
        dcal *= Qtmp
        dcal = np.log10(dcal)
        dcal *= 10
        dcal -= CalibValue
        dcal[np.isnan(dcal)] = 0
        dcal[np.isinf(dcal)] = 0

        fileout = filein[0:filein.__len__() - 5] + '_Calib.tiff'
        _op_.writeTiff(dcal, fileout, description, geotrans, projection)
        del dataset
        del dcal
        return 0
def rpc_orth(in_list):
    in_files = in_list[:-2]
    dem_file = in_list[-2]
    out_dir = in_list[-1]

    # 单位为字节
    total_memory = psutil.virtual_memory().total

    gdal.SetCacheMax(int(total_memory / 1 * 2))
    #
    tif_driver = gdal.GetDriverByName("GTiff")

    for in_file in in_files:

        input_name = os.path.splitext(os.path.basename(in_file))[0]

        out_file = os.path.join(out_dir, '%s.tif' % input_name)

        if os.path.exists(out_file):
            tif_driver.Delete(out_file)

        in_xml_file = os.path.splitext(in_file)[0] + '.xml'

        if os.path.exists(in_xml_file):
            gdal.Warp(out_file,
                      in_file,
                      rpc=True,
                      multithread=True,
                      errorThreshold=0.0,
                      resampleAlg=gdal.GRIORA_Bilinear,
                      transformerOptions=['RPC_DEM=%s' % dem_file])

            out_xml_file = os.path.join(
                os.path.dirname(out_file),
                '%s.xml' % os.path.splitext(os.path.basename(out_file))[0])
            shutil.copy(in_xml_file, out_xml_file)

        json_file = search_file(os.path.dirname(in_file), '.json')

        if json_file != []:
            if search_file(os.path.dirname(in_file), '.txt') == []:

                gdal.Warp(out_file,
                          in_file,
                          multithread=True,
                          errorThreshold=0.0,
                          resampleAlg=gdal.GRIORA_Bilinear,
                          transformerOptions=['RPC_DEM=%s' % dem_file])
            else:
                gdal.Warp(out_file,
                          in_file,
                          rpc=True,
                          multithread=True,
                          errorThreshold=0.0,
                          resampleAlg=gdal.GRIORA_Bilinear,
                          transformerOptions=['RPC_DEM=%s' % dem_file])
            out_json_file = os.path.join(
                os.path.dirname(out_file),
                '%s.json' % os.path.splitext(os.path.basename(out_file))[0])
            shutil.copy(json_file[0], out_json_file)
Exemple #7
0
def gdal_write(src_arr,
               dst_gdal,
               ds_config,
               dst_fmt='GTiff',
               max_cache=False,
               verbose=False):
    """write src_arr to gdal file dst_gdal using src_config

    returns [output-gdal, status-code]
    """

    driver = gdal.GetDriverByName(dst_fmt)
    if os.path.exists(dst_gdal):
        try:
            driver.Delete(dst_gdal)
        except Exception as e:
            echo_error_msg(e)
            remove_glob(dst_gdal)

    if max_cache:
        gdal.SetCacheMax(2**30)

    if ds_config['dt'] == 5:
        ds = driver.Create(dst_gdal,
                           ds_config['nx'],
                           ds_config['ny'],
                           1,
                           ds_config['dt'],
                           options=['COMPRESS=DEFLATE', 'TILED=YES'])
    else:
        ds = driver.Create(
            dst_gdal,
            ds_config['nx'],
            ds_config['ny'],
            1,
            ds_config['dt'],
            options=['COMPRESS=DEFLATE', 'TILED=YES', 'PREDICTOR=3'])

    if ds is not None:
        ds.SetGeoTransform(ds_config['geoT'])
        try:
            ds.SetProjection(ds_config['proj'])
        except Exception as e:
            if verbose:
                echo_warning_msg('could not set projection {}'.format(
                    ds_config['proj']))
            else:
                pass
        ds.GetRasterBand(1).SetNoDataValue(ds_config['ndv'])
        ds.GetRasterBand(1).WriteArray(src_arr)
        ds = None
        return (dst_gdal, 0)

    else:
        return (None, -1)
Exemple #8
0
def rpc_process(in_file, out_file, dem_file):
    # 单位为字节
    total_memory = psutil.virtual_memory().total

    gdal.SetCacheMax(int(total_memory / 1 * 2))
    tif_driver = gdal.GetDriverByName("GTiff")

    if os.path.exists(out_file):
        tif_driver.Delete(out_file)
    in_xml_file = os.path.splitext(in_file)[0] + '.xml'

    if os.path.exists(in_xml_file):
        gdal.Warp(out_file,
                  in_file,
                  rpc=True,
                  multithread=True,
                  errorThreshold=0.0,
                  resampleAlg=gdal.GRIORA_Bilinear,
                  callback=progress,
                  transformerOptions=['RPC_DEM=%s' % dem_file])

        out_xml_file = os.path.join(
            os.path.dirname(out_file),
            '%s.xml' % os.path.splitext(os.path.basename(out_file))[0])
        shutil.copy(in_xml_file, out_xml_file)

    json_file = searchfiles(os.path.dirname(in_file), '*.json')

    if json_file != []:
        if searchfiles(os.path.dirname(in_file), '*.txt') == []:

            gdal.Warp(out_file,
                      in_file,
                      multithread=True,
                      errorThreshold=0.0,
                      resampleAlg=gdal.GRIORA_Bilinear,
                      callback=progress,
                      transformerOptions=['RPC_DEM=%s' % dem_file])
        else:
            gdal.Warp(out_file,
                      in_file,
                      rpc=True,
                      multithread=True,
                      errorThreshold=0.0,
                      resampleAlg=gdal.GRIORA_Bilinear,
                      callback=progress,
                      transformerOptions=['RPC_DEM=%s' % dem_file])
        out_json_file = os.path.join(
            os.path.dirname(out_file),
            '%s.json' % os.path.splitext(os.path.basename(out_file))[0])
        shutil.copy(json_file[0], out_json_file)
    return None
Exemple #9
0
def clip_raster(in_file, shapefile, srcnodata, dstnodata, out_file):
    # 单位为字节
    total_memory = psutil.virtual_memory().total

    gdal.SetConfigOption('GDALWARP_IGNORE_BAD_CUTLINE', 'YES')
    gdal.PushErrorHandler('CPLQuietErrorHandler')
    gdal.SetCacheMax(int(total_memory))

    tiff_driver = gdal.GetDriverByName("GTiff")
    if os.path.exists(out_file):
        tiff_driver.Delete(out_file)

    gdal.Warp(out_file, in_file, cutlineDSName=shapefile, cropToCutline=True,
              srcNodata=srcnodata, dstNodata=dstnodata, multithread=True)
Exemple #10
0
    def loadGDALSettings(self, settings):
        logger = self._app.logger

        settings.beginGroup('gdal')
        try:
            cachesize = settings.value('GDAL_CACHEMAX')
            if cachesize is not None:
                cachesize = int(cachesize)
                gdal.SetCacheMax(cachesize)
                logger.debug('GDAL cache size det to %d' % cachesize)

            value = settings.value('GDAL_DATA')
            if value:
                value = os.path.expanduser(os.path.expandvars(value))
                gdal.SetConfigOption('GDAL_DATA', value)
                logger.debug('GDAL_DATA directory set to "%s"' % value)

            for optname in ('GDAL_SKIP', 'GDAL_DRIVER_PATH',
                            'OGR_DRIVER_PATH'):
                value = settings.value(optname)
                if value is not None:
                    value = os.path.expanduser(os.path.expandvars(value))
                    # @NOTE: type of arg 2 of SetConfigOption must be str,
                    #        not an unicode
                    gdal.SetConfigOption(optname, str(value))
                    logger.debug('%s set to "%s"' % (optname, value))

            gdal.AllRegister()
            logger.debug('run "gdal.AllRegister()"')

            # update the about dialog
            tabWidget = self._app.aboutdialog.tabWidget
            for index in range(tabWidget.count()):
                if tabWidget.tabText(index) == 'GDAL':
                    gdalinfowidget = tabWidget.widget(index)
                    gdalinfowidget.setGdalDriversTab()
                    break
            else:
                logger.debug('GDAL page ot found in the about dialog')
                return
        finally:
            settings.endGroup()
Exemple #11
0
def main(in_file, out_file, shapefile):
    # 单位为字节
    total_memory = psutil.virtual_memory().total

    gdal.SetConfigOption('GDALWARP_IGNORE_BAD_CUTLINE', 'YES')
    gdal.SetCacheMax(int(total_memory))
    tiff_driver = gdal.GetDriverByName("GTiff")
    if os.path.exists(out_file):
        tiff_driver.Delete(out_file)

    # gdal.Warp(out_file, in_file, cutlineDSName = shapefile, cropToCutline = True,
    #           srcNodata = srcnodata, dstNodata = dstnodata, multithread = True,
    #           warpMemoryLimit = int(total_memory / 4 * 3),
    #           callback = progress, resampleAlg = gdal.GRIORA_Bilinear)

    gdal.Warp(out_file,
              in_file,
              cutlineDSName=shapefile,
              cropToCutline=True,
              srcNodata=srcnodata,
              dstNodata=dstnodata,
              multithread=True,
              callback=progress,
              resampleAlg=gdal.GRIORA_Bilinear)
Exemple #12
0
"""
import os
from os.path import join
import pickle as cp
import numpy as np
from osgeo import gdal
import subprocess
from pathlib import Path

from pyrate.core import shared, ifgconstants as ifc, mpiops, config as cf
from pyrate.core.shared import PrereadIfg
from pyrate.constants import REF_COLOR_MAP_PATH
from pyrate.core.config import OBS_DIR, OUT_DIR, ConfigException
from pyrate.core.logger import pyratelogger as log

gdal.SetCacheMax(64)

# Constants
MASTER_PROCESS = 0


def main(params):
    """
    PyRate merge main function. Assembles product tiles in to
    single geotiff files
    """
    # setup paths
    rows, cols = params["rows"], params["cols"]
    _merge_stack(rows, cols, params)

    if params[cf.TIME_SERIES_CAL]:
Exemple #13
0
def jpeg_18():
    height = 1024
    width = 1024
    src_ds = gdal.GetDriverByName('GTiff').Create('/vsimem/jpeg_18.tif', width,
                                                  height, 1)
    for i in range(height):
        data = struct.pack('B' * 1, int(i / (height / 256)))
        src_ds.WriteRaster(0, i, width, 1, data, 1, 1)

    ds = gdal.GetDriverByName('JPEG').CreateCopy('/vsimem/jpeg_18.jpg',
                                                 src_ds,
                                                 options=['QUALITY=99'])
    src_ds = None
    gdal.Unlink('/vsimem/jpeg_18.tif')

    oldSize = gdal.GetCacheMax()
    gdal.SetCacheMax(0)

    line0 = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    data = struct.unpack('B' * width, line0)
    if abs(data[0] - 0) > 10:
        return 'fail'
    line1023 = ds.GetRasterBand(1).ReadRaster(0, height - 1, width, 1)
    data = struct.unpack('B' * width, line1023)
    if abs(data[0] - 255) > 10:
        return 'fail'
    line0_ovr1 = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0, 0, int(width / 4), 1)
    data = struct.unpack('B' * (int(width / 4)), line0_ovr1)
    if abs(data[0] - 0) > 10:
        return 'fail'
    line1023_bis = ds.GetRasterBand(1).ReadRaster(0, height - 1, width, 1)
    if line1023_bis == line0 or line1023 != line1023_bis:
        gdaltest.post_reason('fail')
        return 'fail'
    line0_bis = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    if line0 != line0_bis:
        gdaltest.post_reason('fail')
        return 'fail'
    line255_ovr1 = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0,
        int(height / 4) - 1, int(width / 4), 1)
    data = struct.unpack('B' * int(width / 4), line255_ovr1)
    if abs(data[0] - 255) > 10:
        return 'fail'
    line0_bis = ds.GetRasterBand(1).ReadRaster(0, 0, width, 1)
    if line0 != line0_bis:
        gdaltest.post_reason('fail')
        return 'fail'
    line0_ovr1_bis = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0, 0, int(width / 4), 1)
    if line0_ovr1 != line0_ovr1_bis:
        gdaltest.post_reason('fail')
        return 'fail'
    line255_ovr1_bis = ds.GetRasterBand(1).GetOverview(1).ReadRaster(
        0,
        int(height / 4) - 1, int(width / 4), 1)
    if line255_ovr1 != line255_ovr1_bis:
        gdaltest.post_reason('fail')
        return 'fail'

    gdal.SetCacheMax(oldSize)

    ds = None
    gdal.Unlink('/vsimem/jpeg_18.jpg')

    return 'success'
# Import Numpy
import numpy as np

# Import GDAL et al.
from osgeo import gdal
from osgeo import ogr
from osgeo import osr

# Fix osgeo error reporting
gdal.UseExceptions()
ogr.UseExceptions()
osr.UseExceptions()

# Set gdal configuration parameters
gdal.SetCacheMax(
    2147483648
)  # 2 GB. This sets the caching max size for gdal. Bigger number can lead to faster reads / writes
gdal.SetConfigOption('HFA_USE_RRD', 'YES')  # Configure GDAL to use blocks

import gmtools.geospatial as gm_geo


def reproject_window_offset(src_dataset,
                            topleft_x,
                            topleft_y,
                            window_xrange,
                            window_yrange,
                            desired_cellsize_x,
                            desired_cellsize_y,
                            band=1,
                            epsg_from=None,
Exemple #15
0
def main(argv):
    # Setup logging
    duallog.setup(Path(FLAGS.data_directory) / 'logs')
    logging.set_verbosity(
        FLAGS.logging_verbosity
    )  # Must be called after duallog.setup() to function properly

    # Configure GDAL
    gdal.SetCacheMax(8 * 1000000000)

    # Create absolute paths (either use full path provided as argument or use data dir in the project folder)
    data_dir = Path(FLAGS.data_directory) if os.path.isabs(
        FLAGS.data_directory) else Path.cwd() / FLAGS.data_directory

    # Ensure filename on geojson file
    geojson_path = FLAGS.geojson if FLAGS.geojson.endswith(
        '.geojson') else FLAGS.geojson + '.geojson'

    # If no order_id from previous order is provided, then download the data requested for this order
    order_id = FLAGS.order_id
    if order_id == 'Empty':
        order_id = 'order_' + datetime.datetime.today().strftime(
            '%Y%m%d-%H%M%S')

        logging.info("####################################")
        logging.info("# Initializing Sentinel downloader #")
        logging.info("####################################")
        logging.info("Order id: " + order_id)
        downloader = Downloader(username=FLAGS.username,
                                password=FLAGS.password,
                                satellite=FLAGS.satellite,
                                order_id=order_id,
                                directory=data_dir)

        # Load the geojson file (check whether the filename was included in the provided name)
        if 'denmark_without_bornholm' in str(geojson_path):
            # Load the default geojson (denmark_without_bornholm), which is included in the project code
            footprint = geojson_to_wkt(
                read_geojson(
                    Path('data') / 'geojson' /
                    'denmark_without_bornholm.geojson'))
        else:
            # Load the provided geojson file from the data directory
            footprint = geojson_to_wkt(
                read_geojson(data_dir / 'geojson' /
                             geojson_path))  # Load from data directory

        # Query the data (multiple footprints can be used, but it is recommended to stick to a single footprint)
        downloader.query(footprint, FLAGS.startdate, FLAGS.enddate)

        # Following code can be used if several geojson files are to be queried
        # footprint = geojson_to_wkt(read_geojson('data/geojson/bornholm.geojson'))
        # downloader.query(footprint, FLAGS.startdate, FLAGS.enddate)

        # Print the number of products and size of all products to be downloaded
        downloader.print_num_and_size_of_products()
        downloader.save_queried_products(
        )  # Save a geojson containing all products to be downloaded
        logging.info("")

        if FLAGS.download:
            logging.info("####################")
            logging.info("# Downloading data #")
            logging.info("####################")
            downloader.download_zipfiles()
            logging.info("")

    if FLAGS.process_tiles:
        # Load products to be processed (always load from file to ensure modularity for the downloader and processor)
        queried_products_path = (data_dir / 'orders' /
                                 order_id).with_suffix('.pkl')
        products_df = pd.read_pickle(queried_products_path)

        logging.info("###################")
        logging.info("# Processing data #")
        logging.info("###################")
        processpipeliner = ProcessPipeliner(products_df=products_df,
                                            directory=data_dir)
        processpipeliner.process_products()
Exemple #16
0
def test_mask_14():

    src_ds = gdal.Open('data/byte.tif')

    assert src_ds is not None, 'Failed to open test dataset.'

    drv = gdal.GetDriverByName('GTiff')
    with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'FALSE'):
        ds = drv.CreateCopy('tmp/byte_with_mask.tif', src_ds)
    src_ds = None

    # The only flag value supported for internal mask is GMF_PER_DATASET
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.CreateMaskBand(0)
    assert ret != 0, 'Error expected'

    with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
        ret = ds.CreateMaskBand(gdal.GMF_PER_DATASET)
    assert ret == 0, 'Creation failed'

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    assert cs == 0, 'Got wrong checksum for the mask (1)'

    ds.GetRasterBand(1).GetMaskBand().Fill(1)

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    assert cs == 400, 'Got wrong checksum for the mask (2)'

    # This TIFF dataset has already an internal mask band
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.CreateMaskBand(gdal.GMF_PER_DATASET)
    assert ret != 0, 'Error expected'

    # This TIFF dataset has already an internal mask band
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.GetRasterBand(1).CreateMaskBand(gdal.GMF_PER_DATASET)
    assert ret != 0, 'Error expected'

    ds = None

    with pytest.raises(OSError,
                       message='tmp/byte_with_mask.tif.msk should not exist'):
        os.stat('tmp/byte_with_mask.tif.msk')

    with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'FALSE'):
        ds = gdal.Open('tmp/byte_with_mask.tif')

        assert ds.GetRasterBand(1).GetMaskFlags() == gdal.GMF_PER_DATASET, \
          'wrong mask flags'

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    assert cs == 400, 'Got wrong checksum for the mask (3)'

    # Test fix for #5884
    old_val = gdal.GetCacheMax()
    gdal.SetCacheMax(0)
    with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
        out_ds = drv.CreateCopy('/vsimem/byte_with_mask.tif',
                                ds,
                                options=['COMPRESS=JPEG'])
    gdal.SetCacheMax(old_val)
    assert out_ds.GetRasterBand(1).Checksum() != 0
    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    assert cs == 400, 'Got wrong checksum for the mask (4)'
    out_ds = None
    drv.Delete('/vsimem/byte_with_mask.tif')

    ds = None

    drv.Delete('tmp/byte_with_mask.tif')
Exemple #17
0
def main(args):
    starttime = Timer.starttimer()
    #Cache thrashing is common when working with large files
    # we help alleviate misses by setting a larger than normal cache.  1GB

    gdal.SetCacheMax(1073741824)

    #Get stretch type
    stretch = OptParse.argget_stretch(args)

    #Get some info about the machine for mp
    cores = args['ncores']
    if cores is None:
        cores = mp.cpu_count()

    #Load the input dataset using the GdalIO class and get / set the output datatype.
    dataset = OpenDataSet(args['input'])
    raster = dataset.load()
    xsize, ysize, nbands, projection, geotransform = dataset.info(raster)

    #Get band information
    bands = [raster.GetRasterBand(b) for b in range(1, nbands + 1)]
    bandstats = [Stats.get_band_stats(b) for b in bands]
    b = bands[0]
    banddtype = b.DataType
    blocksize = b.GetBlockSize()
    xblocksize = blocksize[0]
    yblocksize = blocksize[1]

    output = create_output(args['outputformat'], args['output'], xsize, ysize,
                           len(bands), projection, geotransform,
                           gdal.GetDataTypeByName(args['dtype']))

    #Intelligently segment the image based upon number of cores and intrinsic block size
    if args['byline'] is True:
        segments = segment_image(xsize, ysize, 1, ysize)
        args['statsper'] = True
    elif args['bycolumn'] is True:
        segments = segment_image(xsize, ysize, xsize, 1)
        args['statsper'] = True
    elif args['horizontal_segments'] is not None or args[
            'vertical_segments'] is not None:
        #The user is defining the segmentation
        segments = segment_image(xsize, ysize, args['vertical_segments'],
                                 args['horizontal_segments'])
    else:
        segments = [(0, 0, xsize, ysize)]

    carray_dtype = _gdal_to_ctypes[banddtype]

    #Preallocate a sharedmem array of the correct size
    ctypesxsize, ctypesysize = segments[0][2:]
    if args['byline'] is True:
        ctypesysize = cores
    elif args['bycolumn'] is True:
        ctypesxsize = cores
    carray = mp.RawArray(carray_dtype, ctypesxsize * ctypesysize)
    glb.sharedarray = np.frombuffer(carray,
                                    dtype=_gdal_to_numpy[banddtype]).reshape(
                                        ctypesysize, ctypesxsize)

    pool = mp.Pool(processes=cores,
                   initializer=glb.init,
                   initargs=(glb.sharedarray, ))

    #A conscious decision to iterate over the bands in serial - a IO bottleneck anyway
    for j, band in enumerate(bands):

        stats = bandstats[j]
        bandmin = stats['minimum']
        bandmax = stats['maximum']
        ndv = stats['ndv']
        userndv = args['ndv']
        args.update(stats)

        if args['byline'] is True:
            for y in range(0, ysize, cores):
                xstart, ystart, intervalx, intervaly = 0, y, xsize, cores
                if ystart + intervaly > ysize:
                    intervaly = ysize - ystart
                #print ystart, ystart + intervaly
                #print y, ystart, ystart+ intervaly, intervaly
                glb.sharedarray[:intervaly, :intervalx] = band.ReadAsArray(
                    xstart, ystart, intervalx, intervaly)
                #If the input has an NDV - mask it.
                if stats['ndv'] != None:
                    glb.sharedarray = np.ma.masked_equal(glb.sharedarray,
                                                         stats['ndv'],
                                                         copy=False)
                    mask = np.ma.getmask(glb.sharedarray)
                #if args['statsper'] is True:
                #args.update(Stats.get_array_stats(glb.sharedarray, stretch))
                for i in range(cores):
                    res = pool.apply(stretch, args=(slice(i, i + 1), args))

                if args['ndv'] != None:
                    glb.sharedarray[glb.sharedarray == ndv] = args['ndv']
                    output.GetRasterBand(j + 1).SetNoDataValue(float(userndv))
                if args['scale'] is not None:
                    #Scale the data before writing to disk
                    scale(args['scale'][0], args['scale'][1], bandmin, bandmax)
                output.GetRasterBand(j + 1).WriteArray(
                    glb.sharedarray[:intervaly, :intervalx], xstart, ystart)

                if args['quiet']:
                    print "Processed {} or {} lines \r".format(y, ysize),
                    sys.stdout.flush()
        elif args['bycolumn'] is True:
            for x in range(0, xsize, cores):
                xstart, ystart, intervalx, intervaly = x, 0, cores, ysize
                if xstart + intervalx > xsize:
                    intervalx = xsize - xstart

                glb.sharedarray[:intervaly, :intervalx] = band.ReadAsArray(
                    xstart, ystart, intervalx, intervaly)
                #If the input has an NDV - mask it.
                if stats['ndv'] != None:
                    glb.sharedarray = np.ma.masked_equal(glb.sharedarray,
                                                         stats['ndv'],
                                                         copy=False)
                    mask = np.ma.getmask(glb.sharedarray)
                if args['statsper'] is True:
                    args.update(Stats.get_array_stats(glb.sharedarray,
                                                      stretch))
                for i in range(cores):
                    res = pool.apply(stretch, args=(slice(i, i + 1), args))

                if args['ndv'] != None:
                    glb.sharedarray[glb.sharedarray == ndv] = args['ndv']
                    output.GetRasterBand(j + 1).SetNoDataValue(float(userndv))
                if args['scale'] is not None:
                    scale(args['scale'][0], args['scale'][1], bandmin, bandmax)
                output.GetRasterBand(j + 1).WriteArray(
                    glb.sharedarray[:intervaly, :intervalx], xstart, ystart)

                if args['quiet']:
                    print "Processed {} or {} lines \r".format(x, xsize),
                    sys.stdout.flush()
        #If not processing line by line, distirbuted the block over availabel cores
        else:
            for i, chunk in enumerate(segments):
                xstart, ystart, intervalx, intervaly = chunk
                #Read the array into the buffer
                glb.sharedarray[:intervaly, :intervalx] = band.ReadAsArray(
                    xstart, ystart, intervalx, intervaly)

                #If the input has an NDV - mask it.
                if stats['ndv'] != None:
                    glb.sharedarray = np.ma.masked_equal(glb.sharedarray,
                                                         stats['ndv'],
                                                         copy=False)
                    mask = np.ma.getmask(glb.sharedarray)
                if args['statsper'] is True:
                    args.update(Stats.get_array_stats(glb.sharedarray,
                                                      stretch))

                #Determine the decomposition for each core

                step = intervaly // cores

                starts = range(0, intervaly + 1, step)
                stops = starts[1:]
                stops.append(intervaly + 1)
                offsets = zip(starts, stops)
                for o in offsets:
                    res = pool.apply(stretch, args=(slice(o[0], o[1]), args))

            if args['ndv'] != None:
                glb.sharedarray[glb.sharedarray == ndv] = args['ndv']
                output.GetRasterBand(j + 1).SetNoDataValue(float(userndv))
            if args['scale'] is not None:
                #Scale the data before writing to disk
                scale(args['scale'][0], args['scale'][1], bandmin, bandmax)
            output.GetRasterBand(j + 1).WriteArray(
                glb.sharedarray[:intervaly, :intervalx], xstart, ystart)

    Timer.totaltime(starttime)

    #Close up
    dataset = None
    output = None
    pool.close()
    pool.join()
Exemple #18
0
from osgeo import gdal, osr, gdal_array
from typing import Tuple, NoReturn, Callable, List
import numpy as np

from .tiling import get_tiles, Tile

gdal.SetCacheMax(1000000000)


def _default_progress_callback(progress):
    ''' Default progress callback function. Prints % complete to stdout.
    '''
    print(f"Progress = {progress * 100:.1f}")


class GridTransformer:
    ''' Converts raster data into the format expected by MBES Grid Checks and Finder Grid Checks.
    This process takes three separate files, each containing a single band, and outputs a three
    band raster including density, depth, and uncertainty (all 32-bit floats).
    '''
    def __init__(self):
        self.output_datatype = gdal.GDT_Float32
        # set output nodata to -3.4028235e+38
        self.output_nodata = np.finfo(np.float32).min.item()
        # data is chunked by tiles with this size
        self.error_messages = []
        self.warning_messages = []

    def _validate_sizes(self, density: gdal.Dataset, depth: gdal.Dataset,
                        uncertainty: gdal.Dataset) -> bool:
        if ((density.RasterXSize == depth.RasterXSize ==
def setCacheMax(cache_in_bytes: int):
    os.environ['GDAL_CACHEMAX'] = '%d' % int(cache_in_bytes / 1024 / 1024)
    gdal.SetCacheMax(cache_in_bytes)
Exemple #20
0
def main(options, args):
    starttime = Timer.starttimer()
    #Cache thrashing is common when working with large files, we help alleviate misses by setting a larger than normal cache.  1GB
    gdal.SetCacheMax(1073741824)

    #Check for input
    if not args:
        print "\nERROR: You must supply an input data set.\n"
        sys.exit(0)

    #Get stretch type
    stretch = OptParse.get_stretch(options)

    #Get some info about the machine for multiprocessing
    cores = multiprocessing.cpu_count()
    cores *= 2
    print "Processing on %i cores." % cores

    #Load the input dataset using the GdalIO class and get / set the output datatype.
    dataset = GdalIO.GdalIO(args[0])
    raster = dataset.load()

    #Default is none, unless user specified
    if options['dtype'] == None:
        dtype = gdal.GetDataTypeName(raster.GetRasterBand(1).DataType)
    else:
        dtype = options['dtype']

    #Create an output if the stretch is written to disk
    xsize, ysize, bands, projection, geotransform = dataset.info(raster)
    output = dataset.create_output("", options['output'], xsize, ysize, bands,
                                   projection, geotransform,
                                   gdal.GetDataTypeByName(dtype))

    #Segment the image to handle either RAM constraints or selective processing
    segments = Segment.segment_image(xsize, ysize, options['vint'],
                                     options['hint'])

    for b in xrange(bands):
        band = raster.GetRasterBand(b + 1)
        bandstats = Stats.get_band_stats(band)
        for key in bandstats.iterkeys():
            options[key] = bandstats[key]

        #Get the size of the segments to be manipulated
        piecenumber = 1
        for chunk in segments:

            print "Image segmented.  Processing segment %i of %i" % (
                piecenumber, len(segments))
            piecenumber += 1
            (xstart, ystart, intervalx, intervaly) = chunk

            array = band.ReadAsArray(xstart, ystart, intervalx,
                                     intervaly).astype(numpy.float32)

            if options['ndv_band'] != None:
                array = numpy.ma.masked_values(array,
                                               options['ndv_band'],
                                               copy=False)
            elif options['ndv'] != None:
                array = numpy.ma.masked_values(array,
                                               options['ndv'],
                                               copy=False)

            if 'stretch' in stretch.__name__:
                array = Stats.normalize(array, options['bandmin'],
                                        options['bandmax'], dtype)

            #If the user wants to calc stats per segment:
            if options['segment'] == True:
                stats = Stats.get_array_stats(array, stretch)
                for key in stats.iterkeys():
                    options[key] = stats[key]
            #Otherwise use the stats per band for each segment
            else:
                options['mean'] = options['bandmean']
                options['maximum'] = options['bandmax']
                options['minimum'] = options['bandmin']
                options['standard_deviation'] = options['bandstd']

            y, x = array.shape

            #Calculate the hist and cdf if we need it.  This way we do not calc it per core.
            if options['histequ_stretch'] == True:
                cdf, bins = Stats.gethist_cdf(array, options['num_bins'])
                options['cdf'] = cdf
                options['bins'] = bins

            #Fill the masked values with NaN to get to a shared array
            if options['ndv'] != None:
                array = array.filled(numpy.nan)

            #Create an ctypes array
            init(ArrayConvert.SharedMemArray(array))

            step = y // cores
            jobs = []
            if step != 0:
                for i in range(0, y, step):
                    p = multiprocessing.Process(target=stretch,
                                                args=(shared_arr,
                                                      slice(i, i + step)),
                                                kwargs=options)
                    jobs.append(p)

                for job in jobs:
                    job.start()
                    del job
                for job in jobs:
                    job.join()
                    del job

            #Return the array to the proper data range and write it out.  Scale if that is what the user wants
            if options['histequ_stretch'] or options['gamma_stretch'] == True:
                pass
            elif 'filter' in stretch.__name__:
                pass
            else:
                Stats.denorm(shared_arr.asarray(), dtype, kwargs=options)

            if options['scale'] != None:
                Stats.scale(shared_arr.asarray(), kwargs=options)

            #If their are NaN in the array replace them with the dataset no data value
            Stats.setnodata(shared_arr, options['ndv'])

            #Write the output
            output.GetRasterBand(b + 1).WriteArray(shared_arr.asarray(),
                                                   xstart, ystart)

            #Manually cleanup to stop memory leaks.
            del array, jobs, shared_arr.data
            try:
                del stats
            except:
                pass
            del globals()['shared_arr']
            gc.collect()

            if options['ndv'] != None:
                output.GetRasterBand(b + 1).SetNoDataValue(
                    float(options['ndv']))
            elif options['ndv_band'] != None:
                output.GetRasterBand(b + 1).SetNoDataValue(
                    float(options['ndv_band']))

    if options['visualize'] == True:
        Plot.show_hist(shared_arr.asarray())

    Timer.totaltime(starttime)

    #Close up
    dataset = None
    output = None
    gc.collect()
Exemple #21
0
def mask_14():

    src_ds = gdal.Open('data/byte.tif')

    if src_ds is None:
        gdaltest.post_reason('Failed to open test dataset.')
        return 'fail'

    drv = gdal.GetDriverByName('GTiff')
    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'FALSE')
    ds = drv.CreateCopy('tmp/byte_with_mask.tif', src_ds)
    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'TRUE')
    src_ds = None

    # The only flag value supported for internal mask is GMF_PER_DATASET
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.CreateMaskBand(0)
    if ret == 0:
        gdaltest.post_reason('Error expected')
        return 'fail'

    with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
        ret = ds.CreateMaskBand(gdal.GMF_PER_DATASET)
    if ret != 0:
        gdaltest.post_reason('Creation failed')
        return 'fail'

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    if cs != 0:
        print(cs)
        gdaltest.post_reason('Got wrong checksum for the mask (1)')
        return 'fail'

    ds.GetRasterBand(1).GetMaskBand().Fill(1)

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    if cs != 400:
        print(cs)
        gdaltest.post_reason('Got wrong checksum for the mask (2)')
        return 'fail'

    # This TIFF dataset has already an internal mask band
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.CreateMaskBand(gdal.GMF_PER_DATASET)
    if ret == 0:
        gdaltest.post_reason('Error expected')
        return 'fail'

    # This TIFF dataset has already an internal mask band
    with gdaltest.error_handler():
        with gdaltest.config_option('GDAL_TIFF_INTERNAL_MASK', 'YES'):
            ret = ds.GetRasterBand(1).CreateMaskBand(gdal.GMF_PER_DATASET)
    if ret == 0:
        gdaltest.post_reason('Error expected')
        return 'fail'

    ds = None

    try:
        os.stat('tmp/byte_with_mask.tif.msk')
        gdaltest.post_reason('tmp/byte_with_mask.tif.msk should not exist')
        return 'fail'
    except:
        pass

    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'FALSE')
    ds = gdal.Open('tmp/byte_with_mask.tif')

    if ds.GetRasterBand(1).GetMaskFlags() != gdal.GMF_PER_DATASET:
        gdaltest.post_reason('wrong mask flags')
        return 'fail'

    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK_TO_8BIT', 'TRUE')

    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    if cs != 400:
        print(cs)
        gdaltest.post_reason('Got wrong checksum for the mask (3)')
        return 'fail'

    # Test fix for #5884
    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK', 'YES')
    old_val = gdal.GetCacheMax()
    gdal.SetCacheMax(0)
    out_ds = drv.CreateCopy('/vsimem/byte_with_mask.tif',
                            ds,
                            options=['COMPRESS=JPEG'])
    gdal.SetConfigOption('GDAL_TIFF_INTERNAL_MASK', None)
    gdal.SetCacheMax(old_val)
    if out_ds.GetRasterBand(1).Checksum() == 0:
        gdaltest.post_reason('failure')
        return 'fail'
    cs = ds.GetRasterBand(1).GetMaskBand().Checksum()
    if cs != 400:
        print(cs)
        gdaltest.post_reason('Got wrong checksum for the mask (4)')
        return 'fail'
    out_ds = None
    drv.Delete('/vsimem/byte_with_mask.tif')

    ds = None

    drv.Delete('tmp/byte_with_mask.tif')

    return 'success'
"""Demo of how to use pandas to multiply one table by another."""
import argparse
import logging
import multiprocessing
import os
import sys

from osgeo import gdal
import pandas
import pygeoprocessing
import numpy
import taskgraph

gdal.SetCacheMax(2**30)

# treat this one column name as special for the y intercept
INTERCEPT_COLUMN_ID = 'intercept'
OPERATOR_FN = {
    '+': numpy.add,
    '*': numpy.multiply,
    '^': numpy.power,
}
N_CPUS = multiprocessing.cpu_count()

logging.basicConfig(
    level=logging.DEBUG,
    format=('%(asctime)s (%(relativeCreated)d) %(levelname)s %(name)s'
            ' [%(funcName)s:%(lineno)d] %(message)s'),
    stream=sys.stdout)

LOGGER = logging.getLogger(__name__)
Exemple #23
0
def _create_thumbnail(red_file,
                      green_file,
                      blue_file,
                      output_path,
                      x_constraint=None,
                      nodata=-999,
                      work_dir=None,
                      overwrite=True):
    """
    Create JPEG thumbnail image using individual R, G, B images.

    This method comes from the old ULA codebase.

    :param red_file: red band data file
    :param green_file: green band data file
    :param blue_file: blue band data file
    :param output_path: thumbnail file to write to.
    :param x_constraint: thumbnail width (if not full resolution)
    :param nodata: null/fill data value
    :param work_dir: temp/work directory to use.
    :param overwrite: overwrite existing thumbnail?

    Thumbnail height is adjusted automatically to match the aspect ratio
    of the input images.

    """
    nodata = int(nodata)

    # GDAL calls need absolute paths.
    thumbnail_path = pathlib.Path(output_path).absolute()

    if thumbnail_path.exists() and not overwrite:
        _LOG.warning('File already exists. Skipping creation of %s',
                     thumbnail_path)
        return None, None, None

    # thumbnail_image = os.path.abspath(thumbnail_image)

    out_directory = str(thumbnail_path.parent)
    work_dir = os.path.abspath(work_dir) if work_dir else tempfile.mkdtemp(
        prefix='.thumb-tmp', dir=out_directory)
    try:
        # working files
        file_to = os.path.join(work_dir, 'rgb.vrt')
        warp_to_file = os.path.join(work_dir, 'rgb-warped.vrt')
        outtif = os.path.join(work_dir, 'thumbnail.tif')

        # Build the RGB Virtual Raster at full resolution
        run_command([
            "gdalbuildvrt", "-overwrite", "-separate", file_to,
            str(red_file),
            str(green_file),
            str(blue_file)
        ], work_dir)
        assert os.path.exists(file_to), "VRT must exist"

        # Determine the pixel scaling to get the correct width thumbnail
        vrt = gdal.Open(file_to)
        intransform = vrt.GetGeoTransform()
        inpixelx = intransform[1]
        # inpixely = intransform[5]
        inrows = vrt.RasterYSize
        incols = vrt.RasterXSize

        # If a specific resolution is asked for.
        if x_constraint:
            outresx = inpixelx * incols / x_constraint
            _LOG.info('Input pixel res %r, output pixel res %r', inpixelx,
                      outresx)

            outrows = int(
                math.ceil((float(inrows) / float(incols)) * x_constraint))

            run_command([
                "gdalwarp", "--config", "GDAL_CACHEMAX",
                str(GDAL_CACHE_MAX_MB), "-of", "VRT", "-tr",
                str(outresx),
                str(outresx), "-r", "near", "-overwrite", file_to, warp_to_file
            ], work_dir)
        else:
            # Otherwise use a full resolution browse image.
            outrows = inrows
            x_constraint = incols
            warp_to_file = file_to
            outresx = inpixelx

        _LOG.debug('Current GDAL cache max %rMB. Setting to %rMB',
                   gdal.GetCacheMax() / 1024 / 1024, GDAL_CACHE_MAX_MB)
        gdal.SetCacheMax(GDAL_CACHE_MAX_MB * 1024 * 1024)

        # Open VRT file to array
        vrt = gdal.Open(warp_to_file)
        driver = gdal.GetDriverByName("GTiff")
        outdataset = driver.Create(outtif, x_constraint, outrows, 3,
                                   gdalconst.GDT_Byte)

        # Loop through bands and apply Scale and Offset
        for band_number in (1, 2, 3):
            band = vrt.GetRasterBand(band_number)

            scale, offset = _calculate_scale_offset(nodata, band)

            # Apply gain and offset
            outdataset.GetRasterBand(band_number).WriteArray(
                (numpy.ma.masked_less_equal(band.ReadAsArray(), nodata) *
                 scale) + offset)
            _LOG.debug('Scale %r, offset %r', scale, offset)

        # Must close datasets to flush to disk.
        # noinspection PyUnusedLocal
        outdataset = None
        # noinspection PyUnusedLocal
        vrt = None

        # GDAL Create doesn't support JPEG so we need to make a copy of the GeoTIFF
        run_command([
            "gdal_translate", "--config", "GDAL_CACHEMAX",
            str(GDAL_CACHE_MAX_MB), "-of", "JPEG", outtif,
            str(thumbnail_path)
        ], work_dir)

        _LOG.debug('Cleaning work files')
    finally:
        # Clean up work files
        if os.path.exists(work_dir):
            shutil.rmtree(work_dir)

    # Newer versions of GDAL create aux files due to the histogram. Clean them up.
    for f in (red_file, blue_file, green_file):
        f = pathlib.Path(f)
        aux_file = f.with_name(f.name + '.aux.xml')
        if aux_file.exists():
            _LOG.info('Cleaning aux: %s', aux_file)
            os.remove(str(aux_file.absolute()))

    return x_constraint, outrows, outresx
Exemple #24
0
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#   See the License for the specific language governing permissions and
#   limitations under the License.
"""
This Python module contains bindings for the GDAL library
"""
# pylint: disable=too-many-arguments,R0914
from typing import Union, List, Tuple
from osgeo import gdal, gdalconst
from osgeo.gdal import Dataset
import numpy as np
import numexpr as ne
from pyrate.core import shared, ifgconstants as ifc
from pyrate.core.logger import pyratelogger as log

gdal.SetCacheMax(2**15)
GDAL_WARP_MEMORY_LIMIT = 2**10
LOW_FLOAT32 = np.finfo(np.float32).min * 1e-10
all_mlooked_types = [
    ifc.MLOOKED_COH_MASKED_IFG, ifc.MULTILOOKED, ifc.MULTILOOKED_COH,
    ifc.MLOOKED_DEM, ifc.MLOOKED_INC
]


def coherence_masking(input_gdal_dataset: Dataset, coherence_file_path: str,
                      coherence_thresh: float) -> None:
    """
    Perform coherence masking on raster in-place.

    Based on gdal_calc formula provided by Nahidul:
    gdal_calc.py -A 20151127-20151209_VV_8rlks_flat_eqa.cc.tif
"""Demo of how to use pandas to multiply one table by another."""
import logging
import multiprocessing
import os
import re
import sys

from osgeo import gdal
import pandas
import pygeoprocessing
import numpy
import taskgraph

gdal.SetCacheMax(2**27)

# treat this one column name as special for the y intercept
INTERCEPT_COLUMN_ID = 'intercept'
OPERATOR_FN = {
    '+': numpy.add,
    '*': numpy.multiply,
    '^': numpy.power,
}
N_CPUS = multiprocessing.cpu_count()

logging.basicConfig(
    level=logging.DEBUG,
    format=('%(asctime)s (%(relativeCreated)d) %(levelname)s %(name)s'
            ' [%(funcName)s:%(lineno)d] %(message)s'),
    stream=sys.stdout)

LOGGER = logging.getLogger(__name__)
Exemple #26
0
    if isinstance(array_out, numpy.ma.core.MaskedArray) == True:
        if args.newNDV == None:
            array_out = numpy.ma.filled(array_out, NDV)
        else:
            array_out = numpy.ma.filled(array_out, float(args.newNDV[0]))
    else:
        pass

    #Create the output
    if args.dtype == None:
        dtype = gdal.GetDataTypeName(raster.GetRasterBand(1).DataType)
    else:
        dtype = args.dtype
    output = dataset.create_output(xsize, ysize, name, 1, parseddtype(dtype))
    output.GetRasterBand(1).WriteArray(array_out)

    if NDV or args.newNDV:
        if args.newNDV == None:
            output.GetRasterBand(1).SetNoDataValue(NDV)
        else:
            output.GetRasterBand(1).SetNoDataValue(float(args.newNDV[0]))


if __name__ == '__main__':
    gdal.SetCacheMax(805306368)  #768MB
    gdal.UseExceptions(
    )  #This allows try / except statements with gdal.Open to fail in a pythonic way.
    #gdal.SetConfigOption('CPL_DEBUG', 'ON')

    main()