Exemple #1
0
def basic_test_8():

    ret = gdal.VersionInfo('RELEASE_DATE')
    if len(ret) != 8:
        gdaltest.post_reason('fail')
        print(ret)
        return 'fail'

    python_exe = sys.executable
    if sys.platform == 'win32':
        python_exe = python_exe.replace('\\', '/')

    ret = gdaltest.runexternal(python_exe + ' basic_test.py LICENSE 0')
    if ret.find('GDAL/OGR is released under the MIT/X license'
                ) != 0 and ret.find('GDAL/OGR Licensing') < 0:
        gdaltest.post_reason('fail')
        print(ret)
        return 'fail'

    f = open('tmp/LICENSE.TXT', 'wt')
    f.write('fake_license')
    f.close()
    ret = gdaltest.runexternal(python_exe + ' basic_test.py LICENSE 1')
    os.unlink('tmp/LICENSE.TXT')
    if ret.find('fake_license') != 0 and ret.find('GDAL/OGR Licensing') < 0:
        gdaltest.post_reason('fail')
        print(ret)
        return 'fail'

    return 'success'
Exemple #2
0
def build_args(input_files, gfs_file_path, pg_source):
    i = 0
    all_args = []
    for input_file in input_files:
        if input_file.lower().endswith('.gz'):
            input_file = '/vsigzip/' + input_file
        args = ['-f', 'PostgreSQL',
                '--config', 'PG_USE_COPY', 'YES',
                '--config', 'GML_GFS_TEMPLATE', gfs_file_path,
                pg_source, input_file]
        if str(gdal.VersionInfo()).startswith('2'):
            # -lyr_transaction added to negate
            # ERROR 1: ERROR: current transaction is aborted, commands ignored until end of transaction block
            #
            # ERROR 1: no COPY in progress
            args.insert(0, '-lyr_transaction')
        if i == 0:
            args.insert(0, '-overwrite')
            args.extend(['-lco', 'OVERWRITE=YES',
                         '-lco', 'SPATIAL_INDEX=OFF',
                         '-lco', 'PRECISION=NO'])
        else:
            args.insert(0, '-append')

        i += 1
        all_args.append(args)
    return all_args
    def SetzeVoreinstellungen(self):

        s = QSettings("EZUSoft", fncProgKennung())

        try:
            SaveWidth = int(s.value("SaveWidth", "0"))
            SaveHeight = int(s.value("SaveHeight", "0"))
        except:
            QSettings("EZUSoft", "ADXF2Shape").clear()
            SaveWidth = 0
            SaveHeight = 0

        if SaveWidth > self.minimumWidth() and SaveHeight > self.minimumHeight(
        ):
            self.resize(SaveWidth, SaveHeight)

        bGenSHP = True if s.value("bGenSHP", "Nein") == "Ja" else False
        self.chkSHP.setChecked(bGenSHP)
        bGenDXF = True if s.value("bGenDXF", "Nein") == "Ja" else False
        self.chkDXF.setChecked(bGenDXF)
        self.chkMergeFlur.setEnabled(bGenDXF)

        self.chkSave_clicked()

        try:
            self.lbGDAL.setText(gdal.VersionInfo("GDAL_RELEASE_DATE"))
        except:
            self.lbGDAL.setText("-")
def build_args(input_files, gfs_file_path, pg_source, ignore_fid):
    i = 0
    all_args = []
    for input_file in input_files:
        if input_file.lower().endswith('.gz'):
            input_file = '/vsigzip/' + input_file
        args = ['-f', 'PostgreSQL',
                '--config', 'PG_USE_COPY', 'YES',
                '--config', 'GML_GFS_TEMPLATE', gfs_file_path,
                pg_source, input_file]
        if str(gdal.VersionInfo()).startswith('2'):
            # -lyr_transaction added to negate
            # ERROR 1: ERROR: current transaction is aborted, commands ignored until end of transaction block
            #
            # ERROR 1: no COPY in progress
            args.insert(0, '-lyr_transaction')
        if i == 0:
            args.insert(0, '-overwrite')
            args.extend(['-lco', 'OVERWRITE=YES',
                         '-lco', 'SPATIAL_INDEX=OFF',
                         '-lco', 'PRECISION=NO'])
        else:
            args.insert(0, '-append')

        if ignore_fid:
            # fixes ERROR 1: COPY statement failed. ERROR: null value in column "fid" violates not-null
            # https://github.com/lutraconsulting/ostranslator-ii/issues/18
            args.extend(['--config', 'GML_EXPOSE_FID', 'NO'])

        i += 1
        all_args.append(args)
    return all_args
def pytest_report_header(config):
    message = 'QGIS : {}\n'.format(Qgis.QGIS_VERSION_INT)
    message += 'Python GDAL : {}\n'.format(gdal.VersionInfo('VERSION_NUM'))
    message += 'Python : {}\n'.format(sys.version)
    # message += 'Python path : {}'.format(sys.path)
    message += 'QT : {}'.format(Qt.QT_VERSION_STR)
    return message
Exemple #6
0
def compute_distance(gdb_dir, shapefile):
    '''For every point in `shapefile' find the diatance to the neartest
  feature (in our case a road) and save it in a new field.  The field
  has the same name as the layer in the feature file (perhaps truncated
  due to length).

  '''

    # FIXME: What is this?
    gdal.AllRegister()

    # Use OGR specific exceptions
    ogr.UseExceptions()

    # Open DB directory
    db = open_db(gdb_dir)

    # try to open source shapefile
    if int(gdal.VersionInfo()) > 1990000:
        shape = ogr.Open(shapefile.name, gdal.OF_VECTOR)
    else:
        shape = ogr.Open(shapefile.name, 1)
    if shape is None:
        print('Unable to open shapefile', in_shapefile)
        sys.exit(1)

    layer = shape.GetLayer(0)
    # add new fields to the shapefile
    create_fields(layer, db)
    process(layer, db, False)

    # clean close
    del db
Exemple #7
0
    def test_project(self):
        """
        This test fails for gdal versions below 2.0. The warping is correct, but
        the void space around the original image is filled with fill_value in versions
        >= 2.0, else with 0. The tested function behaves like the more recent versions
        of GDAL
        """
        codes = (2062, 3857)

        if gdal.VersionInfo().startswith("1"):
            warnings.warn(
                "Skipping incompatible warp test on GDAL versions < 2",
                RuntimeWarning)
            return

        for fname, base in zip(self.fnames, self.grids):
            # break
            if base.proj:
                for epsg in codes:
                    # gdalwarp flips the warped image
                    proj = ga.project(grid=base[::-1],
                                      proj={"init": "epsg:{:}".format(epsg)},
                                      max_error=0)
                    # proj = base[::-1].warp({"init":"epsg:{:}".format(epsg)}, 0)
                    with tempfile.NamedTemporaryFile(suffix=".tif") as tf:
                        subprocess.check_output(
                            "gdalwarp -r 'near' -et 0 -t_srs 'EPSG:{:}' {:} {:}"
                            .format(epsg, fname, tf.name),
                            shell=True)
                        compare = ga.fromfile(tf.name)
                        self.assertTrue(np.all(proj.data == compare.data))
                        self.assertTrue(np.all(proj.mask == compare.mask))
                        self.assertDictEqual(proj.bbox, compare.bbox)
            else:
                self.assertRaises(AttributeError)
Exemple #8
0
    def __init__(self, projection, geotransform):
        """
		Parameters
		----------
		projection : str
			Projection-Wkt; can be returned by calling
			gdal.Dataset.GetProjection()
		geotransform : tuple
			Geo-Transform; can be returned by calling
			gdal.Dataset.GetGeoTransform()
			order: xoff, xdx, xdy, yoff, ydx, ydy (in meters)
		"""
        # set source spatial reference
        ssr = osr.SpatialReference()
        if int(gdal.VersionInfo()[0]) >= 3:
            # for GDAL-Version >= 3 use traditional GIS order
            #TODO use new GIS order as standard
            ssr.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
        ssr.ImportFromWkt(projection)

        # get target spatial reference
        tsr = ssr.CloneGeogCS()

        self.ct = osr.CoordinateTransformation(ssr, tsr)
        self.ct_rev = osr.CoordinateTransformation(tsr, ssr)
        self.gt = geotransform
        self.prj = projection
Exemple #9
0
    def __init__(self, fname):
        """

        :param fname: a GTif file
        """
        try:
            self.raster = gdal.Open(fname)
            spatial_ref = osr.SpatialReference(self.raster.GetProjection())

            # retrieve get the WGS84 spatial reference
            wgs_ref = osr.SpatialReference()
            wgs_ref.ImportFromEPSG(4326)

            # Do a coordinate transform
            self.coord_transf = osr.CoordinateTransformation(
                wgs_ref, spatial_ref)

            # Apply geo-transformation and its inverse
            self.raster_gt = self.raster.GetGeoTransform()
            self.raster_inv_gt = gdal.InvGeoTransform(self.raster_gt)

            # Handle error from Inverse function
            if gdal.VersionInfo()[0] == '1':
                if self.raster_inv_gt[0] == 1:
                    self.raster_inv_gt = self.raster_inv_gt[1]
                else:
                    raise RuntimeError('Inverse geotransform failed')
            elif self.raster_inv_gt is None:
                raise RuntimeError('Inverse geotransform failed')

        except RuntimeError:  # <- Check first what exception is being thrown
            pass
Exemple #10
0
 def about_dialog_system_versions(self):
     import sys
     major, minor, micro = sys.version_info[0:3]
     desc = f"Python {major}.{minor}.{micro}:\n<ul>"
     import wx
     desc += "<li>wxPython %s\n" % wx.version()
     try:
         import sawx
         desc += "<li>sawx %s\n" % sawx.__version__
     except:
         pass
     try:
         import numpy
         desc += "<li>numpy %s\n" % numpy.version.version
     except:
         pass
     try:
         import OpenGL
         import OpenGL.GL as gl
         desc += "<li>PyOpenGL %s\n" % OpenGL.__version__
         desc += "<li>OpenGL %s\n" % gl.glGetString(gl.GL_VERSION).encode('utf-8')
         desc += "<li>OpenGL Vendor: %s\n" % gl.glGetString(gl.GL_VENDOR).encode('utf-8')
         desc += "<li>OpenGL Renderer: %s\n" % gl.glGetString(gl.GL_RENDERER).encode('utf-8')
         desc += "<li>GLSL primary: %s\n" % gl.glGetString(gl.GL_SHADING_LANGUAGE_VERSION).encode('utf-8')
         num_glsl = gl.glGetInteger(gl.GL_NUM_SHADING_LANGUAGE_VERSIONS)
         desc += "<li>GLSL supported: "
         for i in range(num_glsl):
             v = gl.glGetStringi(gl.GL_SHADING_LANGUAGE_VERSION, i).encode('utf-8')
             desc += v + ", "
         desc += "\n"
     except:
         pass
     try:
         import gdal
         desc += "<li>GDAL %s\n" % gdal.VersionInfo()
     except:
         pass
     try:
         import pyproj
         desc += "<li>PyProj %s\n" % pyproj.__version__
     except:
         pass
     try:
         import netCDF4
         desc += "<li>netCDF4 %s\n" % netCDF4.__version__
     except:
         pass
     try:
         import shapely
         desc += "<li>Shapely %s\n" % shapely.__version__
     except:
         pass
     try:
         import omnivore_framework
         desc += "<li>Omnivore %s\n" % omnivore_framework.__version__
     except:
         pass
     desc += "</ul>"
     return desc
Exemple #11
0
    def SetzeVoreinstellungen(self):
        self.ManageTransformSettings()

        s = QSettings("EZUSoft", fncProgKennung())

        SaveWidth = int(s.value("SaveWidth", "0"))
        SaveHeight = int(s.value("SaveHeight", "0"))
        if SaveWidth > self.minimumWidth() and SaveHeight > self.minimumHeight(
        ):
            self.resize(SaveWidth, SaveHeight)

        bGenCol = True if s.value("bGenCol", "Nein") == "Ja" else False
        self.chkCol.setChecked(bGenCol)

        bGenLay = True if s.value("bGenLay", "Ja") == "Ja" else False
        self.chkLay.setChecked(bGenLay)

        bFormatText = True if s.value("bFormatText", "Ja") == "Ja" else False
        self.chkUseTextFormat.setChecked(bFormatText)

        bUseColor4Point = True if s.value("bUseColor4Point",
                                          "Ja") == "Ja" else False
        self.chkUseColor4Point.setChecked(bUseColor4Point)
        bUseColor4Line = True if s.value("bUseColor4Line",
                                         "Ja") == "Ja" else False
        self.chkUseColor4Line.setChecked(bUseColor4Line)
        bUseColor4Poly = True if s.value("bUseColor4Poly",
                                         "Nein") == "Ja" else False
        self.chkUseColor4Poly.setChecked(bUseColor4Poly)

        bGenSHP = True if s.value("bGenSHP", "Nein") == "Ja" else False
        self.chkSHP.setChecked(bGenSHP)
        self.chkSHP_clicked()

        bGen3D = True if s.value("bGen3D", "Nein") == "Ja" else False
        self.chk3D.setChecked(bGen3D)
        self.chk3D_clicked()

        if False:
            bGenGPKG = True if s.value("bGenGPKG", "Nein") == "Ja" else False
            self.chkGPKG.setVisible(True)

        else:
            bGenGPKG = False
            self.chkGPKG.setVisible(False)

        self.chkGPKG.setChecked(bGenGPKG)
        self.chkGPKG_clicked()

        iCodePage = s.value("iCodePage", 0)
        self.txtFaktor.setText('1.3')

        self.cbCharSet.addItems(self.charsetList)
        self.cbCharSet.setCurrentIndex(int(iCodePage))
        try:
            self.lbGDAL.setText(gdal.VersionInfo("GDAL_RELEASE_DATE"))
        except:
            self.lbGDAL.setText("-")
Exemple #12
0
def unit_gdal_version():
    try:
        import gdal
        version_num = int(gdal.VersionInfo('VERSION_NUM'))
        if (version_num < 2010000):
            raise AssertionError(("The version of GDAL should be at least 2.1.\n",
                                  "Recommended fix for Ubuntu 16.04:\n",
                                  "add-apt-repository -y ppa:ubuntugis/ppa\n",
                                  "apt-get update\n",
                                  "apt-get install gdal-bin libgdal-dev\n"))
    except ImportError:
        raise AssertionError('GDAL does not seem to be installed.')
Exemple #13
0
def fix():
    import os
    if os.name != "nt":
        print("\n", "*" * 80, "\n", "You are not running MS Windows so this package is not relevant to you", "\n",
              "*" * 80)
        return

    try:
        # We need to...
        # 1. Be able to find python library
        # 2. Ensure that django and gdal are installed
        from distutils.sysconfig import get_python_lib
        from django.contrib import gis
        import gdal as gd

        os.environ["PATH"] += os.pathsep + get_python_lib() + "\\osgeo"
        gdal_file = os.path.join(get_python_lib(), "django\\contrib\\gis\\gdal\\libgdal.py")

        # Figure out the version of the currently installed GDAL and make an entry to be added to the DLLs list in
        # libgdal.py
        gdal_ver = gd.VersionInfo()
        gdal_ver_major = str(int(gdal_ver) // 10 ** 6)[-1]
        gdal_ver_minor = str(int(gdal_ver) // 10 ** 4)[-1]
        gdal_ver_patch = str(int(gdal_ver) // 10 ** 2)[-1]
        gdal_ver_string = f"gdal{gdal_ver_major}{gdal_ver_minor}{gdal_ver_patch}"

        # fix to create version with zero patch, i.e. GDAL v 3.0.1 also now looks for gdal300.dll
        gdal_ver_string_nopatch = f"gdal{gdal_ver_major}{gdal_ver_minor}0"

        # Read libgdal.py and make sure that we haven't already updated it. We should update this ONCE only.
        with open(gdal_file, "r") as fh:
            fh_content = fh.read()
            if "lib_names = ['" + gdal_ver_string + "', '" + gdal_ver_string_nopatch + "', " in fh_content:
                return
        if not fh_content:
            raise AssertionError("Nothing read!")

        # Find the list of DLLs and add our GDAL version to it
        if "lib_names = [" in fh_content:
            new_content = \
                fh_content.replace("lib_names = [", "lib_names = ['" + gdal_ver_string + "', '" + gdal_ver_string_nopatch + "', ")
            print(new_content)

            # Replace libgdal.py with updated version
            with open(gdal_file, "w") as fh:
                fh.write(new_content)

    except Exception as e:
        message = f"Something went wrong!\n {e}\n" + \
                  f" If you can't find a module such as django or gdal, make sure that this is installed."
        print("\n", "*" * 90, "\n", message, "\n", "*" * 90)
Exemple #14
0
    def get_gdal_version(self):
        """
        Check the version of gdal imported to ensure it meets the requirements of this class
        """

        version = gdal.VersionInfo()
        major = int(version[0])
        minor = int(version[1:3])
        bug = int(version[3:5])
        if not (major == 3 and minor >= 1):
            msg = 'The version of GDAL must be >= 3.1.  Version found: {}.{}.{}'.format(
                major, minor, bug)
            self.logger.error(msg)


#            raise ValueError(msg)
        self.gdal_version = version
def check_gdal_version():
    """
    Check the version of gdal imported to ensure it meets the requirements
    of this script

    Returns
    -------
    None.

    """
    version = gdal.VersionInfo()
    major = int(version[0])
    minor = int(version[1:3])
    bug = int(version[3:5])
    if major == 3 and minor >= 1:
        pass
    else:
        msg = f'The version of GDAL must be >= 3.1.  Version found: {version}'
        LOGGER.error(msg)
        raise ValueError(msg)
Exemple #16
0
def main():

    assert os.path.exists(args.input_fn)
    version = gdal.VersionInfo()
    assert int(version[0:2]) >= 30 and int(
        version[2:4]) >= 10, "You must have GDAL version >= 3.1 to write COGS"

    #-----------------------------------
    with open(args.input_fn, "r") as f:
        fns = f.read().strip().split("\n")
    print("Found %d files" % (len(fns)))

    assert args.suffix != "", "The suffix can't be an empty string because you might lose data."

    p = Pool(args.num_processes)
    results = p.map(do_work, fns)

    with open(args.log_fn, "w") as f:
        f.write("input_fn,output_fn,result\n")
        for i in range(len(fns)):
            f.write("%s,%s,%d\n" % (fns[i], results[i][0], results[i][1]))
Exemple #17
0
def ogr_geom_build_from_edges_4():

    if int(gdal.VersionInfo('VERSION_NUM')) < 1900:
        gdaltest.post_reason('would crash')
        return 'skip'

    if gdaltest.have_geos == 0:
        return 'skip'

    link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection)

    wkt_array = [
        'LINESTRING EMPTY', 'LINESTRING (1 1,1 2)', 'LINESTRING EMPTY',
        'LINESTRING (1 2,2 2)', 'LINESTRING (2 2,2 1)', 'LINESTRING (2 1,1 1)',
        'LINESTRING (0 0,0 10)', 'LINESTRING (0 10,10 10)',
        'LINESTRING (10 10,10 0)', 'LINESTRING (10 0,0 0)'
    ]

    for wkt in wkt_array:
        geom = ogr.CreateGeometryFromWkt(wkt)
        #print "geom is",geom
        link_coll.AddGeometry(geom)
        geom.Destroy()

    try:
        poly = ogr.BuildPolygonFromEdges(link_coll)
        if poly is None:
            return 'fail'
        wkt = poly.ExportToWkt()
        if wkt != 'POLYGON ((0 0,0 10,10 10,10 0,0 0),(1 1,1 2,2 2,2 1,1 1))':
            print(wkt)
            return 'fail'
        poly.Destroy()
    except:
        return 'fail'

    return 'success'
Exemple #18
0
def cog_validate(ds, check_tiled=True):
    """Check if a file is a (Geo)TIFF with cloud optimized compatible structure.

    Args:
      ds: GDAL Dataset for the file to inspect.
      check_tiled: Set to False to ignore missing tiling.

    Returns:
      A tuple, whose first element is an array of error messages
      (empty if there is no error), and the second element, a dictionary
      with the structure of the GeoTIFF file.

    Raises:
      ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the
        file is not a Tiff.
    """

    if int(gdal.VersionInfo('VERSION_NUM')) < 2020000:
        raise ValidateCloudOptimizedGeoTIFFException(
            'GDAL 2.2 or above required')

    unicode_type = type(''.encode('utf-8').decode('utf-8'))
    if isinstance(ds, str) or isinstance(ds, unicode_type):
        gdal.PushErrorHandler()
        ds = gdal.Open(ds)
        gdal.PopErrorHandler()
        if ds is None:
            raise ValidateCloudOptimizedGeoTIFFException(
                'Invalid file : %s' % gdal.GetLastErrorMsg())
        if ds.GetDriver().ShortName != 'GTiff':
            raise ValidateCloudOptimizedGeoTIFFException(
                'The file is not a GeoTIFF')

    details = {}
    errors = []
    filename = ds.GetDescription()
    main_band = ds.GetRasterBand(1)
    ovr_count = main_band.GetOverviewCount()
    filelist = ds.GetFileList()
    if filelist is not None and filename + '.ovr' in filelist:
        errors += [
            'Overviews found in external .ovr file. They should be internal']

    if main_band.XSize >= 512 or main_band.YSize >= 512:
        if check_tiled:
            block_size = main_band.GetBlockSize()
            if block_size[0] == main_band.XSize and block_size[0] > 1024:
                errors += [
                    'The file is greater than 512xH or Wx512, but is not tiled']

        if ovr_count == 0:
            errors += [
                'The file is greater than 512xH or Wx512, but has no overviews']

    ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))
    ifd_offsets = [ifd_offset]
    if ifd_offset not in (8, 16):
        errors += [
            'The offset of the main IFD should be 8 for ClassicTIFF '
            'or 16 for BigTIFF. It is %d instead' % ifd_offsets[0]]
    details['ifd_offsets'] = {}
    details['ifd_offsets']['main'] = ifd_offset

    for i in range(ovr_count):
        # Check that overviews are by descending sizes
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        if i == 0:
            if (ovr_band.XSize > main_band.XSize or
                ovr_band.YSize > main_band.YSize):
                    errors += [
                        'First overview has larger dimension than main band']
        else:
            prev_ovr_band = ds.GetRasterBand(1).GetOverview(i-1)
            if (ovr_band.XSize > prev_ovr_band.XSize or
                ovr_band.YSize > prev_ovr_band.YSize):
                    errors += [
                        'Overview of index %d has larger dimension than '
                        'overview of index %d' % (i, i-1)]

        if check_tiled:
            block_size = ovr_band.GetBlockSize()
            if block_size[0] == ovr_band.XSize and block_size[0] > 1024:
                errors += [
                    'Overview of index %d is not tiled' % i]

        # Check that the IFD of descending overviews are sorted by increasing
        # offsets
        ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF'))
        ifd_offsets.append(ifd_offset)
        details['ifd_offsets']['overview_%d' % i] = ifd_offset
        if ifd_offsets[-1] < ifd_offsets[-2]:
            if i == 0:
                errors += [
                    'The offset of the IFD for overview of index %d is %d, '
                    'whereas it should be greater than the one of the main '
                    'image, which is at byte %d' %
                    (i, ifd_offsets[-1], ifd_offsets[-2])]
            else:
                errors += [
                    'The offset of the IFD for overview of index %d is %d, '
                    'whereas it should be greater than the one of index %d, '
                    'which is at byte %d' %
                    (i, ifd_offsets[-1], i-1, ifd_offsets[-2])]

    # Check that the imagery starts by the smallest overview and ends with
    # the main resolution dataset
    block_offset = main_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF')
    if not block_offset:
        errors += ['Missing BLOCK_OFFSET_0_0']
    data_offset = int(block_offset) if block_offset else None
    data_offsets = [data_offset]
    details['data_offsets'] = {}
    details['data_offsets']['main'] = data_offset
    for i in range(ovr_count):
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        data_offset = int(ovr_band.GetMetadataItem('BLOCK_OFFSET_0_0', 'TIFF'))
        data_offsets.append(data_offset)
        details['data_offsets']['overview_%d' % i] = data_offset

    if data_offsets[-1] < ifd_offsets[-1]:
        if ovr_count > 0:
            errors += [
                'The offset of the first block of the smallest overview '
                'should be after its IFD']
        else:
            errors += [
                'The offset of the first block of the image should '
                'be after its IFD']
    for i in range(len(data_offsets)-2, 0, -1):
        if data_offsets[i] < data_offsets[i + 1]:
            errors += [
                'The offset of the first block of overview of index %d should '
                'be after the one of the overview of index %d' %
                (i - 1, i)]
    if len(data_offsets) >= 2 and data_offsets[0] < data_offsets[1]:
        errors += [
            'The offset of the first block of the main resolution image'
            'should be after the one of the overview of index %d' %
            (ovr_count - 1)]

    return errors, details
Exemple #19
0
def misc_12():

    if int(gdal.VersionInfo('VERSION_NUM')) < 1900:
        gdaltest.post_reason('would crash')
        return 'skip'

    import test_cli_utilities
    gdal_translate_path = test_cli_utilities.get_gdal_translate_path()

    for i in range(gdal.GetDriverCount()):
        drv = gdal.GetDriver(i)
        #if drv.ShortName == 'ECW' or drv.ShortName == 'JP2ECW':
        #    continue
        md = drv.GetMetadata()
        if 'DCAP_CREATECOPY' in md or 'DCAP_CREATE' in md:

            ext = ''
            if drv.ShortName == 'GTX':
                ext = '.gtx'
            elif drv.ShortName == 'RST':
                ext = '.rst'
            elif drv.ShortName == 'SAGA':
                ext = '.sdat'
            elif drv.ShortName == 'ECW':
                ext = '.ecw'
            elif drv.ShortName == 'KMLSUPEROVERLAY':
                ext = '.kmz'
            elif drv.ShortName == 'ADRG':
                ext = '/ABCDEF01.GEN'
            elif drv.ShortName == 'SRTMHGT':
                ext = '/N48E002.HGT'

            nbands = 1
            if drv.ShortName == 'WEBP' or drv.ShortName == 'ADRG':
                nbands = 3

            datatype = gdal.GDT_Byte
            if drv.ShortName == 'BT' or drv.ShortName == 'BLX':
                datatype = gdal.GDT_Int16
            elif drv.ShortName == 'GTX' or drv.ShortName == 'NTv2' or drv.ShortName == 'Leveller':
                datatype = gdal.GDT_Float32

            size = 1201
            if drv.ShortName == 'BLX':
                size = 128

            src_ds = gdal.GetDriverByName('GTiff').Create(
                '/vsimem/misc_12_src.tif', size, size, nbands, datatype)
            set_gt = (2, 1.0 / size, 0, 49, 0, -1.0 / size)
            src_ds.SetGeoTransform(set_gt)
            src_ds.SetProjection(
                'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["degree",0.01745329251994328]]'
            )

            # Test to detect crashes
            gdal.PushErrorHandler('CPLQuietErrorHandler')
            ds = drv.CreateCopy('/nonexistingpath/nonexistingfile' + ext,
                                src_ds)
            gdal.PopErrorHandler()
            ds = None

            if gdal_translate_path is not None:
                # Test to detect memleaks
                ds = gdal.GetDriverByName('VRT').CreateCopy(
                    'tmp/misc_12.vrt', src_ds)
                (out, err) = gdaltest.runexternal_out_and_err(
                    gdal_translate_path + ' -of ' + drv.ShortName +
                    ' tmp/misc_12.vrt /nonexistingpath/nonexistingfile' + ext,
                    check_memleak=False)
                ds = None
                gdal.Unlink('tmp/misc_12.vrt')

                # If DEBUG_VSIMALLOC_STATS is defined, this is an easy way
                # to catch some memory leaks
                if out.find('VSIMalloc + VSICalloc - VSIFree') != -1 and \
                out.find('VSIMalloc + VSICalloc - VSIFree : 0') == -1:
                    if drv.ShortName == 'Rasterlite' and out.find(
                            'VSIMalloc + VSICalloc - VSIFree : 1') != -1:
                        pass
                    else:
                        print('memleak detected for driver %s' % drv.ShortName)

            src_ds = None

            gdal.Unlink('/vsimem/misc_12_src.tif')

    return 'success'
Exemple #20
0
 def versionNum(self):
     return int(gdal.VersionInfo("VERSION_NUM"))
Exemple #21
0
def validate(  # noqa pylint: disable=too-many-branches
        ds,
        check_tiled=True,
        full_check=False):
    """Check if a file is a (Geo)TIFF with cloud optimized compatible structure.

    Args:
      ds: GDAL Dataset for the file to inspect.
      check_tiled: Set to False to ignore missing tiling.
      full_check: Set to TRUe to check tile/strip leader/trailer bytes. Might be slow on remote files

    Returns:
      A tuple, whose first element is an array of error messages
      (empty if there is no error), and the second element, a dictionary
      with the structure of the GeoTIFF file.

    Raises:
      ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the
        file is not a Tiff.
    """

    if int(gdal.VersionInfo("VERSION_NUM")) < 2020000:
        raise ValidateCloudOptimizedGeoTIFFException(
            "GDAL 2.2 or above required")

    unicode_type = type("".encode("utf-8").decode("utf-8"))
    if isinstance(ds, (str, unicode_type)):
        gdal.PushErrorHandler()
        ds = gdal.Open(ds)
        gdal.PopErrorHandler()
        if ds is None:
            raise ValidateCloudOptimizedGeoTIFFException(
                "Invalid file : %s" % gdal.GetLastErrorMsg())
        if ds.GetDriver().ShortName != "GTiff":
            raise ValidateCloudOptimizedGeoTIFFException(
                "The file is not a GeoTIFF")

    details = {}
    errors = []
    warnings = []
    filename = ds.GetDescription()
    main_band = ds.GetRasterBand(1)
    ovr_count = main_band.GetOverviewCount()
    filelist = ds.GetFileList()
    if filelist is not None and filename + ".ovr" in filelist:
        errors += [
            "Overviews found in external .ovr file. They should be internal"
        ]

    if main_band.XSize > 512 or main_band.YSize > 512:
        if check_tiled:
            block_size = main_band.GetBlockSize()
            if block_size[0] == main_band.XSize and block_size[0] > 1024:
                errors += [
                    "The file is greater than 512xH or Wx512, but is not tiled"
                ]

        if ovr_count == 0:
            warnings += [
                "The file is greater than 512xH or Wx512, it is recommended "
                "to include internal overviews"
            ]

    ifd_offset = int(main_band.GetMetadataItem("IFD_OFFSET", "TIFF"))
    ifd_offsets = [ifd_offset]

    block_order_row_major = False
    block_leader_size_as_uint4 = False
    block_trailer_last_4_bytes_repeated = False
    mask_interleaved_with_imagery = False

    if ifd_offset not in (8, 16):

        # Check if there is GDAL hidden structural metadata
        f = gdal.VSIFOpenL(filename, "rb")
        if not f:
            raise ValidateCloudOptimizedGeoTIFFException("Cannot open file")
        signature = struct.unpack("B" * 4, gdal.VSIFReadL(4, 1, f))
        bigtiff = signature in ((0x49, 0x49, 0x2B, 0x00), (0x4D, 0x4D, 0x00,
                                                           0x2B))
        if bigtiff:
            expected_ifd_pos = 16
        else:
            expected_ifd_pos = 8
        gdal.VSIFSeekL(f, expected_ifd_pos, 0)
        pattern = "GDAL_STRUCTURAL_METADATA_SIZE=%06d bytes\n" % 0
        got = gdal.VSIFReadL(len(pattern), 1, f).decode("LATIN1")
        if len(got) == len(pattern) and got.startswith(
                "GDAL_STRUCTURAL_METADATA_SIZE="):
            size = int(got[len("GDAL_STRUCTURAL_METADATA_SIZE="):][0:6])
            extra_md = gdal.VSIFReadL(size, 1, f).decode("LATIN1")
            block_order_row_major = "BLOCK_ORDER=ROW_MAJOR" in extra_md
            block_leader_size_as_uint4 = "BLOCK_LEADER=SIZE_AS_UINT4" in extra_md
            block_trailer_last_4_bytes_repeated = (
                "BLOCK_TRAILER=LAST_4_BYTES_REPEATED" in extra_md)
            mask_interleaved_with_imagery = (
                "MASK_INTERLEAVED_WITH_IMAGERY=YES" in extra_md)
            if "KNOWN_INCOMPATIBLE_EDITION=YES" in extra_md:
                errors += [
                    "KNOWN_INCOMPATIBLE_EDITION=YES is declared in the file"
                ]
            expected_ifd_pos += len(pattern) + size
            expected_ifd_pos += (expected_ifd_pos % 2
                                 )  # IFD offset starts on a 2-byte boundary
        gdal.VSIFCloseL(f)

        if expected_ifd_pos != ifd_offsets[0]:
            errors += [
                "The offset of the main IFD should be %d. It is %d instead" %
                (expected_ifd_pos, ifd_offsets[0])
            ]

    details["ifd_offsets"] = {}
    details["ifd_offsets"]["main"] = ifd_offset

    for i in range(ovr_count):
        # Check that overviews are by descending sizes
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        if i == 0:
            if ovr_band.XSize > main_band.XSize or ovr_band.YSize > main_band.YSize:
                errors += [
                    "First overview has larger dimension than main band"
                ]
        else:
            prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1)
            if (ovr_band.XSize > prev_ovr_band.XSize
                    or ovr_band.YSize > prev_ovr_band.YSize):
                errors += [
                    "Overview of index %d has larger dimension than "
                    "overview of index %d" % (i, i - 1)
                ]

        if check_tiled:
            block_size = ovr_band.GetBlockSize()
            if block_size[0] == ovr_band.XSize and block_size[0] > 1024:
                errors += ["Overview of index %d is not tiled" % i]

        # Check that the IFD of descending overviews are sorted by increasing
        # offsets
        ifd_offset = int(ovr_band.GetMetadataItem("IFD_OFFSET", "TIFF"))
        ifd_offsets.append(ifd_offset)
        details["ifd_offsets"]["overview_%d" % i] = ifd_offset
        if ifd_offsets[-1] < ifd_offsets[-2]:
            if i == 0:
                errors += [
                    "The offset of the IFD for overview of index %d is %d, "
                    "whereas it should be greater than the one of the main "
                    "image, which is at byte %d" %
                    (i, ifd_offsets[-1], ifd_offsets[-2])
                ]
            else:
                errors += [
                    "The offset of the IFD for overview of index %d is %d, "
                    "whereas it should be greater than the one of index %d, "
                    "which is at byte %d" %
                    (i, ifd_offsets[-1], i - 1, ifd_offsets[-2])
                ]

    # Check that the imagery starts by the smallest overview and ends with
    # the main resolution dataset

    def get_block_offset(band):
        blockxsize, blockysize = band.GetBlockSize()
        for y in range(int((band.YSize + blockysize - 1) / blockysize)):
            for x in range(int((band.XSize + blockxsize - 1) / blockxsize)):
                block_offset = band.GetMetadataItem(
                    "BLOCK_OFFSET_%d_%d" % (x, y), "TIFF")
                if block_offset:
                    return int(block_offset)
        return 0

    block_offset = get_block_offset(main_band)
    data_offsets = [block_offset]
    details["data_offsets"] = {}
    details["data_offsets"]["main"] = block_offset
    for i in range(ovr_count):
        ovr_band = ds.GetRasterBand(1).GetOverview(i)
        block_offset = get_block_offset(ovr_band)
        data_offsets.append(block_offset)
        details["data_offsets"]["overview_%d" % i] = block_offset

    if data_offsets[-1] != 0 and data_offsets[-1] < ifd_offsets[-1]:
        if ovr_count > 0:
            errors += [
                "The offset of the first block of the smallest overview "
                "should be after its IFD"
            ]
        else:
            errors += [
                "The offset of the first block of the image should be after its IFD"
            ]
    for i in range(len(data_offsets) - 2, 0, -1):
        if data_offsets[i] != 0 and data_offsets[i] < data_offsets[i + 1]:
            errors += [
                "The offset of the first block of overview of index %d should "
                "be after the one of the overview of index %d" % (i - 1, i)
            ]
    if (len(data_offsets) >= 2 and data_offsets[0] != 0
            and data_offsets[0] < data_offsets[1]):
        errors += [
            "The offset of the first block of the main resolution image "
            "should be after the one of the overview of index %d" %
            (ovr_count - 1)
        ]

    if full_check and (block_order_row_major or block_leader_size_as_uint4
                       or block_trailer_last_4_bytes_repeated
                       or mask_interleaved_with_imagery):
        f = gdal.VSIFOpenL(filename, "rb")
        if not f:
            raise ValidateCloudOptimizedGeoTIFFException("Cannot open file")

        full_check_band(
            f,
            "Main resolution image",
            main_band,
            errors,
            block_order_row_major,
            block_leader_size_as_uint4,
            block_trailer_last_4_bytes_repeated,
            mask_interleaved_with_imagery,
        )
        if (main_band.GetMaskFlags() == gdal.GMF_PER_DATASET
                and (filename + ".msk") not in ds.GetFileList()):
            full_check_band(
                f,
                "Mask band of main resolution image",
                main_band.GetMaskBand(),
                errors,
                block_order_row_major,
                block_leader_size_as_uint4,
                block_trailer_last_4_bytes_repeated,
                False,
            )
        for i in range(ovr_count):
            ovr_band = ds.GetRasterBand(1).GetOverview(i)
            full_check_band(
                f,
                "Overview %d" % i,
                ovr_band,
                errors,
                block_order_row_major,
                block_leader_size_as_uint4,
                block_trailer_last_4_bytes_repeated,
                mask_interleaved_with_imagery,
            )
            if (ovr_band.GetMaskFlags() == gdal.GMF_PER_DATASET
                    and (filename + ".msk") not in ds.GetFileList()):
                full_check_band(
                    f,
                    "Mask band of overview %d" % i,
                    ovr_band.GetMaskBand(),
                    errors,
                    block_order_row_major,
                    block_leader_size_as_uint4,
                    block_trailer_last_4_bytes_repeated,
                    False,
                )
        gdal.VSIFCloseL(f)

    return warnings, errors, details
def main():

    #### Set Up Arguments
    parent_parser, pos_arg_keys = ortho_functions.buildParentArgumentParser()
    parser = argparse.ArgumentParser(
        parents=[parent_parser],
        description="Run/Submit batch pansharpening in parallel")

    parser.add_argument("--pbs",
                        action='store_true',
                        default=False,
                        help="submit tasks to PBS")
    parser.add_argument("--slurm",
                        action='store_true',
                        default=False,
                        help="submit tasks to SLURM")
    parser.add_argument(
        "--parallel-processes",
        type=int,
        default=1,
        help="number of parallel processes to spawn (default 1)")
    parser.add_argument(
        "--qsubscript",
        help=
        "submission script to use in PBS/SLURM submission (PBS default is qsub_pansharpen.sh, "
        "SLURM default is slurm_pansharpen.py, in script root folder)")
    parser.add_argument(
        "-l", help="PBS resources requested (mimicks qsub syntax, PBS only)")
    parser.add_argument("--dryrun",
                        action="store_true",
                        default=False,
                        help="print actions without executing")

    #### Parse Arguments
    args = parser.parse_args()
    scriptpath = os.path.abspath(sys.argv[0])
    src = os.path.abspath(args.src)
    dstdir = os.path.abspath(args.dst)

    #### Validate Required Arguments
    if os.path.isdir(src):
        srctype = 'dir'
    elif os.path.isfile(src) and os.path.splitext(src)[1].lower() == '.txt':
        srctype = 'textfile'
    elif os.path.isfile(src) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    elif os.path.isfile(src.replace('msi', 'blu')) and os.path.splitext(
            src)[1].lower() in ortho_functions.exts:
        srctype = 'image'
    else:
        parser.error(
            "Error arg1 is not a recognized file path or file type: {}".format(
                src))

    if not os.path.isdir(dstdir):
        parser.error("Error arg2 is not a valid file path: {}".format(dstdir))

    # Verify qsubscript
    if args.pbs or args.slurm:
        if args.qsubscript is None:
            if args.pbs:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'qsub_pansharpen.sh')
            if args.slurm:
                qsubpath = os.path.join(os.path.dirname(scriptpath),
                                        'slurm_pansharpen.sh')
        else:
            qsubpath = os.path.abspath(args.qsubscript)
        if not os.path.isfile(qsubpath):
            parser.error("qsub script path is not valid: {}".format(qsubpath))

    ### Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError as e:
        parser.error(e)

    ## Check GDAL version (2.1.0 minimum)
    gdal_version = gdal.VersionInfo()
    try:
        if int(gdal_version) < 2010000:
            parser.error(
                "gdal_pansharpen requires GDAL version 2.1.0 or higher")
    except ValueError:
        parser.error("Cannot parse GDAL version: {}".format(gdal_version))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)

    #### Get args ready to pass to task handler
    arg_keys_to_remove = ('l', 'qsubscript', 'dryrun', 'pbs', 'slurm',
                          'parallel_processes')
    arg_str_base = taskhandler.convert_optional_args_to_string(
        args, pos_arg_keys, arg_keys_to_remove)

    ## Identify source images
    if srctype == 'dir':
        image_list1 = utils.find_images(src, False, ortho_functions.exts)
    elif srctype == 'textfile':
        image_list1 = utils.find_images(src, True, ortho_functions.exts)
    else:
        image_list1 = [src]

    pair_list = []
    for srcfp in image_list1:
        #print(srcfp)
        try:
            image_pair = ImagePair(srcfp, spatial_ref)
        except RuntimeError as e:
            logger.error(e)
        else:
            logger.info("Image: %s, Sensor: %s", image_pair.mul_srcfn,
                        image_pair.sensor)
            pair_list.append(image_pair)

    logger.info('Number of src image pairs: %i', len(pair_list))

    ## Build task queue
    i = 0
    task_queue = []
    for image_pair in pair_list:

        bittype = utils.get_bit_depth(args.outtype)
        pansh_dstfp = os.path.join(
            dstdir, "{}_{}{}{}_pansh.tif".format(
                os.path.splitext(image_pair.mul_srcfn)[0], bittype,
                args.stretch, args.epsg))

        if not os.path.isfile(pansh_dstfp):
            i += 1
            task = taskhandler.Task(
                image_pair.mul_srcfn, 'Psh{:04g}'.format(i), 'python',
                '{} {} {} {}'.format(scriptpath, arg_str_base,
                                     image_pair.mul_srcfp, dstdir),
                exec_pansharpen, [image_pair, pansh_dstfp, args])
            task_queue.append(task)

    logger.info('Number of incomplete tasks: %i', i)

    ## Run tasks
    if len(task_queue) > 0:
        logger.info("Submitting Tasks")
        if args.pbs:
            l = "-l {}".format(args.l) if args.l else ""
            try:
                task_handler = taskhandler.PBSTaskHandler(qsubpath, l)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.slurm:
            try:
                task_handler = taskhandler.SLURMTaskHandler(qsubpath)
            except RuntimeError as e:
                logger.error(e)
            else:
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        elif args.parallel_processes > 1:
            try:
                task_handler = taskhandler.ParallelTaskHandler(
                    args.parallel_processes)
            except RuntimeError as e:
                logger.error(e)
            else:
                logger.info("Number of child processes to spawn: %i",
                            task_handler.num_processes)
                if not args.dryrun:
                    task_handler.run_tasks(task_queue)

        else:
            results = {}
            lfh = None
            for task in task_queue:

                src, dstfp, task_arg_obj = task.method_arg_list

                #### Set up processing log handler
                logfile = os.path.splitext(dstfp)[0] + ".log"
                lfh = logging.FileHandler(logfile)
                lfh.setLevel(logging.DEBUG)
                formatter = logging.Formatter(
                    '%(asctime)s %(levelname)s- %(message)s',
                    '%m-%d-%Y %H:%M:%S')
                lfh.setFormatter(formatter)
                logger.addHandler(lfh)

                if not args.dryrun:
                    results[task.name] = task.method(src, dstfp, task_arg_obj)

                #### remove existing file handler
                logger.removeHandler(lfh)

                #### remove existing file handler
                logger.removeHandler(lfh)

            #### Print Images with Errors
            for k, v in results.items():
                if v != 0:
                    logger.warning("Failed Image: %s", k)

        logger.info("Done")

    else:
        logger.info("No images found to process")
Exemple #23
0
 def version(self):
     return Version(gdal.VersionInfo("RELEASE_NAME"))
Exemple #24
0
 def test_travis(self):
     print('Django', django.VERSION)
     print('Python', sys.version_info)
     print('GDAL', gdal.VersionInfo())
Exemple #25
0
    if gdaltest.msg != 'test':
        gdaltest.post_reason('fail')
        return 'fail'

    return 'success'


gdaltest_list = [
    basic_test_1, basic_test_2, basic_test_3, basic_test_4, basic_test_5,
    basic_test_6, basic_test_7, basic_test_8, basic_test_9, basic_test_10
]

if __name__ == '__main__':

    if len(sys.argv) == 3 and sys.argv[1] == "LICENSE":
        if sys.argv[2] == '0':
            gdal.SetConfigOption('GDAL_DATA', '/foo')
        else:
            gdal.SetConfigOption('GDAL_DATA', 'tmp')
        gdal.VersionInfo('LICENSE')
        print(gdal.VersionInfo('LICENSE'))
        import testnonboundtoswig
        testnonboundtoswig.GDALDestroyDriverManager()
        sys.exit(0)

    gdaltest.setup_run('basic_test')

    gdaltest.run_tests(gdaltest_list)

    gdaltest.summarize()
Exemple #26
0
    import rasterio
    import boto3
    import urllib
    import botocore
    def PrintException():  # Honestly not sure how to use this yet :)
        exc_type, exc_obj, tb = sys.exc_info()
        f = tb.tb_frame
        lineno = tb.tb_lineno
        filename = f.f_code.co_filename
        linecache.checkcache(filename)
        line = linecache.getline(filename, lineno, f.f_globals)
        print('EXCEPTION IN ({}, LINE {} "{}"): {}'.format(filename, lineno,
              line.strip(), exc_obj))

    gdal.UseExceptions()
    print("GDAL version:" + str(int(gdal.VersionInfo('VERSION_NUM'))))
else:
    homepath = "/Ranch-Climate-Weather/"
    os.chdir(homepath)
    from flask_caching import Cache  # This works on Linux but not Windows :)

# In[]: Libraries
import copy
import dash
from dash.dependencies import Input, Output, State, Event
import datetime
import dash_core_components as dcc
import dash_html_components as html
import dash_table_experiments as dt
import gc
import glob
Exemple #27
0
try:
    import pandas as pa
    print "pandas version: ", pa.__version__
except ImportError:
    print "pandas not installed"

try:
    import netCDF4 as nc
    print "netCDF4 version: ", nc.__version__
except ImportError:
    print "netCDF4 not installed"

try:
    import scipy
    print "scipy version: ", scipy.__version__
except ImportError:
    print "scipy not installed"

try:
    import gdal
    print "gdal version: ", gdal.VersionInfo()
except ImportError:
    print "gdal not installed"

try:
    import statsmodels.api as sm
    print "statsmodels version: ", sm.__version__
except ImportError:
    print "statsmodels not installed"
Exemple #28
0
        gdal.CE_Warning: 'Warning',
        gdal.CE_Failure: 'Failure',
        gdal.CE_Fatal: 'Fatal'
    }
    err_msg = err_msg.replace('\n', ' ')
    err_class = errtype.get(err_class, 'None')
    print 'Error Number: %s' % (err_num)
    print 'Error Type: %s' % (err_class)
    print 'Error Message: %s' % (err_msg)


try:
    import ogr
    import osr
    import gdal
    version_num = int(gdal.VersionInfo('VERSION_NUM'))
    if version_num >= 2000000:
        has_ogr = True
        gdal.PushErrorHandler(gdal_error_handler)
    else:
        print 'GDAL version >= 2.0 is required:'
        has_ogr = False
except Exception as e:
    print 'GDAL is not available:'
    print e
    has_ogr = False


class TestRun(unittest.TestCase):
    """Tests for (non-scalar) LagrangianArray"""
    def make_OceanDrift_object(self):
Exemple #29
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = '1.0.2'
    date = 20201028
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    ### For paralell processing
    global files

    #%% Set default
    infile = []
    outdir = []
    zmin = 5
    zmax = []
    tms_flag = True
    try:
        n_para = len(os.sched_getaffinity(0))
    except:
        n_para = multi.cpu_count()

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(
                argv[1:], "hi:o:",
                ["help", "zmin=", "zmax=", "xyz", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                infile = a
            elif o == '-o':
                outdir = a
            elif o == '--zmin':
                zmin = int(a)
            elif o == '--zmax':
                zmax = int(a)
            elif o == '--xyz':
                tms_flag = False
            elif o == '--n_para':
                n_para = int(a)

        if not infile:
            raise Usage('No input file given, -i is not optional!')
        elif not os.path.exists(infile):
            raise Usage('No {} exists!'.format(infile))
        elif gdal.Open(infile) is None:
            raise Usage('{} is not GeoTIFF!'.format(infile))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Parameter setting
    if not outdir:
        outdir = 'tiles_' + infile.replace('.tif', '').replace('.', '_')
    if os.path.exists(outdir):
        print('\n{} already exists. Remove and overwrite.'.format(outdir),
              flush=True)
        shutil.rmtree(outdir)

    print('\nOutput dir: {}'.format(outdir), flush=True)

    if not zmax:
        geotiff = gdal.Open(infile)
        lon_w, dlon, _, lat_n, _, dlat = geotiff.GetGeoTransform()

        ### Approx pixel spacing in meter
        dlat_m = abs(dlat * 40000000 / 360)  ## deg -> meter
        dlon_m = abs(dlon * 40000000 / 360 *
                     np.cos(np.deg2rad(lat_n)))  ## deg -> meter

        pixsp = dlat_m if dlat_m < dlon_m else dlon_m  ## Use smaller one

        if pixsp <= 5: zmax = 17
        elif pixsp <= 10: zmax = 16
        elif pixsp <= 20: zmax = 15
        elif pixsp <= 40: zmax = 14
        elif pixsp <= 80: zmax = 13
        elif pixsp <= 160: zmax = 12
        else: zmax = 11

    print('\nZoom levels: {} - {}'.format(zmin, zmax), flush=True)

    gdalver = gdal.VersionInfo()  ## e.g., 3.1.1 -> 3010100, str

    #%% gdal2ties
    call = [
        "gdal2tiles.py", "-z", "{}-{}".format(zmin, zmax), "--no-kml", "-w",
        "leaflet", infile, outdir
    ]

    if int(gdalver[0]) >= 3:
        ## -x option (Exclude transparent tiles) available in ver>=3
        call.insert(1, "-x")

    if int(gdalver[0:3]) >= 203:
        ## --processes option available in ver>=2.3
        call.insert(1, "--processes={}".format(n_para))

    if not tms_flag and int(gdalver[0:3]) >= 301:
        ## --xyz option available ver>=3.1
        call.insert(1, "--xyz")

    print('', flush=True)
    print(' '.join(call), flush=True)

    p = subp.Popen(call, stdout=subp.PIPE, stderr=subp.STDOUT)
    for line in iter(p.stdout.readline, b''):
        print(line.rstrip().decode("utf8"), flush=True)

    #%% Remove transparent tiles if gdal<3
    if int(gdalver[0]) < 3:
        print('\nRemove transparent tiles...', flush=True)
        call = ["find", outdir, "-size", "334c", "|", "xargs", "rm", "-f"]
        subp.run(' '.join(call), shell=True)

    #%% Invert y if XYZ tiles and gdal<3.1
    if not tms_flag and int(gdalver[0:3]) < 301:
        print('\nInvert Y with {} parallel processing...'.format(n_para),
              flush=True)
        files = glob.glob('{}/*/*/*.png'.format(outdir))
        p = multi.Pool(n_para)
        p.map(invert_y_wrapper, range(len(files)))
        p.close()

    #%% Edit leaflet.html
    with open(os.path.join(outdir, 'leaflet.html'), 'r') as f:
        lines = f.readlines()

    ### Add GSImaps
    gsi = '        //  .. GSIMaps\n        var gsi = L.tileLayer(\'https://cyberjapandata.gsi.go.jp/xyz/std/{z}/{x}/{y}.png\', {attribution: \'<a href="https://maps.gsi.go.jp/development/ichiran.html" target="_blank">地理院タイル</a>\'});\n'
    gsi2 = '"GSIMaps": gsi, '
    gsi_photo = '        //  .. GSIMaps photo\n        var gsi_photo = L.tileLayer(\'https://cyberjapandata.gsi.go.jp/xyz/seamlessphoto/{z}/{x}/{y}.jpg\', {attribution: \'<a href="https://maps.gsi.go.jp/development/ichiran.html" target="_blank">地理院タイル</a>\'});\n'
    gsi_photo2 = '"GSIMaps Photo": gsi_photo, '

    ### XYZ or TMS
    tms = 'true' if tms_flag else 'false'

    ### Replace
    lines2 = [
        s + gsi + gsi_photo if '// Base layers\n' in s else
        s.replace('= {', '= {' + gsi2 + gsi_photo2) if 'var basemaps =' in s
        else s.replace('true', tms) if 'tms: true' in s else s for s in lines
    ]

    with open(os.path.join(outdir, 'leaflet2.html'), 'w') as f:
        f.writelines(lines2)

    #%% Create layers.txt for GSIMaps
#    url = 'file://' + os.path.join(os.path.abspath(outdir), '{z}', '{x}', '{y}.png')
    url = os.path.join('http://', 'XXX', outdir, '{z}', '{x}', '{y}.png')
    with open(os.path.join(outdir, 'layers.txt'), 'w') as f:
        f.writelines(layers_txt(outdir, url, zmin, zmax, tms))

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output: {}'.format(outdir), flush=True)
    print('')
Exemple #30
0
    ### Verify processing options do not conflict
    if args.pbs and args.slurm:
        parser.error("Options --pbs and --slurm are mutually exclusive")
    if (args.pbs or args.slurm) and args.parallel_processes > 1:
        parser.error(
            "HPC Options (--pbs or --slurm) and --parallel-processes > 1 are mutually exclusive"
        )

    #### Verify EPSG
    try:
        spatial_ref = utils.SpatialRef(args.epsg)
    except RuntimeError, e:
        parser.error(e)

    ## Check GDAL version (2.1.0 minimum)
    gdal_version = gdal.VersionInfo()
    try:
        if int(gdal_version) < 2010000:
            parser.error(
                "gdal_pansharpen requires GDAL version 2.1.0 or higher")
    except ValueError, e:
        parser.error("Cannot parse GDAL version: {}".format(gdal_version))

    #### Set up console logging handler
    lso = logging.StreamHandler()
    lso.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s %(levelname)s- %(message)s',
                                  '%m-%d-%Y %H:%M:%S')
    lso.setFormatter(formatter)
    logger.addHandler(lso)