def test_dissolve_with_lists():
    assert anc.dissolve([[1, 2], [3, 4]]) == [1, 2, 3, 4]
    assert anc.dissolve([[[1]]]) == [1]
    assert anc.dissolve(((
        1,
        2,
    ), (3, 4))) == [1, 2, 3, 4]
    assert anc.dissolve(((1, 2), (1, 2))) == [1, 2, 1, 2]
Example #2
0
def getAuxdata(datasets, scenes):
    auxDataPath = os.path.join(expanduser("~"), '.snap/auxdata')

    scenes = [identify(scene) if isinstance(scene, str) else scene for scene in scenes]
    sensors = list(set([scene.sensor for scene in scenes]))
    for dataset in datasets:
        if dataset == 'SRTM 1Sec HGT':
            files = [x.replace('hgt', 'SRTMGL1.hgt.zip') for x in
                     list(set(dissolve([scene.getHGT() for scene in scenes])))]
            for file in files:
                infile = os.path.join('http://step.esa.int/auxdata/dem/SRTMGL1', file)
                outfile = os.path.join(auxDataPath, 'dem/SRTM 1Sec HGT', file)
                if not os.path.isfile(outfile):
                    print(infile)
                    try:
                        input = urlopen(infile)
                    except HTTPError:
                        print('-> not available')
                        continue
                    with open(outfile, 'wb') as output:
                        output.write(input.read())
                    input.close()
        elif dataset == 'POEORB':
            for sensor in sensors:
                if re.search('S1[AB]', sensor):

                    dates = [(scene.start[:4], scene.start[4:6]) for scene in scenes]
                    years = list(set([x[0] for x in dates]))

                    remote_contentVersion = urlopen(
                        'http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/remote_contentVersion.txt')
                    versions_remote = getOrbitContentVersions(remote_contentVersion)

                    for year in years:
                        dir_orb = os.path.join(auxDataPath, 'Orbits/Sentinel-1/POEORB', year)

                        if not os.path.isdir(dir_orb):
                            os.makedirs(dir_orb)
                        contentVersionFile = os.path.join(dir_orb, 'contentVersion.txt')

                        if os.path.isfile(contentVersionFile):
                            contentVersion = open(contentVersionFile, 'r+')
                            versions_local = getOrbitContentVersions(contentVersion)
                        else:
                            contentVersion = open(contentVersionFile, 'w')
                            versions_local = {}

                        combine = dict(set(versions_local.items()) & set(versions_remote.items()))

                        dates_select = [x for x in dates if x[0] == year]
                        months = list(set([x[1] for x in dates_select]))

                        orb_ids = sorted(
                            [x for x in ['{}-{}.zip'.format(year, month) for month in months] if not x in combine])

                        if len(orb_ids) > 0:
                            contentVersion.write('#\n#{}\n'.format(strftime('%a %b %d %H:%M:%S %Z %Y', gmtime())))

                            for orb_id in orb_ids:
                                orb_remote = urlopen(
                                    'http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/{}'.format(orb_id))
                                orb_remote_stream = zf.ZipFile(StringIO(orb_remote.read()), 'r')
                                orb_remote.close()

                                targets = [x for x in orb_remote_stream.namelist() if
                                           not os.path.isfile(os.path.join(dir_orb, x))]
                                orb_remote_stream.extractall(dir_orb, targets)
                                orb_remote_stream.close()

                                versions_local[orb_id] = versions_remote[orb_id]

                                for key, val in versions_local.iteritems():
                                    contentVersion.write('{}={}\n'.format(key, val))

                        contentVersion.close()
                    remote_contentVersion.close()
                else:
                    print('not implemented yet')
        elif dataset == 'Delft Precise Orbits':
            path_server = 'dutlru2.lr.tudelft.nl'
            subdirs = {'ASAR:': 'ODR.ENVISAT1/eigen-cg03c', 'ERS1': 'ODR.ERS-1/dgm-e04', 'ERS2': 'ODR.ERS-2/dgm-e04'}
            ftp = FTP(path_server)
            ftp.login()
            for sensor in sensors:
                if sensor in subdirs.keys():
                    path_target = os.path.join('pub/orbits', subdirs[sensor])
                    path_local = os.path.join(auxDataPath, 'Orbits/Delft Precise Orbits', subdirs[sensor])
                    ftp.cwd(path_target)
                    for item in ftp.nlst():
                        ftp.retrbinary('RETR ' + item, open(os.path.join(path_local, item), 'wb').write)
            ftp.quit()
        else:
            print('not implemented yet')
Example #3
0
 def test_dissolve_3_list_recursion(self):
     self.assertEqual(anc.dissolve([[[1]]]), [1])
Example #4
0
 def test_dissolve_with_lists(self):
     self.assertEqual(anc.dissolve([[1, 2], [3, 4]]), [1, 2, 3, 4])
Example #5
0
 def test_dissolve_tuple_recursion(self):
     self.assertEqual(anc.dissolve(((1, 2), (1, 2))), [1, 2, 1, 2])
Example #6
0
 def test_dissolve_with_tuples(self):
     self.assertEqual(anc.dissolve(((
         1,
         2,
     ), (3, 4))), [1, 2, 3, 4])
Example #7
0
def stack(srcfiles,
          dstfile,
          resampling,
          targetres,
          srcnodata,
          dstnodata,
          shapefile=None,
          layernames=None,
          sortfun=None,
          separate=False,
          overwrite=False,
          compress=True,
          cores=4):
    """
    function for mosaicking, resampling and stacking of multiple raster files into a 3D data cube

    Parameters
    ----------
    srcfiles: list
        a list of file names or a list of lists; each sub-list is treated as an order to mosaic its containing files
    dstfile: str
        the destination file (if sesparate) or a directory
    resampling: {near, bilinear, cubic, cubicspline, lanczos, average, mode, max, min, med, Q1, Q3}
        the resampling method; see documentation of gdalwarp
    targetres: tuple
        a list with two entries for x and y spatial resolution
    srcnodata: int or float
        the nodata value of the source files
    dstnodata: int or float
        the nodata value of the destination file(s)
    shapefile: str or spatial.vector.Vector
        a shapefile for defining the area of the destination files
    layernames: list
        the names of the output layers; if None, the basenames of the input files are used
    sortfun: function
        a function for sorting the input files; this is needed for defining the mosaicking order
    separate: bool
        should the files be written to a single raster block or separate files? If separate, each tile is written to geotiff.
    overwrite: bool
        overwrite the file if it already exists?
    compress: bool
        compress the geotiff files?
    cores: int
        the number of CPU threads to use; this is only relevant if separate = True

    Returns
    -------
    """
    if len(dissolve(srcfiles)) == 0:
        raise IOError('no input files provided to function raster.stack')

    if layernames is not None:
        if len(layernames) != len(srcfiles):
            raise IOError(
                'mismatch between number of source file groups and layernames')

    if not isinstance(targetres, (list, tuple)) or len(targetres) != 2:
        raise RuntimeError(
            'targetres must be a list or tuple with two entries for x and y resolution'
        )

    if len(srcfiles) == 1 and not isinstance(srcfiles[0], list):
        raise IOError('only one file specified; nothing to be done')

    if resampling not in [
            'near', 'bilinear', 'cubic', 'cubicspline', 'lanczos', 'average',
            'mode', 'max', 'min', 'med', 'Q1', 'Q3'
    ]:
        raise IOError('resampling method not supported')

    projections = list()
    for x in dissolve(srcfiles):
        try:
            projection = Raster(x).projection
        except OSError as e:
            print('cannot read file: {}'.format(x))
            raise e
        projections.append(projection)

    projections = list(set(projections))
    if len(projections) > 1:
        raise IOError('raster projection mismatch')
    elif len(projections) == 0:
        raise RuntimeError(
            'could not retrieve the projection from any of the {} input images'
            .format(len(srcfiles)))
    else:
        srs = projections[0]

    # read shapefile bounding coordinates and reduce list of rasters to those overlapping with the shapefile
    if shapefile is not None:
        shp = shapefile if isinstance(shapefile, Vector) else Vector(shapefile)
        shp.reproject(srs)
        ext = shp.extent
        arg_ext = (ext['xmin'], ext['ymin'], ext['xmax'], ext['ymax'])
        for i in range(len(srcfiles)):
            group = sorted(srcfiles[i], key=sortfun) if isinstance(
                srcfiles[i], list) else [srcfiles[i]]
            group = [x for x in group if intersect(shp, Raster(x).bbox())]
            if len(group) > 1:
                srcfiles[i] = group
            elif len(group) == 1:
                srcfiles[i] = group[0]
            else:
                srcfiles[i] = None
        srcfiles = filter(None, srcfiles)
    else:
        arg_ext = None

    # create temporary directory for writing intermediate files
    dst_base = os.path.splitext(dstfile)[0]
    tmpdir = dst_base + '__tmp'
    if not os.path.isdir(tmpdir):
        os.makedirs(tmpdir)

    options_warp = {
        'options': ['-q'],
        'format': 'GTiff' if separate else 'ENVI',
        'outputBounds': arg_ext,
        'multithread': True,
        'srcNodata': srcnodata,
        'dstNodata': dstnodata,
        'xRes': targetres[0],
        'yRes': targetres[1],
        'resampleAlg': resampling
    }

    if overwrite:
        options_warp['options'] += ['-overwrite']

    if separate and compress:
        options_warp['options'] += [
            '-co', 'COMPRESS=DEFLATE', '-co', 'PREDICTOR=2'
        ]

    options_buildvrt = {'outputBounds': arg_ext, 'srcNodata': srcnodata}

    # create VRT files for mosaicing
    for i in range(len(srcfiles)):
        base = srcfiles[i][0] if isinstance(srcfiles[i], list) else srcfiles[i]
        vrt = os.path.join(
            tmpdir,
            os.path.splitext(os.path.basename(base))[0] + '.vrt')
        gdalbuildvrt(srcfiles[i], vrt, options_buildvrt)
        srcfiles[i] = vrt

    # if no specific layernames are defined and sortfun is not set to None,
    # sort files by custom function or, by default, the basename of the raster/VRT file
    if layernames is None and sortfun is not None:
        srcfiles = sorted(srcfiles,
                          key=sortfun if sortfun else os.path.basename)

    bandnames = [os.path.splitext(os.path.basename(x))[0]
                 for x in srcfiles] if layernames is None else layernames

    if separate or len(srcfiles) == 1:
        if not os.path.isdir(dstfile):
            os.makedirs(dstfile)
        dstfiles = [os.path.join(dstfile, x) + '.tif' for x in bandnames]
        if overwrite:
            files = [x for x in zip(srcfiles, dstfiles)]
        else:
            files = [
                x for x in zip(srcfiles, dstfiles) if not os.path.isfile(x[1])
            ]
            if len(files) == 0:
                print(
                    'all target tiff files already exist, nothing to be done')
                shutil.rmtree(tmpdir)
                return
        srcfiles, dstfiles = map(list, zip(*files))

        multicore(gdalwarp,
                  cores=cores,
                  multiargs={
                      'src': srcfiles,
                      'dst': dstfiles
                  },
                  options=options_warp)
    else:
        # create VRT for stacking
        vrt = os.path.join(tmpdir, os.path.basename(dst_base) + '.vrt')
        options_buildvrt['options'] = ['-separate']
        gdalbuildvrt(srcfiles, vrt, options_buildvrt)

        # warp files
        gdalwarp(vrt, dstfile, options_warp)

        # edit ENVI HDR files to contain specific layer names
        par = envi.HDRobject(dstfile + '.hdr')
        par.band_names = bandnames
        envi.hdr(par)

    # remove temporary directory and files
    shutil.rmtree(tmpdir)