Exemplo n.º 1
0
def make_VRT(file, directory):
    sub_directories = [
        os.path.join(directory, name, file) for name in os.listdir(directory)
        if os.path.isdir(os.path.join(directory, name))
    ]
    gdal.BuildVRTOptions(VRTNodata="nan")
    gdal.BuildVRT(os.path.join(directory, f"{file[:-4]}.vrt"), sub_directories)
def get_tiles(indir, master_tiles, s):
    dname = os.path.basename(indir.strip(os.path.sep))  # get the actual granule/folder name
    fprint = get_aster_footprint(indir, master_tiles.crs, indir=indir, polyout=False)
    my_proj = pyproj.Proj(master_tiles.crs)
    if my_proj.is_latlong():
        buff = buffer_conversion(fprint, 1000)
    else:
        unit = [st.split('=')[-1] for st in my_proj.srs.split(' ') if 'units' in st]
        if unit[0] == 'm':
            buff = 1000
        else:
            buff = 1000 # have to figure out what to do with non-meter units...
    res = s.query(fprint.buffer(buff))
    # get only the results that intersect our buffered footprint
    intersects = [c for c in res if fprint.buffer(buff).intersection(c).area > 0]
    fnames = [master_tiles['filename'][master_tiles['geometry'] == c].values[0] for c in intersects]
    paths = [master_tiles['path'][master_tiles['geometry'] == c].values[0] for c in intersects]
    # subfolders = [master_tiles['subfolder'][master_tiles['geometry'] == c].values[0] for c in intersects]

    # tilelist = [os.path.sep.join([paths[i], subfolders[i], f]) for i, f in enumerate(fnames)]
    tilelist = [os.path.sep.join([paths[i], f]) for i, f in enumerate(fnames)]
    # create a temporary VRT from the tiles
    # print(os.path.sep.join([os.path.abspath(indir), 'tmp_{}.vrt'.format(dname)]))
    if len(tilelist)>0:
        gdal.BuildVRT(os.path.sep.join([indir, 'tmp_{}.vrt'.format(dname)]), tilelist, resampleAlg='bilinear')
        return os.path.sep.join([os.path.abspath(indir), 'tmp_{}.vrt'.format(dname)])
    else:
        return None
Exemplo n.º 3
0
def mosaic_timescan(burst_inventory, processing_dir, temp_dir, ard_parameters):

    product_list = [
        'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH', 'coh.VV', 'coh.VH', 'Alpha',
        'Entropy', 'Anisotropy'
    ]
    metrics = ard_parameters['metrics']

    os.makedirs(opj(processing_dir, 'Mosaic', 'Timescan'), exist_ok=True)
    i, list_of_files = 0, []
    for product in itertools.product(product_list, metrics):  # ****

        filelist = ''.join(
            glob.glob(
                opj(processing_dir, '*', 'Timescan',
                    '*{}.{}.tif'.format(product[0], product[1]))))

        if filelist:
            i += 1
            outfile = opj(processing_dir, 'Mosaic', 'Timescan',
                          '{}.{}.{}.tif'.format(i, product[0], product[1]))
            command = ('otbcli_Mosaic -il {} -comp.feather large -tmpdir {}'
                       '-progress 1 -out {} float'.format(
                           filelist, temp_dir, outfile))
            os.system(command)
            list_of_files.append(outfile)

    # create vrt
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(processing_dir, 'Mosaic', 'Timescan', 'Timescan.vrt'),
                  list_of_files,
                  options=vrt_options)
Exemplo n.º 4
0
def hdf2tif(hdf, reproject=True):
    """
    Converts hdf files to tiff files

    :param hdf: HDF file to be processed
    :param reproject: Will be reprojected by default
    :return: None
    """

    dataset = gdal.Open(hdf, gdal.GA_ReadOnly)
    subdatasets = dataset.GetSubDatasets()
    data_dir = create_output_directory(hdf)
    convert_to_vrt(subdatasets, data_dir)
    vrt_options = gdal.BuildVRTOptions(separate=True)
    vrt_list = list_files(data_dir, 'vrt')
    vrt_output = hdf.replace('.hdf', '.vrt')
    gdal.BuildVRT(vrt_output, sorted(vrt_list), options=vrt_options)
    if reproject:
        proj = "+proj=sinu +R=6371007.181 +nadgrids=@null +wktext"
        warp_options = gdal.WarpOptions(srcSRS=proj, dstSRS="EPSG:3857")
    else:
        warp_options = ""
    output_tiff = vrt_output.replace(".vrt", "_reprojected.tif")

    if not os.path.exists(output_tiff):
        gdal.Warp(output_tiff, vrt_output, options=warp_options)

    clear_temp_files(data_dir, vrt_output)

    return os.path.join(DIRECTORY, output_tiff)
Exemplo n.º 5
0
def generate_vrt_and_geotiff(input_file, planet, date):
    title_pattern = "_geo"
    aullr = ""
    srs = ""
    file_name, ext = os.path.splitext(input_file)
    output_file_location = "/data/" + planet + "/" + date + "/"
    output_geo_file = output_file_location + file_name + title_pattern + ext
    print("INFO:getting geo spatial co-ordinates for planet " + planet)

    if (planet == "MARS"):
        aullr = '-180 90 180 -90'
        srs = 'EPSG:4326'
        print("INFO:MARS geo spatial co-ordinates found")
    if (planet == "EARTH"):
        aullr = '-155.79295349 -80.91002823000001 5.505262509999994 80.29821777'
        srs = 'EPSG:4326'
        print("INFO:EARTH geo spatial co-ordinates found")
    if (planet == "MARS" | planet == "EARTH"):
        print("INFO:Loading input data set " + input_file)
        ds = gdal.Open(input_file)
        print(
            "INFO:Translating input data set to geo tiff data set with output bounds "
            + aullr + " output srs " + srs)
        gdal.Translate(output_geo_file,
                       ds,
                       format='GTiff',
                       outputBounds=aullr,
                       outputSRS=srs)
        print("INFO:Building vrt for generated geo tiff data set")
        gdal.BuildVRT(output_file_location + file_name + ".vrt",
                      output_geo_file)
        create_info_file(output_file_location, file_name)
        print("INFO:Completed geo tiff, vrt and info files generation")
    else:
        print("DEBUG:Unknown planet")
Exemplo n.º 6
0
def create_tscan_vrt(timescan_dir, ard_params):
    # loop through all pontial proucts
    # a products list
    product_list = [
        'TC.HH', 'TC.VV', 'TC.HV', 'TC.VH', 'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH',
        'coh.VV', 'coh.VH', 'coh.HH', 'coh.HV', 'pol.Entropy',
        'pol.Anisotropy', 'pol.Alpha'
    ]

    i, outfiles = 0, []
    iteration = itertools.product(product_list, ard_params['metrics'])
    for product, metric in iteration:

        # get file and add number for outfile
        infile = opj(timescan_dir, '{}.{}.tif'.format(product, metric))

        # if there is no file sto the iteration
        if not os.path.isfile(infile):
            continue

        # else
        i += 1
        outfile = opj(timescan_dir, '{}.{}.{}.tif'.format(i, product, metric))
        outfiles.append(outfile)
        # otherwise rename the file
        shutil.move(infile, outfile)

    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(timescan_dir, 'Timescan.vrt'.format()),
                  outfiles,
                  options=vrt_options)
Exemplo n.º 7
0
def create_DEM_mosaic(DEM,
                      DEM_dir,
                      dstfile,
                      product="NED",
                      vrt_only=False,
                      format="GTiff"):
    """ Create a Mosaic from a list of DEM urls or NTS tiles. Missing tiles will
    be downloaded """

    files = download_multiple_DEM(DEM, DEM_dir, product)

    # build VRT
    VRT_path = path.join(path.dirname(dstfile), "tmp.VRT")
    VRT = gdal.BuildVRT(VRT_path, files)
    VRT.FlushCache()
    VRT = None

    # return VRT-only if desired
    if vrt_only:
        return (VRT_path)

    # set warp parameters

    ds = gdal.Translate(dstfile, VRT_path, format=format)

    ds.FlushCache()
    ds = None

    remove(VRT_path)
    return (dstfile)
Exemplo n.º 8
0
def daily_vrt_jasmin(fnames_date, vrt_dir = None):
    temp1 = 'HDF4_EOS:EOS_GRID:"%s":MOD_Grid_BRDF:BRDF_Albedo_Band_Mandatory_Quality_%s'
    temp2 = 'HDF4_EOS:EOS_GRID:"%s":MOD_Grid_BRDF:BRDF_Albedo_Parameters_%s'

    spatialRef = osr.SpatialReference()
    spatialRef.ImportFromProj4('+proj=sinu +lon_0=0 +x_0=0 +y_0=0 +R=6371007.181 +units=m +no_defs')

    fnames, date = fnames_date
    fnames = list(map(os.path.abspath, fnames))
    all_files = fnames
    ''' this maybe problematic when reading and writing the same time
    all_files = []
    for fname in fnames:
        all_files += glob(os.path.dirname(fname) + '/MCD43A1.A%s.h??v??.006.*.hdf'%date)
    '''
    if vrt_dir is None:
        vrt_dir = './MCD43_VRT/' 
    if not os.path.exists(vrt_dir):
        os.mkdir(vrt_dir)
    d = datetime.strptime(date, '%Y%j').strftime('%Y-%m-%d')
    date_dir = vrt_dir + '/' + '%s/'%d
    if not os.path.exists(date_dir):                               
        os.mkdir(date_dir) 
    for temp in [temp1, temp2]:                                                      
        for band in ['Band1','Band2','Band3','Band4','Band5','Band6','Band7', 'vis', 'nir', 'shortwave']:
            bs = []                                                                  
            for fname in all_files:                                                     
                bs.append(temp%(fname, band))                                        
            gdal.BuildVRT(date_dir + '_'.join(['MCD43', date, bs[0].split(':')[-1]])+'.vrt', bs, outputSRS = spatialRef).FlushCache()
Exemplo n.º 9
0
def convert_to_vrt(subdatasets, data_dir):
    """
    Loops through the subdatasets and creates vrt files

    :param subdatasets: Subdataset of every HDF file
    :param data_dir: Result of create_output_directory method
    :return: None
    """

    # For every subdataset loop through the subdatasets
    # Enumerate to keep the bands in order

    for index, subd in enumerate(subdatasets):

        # Generate output name from the band names
        output_name = os.path.join(
            data_dir, "Band_{}_{}.vrt".format(
                str(index + 1).zfill(2), subd[0].split(":")[4]))

        # Create the virtual raster
        gdal.BuildVRT(output_name, subd[0])

        # Check if scale and offset exists
        scale = get_metadata_item(subd[0], 'scale')

        modify_vrt(output_name, scale)
Exemplo n.º 10
0
def daily_vrt(fnames_date, vrt_dir=None):
    temp1 = 'HDF4_EOS:EOS_GRID:"%s":MOD_Grid_BRDF:BRDF_Albedo_Band_Mandatory_Quality_%s'
    temp2 = 'HDF4_EOS:EOS_GRID:"%s":MOD_Grid_BRDF:BRDF_Albedo_Parameters_%s'

    fnames, date = fnames_date
    fnames = list(map(os.path.abspath, fnames))
    all_files = []
    for fname in fnames:
        all_files += glob(
            os.path.dirname(fname) + '/MCD43A1.A%s.h??v??.006.*.hdf' % date)
    if vrt_dir is None:
        vrt_dir = './MCD43_VRT/'
    if not os.path.exists(vrt_dir):
        os.mkdir(vrt_dir)
    d = datetime.strptime(date, '%Y%j').strftime('%Y-%m-%d')
    date_dir = vrt_dir + '/' + '%s/' % d
    if not os.path.exists(date_dir):
        os.mkdir(date_dir)
    for temp in [temp1, temp2]:
        for band in [
                'Band1', 'Band2', 'Band3', 'Band4', 'Band5', 'Band6', 'Band7',
                'vis', 'nir', 'shortwave'
        ]:
            bs = []
            for fname in all_files:
                bs.append(temp % (fname, band))
            gdal.BuildVRT(
                date_dir + '_'.join(['MCD43', date, bs[0].split(':')[-1]]) +
                '.vrt', bs).FlushCache()
def copy_evaluation_reef_quads() -> None:
    _logger.info('Copying evaluation quads')
    dirs_reefs = sorted(os.listdir(paths.DIR_DATA_EVAL))
    dirs_data_variant = sorted(os.listdir(paths.DIR_DATA_TRAIN))
    for dir_reef in dirs_reefs:
        _logger.debug('Copying evaluation quads for reef {}'.format(dir_reef))
        feature = next(
            iter(
                fiona.open(
                    os.path.join(paths.DIR_DATA_EVAL, dir_reef,
                                 PATH_REEF_MULTIPOLY))))
        geometry = shapely.geometry.shape(feature['geometry'])
        quads_needed = mosaic_quads.determine_mosaic_quads_for_geometry(
            geometry)
        _logger.debug('Quads needed:  {}'.format(quads_needed))
        vrt_srcs = list()
        for dir_variant in dirs_data_variant:
            if dir_variant == 'tmp':
                continue
            for quad in quads_needed:
                filename_quad = quad + '_features.tif'
                filepath_src = os.path.join(paths.DIR_DATA_TRAIN, dir_variant,
                                            filename_quad)
                if not os.path.exists(filepath_src):
                    continue
                filepath_dest = os.path.join(paths.DIR_DATA_EVAL, dir_reef,
                                             dir_variant, filename_quad)
                if not os.path.exists(os.path.dirname(filepath_dest)):
                    os.makedirs(os.path.dirname(filepath_dest))
                shutil.copy(filepath_src, filepath_dest)
                vrt_srcs.append(filepath_dest)
            filepath_vrt = os.path.join(paths.DIR_DATA_EVAL, dir_reef,
                                        dir_variant, PATH_REEF_VRT)
            gdal.BuildVRT(filepath_vrt, vrt_srcs)
Exemplo n.º 12
0
def single_tif_to_stacked(tif_fold):
    '''
    this function takes a list of single band tifs and transform them into:
    - first, a stacked .vrt
    - second, a stacked multiband .tif
    
    ----
    Parameters:
    tif_fold -> string
    
    ----
    Returns:
    .vrt and .tif
    '''
    # path of files location
    filenames = [
        '01.2007.tif', '02.2007.tif', '03.2007.tif', '04.2007.tif',
        '05.2007.tif', '06.2007.tif', '07.2007.tif', '08.2007.tif',
        '09.2007.tif', '10.2007.tif', '11.2007.tif', '12.2007.tif',
        '01.2008.tif', '02.2008.tif', '03.2008.tif', '04.2008.tif',
        '05.2008.tif', '06.2008.tif', '07.2008.tif', '08.2008.tif',
        '09.2008.tif', '10.2008.tif', '11.2008.tif', '12.2008.tif'
    ]
    # loop through each tif in the folder
    tifs = [os.path.join(tif_fold, road) for road in filenames]

    # output location for vrt file
    outvrt_fold = os.path.join(tif_fold, 'stacked.vrt')
    # stack list of tifs into vrt file
    outvrt = gdal.BuildVRT(outvrt_fold, tifs, separate=True)
    # output location for tif file
    outtif_fold = os.path.join(tif_fold, 'stacked.tif')
    # converts vrt into tif
    outtif = gdal.Translate(outtif_fold, outvrt)
Exemplo n.º 13
0
def get_tiles(bounds, master_tiles, s, name):
    res = s.query(bounds)
    # get only the results that intersect our buffered footprint more than 10% of its area
    intersects = [c for c in res if bounds.intersection(c).area / c.area > 0.1]
    fnames = [
        master_tiles['filename'][master_tiles['geometry'] == c].values[0]
        for c in intersects
    ]
    paths = [
        master_tiles['path'][master_tiles['geometry'] == c].values[0]
        for c in intersects
    ]
    subfolders = [
        master_tiles['subfolder'][master_tiles['geometry'] == c].values[0]
        for c in intersects
    ]

    tilelist = [
        os.path.sep.join([paths[i], subfolders[i], f])
        for i, f in enumerate(fnames)
    ]
    # create a temporary VRT from the tiles
    gdal.BuildVRT('{}_ref.vrt'.format(name), tilelist, resampleAlg='bilinear')
    # print(os.path.sep.join([os.path.abspath(indir), 'tmp_{}.vrt'.format(dname)]))
    return '{}_ref.vrt'.format(name)
Exemplo n.º 14
0
def create_timeseries_mosaic_vrt(list_of_args):
    ts_dir, product, outfiles = list_of_args

    gdal.BuildVRT(
        str(ts_dir.joinpath(f'{product}.Timeseries.vrt')),
        [str(outfile) for outfile in outfiles],
        options=gdal.BuildVRTOptions(srcNodata=0, separate=True)
    )
Exemplo n.º 15
0
def merge_hdf_tiles(tile_list):
    dirname = os.path.dirname(tile_list[0])
    target_name = os.path.join(dirname, "example.vrt")
    gdal.BuildVRT(target_name, tile_list)

    print target_name

    return target_name
Exemplo n.º 16
0
def mosaic_to_vrt(ts_dir, product, outfiles):
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    if type(outfiles) == str:
        outfiles = outfiles.replace("'", '').strip('][').split(', ')

    gdal.BuildVRT(opj(ts_dir, '{}.Timeseries.vrt'.format(product)),
                  outfiles,
                  options=vrt_options)
Exemplo n.º 17
0
def make_vrt(data_list, wkt_dst_srs, outputDir, output_vrt_file, outputBounds=None, resolution='average', resampling=gdal.GRA_Bilinear, srcNodata=0, error_threshold=0.125):

    if len(data_list) == 0:
        return 0

    output_tmp_file_list=[os.path.join(outputDir, os.path.basename(file)+".VRT") for file in data_list]

    for file, dst_file in zip(data_list, output_tmp_file_list):
        # Open source dataset and read source SRS
        gdal_data = gdal.Open(file)
        data_proj = gdal_data.GetProjection()
        if data_proj == '':
            d=dict(gdal.Info(file,format='json'))
            in_epsg=int(d['metadata']['GEOLOCATION']['SRS'].rsplit('"EPSG","')[-1].split('"')[0])
            src_srs =  osr.SpatialReference()
            src_srs.ImportFromEPSG(in_epsg)
            src_srs = src_srs.ExportToWkt()
        else:
            src_srs = None

        ## Call AutoCreateWarpedVRT()
        #tmp_ds = gdal.AutoCreateWarpedVRT(gdal_data, src_srs, wkt_dst_srs, resampling, error_threshold)

        ## Create the final warped raster
        #dst_ds = gdal.GetDriverByName('VRT').CreateCopy(dst_file, tmp_ds)

        # Warp to dst_srs
        dst_ds = gdal.Warp(dst_file, gdal_data, resampleAlg=resampling, srcNodata=srcNodata,
                           dstNodata=srcNodata, srcSRS=src_srs, dstSRS=wkt_dst_srs, errorThreshold=error_threshold)
        del dst_ds

    print(os.path.join(outputDir, output_vrt_file))
    if outputBounds:
        vrt_data=gdal.BuildVRT(output_vrt_file, output_tmp_file_list, separate=True, outputBounds=outputBounds,
                               resolution=resolution, srcNodata=srcNodata)
    else:
        vrt_data=gdal.BuildVRT(output_vrt_file, output_tmp_file_list, separate=True,
                               resolution=resolution, srcNodata=srcNodata)


    bands = vrt_data.RasterCount

    # to force gdal to write file
    del vrt_data

    return bands
    def stack_tile(tile, tmp_ref_dir, tmp_aster_dir, tmp_setsm_dir, out_dir):

        lat, lon = SRTMGL1_naming_to_latlon(tile)
        epsg, utm = latlon_to_UTM(lat, lon)

        outfile = os.path.join(out_dir, utm, tile + '.nc')

        if not os.path.exists(outfile):

            print('Stacking tile: ' + tile + ' in UTM zone ' + utm)

            # reference DEM
            ref_utm_dir = os.path.join(tmp_ref_dir, utm)
            ref_vrt = os.path.join(ref_utm_dir, 'tmp_' + utm + '.vrt')
            ref_list = glob(os.path.join(ref_utm_dir, '**/*.tif'),
                            recursive=True)
            if not os.path.exists(ref_vrt):
                gdal.BuildVRT(ref_vrt, ref_list, resampleAlg='bilinear')

            # DEMs to stack
            flist1 = glob(os.path.join(tmp_aster_dir, '**/*_final.zip'),
                          recursive=True)
            if os.path.exists(tmp_setsm_dir):
                flist2 = glob(os.path.join(tmp_setsm_dir, '**/*.tif'),
                              recursive=True)
            else:
                flist2 = []

            flist = flist1 + flist2

            extent = niceextent_utm_latlontile(tile, utm, res)
            bobformat_extent = [extent[0], extent[2], extent[1], extent[3]]

            print('Nice extent is:')
            print(extent)
            if len(flist) > 0:
                nco = create_mmaster_stack(flist,
                                           extent=bobformat_extent,
                                           epsg=int(epsg),
                                           mst_tiles=ref_vrt,
                                           res=res,
                                           outfile=outfile,
                                           coreg=False,
                                           uncert=True,
                                           clobber=True,
                                           add_ref=True,
                                           add_corr=True,
                                           latlontile_nodata=tile,
                                           filt_mm_corr=False,
                                           l1a_zipped=True,
                                           y0=y0,
                                           tmptag=tile)
                nco.close()
            else:
                print('No DEM intersecting tile found. Skipping...')

        else:
            print('Tile ' + tile + ' already exists.')
Exemplo n.º 19
0
def stitchModisDate(year=2019,doy=1,sds='Lai_500m',timeout=None,\
              tile=['h17v03','h18v03'],verbose=False,\
              product='MCD15A3H'):
    '''
    function called stitchModisDate with arguments:
    
    year
    doy

    keywords/defaults:

        sds      : 'Lai_500m'
        tile     : ['h17v03','h18v03']
        product  : 'MCD15A3H'

    generates a stitched VRT file with the appropriate data,

    returns VRT filename for this dataset.
    
    Options:
    timeout : None
    verbose : False
    
    '''

    kwargs = {
        'product': product,
        'tile': tile,
        'year': year,
        'doys': [doy],
        'sds': [sds]
    }

    data = getModisFiles(verbose=verbose, timeout=1000, **kwargs)

    ofiles = []

    for sds, sds_v in data.items():
        if verbose:
            print('sds', sds)
        for doy, doy_v in sds_v.items():
            if verbose:
                print('doy', doy)
            # build a VRT
            tiles = doy_v.keys()

            ofile = f"work/stitch_{sds}_{kwargs['year']}_{doy:03d}_{'Tiles_'+'_'.join(tiles)}.vrt"
            if verbose:
                print(f'saving to {ofile}')
            stitch_vrt = gdal.BuildVRT(ofile, list(doy_v.values()))
            del stitch_vrt
            ofiles.append(ofile)

    if len(ofiles):
        return ofiles[0]
    else:
        print(f'error in stitchModisDate: {data}\n{kwargs}')
        return None
Exemplo n.º 20
0
def one_subset(supplierId, filename, polygon, tilepath, tifpath, size,sentinel=2):
    """
    Creates one subsetted image from the large sentinel tile

    :param supplierId: supplierId (also the name) of the Sentinel 2 tile (str)
    :param filename: Filename of output image (str)
    :param polygon: Shapely polygon of desired bla
    :param tilepath: Path to Sentinel tiles
    :param tifpath: Path to output images
    :param size: Length of one side of the output image in pixels
    :return: none
    """

    # gdalwarp inputs polygons as a csv file. This code creates that csv file so that we can run gdalwarp
    csvData = [["", "WKT"], [str(1), polygon.wkt]]
    csvname = './%s.csv' % filename[:-4]
    with open(csvname, 'w') as csvFile:
        writer = csv.writer(csvFile)
        writer.writerows(csvData)
    csvFile.close()

    # TODO: Change this in verbose mode
    #gdal.PushErrorHandler('CPLQuietErrorHandler')
    os.environ["PROJ_LIB"]="C:/Users/danie/Anaconda3/envs/oilrig/Library/share/proj"
    # Finds all the image bands as jp2 files and sorts them alphabetically to retain correct order
    dir = os.path.join(tilepath, supplierId)
    dir = os.path.abspath(dir)
    dir = dir.replace('\\', '/')

    if sentinel == 1:
        file: ZipFile = ZipFile(dir + '.zip', 'r')
        fulllist = sorted([name for name in file.namelist() if name.endswith('.tiff') or name.endswith('.dat')])
        for tif in fulllist:
            if not os.path.exists(dir+".SAFE"):
                file.extract(tif,path=tilepath)
        fulllist = [os.path.join(tilepath,tif).replace('\\', '/') for tif in fulllist]
    else:
        fulllist = sorted(glob.glob(dir + '/*.jp2'))
    # Builds VRT dataset to speed up conversion

    vrtname = './%s.vrt' % filename[:-4]
    buildvrt_options = gdal.BuildVRTOptions(separate=True, xRes=10, yRes=10)
    vrt_dataset = gdal.BuildVRT(destName=vrtname, srcDSOrSrcDSTab=fulllist, options=buildvrt_options)

    try:
        # Subsets image using gdalwarp
        warp_output = os.path.join(tifpath, filename + ".tif")
        warp_options = gdal.WarpOptions(cropToCutline=True, cutlineDSName=csvname, srcSRS="EPSG:4326", dstSRS="EPSG:4326", width=size,
                                        height=size,multithread=True)
        gdal.Warp(warp_output, vrt_dataset, options=warp_options)
    except SystemError as e:
        os.remove(csvname)
        return

    # removes the temporary csv file
    os.remove(csvname)
    return
Exemplo n.º 21
0
def make_vrt(data_list, wkt_dst_srs, outputDir, output_vrt_file, outputBounds=None, resolution='average', resampling=gdal.GRA_Bilinear, srcNodata=0, error_threshold=0.125):

    if len(data_list) == 0:
        return 0

    output_tmp_file_list=[os.path.join(outputDir, os.path.basename(file)+".VRT") for file in data_list]
    output_tmp_file_list2=[]
    for file, dst_file in zip(data_list, output_tmp_file_list):
        # Open source dataset and read source SRS
        gdal_data = gdal.Open(file)
        data_proj = gdal_data.GetProjection()
        if data_proj == '':
            print("Data without geoprojection info. DISCARDED!")
        else:
            # Warp to dst_srs
            dst_ds = gdal.Warp(dst_file, gdal_data, resampleAlg=resampling, srcNodata=srcNodata,
                               dstNodata=srcNodata, dstSRS=wkt_dst_srs, errorThreshold=error_threshold)
            del dst_ds
            output_tmp_file_list2.append(dst_file)

    print(os.path.join(outputDir, output_vrt_file))
    if outputBounds:
        vrt_data=gdal.BuildVRT(output_vrt_file, output_tmp_file_list2, separate=True, outputBounds=outputBounds,
                               resolution=resolution, srcNodata=srcNodata)
    else:
        vrt_data=gdal.BuildVRT(output_vrt_file, output_tmp_file_list2, separate=True,
                               resolution=resolution, srcNodata=srcNodata)


    bands = vrt_data.RasterCount
    if bands > 0:
        del vrt_data
        vrt_data = gdal.Open(output_vrt_file)
        xml = et.ElementTree(file=output_vrt_file)
        for band_num in range(1, bands+1):
            filename = xml.xpath('//VRTRasterBand[@band=%d]//SourceFilename/text()' % band_num)[0]
            band = vrt_data.GetRasterBand(band_num)
            band.SetDescription(filename)

    # to force gdal to write file
    del vrt_data

    return bands
Exemplo n.º 22
0
    def stack_bands(self, files, outpath):
        """
        Stacks a file list into a single file with multiple bands
        """
        outvrt = '/vsimem/stacked.vrt'
        outds = gdal.BuildVRT(outvrt, files, separate=True)
        outds = gdal.Translate(outpath, outds)
        outds = None

        return os.path.exists(outpath)
Exemplo n.º 23
0
def make_vrt(in_folder, patterns, out_name, opts=None):
    import gdal
    import glob
    from geotools.utils import glob_multipattern

    files = glob_multipattern(in_folder, patterns)
    vrt = gdal.BuildVRT(out_name, files, separate=False)
    vrt = None

    return None
Exemplo n.º 24
0
    def __build_vrts(self):
        """
        Generate VRT files to bind together the individual geotiffs so that we can reference them all together
        in xarrays

        :return:
        """
        # define universal options for the VRT files
        vrt_options = gdal.BuildVRTOptions(separate=True)

        for param in self.parameters:

            for var in ['', '_unc']:

                # define the name for the vrt file
                vrt_filename = f"{self.data_directory}/{param}{var}.vrt"

                # check to see whether the file already exists
                if not os.path.isfile(vrt_filename):

                    # Get an ordered list of all the filenames
                    # Todo test that the system is robust to these values not being sorted.
                    filenames = sorted(
                        glob.glob(
                            f'{self.data_directory}/{param}_A???????{var}.tif')
                    )

                    # BUild the file
                    gd = gdal.BuildVRT(vrt_filename,
                                       filenames,
                                       options=vrt_options)
                    gd = None  # flush

                else:
                    # File exists already
                    pass

                # As a second step, produce warped VRTs
                vrt_filename_warp = f"{self.data_directory}/{param}{var}_warped.vrt"

                # check to see whether the file already exists
                if not os.path.isfile(vrt_filename_warp):

                    # convert to lat/lon
                    gd = gdal.Warp(
                        srcDSOrSrcDSTab=vrt_filename,
                        destNameOrDestDS=vrt_filename_warp,
                        format='VRT',
                        dstSRS='+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
                    )  # remove this to use native coordinates
                    gd = None  # flush the file

                else:
                    # File exists already
                    pass
Exemplo n.º 25
0
def JP2totif(L2A10dirs, outdir):
    fold_10mdir = search_file(L2A10dirs, ".jp2")
    file10dir, firstname10 = get_10tif(fold_10mdir)
    filevrt_10m = os.path.join(outdir, firstname10 + ".vrt")
    gdal.BuildVRT(filevrt_10m, file10dir, separate=True)
    # 查询清楚VRT
    filename = os.path.split(firstname10)[1]
    driver = gdal.GetDriverByName("GTiff")
    file_10m = os.path.join(outdir, filename + "_layer.tif")
    outds10 = driver.CreateCopy(file_10m, gdal.Open(filevrt_10m))
    outds10 = None
Exemplo n.º 26
0
def mt_layover(filelist, outfile, extent):
    '''
    This function is usally used in the time-series workflow of OST. A list
    of the filepaths layover/shadow masks

    :param filelist - list of files
    :param out_dir - directory where the output file will be stored
    :return path to the multi-temporal layover/shadow mask file generated
    '''

    # get the start time for Info on processing time
    start = time.time()

    with TemporaryDirectory() as temp:
        # create path to out file
        ls_layer = opj(temp, os.path.basename(outfile))

        # create a vrt-stack out of
        logger.debug('INFO: Creating common Layover/Shadow Mask')
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(opj(temp, 'ls.vrt'), filelist, options=vrt_options)

        with rasterio.open(opj(temp, 'ls.vrt')) as src:

            # get metadata
            meta = src.meta
            # update driver and reduced band count
            meta.update(driver='GTiff', count=1, dtype='uint8')

            # create outfiles
            with rasterio.open(ls_layer, 'w', **meta) as out_min:

                # loop through blocks
                for _, window in src.block_windows(1):

                    # read array with all bands
                    stack = src.read(range(1, src.count + 1), window=window)

                    # get stats
                    arr_max = np.nanmax(stack, axis=0)
                    arr = arr_max / arr_max

                    out_min.write(np.uint8(arr), window=window, indexes=1)

        ras.mask_by_shape(ls_layer,
                          outfile,
                          extent,
                          to_db=False,
                          datatype='uint8',
                          rescale=False,
                          ndv=0)
        # os.remove(ls_layer)
        h.timer(start)
    return outfile
def main(argv):
    # Reference files
    for f in os.listdir(SRC_FOLDER):
        if 'img' in f:
            bands[f.split("_")[3]].append(SRC_FOLDER + f)

    vrt_options = gdal.BuildVRTOptions(resolution='lowest')
    for b in bands:
        if (len(bands[b]) > 0):
            gdal.BuildVRT(SRC_FOLDER + 'sentinel2_' + b + '.vrt',
                          bands[b],
                          options=vrt_options)
Exemplo n.º 28
0
def mosaic_images(folderpath):

    imgs_list = glob.glob(os.path.join(folderpath, "*.tif"))
    mosvrt = os.path.join(folderpath, '/vsimem/mosaic.vrt')  # /vsimem is special in-memory virtual "directory"
    outtif = os.path.join(folderpath, 'clipped-mos.tif')
    outds = gdal.BuildVRT(mosvrt, imgs_list)
    gdal.Translate(outtif, outds)

    # Close datasets
    outds = None

    print("---------------------------------------------")
    print("Made mosaic")
def main(argv):
    # Reference files
    src_dss = [f for f in os.listdir(SRC_FOLDER) if ".img" in f]

    for f in src_dss:
        bands[f.split(".")[0]].append(SRC_FOLDER + f)

    vrt_options = gdal.BuildVRTOptions(resolution='lowest')
    for b in bands:
        if (len(bands[b]) > 0):
            gdal.BuildVRT(SRC_FOLDER + 'sentinel1_' + b + '.vrt',
                          bands[b],
                          options=vrt_options)
Exemplo n.º 30
0
def create_tscan_vrt(timescan_dir, config_file):

    # load ard parameters
    if isinstance(config_file, dict):
        config_dict = config_file
    else:
        config_file = open(config_file, 'r')
        config_dict = json.load(config_file)
        config_file.close()

    ard_tscan = config_dict['processing']['time-scan_ARD']

    # loop through all potential products
    # a products list
    product_list = [
        'bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH',
        'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'
    ]

    metrics = ard_tscan['metrics']
    if 'percentiles' in metrics:
        metrics.remove('percentiles')
        metrics.extend(['p95', 'p5'])

    if 'harmonics' in metrics:
        metrics.remove('harmonics')
        metrics.extend(['amplitude', 'phase', 'residuals'])

    i, outfiles = 0, []
    iteration = itertools.product(product_list, metrics)
    for product, metric in iteration:

        # get file and add number for outfile
        infile = timescan_dir.joinpath(f'{product}.{metric}.tif')

        # if there is no file sto the iteration
        if not infile.exists():
            continue

        i += 1
        # create namespace for output file and add to list for vrt creation
        outfile = timescan_dir.joinpath(f'{i:02d}.{product}.{metric}.tif')
        outfiles.append(str(outfile))

        # otherwise rename the file
        infile.replace(outfile)

    # build vrt
    gdal.BuildVRT(str(timescan_dir.joinpath('Timescan.vrt')),
                  outfiles,
                  options=gdal.BuildVRTOptions(srcNodata=0, separate=True))