コード例 #1
0
def create_tscan_vrt(timescan_dir, ard_params):
    # loop through all pontial proucts
    # a products list
    product_list = [
        'TC.HH', 'TC.VV', 'TC.HV', 'TC.VH', 'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH',
        'coh.VV', 'coh.VH', 'coh.HH', 'coh.HV', 'pol.Entropy',
        'pol.Anisotropy', 'pol.Alpha'
    ]

    i, outfiles = 0, []
    iteration = itertools.product(product_list, ard_params['metrics'])
    for product, metric in iteration:

        # get file and add number for outfile
        infile = opj(timescan_dir, '{}.{}.tif'.format(product, metric))

        # if there is no file sto the iteration
        if not os.path.isfile(infile):
            continue

        # else
        i += 1
        outfile = opj(timescan_dir, '{}.{}.{}.tif'.format(i, product, metric))
        outfiles.append(outfile)
        # otherwise rename the file
        shutil.move(infile, outfile)

    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(timescan_dir, 'Timescan.vrt'.format()),
                  outfiles,
                  options=vrt_options)
コード例 #2
0
def make_VRT(file, directory):
    sub_directories = [
        os.path.join(directory, name, file) for name in os.listdir(directory)
        if os.path.isdir(os.path.join(directory, name))
    ]
    gdal.BuildVRTOptions(VRTNodata="nan")
    gdal.BuildVRT(os.path.join(directory, f"{file[:-4]}.vrt"), sub_directories)
コード例 #3
0
def hdf2tif(hdf, reproject=True):
    """
    Converts hdf files to tiff files

    :param hdf: HDF file to be processed
    :param reproject: Will be reprojected by default
    :return: None
    """

    dataset = gdal.Open(hdf, gdal.GA_ReadOnly)
    subdatasets = dataset.GetSubDatasets()
    data_dir = create_output_directory(hdf)
    convert_to_vrt(subdatasets, data_dir)
    vrt_options = gdal.BuildVRTOptions(separate=True)
    vrt_list = list_files(data_dir, 'vrt')
    vrt_output = hdf.replace('.hdf', '.vrt')
    gdal.BuildVRT(vrt_output, sorted(vrt_list), options=vrt_options)
    if reproject:
        proj = "+proj=sinu +R=6371007.181 +nadgrids=@null +wktext"
        warp_options = gdal.WarpOptions(srcSRS=proj, dstSRS="EPSG:3857")
    else:
        warp_options = ""
    output_tiff = vrt_output.replace(".vrt", "_reprojected.tif")

    if not os.path.exists(output_tiff):
        gdal.Warp(output_tiff, vrt_output, options=warp_options)

    clear_temp_files(data_dir, vrt_output)

    return os.path.join(DIRECTORY, output_tiff)
コード例 #4
0
def mosaic_timescan(burst_inventory, processing_dir, temp_dir, ard_parameters):

    product_list = [
        'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH', 'coh.VV', 'coh.VH', 'Alpha',
        'Entropy', 'Anisotropy'
    ]
    metrics = ard_parameters['metrics']

    os.makedirs(opj(processing_dir, 'Mosaic', 'Timescan'), exist_ok=True)
    i, list_of_files = 0, []
    for product in itertools.product(product_list, metrics):  # ****

        filelist = ''.join(
            glob.glob(
                opj(processing_dir, '*', 'Timescan',
                    '*{}.{}.tif'.format(product[0], product[1]))))

        if filelist:
            i += 1
            outfile = opj(processing_dir, 'Mosaic', 'Timescan',
                          '{}.{}.{}.tif'.format(i, product[0], product[1]))
            command = ('otbcli_Mosaic -il {} -comp.feather large -tmpdir {}'
                       '-progress 1 -out {} float'.format(
                           filelist, temp_dir, outfile))
            os.system(command)
            list_of_files.append(outfile)

    # create vrt
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(processing_dir, 'Mosaic', 'Timescan', 'Timescan.vrt'),
                  list_of_files,
                  options=vrt_options)
コード例 #5
0
def create_timeseries_mosaic_vrt(list_of_args):
    ts_dir, product, outfiles = list_of_args

    gdal.BuildVRT(
        str(ts_dir.joinpath(f'{product}.Timeseries.vrt')),
        [str(outfile) for outfile in outfiles],
        options=gdal.BuildVRTOptions(srcNodata=0, separate=True)
    )
コード例 #6
0
def mosaic_to_vrt(ts_dir, product, outfiles):
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    if type(outfiles) == str:
        outfiles = outfiles.replace("'", '').strip('][').split(', ')

    gdal.BuildVRT(opj(ts_dir, '{}.Timeseries.vrt'.format(product)),
                  outfiles,
                  options=vrt_options)
コード例 #7
0
def one_subset(supplierId, filename, polygon, tilepath, tifpath, size,sentinel=2):
    """
    Creates one subsetted image from the large sentinel tile

    :param supplierId: supplierId (also the name) of the Sentinel 2 tile (str)
    :param filename: Filename of output image (str)
    :param polygon: Shapely polygon of desired bla
    :param tilepath: Path to Sentinel tiles
    :param tifpath: Path to output images
    :param size: Length of one side of the output image in pixels
    :return: none
    """

    # gdalwarp inputs polygons as a csv file. This code creates that csv file so that we can run gdalwarp
    csvData = [["", "WKT"], [str(1), polygon.wkt]]
    csvname = './%s.csv' % filename[:-4]
    with open(csvname, 'w') as csvFile:
        writer = csv.writer(csvFile)
        writer.writerows(csvData)
    csvFile.close()

    # TODO: Change this in verbose mode
    #gdal.PushErrorHandler('CPLQuietErrorHandler')
    os.environ["PROJ_LIB"]="C:/Users/danie/Anaconda3/envs/oilrig/Library/share/proj"
    # Finds all the image bands as jp2 files and sorts them alphabetically to retain correct order
    dir = os.path.join(tilepath, supplierId)
    dir = os.path.abspath(dir)
    dir = dir.replace('\\', '/')

    if sentinel == 1:
        file: ZipFile = ZipFile(dir + '.zip', 'r')
        fulllist = sorted([name for name in file.namelist() if name.endswith('.tiff') or name.endswith('.dat')])
        for tif in fulllist:
            if not os.path.exists(dir+".SAFE"):
                file.extract(tif,path=tilepath)
        fulllist = [os.path.join(tilepath,tif).replace('\\', '/') for tif in fulllist]
    else:
        fulllist = sorted(glob.glob(dir + '/*.jp2'))
    # Builds VRT dataset to speed up conversion

    vrtname = './%s.vrt' % filename[:-4]
    buildvrt_options = gdal.BuildVRTOptions(separate=True, xRes=10, yRes=10)
    vrt_dataset = gdal.BuildVRT(destName=vrtname, srcDSOrSrcDSTab=fulllist, options=buildvrt_options)

    try:
        # Subsets image using gdalwarp
        warp_output = os.path.join(tifpath, filename + ".tif")
        warp_options = gdal.WarpOptions(cropToCutline=True, cutlineDSName=csvname, srcSRS="EPSG:4326", dstSRS="EPSG:4326", width=size,
                                        height=size,multithread=True)
        gdal.Warp(warp_output, vrt_dataset, options=warp_options)
    except SystemError as e:
        os.remove(csvname)
        return

    # removes the temporary csv file
    os.remove(csvname)
    return
コード例 #8
0
    def __build_vrts(self):
        """
        Generate VRT files to bind together the individual geotiffs so that we can reference them all together
        in xarrays

        :return:
        """
        # define universal options for the VRT files
        vrt_options = gdal.BuildVRTOptions(separate=True)

        for param in self.parameters:

            for var in ['', '_unc']:

                # define the name for the vrt file
                vrt_filename = f"{self.data_directory}/{param}{var}.vrt"

                # check to see whether the file already exists
                if not os.path.isfile(vrt_filename):

                    # Get an ordered list of all the filenames
                    # Todo test that the system is robust to these values not being sorted.
                    filenames = sorted(
                        glob.glob(
                            f'{self.data_directory}/{param}_A???????{var}.tif')
                    )

                    # BUild the file
                    gd = gdal.BuildVRT(vrt_filename,
                                       filenames,
                                       options=vrt_options)
                    gd = None  # flush

                else:
                    # File exists already
                    pass

                # As a second step, produce warped VRTs
                vrt_filename_warp = f"{self.data_directory}/{param}{var}_warped.vrt"

                # check to see whether the file already exists
                if not os.path.isfile(vrt_filename_warp):

                    # convert to lat/lon
                    gd = gdal.Warp(
                        srcDSOrSrcDSTab=vrt_filename,
                        destNameOrDestDS=vrt_filename_warp,
                        format='VRT',
                        dstSRS='+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs'
                    )  # remove this to use native coordinates
                    gd = None  # flush the file

                else:
                    # File exists already
                    pass
コード例 #9
0
def mt_layover(filelist, outfile, extent):
    '''
    This function is usally used in the time-series workflow of OST. A list
    of the filepaths layover/shadow masks

    :param filelist - list of files
    :param out_dir - directory where the output file will be stored
    :return path to the multi-temporal layover/shadow mask file generated
    '''

    # get the start time for Info on processing time
    start = time.time()

    with TemporaryDirectory() as temp:
        # create path to out file
        ls_layer = opj(temp, os.path.basename(outfile))

        # create a vrt-stack out of
        logger.debug('INFO: Creating common Layover/Shadow Mask')
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(opj(temp, 'ls.vrt'), filelist, options=vrt_options)

        with rasterio.open(opj(temp, 'ls.vrt')) as src:

            # get metadata
            meta = src.meta
            # update driver and reduced band count
            meta.update(driver='GTiff', count=1, dtype='uint8')

            # create outfiles
            with rasterio.open(ls_layer, 'w', **meta) as out_min:

                # loop through blocks
                for _, window in src.block_windows(1):

                    # read array with all bands
                    stack = src.read(range(1, src.count + 1), window=window)

                    # get stats
                    arr_max = np.nanmax(stack, axis=0)
                    arr = arr_max / arr_max

                    out_min.write(np.uint8(arr), window=window, indexes=1)

        ras.mask_by_shape(ls_layer,
                          outfile,
                          extent,
                          to_db=False,
                          datatype='uint8',
                          rescale=False,
                          ndv=0)
        # os.remove(ls_layer)
        h.timer(start)
    return outfile
def main(argv):
    # Reference files
    for f in os.listdir(SRC_FOLDER):
        if 'img' in f:
            bands[f.split("_")[3]].append(SRC_FOLDER + f)

    vrt_options = gdal.BuildVRTOptions(resolution='lowest')
    for b in bands:
        if (len(bands[b]) > 0):
            gdal.BuildVRT(SRC_FOLDER + 'sentinel2_' + b + '.vrt',
                          bands[b],
                          options=vrt_options)
def main(argv):
    # Reference files
    src_dss = [f for f in os.listdir(SRC_FOLDER) if ".img" in f]

    for f in src_dss:
        bands[f.split(".")[0]].append(SRC_FOLDER + f)

    vrt_options = gdal.BuildVRTOptions(resolution='lowest')
    for b in bands:
        if (len(bands[b]) > 0):
            gdal.BuildVRT(SRC_FOLDER + 'sentinel1_' + b + '.vrt',
                          bands[b],
                          options=vrt_options)
コード例 #12
0
ファイル: burst.py プロジェクト: cuulee/OpenSarToolkit
def mosaic_timeseries(burst_inventory, processing_dir, temp_dir,
                      ard_parameters):

    product_list = [
        'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH', 'coh.VV', 'coh.VH',
        'ha_alpha.Alpha', 'ha_alpha.Entropy', 'ha_alpha.Anisotropy'
    ]

    os.makedirs(opj(processing_dir, 'Mosaic', 'Timeseries'), exist_ok=True)

    # we do this to get the minimum number of
    # timesteps per burst (should be actually the same)
    length = 99999
    for burst in burst_inventory.bid.unique():

        length_of_burst = len(burst_inventory[burst_inventory.bid == burst])

        if length_of_burst < length:
            length = length_of_burst

    # now we loop through each timestep and product
    for product in product_list:  # ****
        list_of_files = []
        for i in range(length):

            filelist = glob.glob(
                opj(processing_dir, '*_IW*_*', 'Timeseries',
                    '{}.*{}.tif'.format(i + 1, product)))

            if filelist:
                print(' INFO: Creating timeseries mosaics for {}.'.format(
                    product))

                out_dir = opj(processing_dir, 'Mosaic', 'Timeseries')
                os.makedirs(out_dir, exist_ok=True)
                outfile = opj(out_dir, '{}.{}.tif'.format(i + 1, product))
                list_of_files.append(outfile)
                filelist = ' '.join(filelist)

                # the command
                command = ('otbcli_Mosaic -il {} -comp.feather large '
                           '-tmpdir {} -progress 1 -out {} float'.format(
                               filelist, temp_dir, outfile))
                os.system(command)

        # create vrt
        if list_of_files:
            vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
            gdal.BuildVRT(opj(out_dir, '{}.Timeseries.vrt'.format(product)),
                          list_of_files,
                          options=vrt_options)
コード例 #13
0
ファイル: raster.py プロジェクト: ywg0212/OpenSarToolkit
def create_tscan_vrt(timescan_dir, config_file):

    # load ard parameters
    if isinstance(config_file, dict):
        config_dict = config_file
    else:
        config_file = open(config_file, 'r')
        config_dict = json.load(config_file)
        config_file.close()

    ard_tscan = config_dict['processing']['time-scan_ARD']

    # loop through all potential products
    # a products list
    product_list = [
        'bs.HH', 'bs.VV', 'bs.HV', 'bs.VH', 'coh.VV', 'coh.VH', 'coh.HH',
        'coh.HV', 'pol.Entropy', 'pol.Anisotropy', 'pol.Alpha'
    ]

    metrics = ard_tscan['metrics']
    if 'percentiles' in metrics:
        metrics.remove('percentiles')
        metrics.extend(['p95', 'p5'])

    if 'harmonics' in metrics:
        metrics.remove('harmonics')
        metrics.extend(['amplitude', 'phase', 'residuals'])

    i, outfiles = 0, []
    iteration = itertools.product(product_list, metrics)
    for product, metric in iteration:

        # get file and add number for outfile
        infile = timescan_dir.joinpath(f'{product}.{metric}.tif')

        # if there is no file sto the iteration
        if not infile.exists():
            continue

        i += 1
        # create namespace for output file and add to list for vrt creation
        outfile = timescan_dir.joinpath(f'{i:02d}.{product}.{metric}.tif')
        outfiles.append(str(outfile))

        # otherwise rename the file
        infile.replace(outfile)

    # build vrt
    gdal.BuildVRT(str(timescan_dir.joinpath('Timescan.vrt')),
                  outfiles,
                  options=gdal.BuildVRTOptions(srcNodata=0, separate=True))
コード例 #14
0
def mt_extent_old(list_of_scenes, config_file):

    with open(config_file) as file:
        config_dict = json.load(file)
        temp_dir = Path(config_dict['temp_dir'])
        aoi = config_dict['aoi']

    # get track/burst dir from first scene
    target_dir = Path(list_of_scenes[0]).parent.parent.parent
    out_file = target_dir.joinpath(f'{target_dir.name}.extent.gpkg')

    logger.info(f'Creating common extent mask for track {target_dir.name}.')
    # get out directory
    out_dir = out_file.parent

    temp_extent = out_dir.joinpath('extent.vrt')
    # build vrt stack from all scenes
    gdal.BuildVRT(
        str(temp_extent),
        list_of_scenes,
        options=gdal.BuildVRTOptions(srcNodata=0, separate=True)
    )

    with TemporaryDirectory(prefix=f'{temp_dir}/') as temp:

        # create namespace for temp file
        temp = Path(temp)
        image_bounds = temp.joinpath(out_file.name)
        exterior = temp.joinpath(out_file.name + '_ext')

        # create outline
        ras.outline(temp_extent, image_bounds, 0, False)

        # create exterior ring and write out
        vec.exterior(image_bounds, exterior, -0.0018)

        # intersect with aoi
        if config_dict['processing']['mosaic']['cut_to_aoi']:
            try:
                vec.aoi_intersection(aoi, exterior, out_file)
            except ValueError:
                shutil.move(exterior, out_file)
        else:
            shutil.move(exterior, out_file)

    return target_dir.name, list_of_scenes, out_file
コード例 #15
0
def mt_extent(list_of_scenes, out_file, temp_dir, buffer=None):
    out_dir = os.path.dirname(out_file)
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)

    # build vrt stack from all scenes
    gdal.BuildVRT(opj(out_dir, 'extent.vrt'),
                  list_of_scenes,
                  options=vrt_options)

    start = time.time()

    outline_file = opj(temp_dir, os.path.basename(out_file))
    ras.outline(opj(out_dir, 'extent.vrt'), outline_file, 0, False)

    vec.exterior(outline_file, out_file, buffer)
    h.delete_shapefile(outline_file)

    os.remove(opj(out_dir, 'extent.vrt'))
    h.timer(start)
コード例 #16
0
ファイル: hdf2tif.py プロジェクト: OpenGeoscience/nex
def hdf2tif(hdf):
    """
    Converts hdf files to tiff files

    :param hdf: HDF file to be processed
    :return: None
    """

    dataset = gdal.Open(hdf, gdal.GA_ReadOnly)
    subdatasets = dataset.GetSubDatasets()
    data_dir = create_output_directory(hdf)
    convert_to_vrt(subdatasets, data_dir)
    vrt_options = gdal.BuildVRTOptions(separate=True)
    vrt_list = list_files(data_dir, 'vrt')
    vrt_output = hdf.replace('.hdf', '.vrt')
    gdal.BuildVRT(vrt_output, sorted(vrt_list), options=vrt_options)
    proj = "+proj=sinu +R=6371007.181 +nadgrids=@null +wktext"
    warp_options = gdal.WarpOptions(srcSRS=proj, dstSRS="EPSG:3857")
    gdal.Warp(vrt_output.replace(".vrt", "_ndvi_reprojected.tif"),
              vrt_output,
              options=warp_options)
コード例 #17
0
def mt_extent(list_of_scenes, out_file, buffer=None):
    out_dir = os.path.dirname(out_file)
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)

    # build vrt stack from all scenes
    gdal.BuildVRT(opj(out_dir, 'extent.vrt'),
                  list_of_scenes,
                  options=vrt_options)

    logger.debug('INFO: Creating shapefile of common extent.')
    start = time.time()
    with TemporaryDirectory() as temp:
        outline_file = opj(temp, os.path.basename(out_file))
        ras.outline(opj(out_dir, 'extent.vrt'), outline_file, 0, False)

        vec.exterior(outline_file, out_file, buffer)
        h.delete_shapefile(outline_file)

        os.remove(opj(out_dir, 'extent.vrt'))
        h.timer(start)
    return out_file
コード例 #18
0
ファイル: raster.py プロジェクト: ywg0212/OpenSarToolkit
def image_bounds(data_dir):
    """Function to create a polygon of image boundary

    This function for all files within a dimap data directory

    :param data_dir:
    :return:
    """
    filelist = []
    for file in data_dir.glob('*img'):
        filelist.append(str(file))

        temp_extent = data_dir.joinpath(f'{data_dir.name}_bounds.vrt')
        # build vrt stack from all scenes
        gdal.BuildVRT(str(temp_extent),
                      filelist,
                      options=gdal.BuildVRTOptions(srcNodata=0, separate=True))

        file_id = '_'.join(data_dir.name.split('_')[:2])
        outline(temp_extent, data_dir.joinpath(f'{file_id}_bounds.json'))
        data_dir.joinpath(f'{data_dir.name}_bounds.vrt').unlink()
コード例 #19
0
ファイル: gdal_mosaic.py プロジェクト: derek-olson/Py-Scripts
def gdal_mosaic(raster_in_path, vrt_outname, raster_out_name):
    driver = gdal.GetDriverByName("VRT")

    vrt_options = gdal.BuildVRTOptions(resampleAlg='bilinear', addAlpha=False)
    vrt = gdal.BuildVRT(raster_in_path + vrt_outname,
                        glob.glob(raster_in_path + '*de.img'),
                        options=vrt_options)

    gdal.Info(vrt)

    driver.CreateCopy(raster_in_path + vrt_outname, vrt)
    gdalTranslate = r'C:\OSGeo4W64\bin\gdal_translate.exe'
    vrt = raster_in_path + vrt_outname
    dst = raster_in_path + raster_out_name

    #cmd = "-ot int16 -outsize 30 30"

    def youCanQuoteMe(item):
        return "\"" + item + "\""

    fullCmd = ' '.join([gdalTranslate, youCanQuoteMe(vrt), youCanQuoteMe(dst)])

    subprocess.call(fullCmd)
コード例 #20
0
def buildVRT():

    print('➡️  Beginning to create VRT')

    for r, d, f in os.walk('./masked'):

        pieceList = []
        for file in f:
            if file.split('.')[-1] != 'tif':
                print('× Skipping non-TIFF file {}'.format(file))

            else:
                pieceList.append('./masked/{}'.format(file))

        vrtOptions = gdal.BuildVRTOptions(resolution='highest',
                                          outputSRS='EPSG:3857',
                                          separate=False,
                                          srcNodata=0)

        gdal.BuildVRT('./mosaic.vrt', pieceList, options=vrtOptions)

        print(
            '🎉 Completed creating the VRT. You can now feed this directly to gdal2tiles!'
        )
コード例 #21
0
def convert_to_vrt(subdatasets, data_dir, bands):
    """
    Loops through the subdatasets and creates vrt files

    :param subdatasets: Subdataset of every HDF file
    :param data_dir: Result of create_output_directory method
    :return: None
    """
    data_list = []

    # 'bands' passed in from user refer to bands indexed from 1
    # make sure we decrement each band passed in so they we access the
    # 0 indexed band value inside the subdatasets list.
    for band in [b - 1 for b in bands]:
        output_name = os.path.join(
            data_dir, "Band{}_{}.vrt".format(
                str(band + 1).zfill(2), subdatasets[band][0].split(":")[-1]))

        # Get the fill value
        fill_value = get_metadata_item(subdatasets[band][0], 'fillvalue')

        # Pass some options
        vrt_options = gdal.BuildVRTOptions(srcNodata=fill_value,
                                           VRTNodata=NO_DATA)

        # Create the virtual raster
        gdal.BuildVRT(output_name, subdatasets[band][0], options=vrt_options)

        # Check if scale and offset exists
        scale = get_metadata_item(subdatasets[band][0], 'scale')

        modify_vrt(output_name, scale)

        data_list.append(output_name)

    return data_list
コード例 #22
0
            os.chdir(path_modis + modis_folders[ifo] + subfolders[iyr] + '/' +
                     monthname[imo])
            hdf_files = sorted(glob.glob('*.hdf'))

            for idt in range(len(hdf_files)):
                ctd_file_path = path_modis_op + modis_folders[
                    ifo] + subfolders[iyr] + '/' + monthname[imo]
                hdf_subdataset_extraction(hdf_files[idt], ctd_file_path,
                                          subdataset_id[ifo], band_n[ifo])
                print(hdf_files[idt])  # Print the file being processed

# 1.2 Group the Geotiff files by dates from their names and
# build virtual dataset VRT files for mosaicking MODIS geotiff files in the list

vrt_options = gdal.BuildVRTOptions(resampleAlg='near',
                                   addAlpha=None,
                                   bandList=None)

for ifo in range(len(modis_folders)):  # MODIS LST and NDVI subfolders
    # os.chdir(path_modis_op + modis_folders[ifo])
    # # List subfolders under root directory
    # subfolders = os.listdir()
    # subfolders = [item for item in subfolders if not item.startswith('.')] # ignore '. and ._files'
    # subfolders = sorted(subfolders)

    for iyr in range(len(subfolders)):

        for imo in range(len(monthname)):

            os.chdir(path_modis_op + modis_folders[ifo] + subfolders[iyr] +
                     '/' + monthname[imo])
コード例 #23
0
def ard_to_ts(list_of_files,
              processing_dir,
              track,
              ard_params,
              pol,
              product_suffix='TC',
              no_data=0.0):
    # get the track directory
    track_dir = opj(processing_dir, track)

    # check routine if timeseries has already been processed
    check_file = opj(track_dir, 'Timeseries',
                     '.{}.{}.processed'.format(product_suffix, pol))
    if os.path.isfile(check_file):
        logger.debug('INFO: Timeseries of {} for {} in {} polarisation already'
                     ' processed'.format(track, product_suffix, pol))
        return

    # get the DB scaling
    if product_suffix is not 'coh':
        to_db = ard_params['to_db_mt']

    if ard_params['apply_ls_mask']:
        extent = opj(track_dir, '{}.extent.masked.shp'.format(track))
    else:
        extent = opj(track_dir, '{}.extent.shp'.format(track))

    # min max dict for stretching in case of 16 or 8 bit datatype
    mm_dict = {
        'TC': {
            'min': -30,
            'max': 5
        },
        'coh': {
            'min': 0.000001,
            'max': 1
        },
        'Alpha': {
            'min': 0.000001,
            'max': 90
        },
        'Anisotropy': {
            'min': 0.000001,
            'max': 1
        },
        'Entropy': {
            'min': 0.000001,
            'max': 1
        }
    }

    stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'
                             ] else product_suffix

    # define out_dir for stacking routine
    out_dir = opj(processing_dir, '{}'.format(track), 'Timeseries')
    os.makedirs(out_dir, exist_ok=True)

    with TemporaryDirectory() as temp_dir:
        # create namespaces
        temp_stack = opj(temp_dir, '{}_{}_{}'.format(track, product_suffix,
                                                     pol))
        out_stack = opj(temp_dir, '{}_{}_{}_mt'.format(track, product_suffix,
                                                       pol))
        stack_log = opj(
            out_dir, '{}_{}_{}_stack.err_log'.format(track, product_suffix,
                                                     pol))

        # run stacking routines
        # convert list of files readable for snap
        list_of_files = '\'{}\''.format(','.join(list_of_files))

        if pol in ['Alpha', 'Anisotropy', 'Entropy']:
            logger.debug(
                'INFO: Creating multi-temporal stack of images of track/track {} for'
                ' the {} band of the polarimetric H-A-Alpha'
                ' decomposition.'.format(track, pol))
            create_stack(list_of_files, temp_stack, stack_log, pattern=pol)
        else:
            logger.debug(
                'INFO: Creating multi-temporal stack of images of track/track {} for'
                ' {} product_suffix in {} '
                'polarization.'.format(track, product_suffix, pol))
            create_stack(list_of_files,
                         temp_stack,
                         stack_log,
                         polarisation=pol)

        # run mt speckle filter
        if ard_params['mt_speckle_filter'] is True:
            speckle_log = opj(
                out_dir,
                '{}_{}_{}_mt_speckle.err_log'.format(track, product_suffix,
                                                     pol))
            logger.debug('INFO: Applying multi-temporal speckle filter')
            mt_speckle_filter('{}.dim'.format(temp_stack), out_stack,
                              speckle_log)
        else:
            out_stack = temp_stack
        if product_suffix == 'coh':
            outfiles = _get_coh_ts(in_stack=out_stack,
                                   out_dir=out_dir,
                                   polarization=pol,
                                   product_suffix=product_suffix,
                                   extent=extent,
                                   to_db=to_db,
                                   out_dtype=ard_params['dtype_output'],
                                   min=mm_dict[stretch]['min'],
                                   max=mm_dict[stretch]['max'])
        else:
            outfiles = _get_regular_ts(in_stack=out_stack,
                                       out_dir=out_dir,
                                       polarization=pol,
                                       product_suffix=product_suffix,
                                       extent=extent,
                                       to_db=to_db,
                                       out_dtype=ard_params['dtype_output'],
                                       min=mm_dict[stretch]['min'],
                                       max=mm_dict[stretch]['max'],
                                       no_data=no_data)
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')
        # build vrt of timeseries
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(opj(out_dir,
                          'Timeseries.{}.{}.vrt'.format(product_suffix, pol)),
                      outfiles,
                      options=vrt_options)
コード例 #24
0
ファイル: gdalwrapper.py プロジェクト: Guiming/DL4SDM
 def buildVRT(self, srcFilelist, outVrt):
     vrt_options = gdal.BuildVRTOptions(separate=True, VRTNodata=-9999)
     gdal.BuildVRT(outVrt, srcFilelist, options=vrt_options)
コード例 #25
0
def _ard_to_ts(burst_inventory, processing_dir, temp_dir, burst, to_db,
               ls_mask_create, ls_mask_apply, mt_speckle_filter, datatype):
    burst_dir = opj(processing_dir, burst)

    # get common burst extent
    list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
    list_of_scenes = [x for x in list_of_scenes if 'layover' not in x]
    extent = opj(burst_dir, '{}.extent.shp'.format(burst))
    timeseries.mt_extent(list_of_scenes, extent, buffer=-0.0018)

    # remove inital extent
    for file in glob.glob(opj(burst_dir, 'tmp*')):
        os.remove(file)

    # layover/shadow mask
    if ls_mask_create is True:
        list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
        list_of_layover = [x for x in list_of_scenes if 'layover' in x]
        out_ls = opj(burst_dir, '{}.ls_mask.tif'.format(burst))
        timeseries.mt_layover(list_of_layover, out_ls, extent=extent)
        logger.debug(
            'INFO: Our common layover mask is located at {}'.format(out_ls))

    if ls_mask_apply:
        logger.debug(
            'INFO: Calculating symetrical difference of extent and ls_mask')
        ras.polygonize_raster(out_ls, '{}.shp'.format(out_ls[:-4]))
        extent_ls_masked = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
        vec.difference(extent, '{}.shp'.format(out_ls[:-4]), extent_ls_masked)
        extent = extent_ls_masked

    list_of_product_types = {'BS': 'Gamma0', 'coh': 'coh', 'ha_alpha': 'Alpha'}

    # we loop through each possible product
    for p, product_name in list_of_product_types.items():
        # we loop through each polarisation
        for pol in ['VV', 'VH', 'HH', 'HV']:
            # see if there is actually any imagery
            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*', '*data*',
                        '{}*{}*img'.format(product_name, pol))))
            if len(list_of_ts_bursts) > 1:
                # check for all datafiles of this product type
                list_of_ts_bursts = sorted(
                    glob.glob(
                        opj(processing_dir, burst, '20*/',
                            '*{}*dim'.format(p))))
                list_of_ts_bursts = '\'{}\''.format(
                    ','.join(list_of_ts_bursts))
                # define out_dir for stacking routine

                out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
                os.makedirs(out_dir, exist_ok=True)

                temp_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                out_stack = opj(out_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                stack_log = opj(out_dir,
                                '{}_{}_{}_stack.err_log'.format(burst, p, pol))

                # run stacking routines
                ts.create_stack(list_of_ts_bursts,
                                temp_stack,
                                stack_log,
                                polarisation=pol)

                # run mt speckle filter
                if mt_speckle_filter is True:
                    speckle_log = opj(
                        out_dir,
                        '{}_{}_{}_mt_speckle.err_log'.format(burst, p, pol))

                    ts.mt_speckle_filter('{}.dim'.format(temp_stack),
                                         out_stack, speckle_log)
                    # remove tmp files
                    h.delete_dimap(temp_stack)
                else:
                    out_stack = temp_stack

                # convert to GeoTiffs
                if p == 'BS':
                    # get the dates of the files
                    dates = [
                        datetime.datetime.strptime(
                            x.split('_')[-1][:-4], '%d%b%Y')
                        for x in glob.glob(
                            opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    dates.sort()
                    # write them back to string for following loop
                    sorted_dates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in dates
                    ]

                    i, outfiles = 1, []
                    for date in sorted_dates:
                        # restructure date to YYMMDD
                        in_date = datetime.datetime.strptime(date, '%d%b%Y')
                        out_date = datetime.datetime.strftime(
                            in_date, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}*img'.format(pol, date)))[0]

                        # create outfile
                        outfile = opj(
                            out_dir,
                            '{}.{}.{}.{}.tif'.format(i, out_date, p, pol))

                        # mask by extent
                        ras.to_gtiff_clip_by_extend(infile,
                                                    outfile,
                                                    extent,
                                                    to_db=to_db,
                                                    out_dtype=datatype,
                                                    min_value=-30,
                                                    max_value=5,
                                                    no_data=0.0)
                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                if p == 'coh':
                    # get slave and master Date
                    mstDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[3].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]

                    slvDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[4].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    mstDates.sort()
                    slvDates.sort()
                    # write them back to string for following loop
                    sortedMstDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in mstDates
                    ]
                    sortedSlvDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in slvDates
                    ]

                    i, outfiles = 1, []
                    for mst, slv in zip(sortedMstDates, sortedSlvDates):

                        inMst = datetime.datetime.strptime(mst, '%d%b%Y')
                        inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

                        outMst = datetime.datetime.strftime(inMst, '%y%m%d')
                        outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}_{}*img'.format(pol, mst, slv)))[0]
                        outfile = opj(
                            out_dir, '{}.{}.{}.{}.{}.tif'.format(
                                i, outMst, outSlv, p, pol))

                        ras.to_gtiff_clip_by_extend(infile,
                                                    outfile,
                                                    extent,
                                                    to_db=False,
                                                    out_dtype=datatype,
                                                    min_value=0.000001,
                                                    max_value=1,
                                                    no_data=0.0)

                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                # remove tmp files
                h.delete_dimap(out_stack)

    for pol in ['Alpha', 'Entropy', 'Anisotropy']:
        list_of_ts_bursts = sorted(
            glob.glob(
                opj(processing_dir, burst, '20*', '*{}*'.format(p),
                    '*{}.img'.format(pol))))

        if len(list_of_ts_bursts) > 1:
            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*/', '*{}*dim'.format(p))))
            list_of_ts_bursts = '\'{}\''.format(','.join(list_of_ts_bursts))

            # logger.debug(list_of_ts_bursts)

            out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
            os.makedirs(out_dir, exist_ok=True)

            temp_stack = opj(temp_dir, '{}_{}_mt'.format(burst, pol))
            out_stack = opj(out_dir, '{}_{}_mt'.format(burst, pol))

            stack_log = opj(out_dir, '{}_{}_stack.err_log'.format(burst, pol))
            # processing routines
            ts.create_stack(list_of_ts_bursts,
                            temp_stack,
                            stack_log,
                            pattern=pol)

            if mt_speckle_filter is True:
                speckle_log = opj(
                    out_dir, '{}_{}_mt_speckle.err_log'.format(burst, pol))
                ts.mt_speckle_filter('{}.dim'.format(temp_stack), out_stack,
                                     speckle_log)
                # remove tmp files
                h.delete_dimap(temp_stack)
            else:
                out_stack = temp_stack

            # get the dates of the files
            dates = [
                datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
                for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
            ]
            # sort them
            dates.sort()
            # write them back to string for following loop
            sorted_dates = [
                datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
            ]

            i, outfiles = 1, []
            for date in sorted_dates:
                # restructure date to YYMMDD
                in_date = datetime.datetime.strptime(date, '%d%b%Y')
                out_date = datetime.datetime.strftime(in_date, '%y%m%d')

                infile = glob.glob(
                    opj('{}.data'.format(out_stack),
                        '*{}*{}*img'.format(pol, date)))[0]
                # create outfile
                outfile = opj(out_dir,
                              '{}.{}.{}.{}.tif'.format(i, out_date, p, pol))
                # mask by extent
                max_value = 90 if pol is 'Alpha' else 1
                ras.to_gtiff_clip_by_extend(infile,
                                            outfile,
                                            extent,
                                            to_db=False,
                                            out_dtype=datatype,
                                            min_value=0.000001,
                                            max_value=max_value,
                                            no_data=0)

                # add ot a list for subsequent vrt creation
                outfiles.append(outfile)
                i += 1

            # build vrt of timeseries
            vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
            gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.vrt'.format(pol)),
                          outfiles,
                          options=vrt_options)

            # remove tmp files
            h.delete_dimap(out_stack)
コード例 #26
0
def _timeseries_to_timescan(burst_inventory, processing_dir, temp_dir,
                            burst_dir, to_db, metrics, outlier_removal):

    product_list = [
        'BS.HH', 'BS.VV', 'BS.HV', 'BS.VH', 'coh.VV', 'coh.VH', 'Alpha',
        'Entropy', 'Anisotropy'
    ]

    for product in product_list:
        for timeseries in glob.glob(
                opj(burst_dir, 'Timeseries', '*{}*vrt'.format(product))):

            logger.debug('INFO: Creating timescan for {}'.format(product))
            timescan_dir = opj(burst_dir, 'Timescan')
            os.makedirs(timescan_dir, exist_ok=True)

            # we get the name of the time-series parameter
            polarisation = timeseries.split('/')[-1].split('.')[2]
            if polarisation == 'vrt':
                timescan_prefix = opj(
                    '{}'.format(timescan_dir),
                    '{}'.format(timeseries.split('/')[-1].split('.')[1]))
            else:
                timescan_prefix = opj(
                    '{}'.format(timescan_dir), '{}.{}'.format(
                        timeseries.split('/')[-1].split('.')[1], polarisation))

            start = time.time()
            if 'BS.' in timescan_prefix:  # backscatter
                timeseries.mt_metrics(timeseries,
                                      timescan_prefix,
                                      metrics,
                                      rescale_to_datatype=True,
                                      to_power=to_db,
                                      outlier_removal=outlier_removal)
            else:  # non-backscatter
                timeseries.mt_metrics(timeseries,
                                      timescan_prefix,
                                      metrics,
                                      rescale_to_datatype=False,
                                      to_power=False,
                                      outlier_removal=outlier_removal)

            h.timer(start)

    # rename and create vrt
    # logger.debug('renaming')
    i, list_of_files = 0, []
    for product in itertools.product(product_list, metrics):

        file = glob.glob(
            opj(burst_dir, 'Timescan',
                '*{}.{}.tif'.format(product[0], product[1])))

        if file:
            i += 1
            outfile = opj(burst_dir, 'Timescan',
                          '{}.{}.{}.tif'.format(i, product[0], product[1]))
            shutil.move(file[0], outfile)
            list_of_files.append(outfile)

    # create vrt
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(burst_dir, 'Timescan', 'Timescan.vrt'),
                  list_of_files,
                  options=vrt_options)
コード例 #27
0
            # build a vrt
            g = gdal.BuildVRT(clipped_file, [blank_file_tiff])

        if (g):
            del(g)
            bandNames.append(f'DOY {doy:03d}')
            allvrt.append(clipped_file)

        old_clip = clipped_file



    g = gdal.BuildVRT(f'{allopfile.as_posix()}.{d}.vrt', allvrt,\
                      options=gdal.BuildVRTOptions(VRTNodata=255,\
                                                   srcNodata=255,\
                                                   allowProjectionDifference=True,\
                                                   separate=True))
    if (g):
        # set band names
        for i in range(g.RasterCount):
            g.GetRasterBand(i+1).SetDescription(bandNames[i])

        # close and flush file
        del g
        print (f'{allopfile.as_posix()}.{d}.vrt')

import gdal
import numpy as np

destination_folder = Path('data')
year = 2017
コード例 #28
0
ファイル: utils.py プロジェクト: kssvrk/Sat2Feature
def StackBands(bands_list,vrtout):
    vrt_options = gdal.BuildVRTOptions(resampleAlg='cubic',separate=True)
    my_vrt = gdal.BuildVRT(vrtout,bands_list, options=vrt_options)
    my_vrt = None
コード例 #29
0
def mt_layover_old(list_of_files, config_file):
    """

    :param list_of_files:
    :param config_file:
    :return:
    """

    # this is a godale thing
    with open(config_file) as file:
        config_dict = json.load(file)
        temp_dir = Path(config_dict['temp_dir'])
        update_extent = (
            config_dict['processing']['time-series_ARD']['apply_ls_mask'])

    target_dir = Path(list_of_files[0]).parent.parent.parent
    outfile = target_dir.joinpath(f'{target_dir.name}.ls_mask.tif')
    extent = target_dir.joinpath(f'{target_dir.name}.extent.gpkg')
    burst_dir = Path(outfile).parent
    burst = burst_dir.name

    logger.info(
        f'Creating common Layover/Shadow mask for track {target_dir.name}.')

    with TemporaryDirectory(prefix=f'{temp_dir}/') as temp:

        # temp to Path object
        temp = Path(temp)

        # create path to temp file
        ls_layer = temp.joinpath(Path(outfile).name)

        # create a vrt-stack out of
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(str(temp.joinpath('ls.vrt')),
                      list_of_files,
                      options=vrt_options)

        with rasterio.open(temp.joinpath('ls.vrt')) as src:

            # get metadata
            meta = src.meta
            # update driver and reduced band count
            meta.update(driver='GTiff', count=1, dtype='uint8')

            # create outfiles
            with rasterio.open(ls_layer, 'w', **meta) as out_min:

                # loop through blocks
                for _, window in src.block_windows(1):

                    # read array with all bands
                    stack = src.read(range(1, src.count + 1), window=window)

                    # get stats
                    arr_max = np.nanmax(stack, axis=0)
                    arr = np.divide(arr_max, arr_max)

                    out_min.write(np.uint8(arr), window=window, indexes=1)

        ras.mask_by_shape(ls_layer,
                          outfile,
                          extent,
                          to_db=False,
                          datatype='uint8',
                          rescale=False,
                          ndv=0)

        ls_layer.unlink()

        extent_ls_masked = None
        if update_extent:

            logger.info(
                'Calculating symmetrical difference of extent and ls_mask')

            # polygonize the multi-temporal ls mask
            ras.polygonize_raster(outfile, f'{str(outfile)[:-4]}.gpkg')

            # create file for masked extent
            extent_ls_masked = burst_dir.joinpath(
                f'{burst}.extent.masked.gpkg')

            # calculate difference between burst extent
            # and ls mask, for masked extent
            try:
                vec.difference(extent, f'{outfile.stem}.gpkg',
                               extent_ls_masked)
            except:
                shutil.copy(extent, extent_ls_masked)

    return burst_dir, list_of_files, outfile, extent_ls_masked
コード例 #30
0
def mt_layover(filelist, outfile, temp_dir, extent, update_extent=False):
    '''
    This function is usally used in the time-series workflow of OST. A list
    of the filepaths layover/shadow masks
    :param filelist - list of files
    :param out_dir - directory where the output file will be stored
    :return path to the multi-temporal layover/shadow mask file generated
    '''

    # get some info
    burst_dir = os.path.dirname(outfile)
    burst = os.path.basename(burst_dir)
    extent = opj(burst_dir, '{}.extent.shp'.format(burst))

    # get the start time for Info on processing time
    start = time.time()
    # create path to out file
    ls_layer = opj(temp_dir, os.path.basename(outfile))

    # create a vrt-stack out of
    print(' INFO: Creating common Layover/Shadow Mask')
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(temp_dir, 'ls.vrt'), filelist, options=vrt_options)

    with rasterio.open(opj(temp_dir, 'ls.vrt')) as src:

        # get metadata
        meta = src.meta
        # update driver and reduced band count
        meta.update(driver='GTiff', count=1, dtype='uint8')

        # create outfiles
        with rasterio.open(ls_layer, 'w', **meta) as out_min:

            # loop through blocks
            for _, window in src.block_windows(1):

                # read array with all bands
                stack = src.read(range(1, src.count + 1), window=window)

                # get stats
                arr_max = np.nanmax(stack, axis=0)
                arr = arr_max / arr_max

                out_min.write(np.uint8(arr), window=window, indexes=1)

    ras.mask_by_shape(ls_layer,
                      outfile,
                      extent,
                      to_db=False,
                      datatype='uint8',
                      rescale=False,
                      ndv=0)
    os.remove(ls_layer)
    h.timer(start)

    if update_extent:
        print(' INFO: Calculating symetrical difference of extent and ls_mask')
        # polygonize the multi-temporal ls mask
        ras.polygonize_raster(outfile, '{}.shp'.format(outfile[:-4]))

        # create file for masked extent
        extent_ls_masked = opj(burst_dir, '{}.extent.masked.shp'.format(burst))

        # calculate difference between burst exntetn and ls mask, fr masked extent
        vec.difference(extent, '{}.shp'.format(outfile[:-4]), extent_ls_masked)