예제 #1
0
def mt_layover(filelist, outfile, extent):
    '''
    This function is usally used in the time-series workflow of OST. A list
    of the filepaths layover/shadow masks

    :param filelist - list of files
    :param out_dir - directory where the output file will be stored
    :return path to the multi-temporal layover/shadow mask file generated
    '''

    # get the start time for Info on processing time
    start = time.time()

    with TemporaryDirectory() as temp:
        # create path to out file
        ls_layer = opj(temp, os.path.basename(outfile))

        # create a vrt-stack out of
        logger.debug('INFO: Creating common Layover/Shadow Mask')
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(opj(temp, 'ls.vrt'), filelist, options=vrt_options)

        with rasterio.open(opj(temp, 'ls.vrt')) as src:

            # get metadata
            meta = src.meta
            # update driver and reduced band count
            meta.update(driver='GTiff', count=1, dtype='uint8')

            # create outfiles
            with rasterio.open(ls_layer, 'w', **meta) as out_min:

                # loop through blocks
                for _, window in src.block_windows(1):

                    # read array with all bands
                    stack = src.read(range(1, src.count + 1), window=window)

                    # get stats
                    arr_max = np.nanmax(stack, axis=0)
                    arr = arr_max / arr_max

                    out_min.write(np.uint8(arr), window=window, indexes=1)

        ras.mask_by_shape(ls_layer,
                          outfile,
                          extent,
                          to_db=False,
                          datatype='uint8',
                          rescale=False,
                          ndv=0)
        # os.remove(ls_layer)
        h.timer(start)
    return outfile
예제 #2
0
def mt_layover_old(list_of_files, config_file):
    """

    :param list_of_files:
    :param config_file:
    :return:
    """

    # this is a godale thing
    with open(config_file) as file:
        config_dict = json.load(file)
        temp_dir = Path(config_dict['temp_dir'])
        update_extent = (
            config_dict['processing']['time-series_ARD']['apply_ls_mask'])

    target_dir = Path(list_of_files[0]).parent.parent.parent
    outfile = target_dir.joinpath(f'{target_dir.name}.ls_mask.tif')
    extent = target_dir.joinpath(f'{target_dir.name}.extent.gpkg')
    burst_dir = Path(outfile).parent
    burst = burst_dir.name

    logger.info(
        f'Creating common Layover/Shadow mask for track {target_dir.name}.')

    with TemporaryDirectory(prefix=f'{temp_dir}/') as temp:

        # temp to Path object
        temp = Path(temp)

        # create path to temp file
        ls_layer = temp.joinpath(Path(outfile).name)

        # create a vrt-stack out of
        vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
        gdal.BuildVRT(str(temp.joinpath('ls.vrt')),
                      list_of_files,
                      options=vrt_options)

        with rasterio.open(temp.joinpath('ls.vrt')) as src:

            # get metadata
            meta = src.meta
            # update driver and reduced band count
            meta.update(driver='GTiff', count=1, dtype='uint8')

            # create outfiles
            with rasterio.open(ls_layer, 'w', **meta) as out_min:

                # loop through blocks
                for _, window in src.block_windows(1):

                    # read array with all bands
                    stack = src.read(range(1, src.count + 1), window=window)

                    # get stats
                    arr_max = np.nanmax(stack, axis=0)
                    arr = np.divide(arr_max, arr_max)

                    out_min.write(np.uint8(arr), window=window, indexes=1)

        ras.mask_by_shape(ls_layer,
                          outfile,
                          extent,
                          to_db=False,
                          datatype='uint8',
                          rescale=False,
                          ndv=0)

        ls_layer.unlink()

        extent_ls_masked = None
        if update_extent:

            logger.info(
                'Calculating symmetrical difference of extent and ls_mask')

            # polygonize the multi-temporal ls mask
            ras.polygonize_raster(outfile, f'{str(outfile)[:-4]}.gpkg')

            # create file for masked extent
            extent_ls_masked = burst_dir.joinpath(
                f'{burst}.extent.masked.gpkg')

            # calculate difference between burst extent
            # and ls mask, for masked extent
            try:
                vec.difference(extent, f'{outfile.stem}.gpkg',
                               extent_ls_masked)
            except:
                shutil.copy(extent, extent_ls_masked)

    return burst_dir, list_of_files, outfile, extent_ls_masked
예제 #3
0
def mt_layover(filelist, outfile, temp_dir, extent, update_extent=False):
    '''
    This function is usally used in the time-series workflow of OST. A list
    of the filepaths layover/shadow masks
    :param filelist - list of files
    :param out_dir - directory where the output file will be stored
    :return path to the multi-temporal layover/shadow mask file generated
    '''

    # get some info
    burst_dir = os.path.dirname(outfile)
    burst = os.path.basename(burst_dir)
    extent = opj(burst_dir, '{}.extent.shp'.format(burst))

    # get the start time for Info on processing time
    start = time.time()
    # create path to out file
    ls_layer = opj(temp_dir, os.path.basename(outfile))

    # create a vrt-stack out of
    print(' INFO: Creating common Layover/Shadow Mask')
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(temp_dir, 'ls.vrt'), filelist, options=vrt_options)

    with rasterio.open(opj(temp_dir, 'ls.vrt')) as src:

        # get metadata
        meta = src.meta
        # update driver and reduced band count
        meta.update(driver='GTiff', count=1, dtype='uint8')

        # create outfiles
        with rasterio.open(ls_layer, 'w', **meta) as out_min:

            # loop through blocks
            for _, window in src.block_windows(1):

                # read array with all bands
                stack = src.read(range(1, src.count + 1), window=window)

                # get stats
                arr_max = np.nanmax(stack, axis=0)
                arr = arr_max / arr_max

                out_min.write(np.uint8(arr), window=window, indexes=1)

    ras.mask_by_shape(ls_layer,
                      outfile,
                      extent,
                      to_db=False,
                      datatype='uint8',
                      rescale=False,
                      ndv=0)
    os.remove(ls_layer)
    h.timer(start)

    if update_extent:
        print(' INFO: Calculating symetrical difference of extent and ls_mask')
        # polygonize the multi-temporal ls mask
        ras.polygonize_raster(outfile, '{}.shp'.format(outfile[:-4]))

        # create file for masked extent
        extent_ls_masked = opj(burst_dir, '{}.extent.masked.shp'.format(burst))

        # calculate difference between burst exntetn and ls mask, fr masked extent
        vec.difference(extent, '{}.shp'.format(outfile[:-4]), extent_ls_masked)
예제 #4
0
def ard_to_ts(list_of_files, burst, product, pol, config_file):

    # -------------------------------------------
    # 1 unpack list of args
    # convert list of files readable for snap
    list_of_files = f"\'{','.join(str(x) for x in list_of_files)}\'"

    # -------------------------------------------
    # 2 read config file
    with open(config_file, 'r') as file:
        config_dict = json.load(file)
        processing_dir = Path(config_dict['processing_dir'])
        ard = config_dict['processing']['single_ARD']
        ard_mt = config_dict['processing']['time-series_ARD']

    # -------------------------------------------
    # 3 get namespace of directories and check if already processed
    # get the burst directory
    burst_dir = processing_dir.joinpath(burst)

    # get timeseries directory and create if non existent
    out_dir = burst_dir.joinpath('Timeseries')
    Path.mkdir(out_dir, parents=True, exist_ok=True)

    # in case some processing has been done before, check if already processed
    check_file = out_dir.joinpath(f'.{product}.{pol}.processed')
    if Path.exists(check_file):
        logger.info(
            f'Timeseries of {burst} for {product} in {pol} '
            f'polarisation already processed.'
        )

        out_files = 'already_processed'
        out_vrt = 'already_processed'

        return (
            burst, list_of_files, out_files, out_vrt, f'{product}.{pol}', None
        )

    # -------------------------------------------
    # 4 adjust processing parameters according to config
    # get the db scaling right
    to_db = ard['to_db']
    if to_db or product != 'bs':
        to_db = False
        logger.debug(f'Not converting to dB for {product}')
    else:
        to_db = ard_mt['to_db']
        logger.debug(f'Converting to dB for {product}')

    if ard_mt['apply_ls_mask']:
        extent = burst_dir.joinpath(f'{burst}.valid.json')
    else:
        extent = burst_dir.joinpath(f'{burst}.min_bounds.json')

    # -------------------------------------------
    # 5 SNAP processing
    with TemporaryDirectory(prefix=f"{config_dict['temp_dir']}/") as temp:

        # turn to Path object
        temp = Path(temp)

        # create namespaces
        temp_stack = temp.joinpath(f'{burst}_{product}_{pol}')
        out_stack = temp.joinpath(f'{burst}_{product}_{pol}_mt')
        stack_log = out_dir.joinpath(f'{burst}_{product}_{pol}_stack.err_log')

        # run stacking routine
        if pol in ['Alpha', 'Anisotropy', 'Entropy']:
            logger.info(
                f'Creating multi-temporal stack of images of burst/track '
                f'{burst} for the {pol} band of the polarimetric '
                f'H-A-Alpha decomposition.'
            )
            try:
                create_stack(
                    list_of_files, temp_stack, stack_log, config_dict,
                    pattern=pol
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error
        else:
            logger.info(
                f'Creating multi-temporal stack of images of burst/track '
                f'{burst} for {product} product in {pol} polarization.'
            )
            try:
                create_stack(
                    list_of_files, temp_stack, stack_log, config_dict,
                    polarisation=pol
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error

        # run mt speckle filter
        if ard_mt['remove_mt_speckle'] is True:

            speckle_log = out_dir.joinpath(
                f'{burst}_{product}_{pol}_mt_speckle.err_log'
            )

            logger.debug('Applying multi-temporal speckle filter')
            try:
                mt_speckle_filter(
                    temp_stack.with_suffix('.dim'), out_stack, speckle_log,
                    config_dict
                )
            except (GPTRuntimeError, NotValidFileError) as error:
                logger.info(error)
                return None, None, None, None, None, error

            # remove tmp files
            h.delete_dimap(temp_stack)
        else:
            out_stack = temp_stack

        # -----------------------------------------------
        # 6 Conversion to GeoTiff

        # min max dict for stretching in case of 16 or 8 bit datatype
        mm_dict = {'bs': {'min': -30, 'max': 5},
                   'coh': {'min': 0.000001, 'max': 1},
                   'Alpha': {'min': 0.000001, 'max': 90},
                   'Anisotropy': {'min': 0.000001, 'max': 1},
                   'Entropy': {'min': 0.000001, 'max': 1}
                   }
        stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'] else product

        if product == 'coh':

            # get slave and master dates from file names and sort them
            mst_dates = sorted([
                dt.strptime(
                    file.name.split('_')[3].split('.')[0], SNAP_DATEFORMAT
                )
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            slv_dates = sorted([
                dt.strptime(
                    file.name.split('_')[4].split('.')[0], SNAP_DATEFORMAT
                )
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            # write them back to string for following loop
            mst_dates = [dt.strftime(ts, SNAP_DATEFORMAT) for ts in mst_dates]
            slv_dates = [dt.strftime(ts, SNAP_DATEFORMAT) for ts in slv_dates]

            out_files = []
            for i, (mst, slv) in enumerate(zip(mst_dates, slv_dates)):

                # re-construct namespace for input file
                infile = list(
                    out_stack.with_suffix('.data').glob(
                        f'*{pol}*{mst}_{slv}*img'
                    )
                )[0]

                # rename dates to YYYYMMDD format
                mst = dt.strftime(dt.strptime(mst, SNAP_DATEFORMAT), '%y%m%d')
                slv = dt.strftime(dt.strptime(slv, SNAP_DATEFORMAT), '%y%m%d')

                # create namespace for output file with renamed dates
                outfile = out_dir.joinpath(
                    f'{i+1:02d}.{mst}.{slv}.{product}.{pol}.tif'
                )

                # fill internal values if any
                #with rasterio.open(str(infile), 'r') as src:
                #    meta = src.meta.copy()
                #    filled = ras.fill_internal_nans(src.read())

                #with rasterio.open(str(infile), 'w', **meta) as dest:
                #    dest.write(filled)

                #print('filled')
                # produce final outputfile,
                # including dtype conversion and ls mask
                ras.mask_by_shape(
                    infile, outfile, extent, to_db=to_db,
                    datatype=ard_mt['dtype_output'],
                    min_value=mm_dict[stretch]['min'],
                    max_value=mm_dict[stretch]['max'],
                    ndv=0.0, description=True)

                # add ot a list for subsequent vrt creation
                out_files.append(str(outfile))

        else:
            # get the dates of the files
            dates = sorted([dt.strptime(
                file.name.split('_')[-1][:-4], SNAP_DATEFORMAT)
                for file in list(out_stack.with_suffix('.data').glob('*.img'))
            ])

            # write them back to string for following loop
            dates = [dt.strftime(ts, "%d%b%Y") for ts in dates]

            out_files = []
            for i, date in enumerate(dates):

                # re-construct namespace for input file
                infile = list(
                    out_stack.with_suffix('.data').glob(f'*{pol}*{date}*img')
                )[0]

                # restructure date to YYMMDD
                date = dt.strftime(
                    dt.strptime(date, SNAP_DATEFORMAT), '%y%m%d'
                )

                # create namespace for output file
                outfile = out_dir.joinpath(
                    f'{i+1:02d}.{date}.{product}.{pol}.tif'
                )

                # fill internal nodata
                #if ard['image_type'] == 'SLC':
                    #with rasterio.open(str(infile), 'r') as src:
                    #    meta = src.meta.copy()
                        #filled = ras.fill_internal_nans(src.read())

                    #with rasterio.open(str(infile), 'w', **meta) as dest:
                    #    dest.write(filled)
                    #print('filledbs')
                # run conversion routine
                ras.mask_by_shape(infile, outfile, extent,
                                  to_db=to_db,
                                  datatype=ard_mt['dtype_output'],
                                  min_value=mm_dict[stretch]['min'],
                                  max_value=mm_dict[stretch]['max'],
                                  ndv=0.0)

                # add ot a list for subsequent vrt creation
                out_files.append(str(outfile))

    # -----------------------------------------------
    # 7 Filechecks
    for file in out_files:
        return_code = h.check_out_tiff(file)
        if return_code != 0:

            for file_ in out_files:
                Path(file_).unlink()
                if Path(f'{file}.xml').exists():
                    Path(f'{file}.xml').unlink()

            return (
                burst, list_of_files, None, None,
                f'{product}.{pol}', return_code
            )

    # write file, so we know this ts has been successfully processed
    with open(str(check_file), 'w') as file:
        file.write('passed all tests \n')

    # -----------------------------------------------
    # 8 Create vrts
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    out_vrt = str(out_dir.joinpath(f'Timeseries.{product}.{pol}.vrt'))
    gdal.BuildVRT(
        out_vrt,
        out_files,
        options=vrt_options
    )

    return burst, list_of_files, out_files, out_vrt, f'{product}.{pol}', None
예제 #5
0
def ard_to_ts(list_of_files,
              processing_dir,
              temp_dir,
              burst,
              proc_file,
              product,
              pol,
              ncores=os.cpu_count()):
    if type(list_of_files) == str:
        list_of_files = list_of_files.replace("'", '').strip('][').split(', ')

    # get the burst directory
    burst_dir = opj(processing_dir, burst)

    # check routine if timeseries has already been processed
    check_file = opj(burst_dir, 'Timeseries',
                     '.{}.{}.processed'.format(product, pol))
    if os.path.isfile(check_file):
        print(' INFO: Timeseries of {} for {} in {} polarisation already'
              ' processed'.format(burst, product, pol))
        return

    # load ard parameters
    with open(proc_file, 'r') as ard_file:
        ard_params = json.load(ard_file)['processing parameters']
        ard = ard_params['single ARD']
        ard_mt = ard_params['time-series ARD']
        if ard_mt['remove mt speckle'] is True:
            ard_mt_speck = ard_params['time-series ARD']['mt speckle filter']
    # get the db scaling right
    to_db = ard['to db']
    if to_db or product != 'bs':
        to_db = False
        print('INFO: Not converting to dB for {}'.format(product))
    else:
        to_db = ard_mt['to db']
        print('INFO: Converting to dB for {}'.format(product))

    if ard['apply ls mask']:
        extent = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
    else:
        extent = opj(burst_dir, '{}.extent.shp'.format(burst))

    # min max dict for stretching in case of 16 or 8 bit datatype
    mm_dict = {
        'bs': {
            'min': -30,
            'max': 5
        },
        'coh': {
            'min': 0.000001,
            'max': 1
        },
        'Alpha': {
            'min': 0.000001,
            'max': 90
        },
        'Anisotropy': {
            'min': 0.000001,
            'max': 1
        },
        'Entropy': {
            'min': 0.000001,
            'max': 1
        }
    }

    stretch = pol if pol in ['Alpha', 'Anisotropy', 'Entropy'] else product

    # define out_dir for stacking routine
    out_dir = opj(processing_dir, '{}'.format(burst), 'Timeseries')
    os.makedirs(out_dir, exist_ok=True)

    # create namespaces
    temp_stack = opj(temp_dir, '{}_{}_{}'.format(burst, product, pol))
    out_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, product, pol))
    stack_log = opj(out_dir,
                    '{}_{}_{}_stack.err_log'.format(burst, product, pol))

    # run stacking routines
    # convert list of files readable for snap
    list_of_files = '\'{}\''.format(','.join(list_of_files))

    if pol in ['Alpha', 'Anisotropy', 'Entropy']:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' the {} band of the polarimetric H-A-Alpha'
            ' decomposition.'.format(burst, pol))
        create_stack(list_of_files, temp_stack, stack_log, pattern=pol)
    else:
        print(
            ' INFO: Creating multi-temporal stack of images of burst/track {} for'
            ' {} product in {} polarization.'.format(burst, product, pol))
        create_stack(list_of_files, temp_stack, stack_log, polarisation=pol)

    # run mt speckle filter
    if ard_mt['remove mt speckle'] is True:
        speckle_log = opj(
            out_dir, '{}_{}_{}_mt_speckle.err_log'.format(burst, product, pol))

        print(' INFO: Applying multi-temporal speckle filter')
        mt_speckle_filter('{}.dim'.format(temp_stack),
                          out_stack,
                          speckle_log,
                          speckle_dict=ard_mt_speck,
                          ncores=ncores)
        # remove tmp files
        h.delete_dimap(temp_stack)
    else:
        out_stack = temp_stack

    if product == 'coh':

        # get slave and master Date
        mstDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[3].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]

        slvDates = [
            datetime.datetime.strptime(
                os.path.basename(x).split('_')[4].split('.')[0], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        mstDates.sort()
        slvDates.sort()
        # write them back to string for following loop
        sortedMstDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in mstDates
        ]
        sortedSlvDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in slvDates
        ]

        i, outfiles = 1, []
        for mst, slv in zip(sortedMstDates, sortedSlvDates):

            inMst = datetime.datetime.strptime(mst, '%d%b%Y')
            inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

            outMst = datetime.datetime.strftime(inMst, '%y%m%d')
            outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')
            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}_{}*img'.format(pol, mst, slv)))[0]

            outfile = opj(
                out_dir,
                '{:02d}.{}.{}.{}.{}.tif'.format(i, outMst, outSlv, product,
                                                pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0,
                              description=True)
            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    else:
        # get the dates of the files
        dates = [
            datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
            for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
        ]
        # sort them
        dates.sort()
        # write them back to string for following loop
        sortedDates = [
            datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
        ]

        i, outfiles = 1, []
        for date in sortedDates:

            # restructure date to YYMMDD
            inDate = datetime.datetime.strptime(date, '%d%b%Y')
            outDate = datetime.datetime.strftime(inDate, '%y%m%d')

            infile = glob.glob(
                opj('{}.data'.format(out_stack),
                    '*{}*{}*img'.format(pol, date)))[0]

            # create outfile
            outfile = opj(
                out_dir, '{:02d}.{}.{}.{}.tif'.format(i, outDate, product,
                                                      pol))

            ras.mask_by_shape(infile,
                              outfile,
                              extent,
                              to_db=to_db,
                              datatype=ard_mt['dtype output'],
                              min_value=mm_dict[stretch]['min'],
                              max_value=mm_dict[stretch]['max'],
                              ndv=0.0)

            # add ot a list for subsequent vrt creation
            outfiles.append(outfile)
            i += 1

    for file in outfiles:
        return_code = h.check_out_tiff(file)
        if return_code != 0:
            h.remove_folder_content(temp_dir)
            os.remove(file)
            return return_code

    # write file, so we know this ts has been succesfully processed
    if return_code == 0:
        with open(str(check_file), 'w') as file:
            file.write('passed all tests \n')

    # build vrt of timeseries
    vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
    gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.{}.vrt'.format(product, pol)),
                  outfiles,
                  options=vrt_options)

    # remove tmp files
    h.delete_dimap(out_stack)
예제 #6
0
def _ard_to_ts(burst_inventory, processing_dir, temp_dir, burst, to_db,
               ls_mask_create, ls_mask_apply, mt_speckle_filter, datatype):

    burst_dir = opj(processing_dir, burst)

    # get common burst extent
    list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
    list_of_scenes = [x for x in list_of_scenes if 'layover' not in x]
    extent = opj(burst_dir, '{}.extent.shp'.format(burst))
    ts.mt_extent(list_of_scenes, extent, temp_dir, buffer=-0.0018)

    # remove inital extent
    for file in glob.glob(opj(burst_dir, 'tmp*')):
        os.remove(file)

    # layover/shadow mask
    if ls_mask_create is True:
        list_of_scenes = glob.glob(opj(burst_dir, '20*', '*data*', '*img'))
        list_of_layover = [x for x in list_of_scenes if 'layover' in x]
        out_ls = opj(burst_dir, '{}.ls_mask.tif'.format(burst))
        ts.mt_layover(list_of_layover, out_ls, temp_dir, extent=extent)
        print(' INFO: Our common layover mask is located at {}'.format(out_ls))

    if ls_mask_apply:
        print(' INFO: Calculating symetrical difference of extent and ls_mask')
        ras.polygonize_raster(out_ls, '{}.shp'.format(out_ls[:-4]))
        extent_ls_masked = opj(burst_dir, '{}.extent.masked.shp'.format(burst))
        vec.difference(extent, '{}.shp'.format(out_ls[:-4]), extent_ls_masked)
        extent = extent_ls_masked

    list_of_product_types = {'BS': 'Gamma0', 'coh': 'coh', 'ha_alpha': 'Alpha'}

    # we loop through each possible product
    for p, product_name in list_of_product_types.items():

        # we loop through each polarisation
        for pol in ['VV', 'VH', 'HH', 'HV']:

            # see if there is actually any imagery
            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*', '*data*',
                        '{}*{}*img'.format(product_name, pol))))

            if len(list_of_ts_bursts) > 1:

                # check for all datafiles of this product type
                list_of_ts_bursts = sorted(
                    glob.glob(
                        opj(processing_dir, burst, '20*/',
                            '*{}*dim'.format(p))))
                list_of_ts_bursts = '\'{}\''.format(
                    ','.join(list_of_ts_bursts))

                # define out_dir for stacking routine

                out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
                os.makedirs(out_dir, exist_ok=True)

                # create namespaces

                temp_stack = opj(temp_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                out_stack = opj(out_dir, '{}_{}_{}_mt'.format(burst, p, pol))

                stack_log = opj(out_dir,
                                '{}_{}_{}_stack.err_log'.format(burst, p, pol))

                # run stacking routines
                ts.create_stack(list_of_ts_bursts,
                                temp_stack,
                                stack_log,
                                polarisation=pol)

                # run mt speckle filter
                if mt_speckle_filter is True:
                    speckle_log = opj(
                        out_dir,
                        '{}_{}_{}_mt_speckle.err_log'.format(burst, p, pol))

                    ts.mt_speckle_filter('{}.dim'.format(temp_stack),
                                         out_stack, speckle_log)
                    # remove tmp files
                    h.delete_dimap(temp_stack)
                else:
                    out_stack = temp_stack

                # convert to GeoTiffs
                if p == 'BS':
                    # get the dates of the files
                    dates = [
                        datetime.datetime.strptime(
                            x.split('_')[-1][:-4], '%d%b%Y')
                        for x in glob.glob(
                            opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    dates.sort()
                    # write them back to string for following loop
                    sortedDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in dates
                    ]

                    i, outfiles = 1, []
                    for date in sortedDates:

                        # restructure date to YYMMDD
                        inDate = datetime.datetime.strptime(date, '%d%b%Y')
                        outDate = datetime.datetime.strftime(inDate, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}*img'.format(pol, date)))[0]

                        # create outfile
                        outfile = opj(
                            out_dir,
                            '{}.{}.{}.{}.tif'.format(i, outDate, p, pol))

                        # mask by extent
                        ras.mask_by_shape(infile,
                                          outfile,
                                          extent,
                                          to_db=to_db,
                                          datatype=datatype,
                                          min_value=-30,
                                          max_value=5,
                                          ndv=0)
                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                if p == 'coh':

                    # get slave and master Date
                    mstDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[3].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]

                    slvDates = [
                        datetime.datetime.strptime(
                            os.path.basename(x).split('_')[4].split('.')[0],
                            '%d%b%Y') for x in glob.glob(
                                opj('{}.data'.format(out_stack), '*img'))
                    ]
                    # sort them
                    mstDates.sort()
                    slvDates.sort()
                    # write them back to string for following loop
                    sortedMstDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in mstDates
                    ]
                    sortedSlvDates = [
                        datetime.datetime.strftime(ts, "%d%b%Y")
                        for ts in slvDates
                    ]

                    i, outfiles = 1, []
                    for mst, slv in zip(sortedMstDates, sortedSlvDates):

                        inMst = datetime.datetime.strptime(mst, '%d%b%Y')
                        inSlv = datetime.datetime.strptime(slv, '%d%b%Y')

                        outMst = datetime.datetime.strftime(inMst, '%y%m%d')
                        outSlv = datetime.datetime.strftime(inSlv, '%y%m%d')

                        infile = glob.glob(
                            opj('{}.data'.format(out_stack),
                                '*{}*{}_{}*img'.format(pol, mst, slv)))[0]
                        outfile = opj(
                            out_dir, '{}.{}.{}.{}.{}.tif'.format(
                                i, outMst, outSlv, p, pol))

                        ras.mask_by_shape(infile,
                                          outfile,
                                          extent,
                                          to_db=False,
                                          datatype=datatype,
                                          min_value=0.000001,
                                          max_value=1,
                                          ndv=0)

                        # add ot a list for subsequent vrt creation
                        outfiles.append(outfile)

                        i += 1

                    # build vrt of timeseries
                    vrt_options = gdal.BuildVRTOptions(srcNodata=0,
                                                       separate=True)
                    gdal.BuildVRT(opj(out_dir,
                                      'Timeseries.{}.{}.vrt'.format(p, pol)),
                                  outfiles,
                                  options=vrt_options)

                # remove tmp files
                h.delete_dimap(out_stack)

    for pol in ['Alpha', 'Entropy', 'Anisotropy']:

        list_of_ts_bursts = sorted(
            glob.glob(
                opj(processing_dir, burst, '20*', '*{}*'.format(p),
                    '*{}.img'.format(pol))))

        if len(list_of_ts_bursts) > 1:

            list_of_ts_bursts = sorted(
                glob.glob(
                    opj(processing_dir, burst, '20*/', '*{}*dim'.format(p))))
            list_of_ts_bursts = '\'{}\''.format(','.join(list_of_ts_bursts))

            # print(list_of_ts_bursts)

            out_dir = opj(processing_dir, '{}/Timeseries'.format(burst))
            os.makedirs(out_dir, exist_ok=True)

            temp_stack = opj(temp_dir, '{}_{}_mt'.format(burst, pol))
            out_stack = opj(out_dir, '{}_{}_mt'.format(burst, pol))

            stack_log = opj(out_dir, '{}_{}_stack.err_log'.format(burst, pol))
            # processing routines
            ts.create_stack(list_of_ts_bursts,
                            temp_stack,
                            stack_log,
                            pattern=pol)

            if mt_speckle_filter is True:
                speckle_log = opj(
                    out_dir, '{}_{}_mt_speckle.err_log'.format(burst, pol))
                ts.mt_speckle_filter('{}.dim'.format(temp_stack), out_stack,
                                     speckle_log)
                # remove tmp files
                h.delete_dimap(temp_stack)
            else:
                out_stack = temp_stack

            # get the dates of the files
            dates = [
                datetime.datetime.strptime(x.split('_')[-1][:-4], '%d%b%Y')
                for x in glob.glob(opj('{}.data'.format(out_stack), '*img'))
            ]
            # sort them
            dates.sort()
            # write them back to string for following loop
            sortedDates = [
                datetime.datetime.strftime(ts, "%d%b%Y") for ts in dates
            ]

            i, outfiles = 1, []
            for date in sortedDates:

                # restructure date to YYMMDD
                inDate = datetime.datetime.strptime(date, '%d%b%Y')
                outDate = datetime.datetime.strftime(inDate, '%y%m%d')

                infile = glob.glob(
                    opj('{}.data'.format(out_stack),
                        '*{}*{}*img'.format(pol, date)))[0]
                # create outfile
                outfile = opj(out_dir,
                              '{}.{}.{}.{}.tif'.format(i, outDate, p, pol))
                # mask by extent
                max_value = 90 if pol is 'Alpha' else 1
                ras.mask_by_shape(infile,
                                  outfile,
                                  extent,
                                  to_db=False,
                                  datatype=datatype,
                                  min_value=0.000001,
                                  max_value=max_value,
                                  ndv=0)

                # add ot a list for subsequent vrt creation
                outfiles.append(outfile)
                i += 1

            # build vrt of timeseries
            vrt_options = gdal.BuildVRTOptions(srcNodata=0, separate=True)
            gdal.BuildVRT(opj(out_dir, 'Timeseries.{}.vrt'.format(pol)),
                          outfiles,
                          options=vrt_options)

            # remove tmp files
            h.delete_dimap(out_stack)