Example #1
0
 def createBrowseImages(self):
     import math
     for name in self._listPng:
         '''
         if(name.count(self._corrections)):
             command = 'mdx.py -P ' + name + ' -cmap cmy -wrap 20'
             self.saveImage(command,name + '_20rad')
         '''
         if name.count('unw.geo'):
             command = 'mdx.py -P ' + name
             createImage(command, name)
             command = 'mdx.py -P ' + name + ' -wrap 20'
             createImage(command, name + '_20rad')
             parser = createFileParser('xml')
             #get the properties from the one of the geo files
             prop, fac, misc = parser.parse(name + '.xml')
             coordinate1 = key_of_same_content('coordinate1', prop)[1]
             width = int(key_of_same_content('size', coordinate1)[1])
             command = 'mdx -P ' + name + ' -s ' + str(
                 width) + ' -amp -r4 -rtlr ' + str(int(width) * 4) + ' -CW'
             createImage(command, self._amplitude)
         elif name.count('cor.geo'):
             command = 'mdx.py -P ' + name
             createImage(command, name)
             parser = createFileParser('xml')
             #get the properties from the one of the geo files
             prop, fac, misc = parser.parse(name + '.xml')
             coordinate1 = key_of_same_content('coordinate1', prop)[1]
             width = int(key_of_same_content('size', coordinate1)[1])
             command = 'mdx -P ' + name + ' -s ' + str(
                 width) + ' -r4 -rhdr ' + str(
                     int(width) * 4) + ' -cmap cmy -wrap 1.2'
             createImage(command,
                         name.replace('.cor.geo', '_ph_only.cor.geo'))
Example #2
0
def finalize(prdName, meta):
    import shutil
    fp = open('../valid.list')
    pair = fp.readlines()[0].split()[0]
    fp.close()
    #extract velocity form results
    los = 'LOS_velocity.geo'
    command = 'extractVelocity.py -i Stack/TS-PARAMS.h5 -o ' + los + ' -x ../insar/' + pair + '/insarProc.xml'
    process = "extractVelocity"
    quickrun(process, command)
    exitV = 0
    dirName = prdName
    #create product dir
    try:
        os.mkdir(dirName)
    except:
        exitV = 1
        toContext("runStack:finalize", exitV,
                  "Failed to create product directory")
    #create .met.json
    fp = open(os.path.join(dirName, prdName + '.met.json'), 'w')
    json.dump({'losVelocity': los, 'interferograms': 'ifg.list'}, fp, indent=4)
    fp.close()

    #create png from velocity and move all the products into the product dir
    try:
        createImage('mdx.py -P ' + los, los)
        productList = ['ifg.list', '../' + meta]
        listFiles = os.listdir('./')
        for fl in listFiles:
            if (fl.count('.geo')):
                productList.append(fl)

        #just in case the default self._inputFile has been modified
        for fileName in productList:
            shutil.move(fileName, dirName)
    except Exception:
        exitV = 1
        toContext(
            "runStack:finalize", exitV,
            "Failed to create image or moving products to product directory")
    #move the product dir up
    try:
        shutil.move(dirName, '../')
    except Exception:
        toContext("runStack:finalize", exitV,
                  "Failed to move product directory")

    #move up
    os.system('mv context.json ../')
    os.chdir('../')
Example #3
0
def main():
    #ret = json.load(open('sent_list.json'))
    if sys.argv[2].lower() == 'sentinel':
        sensor = 'SAR-C Sentinel1'
        to_app = 'merged'
    elif sys.argv[2].lower() == 'csk':
        sensor = sys.argv[2]
        to_app = ''
    
    ret,status = get_list(sys.argv[1],sensor)

    cor_file = "topophase.cor.geo"
    cor_xml = "topophase.cor.geo.xml"
    flist = [cor_file,cor_xml]
    for i,f in enumerate(flist):
        flist[i] = os.path.join(to_app,f)
    
    mdx_path = "{}/bin/mdx".format(os.environ['ISCE_HOME'])

    for l in ret:
        cwd = os.getcwd()
        ls = l['url'].split('/')
        ndir = ls[-2] + '_' + ls[-1]
        if not os.path.exists(ndir):
            os.mkdir(ndir)
        try: 
            os.chdir(ndir)
            #some failed to download
            if not os.path.exists('topophase_ph_only.cor.geo.browse.png'):
                localize_data(l['url'],flist)
                rt = parse(cor_xml)
                size = eval(rt.xpath('.//component[@name="coordinate1"]/property[@name="size"]/value/text()')[0])
                rhdr = size * 4
                createImage("{} -P {} -s {} -r4 -rhdr {} -cmap cmy -wrap 1.2".format(mdx_path, cor_file,size,rhdr),"topophase_ph_only.cor.geo")  
                os.remove(cor_file)
                os.remove(cor_xml)
        except Exception:
            print(l['url'],'failed')
            pass
        os.chdir(cwd)
          
        '''
Example #4
0
def main():
    """HySDS PGE wrapper for TopsInSAR interferogram generation."""

    # save cwd (working directory)
    cwd = os.getcwd()

    # get context
    ctx_file = os.path.abspath('_context.json')
    if not os.path.exists(ctx_file):
        raise RuntimeError("Failed to find _context.json.")
    with open(ctx_file) as f:
        ctx = json.load(f)
    logger.info("ctx: {}".format(json.dumps(ctx, indent=2, sort_keys=True)))

    # get args
    project = ctx['project']
    direction = ctx.get('direction', 'along')
    extra_products = ctx.get('extra_products', [])
    filenames = ctx['filenames']
    outname = 'filt_topophase.unw.geo'

    # get id base
    id_base = ctx['id']
    logger.info("Product base ID: {}".format(id_base))
    
    # get dataset version and set dataset ID
    version = get_version()
    id = "{}-{}-{}".format(id_base, version, re.sub("[^a-zA-Z0-9_]", "_", ctx.get("context", {})
                                               .get("dataset_tag", "standard")))

    # get endpoint configurations
    uu = UrlUtils()
    es_url = uu.rest_url
    es_index = "{}_{}_s1-ifg-stitched".format(uu.grq_index_prefix, version)

    # check if interferogram already exists
    logger.info("GRQ url: {}".format(es_url))
    logger.info("GRQ index: {}".format(es_index))
    logger.info("Product ID for version {}: {}".format(version, id))
    if ifg_exists(es_url, es_index, id):
        logger.info("{} interferogram for {}".format(version, id_base) +
                    " was previously generated and exists in GRQ database.")

        # cleanup IFG dirs
        for i in [os.path.split(fname)[0] for swath_list in filenames for fname in swath_list]:
            logger.info("Removing {}.".format(i))
            try: shutil.rmtree(i)
            except: pass
        return 0

    # create product directory
    dataset_dir = os.path.abspath(id)
    os.makedirs(dataset_dir, 0o755)

    # dump input file
    inp = {
        'direction': direction,
        'extra_products': extra_products,
        'filenames': filenames,
        'outname': outname,
    }
    ifg_stitch_file = os.path.join(dataset_dir, "ifg_stitch.json")
    with open(ifg_stitch_file, 'w') as f:
        json.dump(inp, f, indent=2)

    # run stitccher
    stc_cmd = [
        "python3", os.path.join(BASE_PATH, "ifg_stitcher.py"), ifg_stitch_file
    ]
    stc_cmd_line = " ".join(stc_cmd)
    logger.info("Calling ifg_stitcher.py: {}".format(stc_cmd_line))
    check_call(stc_cmd_line, shell=True)
        
    # generate GDAL (ENVI) headers and move to product directory
    raster_prods = [
        'filt_topophase.unw.geo',
        'filt_topophase.unw.conncomp.geo',
        'phsig.cor.geo',
    ]
    raster_prods.extend(extra_products)
    for j in raster_prods:
        if not os.path.exists(j): continue
        gdal_xml = "{}.xml".format(j)
        gdal_hdr = "{}.hdr".format(j)
        #gdal_tif = "{}.tif".format(j)
        gdal_vrt = "{}.vrt".format(j)
        if os.path.exists(j): shutil.move(j, dataset_dir)
        else: logger.warn("{} wasn't generated.".format(j))
        if os.path.exists(gdal_xml): shutil.move(gdal_xml, dataset_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_xml))
        if os.path.exists(gdal_hdr): shutil.move(gdal_hdr, dataset_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_hdr))
        if os.path.exists(gdal_vrt): shutil.move(gdal_vrt, dataset_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_vrt))

    # save other files to product directory
    shutil.copyfile("_context.json", os.path.join(dataset_dir,"{}.context.json".format(id)))
    if os.path.exists('isce.log'):
        shutil.copyfile("isce.log", os.path.join(dataset_dir, "isce.log"))
    if os.path.exists('stitch_ifgs.log'):
        shutil.copyfile("stitch_ifgs.log", os.path.join(dataset_dir, "stitch_ifgs.log"))

    # create browse images
    os.chdir(dataset_dir)
    mdx_app_path = "{}/applications/mdx.py".format(os.environ['ISCE_HOME'])
    mdx_path = "{}/bin/mdx".format(os.environ['ISCE_HOME'])
    unw_file = "filt_topophase.unw.geo"

    #unwrapped image at different rates
    createImage("{} -P {}".format(mdx_app_path, unw_file),unw_file)
    #createImage("{} -P {} -wrap {}".format(mdx_app_path, unw_file, rad),unw_file + "_5cm")
    createImage("{} -P {} -wrap 20".format(mdx_app_path, unw_file),unw_file + "_20rad")

    #amplitude image
    unw_xml = "filt_topophase.unw.geo.xml"
    rt = parse(unw_xml)
    size = eval(rt.xpath('.//component[@name="coordinate1"]/property[@name="size"]/value/text()')[0])
    rtlr = size * 4
    logger.info("rtlr value for amplitude browse is: {}".format(rtlr))
    createImage("{} -P {} -s {} -amp -r4 -rtlr {} -CW".format(mdx_path, unw_file, size, rtlr), 'amplitude.geo')

    #coherence image
    #top_file = "topophase.cor.geo"
    #createImage("{} -P {}".format(mdx_app_path, top_file),top_file)

    #should be the same size as unw but just in case
    #top_xml = "topophase.cor.geo.xml"
    #rt = parse(top_xml)
    #size = eval(rt.xpath('.//component[@name="coordinate1"]/property[@name="size"]/value/text()')[0])
    #rhdr = size * 4
    #createImage("{} -P {} -s {} -r4 -rhdr {} -cmap cmy -wrap 1.2".format(mdx_path, top_file,size,rhdr),"topophase_ph_only.cor.geo")

    # create unw KMZ
    unw_kml = "unw.geo.kml"
    unw_kmz = "{}.kmz".format(id)
    call_noerr("{} {} -kml {}".format(mdx_app_path, unw_file, unw_kml))
    call_noerr("{}/sentinel/create_kmz.py {} {}.png {}".format(BASE_PATH, unw_kml, unw_file, unw_kmz))

    # remove kml
    call_noerr("rm -f *.kml")

    # chdir back up to work directory
    os.chdir(cwd)

    # create displacement tile layer
    tiles_dir = "{}/tiles".format(dataset_dir)
    vrt_prod_file = "{}/filt_topophase.unw.geo.vrt".format(dataset_dir)
    tiler_cmd_path = os.path.abspath(os.path.join(BASE_PATH, '..', 'map_tiler'))
    dis_layer = "displacement"
    tiler_cmd_tmpl = "{}/create_tiles.py {} {}/{} -b 2 -m prism --nodata 0"
    check_call(tiler_cmd_tmpl.format(tiler_cmd_path, vrt_prod_file, tiles_dir, dis_layer), shell=True)

    # create amplitude tile layer
    amp_layer = "amplitude"
    tiler_cmd_tmpl = "{}/create_tiles.py {} {}/{} -b 1 -m gray --clim_min 10 --clim_max_pct 80 --nodata 0"
    check_call(tiler_cmd_tmpl.format(tiler_cmd_path, vrt_prod_file, tiles_dir, amp_layer), shell=True)

    # create COG (cloud optimized geotiff) with no_data set
    cog_prod_file = "{}/filt_topophase.unw.geo.tif".format(dataset_dir)
    cog_cmd_tmpl = "gdal_translate {} tmp.tif -co TILED=YES -co COMPRESS=DEFLATE -a_nodata 0"
    check_call(cog_cmd_tmpl.format(vrt_prod_file), shell=True)
    check_call("gdaladdo -r average tmp.tif 2 4 8 16 32", shell=True)
    cog_cmd_tmpl = "gdal_translate tmp.tif {} -co TILED=YES -co COPY_SRC_OVERVIEWS=YES -co BLOCKXSIZE=512 -co BLOCKYSIZE=512 --config GDAL_TIFF_OVR_BLOCKSIZE 512"
    check_call(cog_cmd_tmpl.format(cog_prod_file), shell=True)
    os.unlink("tmp.tif")

    # get list of dataset and met files
    dsets = []
    mets = []
    for i in [os.path.dirname(os.path.dirname(fname)) for swath_list in filenames for fname in swath_list]:
        dsets.append(os.path.join(i, "_{}.dataset.json".format(i)))
        mets.append(os.path.join(i, "_{}.met.json".format(i)))
    logger.info("Datasets: {}.".format(dsets))
    logger.info("Mets: {}.".format(mets))

    # create dataset json
    ds_json_file = os.path.join(dataset_dir, "{}.dataset.json".format(id))
    envelope, starttime, endtime = create_dataset_json(id, version, dsets, ds_json_file)

    # create met json
    met_json_file = os.path.join(dataset_dir, "{}.met.json".format(id))
    create_met_json(id, version, envelope, starttime, endtime, mets, met_json_file, direction)

    # cleanup IFG dirs
    for i in [os.path.split(fname)[0] for swath_list in filenames for fname in swath_list]:
        logger.info("Removing {}.".format(i))
        try: shutil.rmtree(i)
        except: pass
Example #5
0
def main():
    """HySDS PGE wrapper for TopsInSAR interferogram generation."""

    # save cwd (working directory)
    cwd = os.getcwd()

    # get context
    ctx_file = os.path.abspath('_context.json')
    if not os.path.exists(ctx_file):
        raise RuntimeError("Failed to find _context.json.")
    with open(ctx_file) as f:
        ctx = json.load(f)
    logger.info("ctx: {}".format(json.dumps(ctx, indent=2)))

    master_safe_dirs = []
    for i in ctx['master_zip_file']:
        master_safe_dirs.append(i.replace(".zip", ".SAFE"))
    slave_safe_dirs = []
    for i in ctx['slave_zip_file']:
        slave_safe_dirs.append(i.replace(".zip", ".SAFE"))

    # unzip SAFE dirs
    master_safe_dirs = []
    for i in ctx['master_zip_file']:
        logger.info("Unzipping {}.".format(i))
        with ZipFile(i, 'r') as zf:
            zf.extractall()
        logger.info("Removing {}.".format(i))
        try:
            os.unlink(i)
        except:
            pass
        master_safe_dirs.append(i.replace(".zip", ".SAFE"))
    slave_safe_dirs = []
    for i in ctx['slave_zip_file']:
        logger.info("Unzipping {}.".format(i))
        with ZipFile(i, 'r') as zf:
            zf.extractall()
        logger.info("Removing {}.".format(i))
        try:
            os.unlink(i)
        except:
            pass
        slave_safe_dirs.append(i.replace(".zip", ".SAFE"))
    #bbox only needed to create dem and not if dem provided
    if 'dem_urls' not in ctx:
        # get union bbox
        logger.info("Determining envelope bbox from SLC swaths.")
        bbox_json = "bbox.json"
        bbox_cmd_tmpl = "{}/get_union_bbox.sh -o {} *.SAFE/annotation/s1?-iw{}-slc-{}-*.xml"
        check_call(bbox_cmd_tmpl.format(BASE_PATH, bbox_json, ctx['swathnum'],
                                        "hh"),
                   shell=True)
        with open(bbox_json) as f:
            bbox = json.load(f)['envelope']
        logger.info("bbox: {}".format(bbox))
    # get id base
    id_base = ctx['id']
    logger.info("Product base ID: {}".format(id_base))

    # get dataset version and set dataset ID
    version = get_version()
    id = "{}-{}-{}".format(
        id_base, version,
        re.sub("[^a-zA-Z0-9_]", "_",
               ctx.get("context", {}).get("dataset_tag", "standard")))

    # get endpoint configurations
    uu = UrlUtils()
    es_url = uu.rest_url
    es_index = "{}_{}_s1-ifg".format(uu.grq_index_prefix, version)

    # check if interferogram already exists
    logger.info("GRQ url: {}".format(es_url))
    logger.info("GRQ index: {}".format(es_index))
    logger.info("Product ID for version {}: {}".format(version, id))
    #TAGREMOVE

    #this part needs to be adapted for offest products, probably just need to add
    #the dataset_tag to _context.json
    if ifg_exists(es_url, es_index, id):
        logger.info("{} interferogram for {}".format(version, id_base) +
                    " was previously generated and exists in GRQ database.")

        # cleanup SAFE dirs
        for i in chain(master_safe_dirs, slave_safe_dirs):
            logger.info("Removing {}.".format(i))
            try:
                shutil.rmtree(i)
            except:
                pass
        return 0

    # get DEM configuration
    dem_type = ctx.get("context", {}).get("dem_type", "SRTM+v3")
    srtm_dem_url = uu.dem_url
    ned1_dem_url = uu.ned1_dem_url
    ned13_dem_url = uu.ned13_dem_url
    dem_user = uu.dem_u
    dem_pass = uu.dem_p

    # download project specific DEM
    if 'dem_urls' in ctx:
        s = requests.session()
        #assume that the first is the data and the second the metadata
        download_file(ctx['dem_urls'][0], session=s)
        download_file(ctx['dem_urls'][1], session=s)
        dem_file = os.path.basename(ctx['dem_urls'][0])
    else:
        # get DEM bbox
        dem_S, dem_N, dem_W, dem_E = bbox
        dem_S = int(math.floor(dem_S))
        dem_N = int(math.ceil(dem_N))
        dem_W = int(math.floor(dem_W))
        dem_E = int(math.ceil(dem_E))
        if dem_type == "SRTM+v3":
            dem_url = srtm_dem_url
            dem_cmd = [
                "{}/applications/dem.py".format(os.environ['ISCE_HOME']), "-a",
                "stitch", "-b", "{} {} {} {}".format(dem_S, dem_N, dem_W,
                                                     dem_E), "-r", "-s", "1",
                "-f", "-x", "-c", "-n", dem_user, "-w", dem_pass, "-u", dem_url
            ]
            dem_cmd_line = " ".join(dem_cmd)
            logger.info("Calling dem.py: {}".format(dem_cmd_line))
            check_call(dem_cmd_line, shell=True)
            dem_file = glob("*.dem.wgs84")[0]
        else:
            if dem_type == "NED1": dem_url = ned1_dem_url
            elif dem_type.startswith("NED13"): dem_url = ned13_dem_url
            else: raise RuntimeError("Unknown dem type %s." % dem_type)
            if dem_type == "NED13-downsampled": downsample_option = "-d 33%"
            else: downsample_option = ""
            dem_cmd = [
                "{}/ned_dem.py".format(BASE_PATH), "-a", "stitch", "-b",
                "{} {} {} {}".format(dem_S, dem_N, dem_W, dem_E),
                downsample_option, "-u", dem_user, "-p", dem_pass, dem_url
            ]
            dem_cmd_line = " ".join(dem_cmd)
            logger.info("Calling ned_dem.py: {}".format(dem_cmd_line))
            check_call(dem_cmd_line, shell=True)
            dem_file = "stitched.dem"
    logger.info("Using DEM file: {}".format(dem_file))

    # fix file path in DEM xml
    fix_cmd = [
        "{}/applications/fixImageXml.py".format(os.environ['ISCE_HOME']), "-i",
        dem_file, "--full"
    ]
    fix_cmd_line = " ".join(fix_cmd)
    logger.info("Calling fixImageXml.py: {}".format(fix_cmd_line))
    check_call(fix_cmd_line, shell=True)

    # download auciliary calibration files
    aux_cmd = [
        #"{}/fetchCal.py".format(BASE_PATH), "-o", "aux_cal"
        "{}/fetchCalES.py".format(BASE_PATH),
        "-o",
        "aux_cal"
    ]
    aux_cmd_line = " ".join(aux_cmd)
    #logger.info("Calling fetchCal.py: {}".format(aux_cmd_line))
    logger.info("Calling fetchCalES.py: {}".format(aux_cmd_line))
    check_call(aux_cmd_line, shell=True)

    # create initial input xml
    xml_file = "topsApp.xml"
    create_input_xml_offset(
        os.path.join(BASE_PATH, 'topsApp_offset.xml.tmpl'), xml_file,
        str(master_safe_dirs), str(slave_safe_dirs), ctx['master_orbit_file'],
        ctx['slave_orbit_file'], dem_file, ctx['swathnum'],
        ctx['ampcor_skip_width'], ctx['ampcor_skip_height'],
        ctx['ampcor_src_win_width'], ctx['ampcor_src_win_height'],
        ctx['ampcor_src_width'], ctx['ampcor_src_height'])

    # run topsApp for offset
    topsapp_cmd = ["topsApp.py", "--steps"]
    topsapp_cmd_line = " ".join(topsapp_cmd)
    logger.info("Calling topsApp.py  for offest: {}".format(topsapp_cmd_line))
    check_call(topsapp_cmd_line, shell=True)

    # create product directory
    prod_dir = id
    os.makedirs(prod_dir, 0o755)

    # create merged directory in product
    prod_merged_dir = os.path.join(prod_dir, 'merged')
    os.makedirs(prod_merged_dir, 0o755)

    # generate GDAL (ENVI) headers and move to product directory
    raster_prods = ('merged/dense_offsets.bil', 'merged/dense_offsets_snr.bil',
                    'merged/filt_dense_offsets.bil', 'merged/los.rdr')
    for i in raster_prods:
        # radar-coded products
        call_noerr("isce2gis.py envi -i {}".format(i))
        #call_noerr("gdal_translate {} {}.tif".format(i, i))
        gdal_xml = "{}.xml".format(i)
        gdal_hdr = "{}.hdr".format(i)
        #gdal_tif = "{}.tif".format(i)
        gdal_vrt = "{}.vrt".format(i)
        if os.path.exists(i): shutil.move(i, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(i))
        if os.path.exists(gdal_xml): shutil.move(gdal_xml, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_xml))
        if os.path.exists(gdal_hdr): shutil.move(gdal_hdr, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_hdr))
        #if os.path.exists(gdal_tif): shutil.move(gdal_tif, prod_merged_dir)
        #else: logger.warn("{} wasn't generated.".format(gdal_tif))
        if os.path.exists(gdal_vrt): shutil.move(gdal_vrt, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_vrt))

        # geo-coded products
        j = "{}.geo".format(i)
        if not os.path.exists(j): continue
        call_noerr("isce2gis.py envi -i {}".format(j))
        #call_noerr("gdal_translate {} {}.tif".format(j, j))
        gdal_xml = "{}.xml".format(j)
        gdal_hdr = "{}.hdr".format(j)
        #gdal_tif = "{}.tif".format(j)
        gdal_vrt = "{}.vrt".format(j)
        if os.path.exists(j): shutil.move(j, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(j))
        if os.path.exists(gdal_xml): shutil.move(gdal_xml, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_xml))
        if os.path.exists(gdal_hdr): shutil.move(gdal_hdr, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_hdr))
        #if os.path.exists(gdal_tif): shutil.move(gdal_tif, prod_merged_dir)
        #else: logger.warn("{} wasn't generated.".format(gdal_tif))
        if os.path.exists(gdal_vrt): shutil.move(gdal_vrt, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_vrt))

    # save other files to product directory
    shutil.copyfile("_context.json",
                    os.path.join(prod_dir, "{}.context.json".format(id)))
    shutil.copyfile("topsApp.xml", os.path.join(prod_dir, "topsApp.xml"))
    shutil.copyfile("fine_interferogram/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "fine_interferogram.xml"))
    shutil.copyfile("master/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "master.xml"))
    shutil.copyfile("slave/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "slave.xml"))
    if os.path.exists('topsProc.xml'):
        shutil.copyfile("topsProc.xml", os.path.join(prod_dir, "topsProc.xml"))
    if os.path.exists('isce.log'):
        shutil.copyfile("isce.log", os.path.join(prod_dir, "isce.log"))

    # move PICKLE to product directory
    shutil.move('PICKLE', prod_dir)

    # create browse images
    os.chdir(prod_merged_dir)
    mdx_path = "{}/bin/mdx".format(os.environ['ISCE_HOME'])
    from utils.createImage import createImage
    from isceobj.Image.Image import Image
    offset_file = "dense_offsets.bil.geo"
    snr_file = "dense_offsets_snr.bil.geo"

    im = Image()
    im.load(offset_file + '.xml')
    mdx_args = '-s %d -r4 -rtlr %d -cmap cmy -wrap 10' % (im.width,
                                                          4 * im.width)

    #unwrapped image at different rates
    createImage("{} -P {} {}".format(mdx_path, offset_file, mdx_args),
                'dense_offsets_az.bil.geo')

    mdx_args = '-s %d -r4 -rhdr %d -cmap cmy -wrap 10' % (im.width,
                                                          4 * im.width)
    #unwrapped image at different rates
    createImage("{} -P {} {}".format(mdx_path, offset_file, mdx_args),
                'dense_offsets_rn.bil.geo')

    mdx_args = '-s %d -r4 -clpmin 0 -clpmax 10 -cmap cmy -wrap 12' % (
        im.width, )
    #unwrapped image at different rates
    createImage("{} -P {} {}".format(mdx_path, snr_file, mdx_args), snr_file)

    # move all browse images to root of product directory
    call_noerr("mv -f *.png ..")

    # chdir back up to work directory
    os.chdir(cwd)

    # extract metadata from master
    met_file = os.path.join(prod_dir, "{}.met.json".format(id))
    extract_cmd_path = os.path.abspath(
        os.path.join(BASE_PATH, '..', '..', 'frameMetadata', 'sentinel'))
    #TAGREMOVE
    #return
    extract_cmd_tmpl = "{}/extractMetadata_s1.sh -i {}/annotation/s1?-iw{}-slc-{}-*.xml -o {}"
    check_call(extract_cmd_tmpl.format(extract_cmd_path, master_safe_dirs[0],
                                       ctx['swathnum'], "hh", met_file),
               shell=True)

    # add master/slave ids and orbits to met JSON (per ASF request)
    master_ids = [i.replace(".zip", "") for i in ctx['master_zip_file']]
    slave_ids = [i.replace(".zip", "") for i in ctx['slave_zip_file']]
    master_rt = parse(os.path.join(prod_dir, "master.xml"))
    master_orbit_number = eval(
        master_rt.xpath('.//property[@name="orbitnumber"]/value/text()')[0])
    slave_rt = parse(os.path.join(prod_dir, "slave.xml"))
    slave_orbit_number = eval(
        slave_rt.xpath('.//property[@name="orbitnumber"]/value/text()')[0])
    with open(met_file) as f:
        md = json.load(f)
    md['master_scenes'] = master_ids
    md['slave_scenes'] = slave_ids
    md['orbitNumber'] = [master_orbit_number, slave_orbit_number]
    md['dataset_type'] = 'offset'
    #fix to make platform metadata consistent
    if 'platform' in md:
        if md['platform'] == 'S1A':
            md['platform'] = 'Sentinel-1A'
        if md['platform'] == 'S1B':
            md['platform'] = 'Sentinel-1B'
    if 'orbit' in md: del md['orbit']  #FIX FOR INVALID ORBIT METADATA
    if ctx.get('stitch_subswaths_xt', False): md['swath'] = [1, 2, 3]
    #md['esd_threshold'] = esd_coh_th if do_esd else -1.  # add ESD coherence threshold

    # add range_looks and azimuth_looks to metadata for stitching purposes
    md['azimuth_looks'] = int(ctx['azimuth_looks'])
    md['range_looks'] = int(ctx['range_looks'])

    # add dem_type
    md['dem_type'] = dem_type

    # write met json
    with open(met_file, 'w') as f:
        json.dump(md, f, indent=2)

    # generate dataset JSON
    ds_file = os.path.join(prod_dir, "{}.dataset.json".format(id))
    create_dataset_json(id, version, met_file, ds_file)

    # move merged products to root of product directory
    #call_noerr("mv -f {}/* {}".format(prod_merged_dir, prod_dir))
    #shutil.rmtree(prod_merged_dir)

    # write PROV-ES JSON
    #${BASE_PATH}/create_prov_es-create_interferogram.sh $id $project $master_orbit_file $slave_orbit_file \
    #                                                        ${dem_file}.xml $dem_file $WORK_DIR \
    #                                                        ${id}/${id}.prov_es.json > create_prov_es.log 2>&1

    # clean out SAFE directories and DEM files
    for i in chain(master_safe_dirs, slave_safe_dirs):
        shutil.rmtree(i)
    for i in glob("dem*"):
        os.unlink(i)
Example #6
0
def main():
    """HySDS PGE wrapper for TopsInSAR interferogram generation."""

    # save cwd (working directory)
    cwd = os.getcwd()

    # get context
    ctx_file = os.path.abspath('_context.json')
    if not os.path.exists(ctx_file):
        raise RuntimeError("Failed to find _context.json.")
    with open(ctx_file) as f:
        ctx = json.load(f)
    logger.info("ctx: {}".format(json.dumps(ctx, indent=2)))

    #Pull topsApp configs
    ctx['azimuth_looks'] = ctx.get("context", {}).get("azimuth_looks", 3)
    ctx['range_looks'] = ctx.get("context", {}).get("range_looks", 7)

    # stitch all subswaths?
    ctx['stitch_subswaths_xt'] = False
    if ctx['swathnum'] is None:
        ctx['stitch_subswaths_xt'] = True
        ctx['swathnum'] = 1
        # use default azimuth and range looks for cross-swath stitching
        ctx['azimuth_looks'] = ctx.get("context", {}).get("azimuth_looks", 7)
        ctx['range_looks'] = ctx.get("context", {}).get("range_looks", 19)
    logger.info("Using azimuth_looks of %d and range_looks of %d" %
                (ctx['azimuth_looks'], ctx['range_looks']))

    ctx['filter_strength'] = ctx.get("context", {}).get("filter_strength", 0.5)
    logger.info("Using filter_strength of %f" % ctx['filter_strength'])

    # unzip SAFE dirs
    master_safe_dirs = []
    for i in ctx['master_zip_file']:
        logger.info("Unzipping {}.".format(i))
        with ZipFile(i, 'r') as zf:
            zf.extractall()
        logger.info("Removing {}.".format(i))
        try:
            os.unlink(i)
        except:
            pass
        master_safe_dirs.append(i.replace(".zip", ".SAFE"))
    slave_safe_dirs = []
    for i in ctx['slave_zip_file']:
        logger.info("Unzipping {}.".format(i))
        with ZipFile(i, 'r') as zf:
            zf.extractall()
        logger.info("Removing {}.".format(i))
        try:
            os.unlink(i)
        except:
            pass
        slave_safe_dirs.append(i.replace(".zip", ".SAFE"))

    # get polarization values
    master_pol = get_polarization(master_safe_dirs[0])
    slave_pol = get_polarization(slave_safe_dirs[0])
    if master_pol == slave_pol:
        match_pol = master_pol
    else:
        match_pol = "{{{},{}}}".format(master_pol, slave_pol)

    # get union bbox
    logger.info("Determining envelope bbox from SLC swaths.")
    bbox_json = "bbox.json"
    bbox_cmd_tmpl = "{}/get_union_bbox.sh -o {} *.SAFE/annotation/s1?-iw{}-slc-{}-*.xml"
    check_call(bbox_cmd_tmpl.format(BASE_PATH, bbox_json, ctx['swathnum'],
                                    match_pol),
               shell=True)
    with open(bbox_json) as f:
        bbox = json.load(f)['envelope']
    logger.info("bbox: {}".format(bbox))

    # get id base
    id_base = ctx['id']
    logger.info("Product base ID: {}".format(id_base))

    # get dataset version and set dataset ID
    version = get_version()
    id = "{}-{}-{}".format(
        id_base, version,
        re.sub("[^a-zA-Z0-9_]", "_",
               ctx.get("context", {}).get("dataset_tag", "standard")))

    # get endpoint configurations
    uu = UrlUtils()
    es_url = uu.rest_url
    es_index = "{}_{}_s1-ifg".format(uu.grq_index_prefix, version)

    # check if interferogram already exists
    logger.info("GRQ url: {}".format(es_url))
    logger.info("GRQ index: {}".format(es_index))
    logger.info("Product ID for version {}: {}".format(version, id))
    if ifg_exists(es_url, es_index, id):
        logger.info("{} interferogram for {}".format(version, id_base) +
                    " was previously generated and exists in GRQ database.")

        # cleanup SAFE dirs
        for i in chain(master_safe_dirs, slave_safe_dirs):
            logger.info("Removing {}.".format(i))
            try:
                shutil.rmtree(i)
            except:
                pass
        return 0

    # get DEM configuration
    dem_type = ctx.get("context", {}).get("dem_type", "SRTM+v3")
    srtm_dem_url = uu.dem_url
    ned1_dem_url = uu.ned1_dem_url
    ned13_dem_url = uu.ned13_dem_url
    dem_user = uu.dem_u
    dem_pass = uu.dem_p

    # download project specific DEM
    if 'kilauea' in ctx['project']:
        s = requests.session()
        s.auth = (dem_user, dem_pass)
        download_file(KILAUEA_DEM_XML, session=s)
        download_file(KILAUEA_DEM, session=s)
        dem_file = os.path.basename(KILAUEA_DEM)
    else:
        # get DEM bbox
        dem_S, dem_N, dem_W, dem_E = bbox
        dem_S = int(math.floor(dem_S))
        dem_N = int(math.ceil(dem_N))
        dem_W = int(math.floor(dem_W))
        dem_E = int(math.ceil(dem_E))
        if dem_type == "SRTM+v3":
            dem_url = srtm_dem_url
            dem_cmd = [
                "{}/applications/dem.py".format(os.environ['ISCE_HOME']), "-a",
                "stitch", "-b", "{} {} {} {}".format(dem_S, dem_N, dem_W,
                                                     dem_E), "-r", "-s", "1",
                "-f", "-x", "-c", "-n", dem_user, "-w", dem_pass, "-u", dem_url
            ]
            dem_cmd_line = " ".join(dem_cmd)
            logger.info("Calling dem.py: {}".format(dem_cmd_line))
            check_call(dem_cmd_line, shell=True)
            dem_file = glob("*.dem.wgs84")[0]
        else:
            if dem_type == "NED1": dem_url = ned1_dem_url
            elif dem_type.startswith("NED13"): dem_url = ned13_dem_url
            else: raise RuntimeError("Unknown dem type %s." % dem_type)
            if dem_type == "NED13-downsampled": downsample_option = "-d 33%"
            else: downsample_option = ""
            dem_cmd = [
                "{}/ned_dem.py".format(BASE_PATH), "-a", "stitch", "-b",
                "{} {} {} {}".format(dem_S, dem_N, dem_W, dem_E),
                downsample_option, "-u", dem_user, "-p", dem_pass, dem_url
            ]
            dem_cmd_line = " ".join(dem_cmd)
            logger.info("Calling ned_dem.py: {}".format(dem_cmd_line))
            check_call(dem_cmd_line, shell=True)
            dem_file = "stitched.dem"
    logger.info("Using DEM file: {}".format(dem_file))

    # fix file path in DEM xml
    fix_cmd = [
        "{}/applications/fixImageXml.py".format(os.environ['ISCE_HOME']), "-i",
        dem_file, "--full"
    ]
    fix_cmd_line = " ".join(fix_cmd)
    logger.info("Calling fixImageXml.py: {}".format(fix_cmd_line))
    check_call(fix_cmd_line, shell=True)

    # download auciliary calibration files
    aux_cmd = [
        #"{}/fetchCal.py".format(BASE_PATH), "-o", "aux_cal"
        "{}/fetchCalES.py".format(BASE_PATH),
        "-o",
        "aux_cal"
    ]
    aux_cmd_line = " ".join(aux_cmd)
    #logger.info("Calling fetchCal.py: {}".format(aux_cmd_line))
    logger.info("Calling fetchCalES.py: {}".format(aux_cmd_line))
    check_call(aux_cmd_line, shell=True)

    # create initial input xml
    do_esd = True
    esd_coh_th = 0.85
    xml_file = "topsApp.xml"
    create_input_xml(
        os.path.join(BASE_PATH, 'topsApp.xml.tmpl'), xml_file,
        str(master_safe_dirs), str(slave_safe_dirs), ctx['master_orbit_file'],
        ctx['slave_orbit_file'], master_pol, slave_pol, dem_file,
        "1, 2, 3" if ctx['stitch_subswaths_xt'] else ctx['swathnum'],
        ctx['azimuth_looks'], ctx['range_looks'], ctx['filter_strength'],
        "{} {} {} {}".format(*bbox), "True", do_esd, esd_coh_th)

    # run topsApp to prepesd step
    topsapp_cmd = [
        "topsApp.py",
        "--steps",
        "--end=prepesd",
    ]
    topsapp_cmd_line = " ".join(topsapp_cmd)
    logger.info(
        "Calling topsApp.py to prepesd step: {}".format(topsapp_cmd_line))
    check_call(topsapp_cmd_line, shell=True)

    # iterate over ESD coherence thresholds
    esd_coh_increment = 0.05
    esd_coh_min = 0.5
    topsapp_cmd = [
        "topsApp.py",
        "--steps",
        "--dostep=esd",
    ]
    topsapp_cmd_line = " ".join(topsapp_cmd)
    while True:
        logger.info(
            "Calling topsApp.py on esd step with ESD coherence threshold: {}".
            format(esd_coh_th))
        try:
            check_call(topsapp_cmd_line, shell=True)
            break
        except CalledProcessError:
            logger.info(
                "ESD filtering failed with ESD coherence threshold: {}".format(
                    esd_coh_th))
            esd_coh_th = round(esd_coh_th - esd_coh_increment, 2)
            if esd_coh_th < esd_coh_min:
                logger.info("Disabling ESD filtering.")
                do_esd = False
                create_input_xml(
                    os.path.join(BASE_PATH, 'topsApp.xml.tmpl'), xml_file,
                    str(master_safe_dirs), str(slave_safe_dirs),
                    ctx['master_orbit_file'], ctx['slave_orbit_file'],
                    master_pol, slave_pol, dem_file, "1, 2, 3"
                    if ctx['stitch_subswaths_xt'] else ctx['swathnum'],
                    ctx['azimuth_looks'], ctx['range_looks'],
                    ctx['filter_strength'], "{} {} {} {}".format(*bbox),
                    "True", do_esd, esd_coh_th)
                break
            logger.info("Stepping down ESD coherence threshold to: {}".format(
                esd_coh_th))
            logger.info(
                "Creating topsApp.xml with ESD coherence threshold: {}".format(
                    esd_coh_th))
            create_input_xml(
                os.path.join(BASE_PATH, 'topsApp.xml.tmpl'), xml_file,
                str(master_safe_dirs), str(slave_safe_dirs),
                ctx['master_orbit_file'], ctx['slave_orbit_file'], master_pol,
                slave_pol, dem_file,
                "1, 2, 3" if ctx['stitch_subswaths_xt'] else ctx['swathnum'],
                ctx['azimuth_looks'], ctx['range_looks'],
                ctx['filter_strength'], "{} {} {} {}".format(*bbox), "True",
                do_esd, esd_coh_th)

    # run topsApp from rangecoreg to geocode
    topsapp_cmd = [
        "topsApp.py",
        "--steps",
        "--start=rangecoreg",
        "--end=geocode",
    ]
    topsapp_cmd_line = " ".join(topsapp_cmd)
    logger.info(
        "Calling topsApp.py to geocode step: {}".format(topsapp_cmd_line))
    check_call(topsapp_cmd_line, shell=True)

    # get radian value for 5-cm wrap
    rt = parse('master/IW{}.xml'.format(ctx['swathnum']))
    wv = eval(rt.xpath('.//property[@name="radarwavelength"]/value/text()')[0])
    rad = 4 * np.pi * .05 / wv
    logger.info("Radian value for 5-cm wrap is: {}".format(rad))

    # create product directory
    prod_dir = id
    os.makedirs(prod_dir, 0o755)

    # create merged directory in product
    prod_merged_dir = os.path.join(prod_dir, 'merged')
    os.makedirs(prod_merged_dir, 0o755)

    # generate GDAL (ENVI) headers and move to product directory
    raster_prods = (
        'merged/topophase.cor',
        'merged/topophase.flat',
        'merged/filt_topophase.flat',
        'merged/filt_topophase.unw',
        'merged/filt_topophase.unw.conncomp',
        'merged/phsig.cor',
        'merged/los.rdr',
        'merged/dem.crop',
    )
    for i in raster_prods:
        # radar-coded products
        call_noerr("isce2gis.py envi -i {}".format(i))
        #call_noerr("gdal_translate {} {}.tif".format(i, i))
        gdal_xml = "{}.xml".format(i)
        gdal_hdr = "{}.hdr".format(i)
        #gdal_tif = "{}.tif".format(i)
        gdal_vrt = "{}.vrt".format(i)
        if os.path.exists(i): shutil.move(i, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(i))
        if os.path.exists(gdal_xml): shutil.move(gdal_xml, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_xml))
        if os.path.exists(gdal_hdr): shutil.move(gdal_hdr, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_hdr))
        #if os.path.exists(gdal_tif): shutil.move(gdal_tif, prod_merged_dir)
        #else: logger.warn("{} wasn't generated.".format(gdal_tif))
        if os.path.exists(gdal_vrt): shutil.move(gdal_vrt, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_vrt))

        # geo-coded products
        j = "{}.geo".format(i)
        if not os.path.exists(j): continue
        call_noerr("isce2gis.py envi -i {}".format(j))
        #call_noerr("gdal_translate {} {}.tif".format(j, j))
        gdal_xml = "{}.xml".format(j)
        gdal_hdr = "{}.hdr".format(j)
        #gdal_tif = "{}.tif".format(j)
        gdal_vrt = "{}.vrt".format(j)
        if os.path.exists(j): shutil.move(j, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(j))
        if os.path.exists(gdal_xml): shutil.move(gdal_xml, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_xml))
        if os.path.exists(gdal_hdr): shutil.move(gdal_hdr, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_hdr))
        #if os.path.exists(gdal_tif): shutil.move(gdal_tif, prod_merged_dir)
        #else: logger.warn("{} wasn't generated.".format(gdal_tif))
        if os.path.exists(gdal_vrt): shutil.move(gdal_vrt, prod_merged_dir)
        else: logger.warn("{} wasn't generated.".format(gdal_vrt))

    # save other files to product directory
    shutil.copyfile("_context.json",
                    os.path.join(prod_dir, "{}.context.json".format(id)))
    shutil.copyfile("topsApp.xml", os.path.join(prod_dir, "topsApp.xml"))
    shutil.copyfile("fine_interferogram/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "fine_interferogram.xml"))
    shutil.copyfile("master/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "master.xml"))
    shutil.copyfile("slave/IW{}.xml".format(ctx['swathnum']),
                    os.path.join(prod_dir, "slave.xml"))
    if os.path.exists('topsProc.xml'):
        shutil.copyfile("topsProc.xml", os.path.join(prod_dir, "topsProc.xml"))
    if os.path.exists('isce.log'):
        shutil.copyfile("isce.log", os.path.join(prod_dir, "isce.log"))

    # move PICKLE to product directory
    shutil.move('PICKLE', prod_dir)

    # create browse images
    os.chdir(prod_merged_dir)
    mdx_app_path = "{}/applications/mdx.py".format(os.environ['ISCE_HOME'])
    mdx_path = "{}/bin/mdx".format(os.environ['ISCE_HOME'])
    from utils.createImage import createImage
    unw_file = "filt_topophase.unw.geo"
    #unwrapped image at different rates
    createImage("{} -P {}".format(mdx_app_path, unw_file), unw_file)
    createImage("{} -P {} -wrap {}".format(mdx_app_path, unw_file, rad),
                unw_file + "_5cm")
    createImage("{} -P {} -wrap 20".format(mdx_app_path, unw_file),
                unw_file + "_20rad")
    #amplitude image
    unw_xml = "filt_topophase.unw.geo.xml"
    rt = parse(unw_xml)
    size = eval(
        rt.xpath(
            './/component[@name="coordinate1"]/property[@name="size"]/value/text()'
        )[0])
    rtlr = size * 4
    logger.info("rtlr value for amplitude browse is: {}".format(rtlr))
    createImage(
        "{} -P {} -s {} -amp -r4 -rtlr {} -CW".format(mdx_path, unw_file, size,
                                                      rtlr), 'amplitude.geo')
    #coherence image
    top_file = "topophase.cor.geo"
    createImage("{} -P {}".format(mdx_app_path, top_file), top_file)
    #should be the same size as unw but just in case
    top_xml = "topophase.cor.geo.xml"
    rt = parse(top_xml)
    size = eval(
        rt.xpath(
            './/component[@name="coordinate1"]/property[@name="size"]/value/text()'
        )[0])
    rhdr = size * 4
    createImage(
        "{} -P {} -s {} -r4 -rhdr {} -cmap cmy -wrap 1.2".format(
            mdx_path, top_file, size, rhdr), "topophase_ph_only.cor.geo")
    '''
    # unw browse
    unw_file = "filt_topophase.unw.geo"
    unw_browse_img = unw_file + ".browse.png"
    unw_browse_img_small = unw_file + ".browse_small.png"
    call_noerr("{} -P {}".format(mdx_app_path, unw_file))
    call_noerr("convert out.ppm -transparent black -trim {}".format(unw_browse_img))
    call_noerr("convert -resize 250x250 {} {}".format(unw_browse_img, unw_browse_img_small))
    if os.path.exists('out.ppm'): os.unlink('out.ppm')

    # unw 5cm browse
    unw_5cm_browse_img = "unw.geo_5cm.browse.png"
    unw_5cm_browse_img_small = "unw.geo_5cm.browse_small.png"
    call_noerr("{} -P {} -wrap {}".format(mdx_app_path, unw_file, rad))
    call_noerr("convert out.ppm -transparent black -trim {}".format(unw_5cm_browse_img))
    call_noerr("convert -resize 250x250 {} {}".format(unw_5cm_browse_img, unw_5cm_browse_img_small))
    if os.path.exists('out.ppm'): os.unlink('out.ppm')

    # unw 20rad browse
    unw_20rad_browse_img = "unw.geo_20rad.browse.png"
    unw_20rad_browse_img_small = "unw.geo_20rad.browse_small.png"
    call_noerr("{} -P {} -wrap 20".format(mdx_app_path, unw_file))
    call_noerr("convert out.ppm -transparent black -trim {}".format(unw_20rad_browse_img))
    call_noerr("convert -resize 250x250 {} {}".format(unw_20rad_browse_img, unw_20rad_browse_img_small))
    if os.path.exists('out.ppm'): os.unlink('out.ppm')

    # amplitude browse
    unw_xml = "filt_topophase.unw.geo.xml"
    amplitude_browse_img = "amplitude.geo.browse.png"
    amplitude_browse_img_small = "amplitude.geo.browse_small.png"
    rt = parse(unw_xml)
    size = eval(rt.xpath('.//component[@name="coordinate1"]/property[@name="size"]/value/text()')[0])
    rtlr = size * 4
    logger.info("rtlr value for amplitude browse is: {}".format(rtlr))
    call_noerr("{} -P {} -s {} -amp -r4 -rtlr {} -CW".format(mdx_path, unw_file, size, rtlr))
    call_noerr("convert out.ppm -transparent black -trim {}".format(amplitude_browse_img))
    call_noerr("convert -resize 250x250 {} {}".format(amplitude_browse_img, amplitude_browse_img_small))
    if os.path.exists('out.ppm'): os.unlink('out.ppm')

    # topophase browse
    top_file = "topophase.cor.geo"
    top_browse_img = "top.geo.browse.png"
    top_browse_img_small = "top.geo.browse_small.png"
    call_noerr("{} -P {}".format(mdx_app_path, top_file))
    call_noerr("convert out.ppm -transparent black -trim {}".format(top_browse_img))
    call_noerr("convert -resize 250x250 {} {}".format(top_browse_img, top_browse_img_small))
    if os.path.exists('out.ppm'): os.unlink('out.ppm')
    '''
    # create unw KMZ
    unw_kml = "unw.geo.kml"
    unw_kmz = "{}.kmz".format(id)
    call_noerr("{} {} -kml {}".format(mdx_app_path, unw_file, unw_kml))
    call_noerr("{}/create_kmz.py {} {}.png {}".format(BASE_PATH, unw_kml,
                                                      unw_file, unw_kmz))

    # move all browse images to root of product directory
    call_noerr("mv -f *.png *.kmz ..")

    # remove kml
    call_noerr("rm -f *.kml")

    # chdir back up to work directory
    os.chdir(cwd)

    # create displacement tile layer
    tiles_dir = "{}/tiles".format(prod_dir)
    vrt_prod_file = "{}/merged/filt_topophase.unw.geo.vrt".format(prod_dir)
    tiler_cmd_path = os.path.abspath(
        os.path.join(BASE_PATH, '..', '..', 'map_tiler'))
    dis_layer = "displacement"
    tiler_cmd_tmpl = "{}/create_tiles.py {} {}/{} -b 2 -m prism --nodata 0"
    check_call(tiler_cmd_tmpl.format(tiler_cmd_path, vrt_prod_file, tiles_dir,
                                     dis_layer),
               shell=True)

    # create amplitude tile layer
    amp_layer = "amplitude"
    tiler_cmd_tmpl = "{}/create_tiles.py {} {}/{} -b 1 -m gray --clim_min 10 --clim_max_pct 80 --nodata 0"
    check_call(tiler_cmd_tmpl.format(tiler_cmd_path, vrt_prod_file, tiles_dir,
                                     amp_layer),
               shell=True)

    # create COG (cloud optimized geotiff) with no_data set
    cog_prod_file = "{}/merged/filt_topophase.unw.geo.tif".format(prod_dir)
    cog_cmd_tmpl = "gdal_translate {} tmp.tif -co TILED=YES -co COMPRESS=DEFLATE -a_nodata 0"
    check_call(cog_cmd_tmpl.format(vrt_prod_file), shell=True)
    check_call("gdaladdo -r average tmp.tif 2 4 8 16 32", shell=True)
    cog_cmd_tmpl = "gdal_translate tmp.tif {} -co TILED=YES -co COPY_SRC_OVERVIEWS=YES -co BLOCKXSIZE=512 -co BLOCKYSIZE=512 --config GDAL_TIFF_OVR_BLOCKSIZE 512"
    check_call(cog_cmd_tmpl.format(cog_prod_file), shell=True)
    os.unlink("tmp.tif")

    # extract metadata from master
    met_file = os.path.join(prod_dir, "{}.met.json".format(id))
    extract_cmd_path = os.path.abspath(
        os.path.join(BASE_PATH, '..', '..', 'frameMetadata', 'sentinel'))
    extract_cmd_tmpl = "{}/extractMetadata_s1.sh -i {}/annotation/s1?-iw{}-slc-{}-*.xml -o {}"
    check_call(extract_cmd_tmpl.format(extract_cmd_path, master_safe_dirs[0],
                                       ctx['swathnum'], master_pol, met_file),
               shell=True)

    # update met JSON
    if 'RESORB' in ctx['master_orbit_file'] or 'RESORB' in ctx[
            'slave_orbit_file']:
        orbit_type = 'resorb'
    else:
        orbit_type = 'poeorb'
    scene_count = min(len(master_safe_dirs), len(slave_safe_dirs))
    master_mission = MISSION_RE.search(master_safe_dirs[0]).group(1)
    slave_mission = MISSION_RE.search(slave_safe_dirs[0]).group(1)
    unw_vrt = "filt_topophase.unw.geo.vrt"
    fine_int_xml = "fine_interferogram.xml"
    update_met_cmd = "{}/update_met_json.py {} {} {} {} {} {}/{} {}/{} {}/{} {}/{} {}"
    check_call(update_met_cmd.format(BASE_PATH, orbit_type, scene_count,
                                     ctx['swathnum'], master_mission,
                                     slave_mission, prod_dir, 'PICKLE',
                                     prod_dir, fine_int_xml, prod_merged_dir,
                                     unw_vrt, prod_merged_dir, unw_xml,
                                     met_file),
               shell=True)

    # add master/slave ids and orbits to met JSON (per ASF request)
    master_ids = [i.replace(".zip", "") for i in ctx['master_zip_file']]
    slave_ids = [i.replace(".zip", "") for i in ctx['slave_zip_file']]
    master_rt = parse(os.path.join(prod_dir, "master.xml"))
    master_orbit_number = eval(
        master_rt.xpath('.//property[@name="orbitnumber"]/value/text()')[0])
    slave_rt = parse(os.path.join(prod_dir, "slave.xml"))
    slave_orbit_number = eval(
        slave_rt.xpath('.//property[@name="orbitnumber"]/value/text()')[0])
    with open(met_file) as f:
        md = json.load(f)
    md['master_scenes'] = master_ids
    md['slave_scenes'] = slave_ids
    md['orbitNumber'] = [master_orbit_number, slave_orbit_number]
    if ctx.get('stitch_subswaths_xt', False): md['swath'] = [1, 2, 3]
    md['esd_threshold'] = esd_coh_th if do_esd else -1.  # add ESD coherence threshold

    # add range_looks and azimuth_looks to metadata for stitching purposes
    md['azimuth_looks'] = int(ctx['azimuth_looks'])
    md['range_looks'] = int(ctx['range_looks'])

    # add filter strength
    md['filter_strength'] = float(ctx['filter_strength'])

    # add dem_type
    md['dem_type'] = dem_type

    # write met json
    with open(met_file, 'w') as f:
        json.dump(md, f, indent=2)

    # generate dataset JSON
    ds_file = os.path.join(prod_dir, "{}.dataset.json".format(id))
    create_dataset_json(id, version, met_file, ds_file)

    # move merged products to root of product directory
    #call_noerr("mv -f {}/* {}".format(prod_merged_dir, prod_dir))
    #shutil.rmtree(prod_merged_dir)

    # write PROV-ES JSON
    #${BASE_PATH}/create_prov_es-create_interferogram.sh $id $project $master_orbit_file $slave_orbit_file \
    #                                                        ${dem_file}.xml $dem_file $WORK_DIR \
    #                                                        ${id}/${id}.prov_es.json > create_prov_es.log 2>&1

    # clean out SAFE directories and DEM files
    for i in chain(master_safe_dirs, slave_safe_dirs):
        shutil.rmtree(i)
    for i in glob("dem*"):
        os.unlink(i)