예제 #1
0
def byteScale(fi, lower, upper):
    outfile = fi.replace('.tif', '%s_%s.tif' % (int(lower), int(upper)))
    gdal.Translate(outfile,
                   fi,
                   outputType=gdal.GDT_Byte,
                   scaleParams=[[lower, upper]],
                   noData=0)

    # Once again, I'm getting zeros in my files eventhough I have set
    # the output range to 1,255!  The following will fix the issue.
    (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(fi))
    mask = np.isinf(data)
    data[mask == True] = 0
    mask = (data < 0).astype(bool)
    (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(outfile))
    mask2 = (data > 0).astype(bool)
    saa.write_gdal_file_byte("mask2.tif",
                             trans,
                             proj,
                             mask.astype(np.byte),
                             nodata=0)
    mask3 = mask ^ mask2
    data[mask3 == True] = 1
    saa.write_gdal_file_byte(outfile, trans, proj, data, nodata=0)

    return (outfile)
예제 #2
0
def getCorners(fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    ullon1 = t1[0]
    ullat1 = t1[3]
    lrlon1 = t1[0] + x1 * t1[1]
    lrlat1 = t1[3] + y1 * t1[5]
    return (ullon1, ullat1, lrlon1, lrlat1)
예제 #3
0
def resizeFiles(params):
    x, y, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    if x > 4096 or y > 4096:
        if x > y:
            width = 4096
            height = 0
        else:
            width = 0
            height = 4096

        for i in range(len(params['mdate'])):
            outFile = params['pFile'][i].replace(".tif", "_resize.tif")
            logging.info("    processing file {} to create file {}".format(
                params['pFile'][i], outFile))
            gdal.Translate(outFile,
                           params['pFile'][i],
                           resampleAlg=GRIORA_Cubic,
                           width=width,
                           height=height)
            params['pFile'][i] = outFile

            outFile = params['cFile'][i].replace(".tif", "_resize.tif")
            logging.info("    processing file {} to create file {}".format(
                params['cFile'][i], outFile))
            gdal.Translate(outFile,
                           params['cFile'][i],
                           resampleAlg=GRIORA_Cubic,
                           width=width,
                           height=height)
            params['cFile'][i] = outFile
예제 #4
0
def read_tif_header(fi):
    x, y, trans, proj = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    lat_max = trans[3]
    lat_min = trans[3] + y * trans[5]
    lon_min = trans[0]
    lon_max = trans[0] + x * trans[1]
    coords = [lon_min, lat_max, lon_max, lat_min]
    return (proj, trans, coords)
예제 #5
0
def get2sigmacutoffs(fi):
    (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(fi))
    top = np.percentile(data, 98)
    data[data > top] = top
    stddev = np.std(data)
    mean = np.mean(data)
    lo = mean - 2 * stddev
    hi = mean + 2 * stddev
    return lo, hi
예제 #6
0
def apply_speckle_filter(fi):

    outfile = fi.replace('.tif', '_sf.tif')
    looks = 4
    size = 7
    dampening_factor = 1
    (x, y, trans, proj, img) = saa.read_gdal_file(saa.open_gdal_file(fi))
    data = enh_lee(looks, size, dampening_factor, img)
    saa.write_gdal_file_float(outfile, trans, proj, data, nodata=0)
    return (outfile)
예제 #7
0
def create_dB(fi):
    (x, y, trans, proj, data) = saa.read_gdal_file(saa.open_gdal_file(fi))

    # If your input data is amplitude data, use these 2 lines:
    #    pwrdata = data*data
    #    dBdata = 10 * np.log(pwrdata)

    # If your input data is power data, use the following line:
    dBdata = 10 * np.log(data)

    outfile = fi.replace('.tif', '_dB.tif')
    saa.write_gdal_file_float(outfile, trans, proj, dBdata, nodata=0)
    return (outfile)
def gdal_interferogram(folder_in=None, name=''):
    if folder_in is None:
        folder_in = os.getcwd()

    qfile = glob.glob(os.path.join(folder_in, 'q_*.img'))[0]
    ifile = qfile.replace('q_', 'i_')
    (x, y, trans, proj, idata) = saa.read_gdal_file(saa.open_gdal_file(ifile))
    (x, y, trans, proj, qdata) = saa.read_gdal_file(saa.open_gdal_file(qfile))

    #amp = np.sqrt(np.sqrt(np.power(idata,2) + np.power(qdata,2)))
    amp = np.log10(
        (np.sqrt(np.sqrt(np.power(idata, 2) + np.power(qdata, 2))) + 1) /
        100000000)
    amp = (amp - np.min(amp))
    ma = amp[amp != 0]
    amp = amp - np.min(ma)
    #amp = np.where(amp < 0, amp, 0)
    amp = amp / np.max(amp) * 255
    phase = (np.arctan2(qdata, idata) + np.pi) / (2 * np.pi) * 255

    saa.write_gdal_file_byte(os.path.join(folder_in, f'amplitude{name}.tif'),
                             trans, proj, amp)
    saa.write_gdal_file_byte(os.path.join(folder_in, f'phase{name}.tif'),
                             trans, proj, phase)
예제 #9
0
def makeGeotiffFiles(h5File, dataName, params):

    # Open up the HDF5 file
    source = h5py.File("%s" % h5File)
    imgarray = source["%s" % dataName][()]
    maxband = imgarray.shape[0]
    logging.info("Found %s bands to process" % maxband)

    # Read a reference file for geolocation and size information
    os.chdir("../DATA")
    x, y, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    os.chdir("../Stack")

    # Get the entire date range
    longList = np.unique(params['mdate'] + params['sdate'])
    dateList = []
    for i in range(len(longList)):
        dateList.append(longList[i][0:8])
    dateList = np.unique(dateList)
    dateList.sort()
    logging.debug("Datelist is {}".format(dateList))

    for cnt in range(maxband):
        logging.info("Processing band %s" % str(cnt + 1))
        if dataName == 'recons':
            if params['train']:
                outFile = "{}_trn_gnt_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_gnt_phase.raw".format(dateList[cnt])
        elif dataName == 'rawts':
            if params['train']:
                outFile = "{}_trn_raw_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_raw_phase.raw".format(dateList[cnt])
        elif dataName == 'error':
            if params['train']:
                outFile = "{}_trn_error_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_error_phase.raw".format(dateList[cnt])

        cmd = 'gdal_translate -b {} -of ENVI HDF5:"{}"://{} {}'.format(
            cnt + 1, h5File, dataName, outFile)
        execute(cmd, uselogging=True)
        newdata = np.fromfile(outFile, dtype=np.float32, count=-1)
        img = np.reshape(newdata, (y, x))
        outFile = outFile.replace('.raw', '.tif')
        saa.write_gdal_file_float(outFile, trans, proj, img)
예제 #10
0
def reprojectFiles(params):
    os.chdir("DATA")
    for i in range(len(params['mdate'])):
        x, y, trans, proj = saa.read_gdal_file_geo(
            saa.open_gdal_file(params['pFile'][i]))
        if "PROJCS" in proj:
            outFile = params['pFile'][i].replace(".tif", "_wgs84.tif")
            logging.info("    processing file {} to create file {}".format(
                params['pFile'][i], outFile))
            gdal.Warp(outFile, params['pFile'][i], dstSRS="EPSG:4326")
            params['pFile'][i] = outFile
            outFile = params['cFile'][i].replace(".tif", "_wgs84.tif")
            logging.info("    processing file {} to create file {}".format(
                params['cFile'][i], outFile))
            gdal.Warp(outFile, params['cFile'][i], dstSRS="EPSG:4326")
            params['cFile'][i] = outFile
    os.chdir("..")
예제 #11
0
def getOverlap(coords, fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))

    ullon1 = t1[0]
    ullat1 = t1[3]
    lrlon1 = t1[0] + x1 * t1[1]
    lrlat1 = t1[3] + y1 * t1[5]

    ullon2 = coords[0]
    ullat2 = coords[1]
    lrlon2 = coords[2]
    lrlat2 = coords[3]

    ullat = min(ullat1, ullat2)
    ullon = max(ullon1, ullon2)
    lrlat = max(lrlat1, lrlat2)
    lrlon = min(lrlon1, lrlon2)

    return (ullon, ullat, lrlon, lrlat)
예제 #12
0
#!/usr/bin/env python

import re, sys, os
import numpy as np
import saa_func_lib as saa

i = sys.argv[1]
q = sys.argv[2]

(x, y, trans, proj, idata) = saa.read_gdal_file(saa.open_gdal_file(i))
(x, y, trans, proj, qdata) = saa.read_gdal_file(saa.open_gdal_file(q))

amp = np.sqrt(np.sqrt(np.power(idata, 2) + np.power(qdata, 2)))
phase = np.arctan2(qdata, idata)

saa.write_gdal_file_float('amplitude.tif', trans, proj, amp)
saa.write_gdal_file_float('phase.tif', trans, proj, phase)
예제 #13
0
    for item in allFiles:
        if item[-4:] == '.tif':
            infiles.append(inDir+'/'+item)


#####################
#
#  Clip images based on image content. Note:  This is EXPERIMENTAL and may fail for 
# any number of reasons
#
#####################
outfiles = []
if clip == True:
    for i in range(0,len(infiles)):
        print 'Reading clip data'
        (x1,y1,trans,proj,r) = saa.read_gdal_file(saa.open_gdal_file(infiles[i]),1)
        (x1,y1,trans,proj,g) = saa.read_gdal_file(saa.open_gdal_file(infiles[i]),2)
        (x1,y1,trans,proj,b) = saa.read_gdal_file(saa.open_gdal_file(infiles[i]),3)
   
        print 'Converting to float'
        r = r.astype(np.float32)
        g = g.astype(np.float32)
        b = b.astype(np.float32)
   
        print 'Calculating Euclidean distance'
        d1 = np.sqrt(np.power(r,2) + np.power(g,2) + np.power(b,2))
   
        print 'Median filtering'
        #d2 = medfilt2d(d1,5)
        d2 = d1
예제 #14
0
def getPixSize(fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    return (t1[1])
예제 #15
0
def procS1StackGIANT(type,
                     output,
                     descFile=None,
                     rxy=None,
                     nvalid=0.8,
                     nsbas=False,
                     filt=0.1,
                     path=None,
                     utcTime=None,
                     heading=None,
                     leave=False,
                     train=False,
                     hyp=None,
                     rawFlag=False,
                     mm=None,
                     errorFlag=False):

    logging.info(
        "***********************************************************************************"
    )
    logging.info("                 STARTING RUN {}".format(output))
    logging.info(
        "***********************************************************************************"
    )
    logging.info("Type of run is {}".format(type))

    if path is not None:
        if os.path.isdir(path):
            if path[0] != "/":
                root = os.getcwd()
                path = os.path.join(root, path)
        else:
            logging.error("ERROR: path {} is not a directory!".format(path))
            exit(1)
        logging.info("Data path is {}".format(path))

    templateDir = os.path.abspath(
        os.path.join(os.path.dirname(__file__), os.pardir, "etc"))
    logging.debug("Looking for templates in %s" % templateDir)

    if type == 'hyp':
        descFile, hypDir = prepareHypFiles(path, hyp)
    elif type == 'custom':
        if train:
            logging.warning(
                "***********************************************************************************"
            )
            logging.warning(
                "WARNING: Unable to run TRAIN model on custom inputs")
            logging.warning("WARNING: Switching off TRAIN corrections")
            logging.warning(
                "***********************************************************************************"
            )
            train = False
        if descFile is None:
            logging.error(
                "ERROR: Must specify a descriptor file when using custom option"
            )
            exit(1)
        if utcTime is None:
            logging.error(
                "ERROR: Must specify a UTC time when using custom option")
            exit(1)
        if heading is None:
            logging.error(
                "ERROR: Must specify a heading when using custom option")
            exit(1)
    elif type == 'aria':
        descFile, utcTime = prepGIAnT(intdir=path)
        heading = 12.0
    else:
        logging.error("ERROR: Unknown processing type {}".format(type))
        exit(1)

    if not os.path.isfile(descFile):
        logging.error(
            "ERROR: Unable to find descriptor file {}".format(descFile))
        exit(1)

    params = getFileList(descFile)
    params['type'] = type
    params['rxy'] = rxy
    params['nvalid'] = float(nvalid)
    params['train'] = train
    params['filt'] = filt

    if utcTime is None:
        os.chdir(hypDir)
        txtFile = glob.glob("*/*20*_20*.txt")[0]
        utcTime = getParameter(txtFile, "UTCtime")
        os.chdir("..")
    params['utctime'] = utcTime

    if heading is None:
        os.chdir(hypDir)
        txtFile = glob.glob("*/*20*_20*.txt")[0]
        heading = getParameter(txtFile, "Heading")
        os.chdir("..")
    params['heading'] = heading

    logging.info("Examining list of files to process...")
    for i in range(len(params['mdate'])):
        logging.debug("    found: {} {} {} {}".format(params['mdate'][i],
                                                      params['sdate'][i],
                                                      params['pFile'][i],
                                                      params['cFile'][i]))

    if type == 'custom':
        prepareCustomFiles(params, path)

    checkFileExistence(params)
    root = os.getcwd()

    logging.info("Reprojecting files...")
    reprojectFiles(params)

    logging.info("Cutting files...")
    os.chdir("DATA")
    if type != 'aria':
        cutFiles(params['pFile'])
        cutFiles(params['cFile'])

        for i in range(len(params['mdate'])):
            params['pFile'][i] = params['pFile'][i].replace(
                ".tif", "_clip.tif")
            params['cFile'][i] = params['cFile'][i].replace(
                ".tif", "_clip.tif")

    logging.info("Resizing files...")
    resizeFiles(params)

    if train:
        logging.info(
            "***********************************************************************************"
        )
        logging.info(
            "          PREPARING TO RUN THE TRAIN MERRA2 WEATHER MODEL")
        logging.info(
            "***********************************************************************************"
        )
        createCleanDir("TRAIN")
        os.chdir("TRAIN")
        makeParmsAPS(params, root)
        prepareFilesForTrain(params)
        myfile = os.path.join(os.pardir, params['pFile'][0])
        aps_weather_model("merra2", 1, 4, myfile)
        os.chdir("..")
        fixFileNamesTrain(params)

    logging.info("Translating files to raw format...")
    for i in range(len(params['pFile'])):
        params['pFile'][i] = toRaw(params['pFile'][i])
        params['cFile'][i] = toRaw(params['cFile'][i])

    if not leave:
        for myfile in glob.glob("*_wgs84.tif"):
            os.remove(myfile)
        for myfile in glob.glob("*_clip.tif"):
            os.remove(myfile)
        for myfile in glob.glob("*_resize.tif"):
            os.remove(myfile)

    width, length, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    params['width'] = width
    params['length'] = length
    os.chdir("..")

    createIfgList(params)
    createExampleRSC(params)
    fixPrepDataXml(params, templateDir)
    fixUserfnPy(params, templateDir)
    fixPrepBasXml(params, templateDir)
    renameFiles(params)

    execute("python prepdataxml.py", uselogging=True)
    execute("PrepIgramStack.py", uselogging=True)
    execute("python prepsbasxml.py", uselogging=True)

    if nsbas == False:
        logging.info("Running SBAS inversion")
        if errorFlag:
            execute("SBASxval.py", uselogging=True)
            h5File = "LS-xval.h5"
        else:
            execute("SBASInvert.py", uselogging=True)
            h5File = "LS-PARAMS.h5"
    else:
        logging.info("Running NSBAS inversion")
        if errorFlag:
            h5File = "NSBAS-xval.h5"
            execute("NSBASxval.py -o {}".format(h5file), uselogging=True)
        else:
            execute("NSBASInvert.py", uselogging=True)
            h5File = "NSBAS-PARAMS.h5"

    os.chdir("Stack")
    filelist = makePNG.mkMovie(h5File, "recons", mm=mm)
    filelist.sort()

    if rawFlag:
        filelist2 = makePNG.mkMovie(h5File, "rawts", mm=mm)
        filelist2.sort()
    elif errorFlag:
        filelist2 = makePNG.mkMovie(h5File, "error", mm=mm)
        filelist2.sort()

    # Get the entire date range
    longList = np.unique(params['mdate'] + params['sdate'])
    dateList = []
    for i in range(len(longList)):
        dateList.append(longList[i][0:8])
    dateList = np.unique(dateList)
    dateList.sort()

    # Add annotations to files
    cnt = 0
    for myfile in filelist:
        execute(
            "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
            .format(FILE=myfile, DATE=dateList[cnt]),
            uselogging=True)
        cnt = cnt + 1
    if train:
        name = "{}_train.gif".format(output)
    else:
        name = "{}.gif".format(output)
    # Make the animation
    execute("convert -delay 120 -loop 0 anno_*.png {}".format(name),
            uselogging=True)

    if rawFlag:
        for myfile in glob.glob("anno_*.png"):
            os.remove(myfile)
        cnt = 0
        for myfile in filelist2:
            execute(
                "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
                .format(FILE=myfile, DATE=dateList[cnt]),
                uselogging=True)
            cnt = cnt + 1
        rawname = name.replace(".gif", "_rawts.gif")
        # Make the animation
        execute("convert -delay 120 -loop 0 anno_*.png {}".format(rawname),
                uselogging=True)
    elif errorFlag:
        for myfile in glob.glob("anno_*.png"):
            os.remove(myfile)
        cnt = 0
        for myfile in filelist2:
            execute(
                "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
                .format(FILE=myfile, DATE=dateList[cnt]),
                uselogging=True)
            cnt = cnt + 1
        rawname = name.replace(".gif", "_error.gif")
        # Make the animation
        execute("convert -delay 120 -loop 0 anno_*.png {}".format(rawname),
                uselogging=True)

    # Get product directory ready
    os.chdir("..")
    prodDir = "PRODUCT_{}".format(output)
    createCleanDir(prodDir)

    os.chdir("Stack")

    shutil.move(name, "../{}".format(prodDir))
    if rawFlag or errorFlag:
        shutil.move(rawname, "../{}".format(prodDir))

    makeGeotiffFiles(h5File, "recons", params)
    if rawFlag:
        makeGeotiffFiles(h5File, "rawts", params)
    elif errorFlag:
        makeGeotiffFiles(h5File, "error", params)

    # Move files from Stack directory
    for myfile in glob.glob("*.tif"):
        shutil.move(myfile, "../{}".format(prodDir))
    shutil.move(h5File, "../{}/{}.h5".format(prodDir, output))
    os.chdir("..")

    shutil.copy(descFile, prodDir)

    if not leave:
        if type == 'hyp':
            shutil.rmtree(hypDir)
        shutil.rmtree("DATA")
        #        shutil.rmtree("LINKS")
        shutil.rmtree("Stack")
        shutil.rmtree("Figs")

        os.remove("data.xml")
        os.remove("userfn.pyc")
        os.remove("sbas.xml")
        os.remove("prepdataxml.py")
        os.remove("prepsbasxml.py")
        os.remove("userfn.py")
        os.remove("ifg.list")
        os.remove("example.rsc")

        if train:
            for myfile in glob.glob("merra/*/*.xyz"):
                os.remove(myfile)
    else:
        shutil.move("DATA", "DATA_{}".format(output))

    logging.info(
        "***********************************************************************************"
    )
    logging.info("                 END OF RUN {}".format(output))
    logging.info(
        "***********************************************************************************"
    )
예제 #16
0
def amp2pwr(fi):
    x, y, trans, proj, data = saa.read_gdal_file(saa.open_gdal_file(fi))
    pwrdata = data * data
    outfile = fi.replace(".tif", "_pwr.tif")
    saa.write_gdal_file_float(outfile, trans, proj, pwrdata, nodata=0)
    return (outfile)
예제 #17
0
def pwr2amp(fi):
    x, y, trans, proj, data = saa.read_gdal_file(saa.open_gdal_file(fi))
    ampdata = np.sqrt(data)
    outfile = fi.replace(".tif", "_amp.tif")
    saa.write_gdal_file_float(outfile, trans, proj, ampdata, nodata=0)
    return (outfile)
예제 #18
0
        hdr.write('samples = '+str(cols)+'\n')
        hdr.write('lines = '+str(rows)+'\n')
        hdr.write('bands   = '+str(bands)+'\n')
        hdr.write('header offset = 0\n')
        hdr.write('file type = ENVI Standard\n')
        hdr.write('data type = '+str(data_type)+'\n')
        hdr.write('interleave = '+band_type+'\n')
        hdr.write('byte order = 0\n')
        hdr.write('band names = {\n')
        hdr.write('}\n')
        hdr.write('\n')
        hdr.close()
        
# Open grd file and read data
print 'Processing files ',file
oh = sa.open_gdal_file(file)

if bands == 1:
        (ox,oy,proj,trans,odata) = sa.read_gdal_file(oh,1)
        np.putmask(odata,odata>5,0)
elif bands ==2:
        (ox,oy,proj,trans,odata) = sa.read_gdal_file(oh,1)
        np.putmask(odata,odata>5,0)
        (ox,oy,proj,trans,pdata) = sa.read_gdal_file(oh,2)

# Create geotransform based on ann file and simple EPSG:4326 WKT for projection
otrans = [ul_lon,lon_step,0,ul_lat,0,lat_step]
oproj = 'GEOGCS[\"WGS 84\",DATUM[\"WGS_1984\",SPHEROID[\"WGS 84\",6378137,298.257223563,AUTHORITY[\"EPSG\",\"7030\"]],AUTHORITY[\"EPSG\",\"6326\"]],PRIMEM[\"Greenwich\",0,AUTHORITY[\"EPSG\",\"8901\"]],UNIT[\"degree\",0.01745329251994328,AUTHORITY[\"EPSG\",\"9122\"]],AUTHORITY[\"EPSG\",\"4326\"]]'

outfile = file.replace('grd','tif')
예제 #19
0
    elFile = 'elevation-%s.tif ' % re.split('/', tdirs[0])[-1]
    coFile = 'coherence-%s.tif ' % re.split('/', tdirs[0])[-1]

    os.chdir(dDir)
    cmd = 'gdal_translate el*.img %s ' % elFile
    print cmd
    os.system(cmd)

    cmd = 'gdal_translate coh*.img %s ' % coFile
    print cmd
    os.system(cmd)

    if UTM:
        # Convert 4326 files to appropriate UTM zone
        (x, y, trans, proj) = saa.read_gdal_file_geo(
            saa.open_gdal_file('elevation-%s.tif' %
                               re.split('/', tdirs[0])[-1]))
        z = calcUTMZone(x, y, trans)
        print('Converting products to UTM Zone %02d' % zone)
        cmd = 'gdalwarp -t_srs EPSG:326%02d %s temp.tif' % (zone, elFile)
        os.system(cmd)
        os.system('mv temp.tif %s' % elFile)
        cmd = 'gdalwarp -t_srs EPSG:326%02d %s temp.tif' % (zone, coFile)
        os.system(cmd)
        os.system('mv temp.tif %s' % coFile)

    #cmd = 'mv %s/*/*.tif geotiffs; rm -r %s' % (td2,td2)
    cmd = 'mv *.tif %s/geotiffs' % startDir
    print cmd
    os.system(cmd)
    os.chdir(startDir)
    # End of geotiff creation