Пример #1
0
def getCorners(fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    ullon1 = t1[0]
    ullat1 = t1[3]
    lrlon1 = t1[0] + x1 * t1[1]
    lrlat1 = t1[3] + y1 * t1[5]
    return (ullon1, ullat1, lrlon1, lrlat1)
Пример #2
0
def resizeFiles(params):
    x, y, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    if x > 4096 or y > 4096:
        if x > y:
            width = 4096
            height = 0
        else:
            width = 0
            height = 4096

        for i in range(len(params['mdate'])):
            outFile = params['pFile'][i].replace(".tif", "_resize.tif")
            logging.info("    processing file {} to create file {}".format(
                params['pFile'][i], outFile))
            gdal.Translate(outFile,
                           params['pFile'][i],
                           resampleAlg=GRIORA_Cubic,
                           width=width,
                           height=height)
            params['pFile'][i] = outFile

            outFile = params['cFile'][i].replace(".tif", "_resize.tif")
            logging.info("    processing file {} to create file {}".format(
                params['cFile'][i], outFile))
            gdal.Translate(outFile,
                           params['cFile'][i],
                           resampleAlg=GRIORA_Cubic,
                           width=width,
                           height=height)
            params['cFile'][i] = outFile
Пример #3
0
def read_tif_header(fi):
    x, y, trans, proj = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    lat_max = trans[3]
    lat_min = trans[3] + y * trans[5]
    lon_min = trans[0]
    lon_max = trans[0] + x * trans[1]
    coords = [lon_min, lat_max, lon_max, lat_min]
    return (proj, trans, coords)
Пример #4
0
def makeGeotiffFiles(h5File, dataName, params):

    # Open up the HDF5 file
    source = h5py.File("%s" % h5File)
    imgarray = source["%s" % dataName][()]
    maxband = imgarray.shape[0]
    logging.info("Found %s bands to process" % maxband)

    # Read a reference file for geolocation and size information
    os.chdir("../DATA")
    x, y, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    os.chdir("../Stack")

    # Get the entire date range
    longList = np.unique(params['mdate'] + params['sdate'])
    dateList = []
    for i in range(len(longList)):
        dateList.append(longList[i][0:8])
    dateList = np.unique(dateList)
    dateList.sort()
    logging.debug("Datelist is {}".format(dateList))

    for cnt in range(maxband):
        logging.info("Processing band %s" % str(cnt + 1))
        if dataName == 'recons':
            if params['train']:
                outFile = "{}_trn_gnt_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_gnt_phase.raw".format(dateList[cnt])
        elif dataName == 'rawts':
            if params['train']:
                outFile = "{}_trn_raw_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_raw_phase.raw".format(dateList[cnt])
        elif dataName == 'error':
            if params['train']:
                outFile = "{}_trn_error_phase.raw".format(dateList[cnt])
            else:
                outFile = "{}_error_phase.raw".format(dateList[cnt])

        cmd = 'gdal_translate -b {} -of ENVI HDF5:"{}"://{} {}'.format(
            cnt + 1, h5File, dataName, outFile)
        execute(cmd, uselogging=True)
        newdata = np.fromfile(outFile, dtype=np.float32, count=-1)
        img = np.reshape(newdata, (y, x))
        outFile = outFile.replace('.raw', '.tif')
        saa.write_gdal_file_float(outFile, trans, proj, img)
Пример #5
0
def reprojectFiles(params):
    os.chdir("DATA")
    for i in range(len(params['mdate'])):
        x, y, trans, proj = saa.read_gdal_file_geo(
            saa.open_gdal_file(params['pFile'][i]))
        if "PROJCS" in proj:
            outFile = params['pFile'][i].replace(".tif", "_wgs84.tif")
            logging.info("    processing file {} to create file {}".format(
                params['pFile'][i], outFile))
            gdal.Warp(outFile, params['pFile'][i], dstSRS="EPSG:4326")
            params['pFile'][i] = outFile
            outFile = params['cFile'][i].replace(".tif", "_wgs84.tif")
            logging.info("    processing file {} to create file {}".format(
                params['cFile'][i], outFile))
            gdal.Warp(outFile, params['cFile'][i], dstSRS="EPSG:4326")
            params['cFile'][i] = outFile
    os.chdir("..")
Пример #6
0
def getOverlap(coords, fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))

    ullon1 = t1[0]
    ullat1 = t1[3]
    lrlon1 = t1[0] + x1 * t1[1]
    lrlat1 = t1[3] + y1 * t1[5]

    ullon2 = coords[0]
    ullat2 = coords[1]
    lrlon2 = coords[2]
    lrlat2 = coords[3]

    ullat = min(ullat1, ullat2)
    ullon = max(ullon1, ullon2)
    lrlat = max(lrlat1, lrlat2)
    lrlon = min(lrlon1, lrlon2)

    return (ullon, ullat, lrlon, lrlat)
Пример #7
0
else:
    tfiles = infiles
    camount = 300

#####################
#
#  get minimum x/y size
#  The minimum values will be our starting point for resizing everything
#
#####################

xSize = np.zeros(len(tfiles))
ySize = np.zeros(len(tfiles))

for i in range(0,len(tfiles)):
    (xSize[i],ySize[i],trans,proj) = saa.read_gdal_file_geo(saa.open_gdal_file(tfiles[i]))

xmin = xSize.min()
ymin = ySize.min()

#####################
#
# Our size will be xmin and ymin minus 300 pixels (150 on either side)
# Since we are assuming these are all USGS high resolution scans, this is a fair value
#
#####################

xmin = int(xmin - camount)
ymin = int(ymin - camount)

for i in range(0,len(tfiles)):
Пример #8
0
def getPixSize(fi):
    (x1, y1, t1, p1) = saa.read_gdal_file_geo(saa.open_gdal_file(fi))
    return (t1[1])
Пример #9
0
    elFile = 'elevation-%s.tif ' % re.split('/', tdirs[0])[-1]
    coFile = 'coherence-%s.tif ' % re.split('/', tdirs[0])[-1]

    os.chdir(dDir)
    cmd = 'gdal_translate el*.img %s ' % elFile
    print cmd
    os.system(cmd)

    cmd = 'gdal_translate coh*.img %s ' % coFile
    print cmd
    os.system(cmd)

    if UTM:
        # Convert 4326 files to appropriate UTM zone
        (x, y, trans, proj) = saa.read_gdal_file_geo(
            saa.open_gdal_file('elevation-%s.tif' %
                               re.split('/', tdirs[0])[-1]))
        z = calcUTMZone(x, y, trans)
        print('Converting products to UTM Zone %02d' % zone)
        cmd = 'gdalwarp -t_srs EPSG:326%02d %s temp.tif' % (zone, elFile)
        os.system(cmd)
        os.system('mv temp.tif %s' % elFile)
        cmd = 'gdalwarp -t_srs EPSG:326%02d %s temp.tif' % (zone, coFile)
        os.system(cmd)
        os.system('mv temp.tif %s' % coFile)

    #cmd = 'mv %s/*/*.tif geotiffs; rm -r %s' % (td2,td2)
    cmd = 'mv *.tif %s/geotiffs' % startDir
    print cmd
    os.system(cmd)
    os.chdir(startDir)
Пример #10
0
def procS1StackGIANT(type,
                     output,
                     descFile=None,
                     rxy=None,
                     nvalid=0.8,
                     nsbas=False,
                     filt=0.1,
                     path=None,
                     utcTime=None,
                     heading=None,
                     leave=False,
                     train=False,
                     hyp=None,
                     rawFlag=False,
                     mm=None,
                     errorFlag=False):

    logging.info(
        "***********************************************************************************"
    )
    logging.info("                 STARTING RUN {}".format(output))
    logging.info(
        "***********************************************************************************"
    )
    logging.info("Type of run is {}".format(type))

    if path is not None:
        if os.path.isdir(path):
            if path[0] != "/":
                root = os.getcwd()
                path = os.path.join(root, path)
        else:
            logging.error("ERROR: path {} is not a directory!".format(path))
            exit(1)
        logging.info("Data path is {}".format(path))

    templateDir = os.path.abspath(
        os.path.join(os.path.dirname(__file__), os.pardir, "etc"))
    logging.debug("Looking for templates in %s" % templateDir)

    if type == 'hyp':
        descFile, hypDir = prepareHypFiles(path, hyp)
    elif type == 'custom':
        if train:
            logging.warning(
                "***********************************************************************************"
            )
            logging.warning(
                "WARNING: Unable to run TRAIN model on custom inputs")
            logging.warning("WARNING: Switching off TRAIN corrections")
            logging.warning(
                "***********************************************************************************"
            )
            train = False
        if descFile is None:
            logging.error(
                "ERROR: Must specify a descriptor file when using custom option"
            )
            exit(1)
        if utcTime is None:
            logging.error(
                "ERROR: Must specify a UTC time when using custom option")
            exit(1)
        if heading is None:
            logging.error(
                "ERROR: Must specify a heading when using custom option")
            exit(1)
    elif type == 'aria':
        descFile, utcTime = prepGIAnT(intdir=path)
        heading = 12.0
    else:
        logging.error("ERROR: Unknown processing type {}".format(type))
        exit(1)

    if not os.path.isfile(descFile):
        logging.error(
            "ERROR: Unable to find descriptor file {}".format(descFile))
        exit(1)

    params = getFileList(descFile)
    params['type'] = type
    params['rxy'] = rxy
    params['nvalid'] = float(nvalid)
    params['train'] = train
    params['filt'] = filt

    if utcTime is None:
        os.chdir(hypDir)
        txtFile = glob.glob("*/*20*_20*.txt")[0]
        utcTime = getParameter(txtFile, "UTCtime")
        os.chdir("..")
    params['utctime'] = utcTime

    if heading is None:
        os.chdir(hypDir)
        txtFile = glob.glob("*/*20*_20*.txt")[0]
        heading = getParameter(txtFile, "Heading")
        os.chdir("..")
    params['heading'] = heading

    logging.info("Examining list of files to process...")
    for i in range(len(params['mdate'])):
        logging.debug("    found: {} {} {} {}".format(params['mdate'][i],
                                                      params['sdate'][i],
                                                      params['pFile'][i],
                                                      params['cFile'][i]))

    if type == 'custom':
        prepareCustomFiles(params, path)

    checkFileExistence(params)
    root = os.getcwd()

    logging.info("Reprojecting files...")
    reprojectFiles(params)

    logging.info("Cutting files...")
    os.chdir("DATA")
    if type != 'aria':
        cutFiles(params['pFile'])
        cutFiles(params['cFile'])

        for i in range(len(params['mdate'])):
            params['pFile'][i] = params['pFile'][i].replace(
                ".tif", "_clip.tif")
            params['cFile'][i] = params['cFile'][i].replace(
                ".tif", "_clip.tif")

    logging.info("Resizing files...")
    resizeFiles(params)

    if train:
        logging.info(
            "***********************************************************************************"
        )
        logging.info(
            "          PREPARING TO RUN THE TRAIN MERRA2 WEATHER MODEL")
        logging.info(
            "***********************************************************************************"
        )
        createCleanDir("TRAIN")
        os.chdir("TRAIN")
        makeParmsAPS(params, root)
        prepareFilesForTrain(params)
        myfile = os.path.join(os.pardir, params['pFile'][0])
        aps_weather_model("merra2", 1, 4, myfile)
        os.chdir("..")
        fixFileNamesTrain(params)

    logging.info("Translating files to raw format...")
    for i in range(len(params['pFile'])):
        params['pFile'][i] = toRaw(params['pFile'][i])
        params['cFile'][i] = toRaw(params['cFile'][i])

    if not leave:
        for myfile in glob.glob("*_wgs84.tif"):
            os.remove(myfile)
        for myfile in glob.glob("*_clip.tif"):
            os.remove(myfile)
        for myfile in glob.glob("*_resize.tif"):
            os.remove(myfile)

    width, length, trans, proj = saa.read_gdal_file_geo(
        saa.open_gdal_file(params['pFile'][0]))
    params['width'] = width
    params['length'] = length
    os.chdir("..")

    createIfgList(params)
    createExampleRSC(params)
    fixPrepDataXml(params, templateDir)
    fixUserfnPy(params, templateDir)
    fixPrepBasXml(params, templateDir)
    renameFiles(params)

    execute("python prepdataxml.py", uselogging=True)
    execute("PrepIgramStack.py", uselogging=True)
    execute("python prepsbasxml.py", uselogging=True)

    if nsbas == False:
        logging.info("Running SBAS inversion")
        if errorFlag:
            execute("SBASxval.py", uselogging=True)
            h5File = "LS-xval.h5"
        else:
            execute("SBASInvert.py", uselogging=True)
            h5File = "LS-PARAMS.h5"
    else:
        logging.info("Running NSBAS inversion")
        if errorFlag:
            h5File = "NSBAS-xval.h5"
            execute("NSBASxval.py -o {}".format(h5file), uselogging=True)
        else:
            execute("NSBASInvert.py", uselogging=True)
            h5File = "NSBAS-PARAMS.h5"

    os.chdir("Stack")
    filelist = makePNG.mkMovie(h5File, "recons", mm=mm)
    filelist.sort()

    if rawFlag:
        filelist2 = makePNG.mkMovie(h5File, "rawts", mm=mm)
        filelist2.sort()
    elif errorFlag:
        filelist2 = makePNG.mkMovie(h5File, "error", mm=mm)
        filelist2.sort()

    # Get the entire date range
    longList = np.unique(params['mdate'] + params['sdate'])
    dateList = []
    for i in range(len(longList)):
        dateList.append(longList[i][0:8])
    dateList = np.unique(dateList)
    dateList.sort()

    # Add annotations to files
    cnt = 0
    for myfile in filelist:
        execute(
            "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
            .format(FILE=myfile, DATE=dateList[cnt]),
            uselogging=True)
        cnt = cnt + 1
    if train:
        name = "{}_train.gif".format(output)
    else:
        name = "{}.gif".format(output)
    # Make the animation
    execute("convert -delay 120 -loop 0 anno_*.png {}".format(name),
            uselogging=True)

    if rawFlag:
        for myfile in glob.glob("anno_*.png"):
            os.remove(myfile)
        cnt = 0
        for myfile in filelist2:
            execute(
                "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
                .format(FILE=myfile, DATE=dateList[cnt]),
                uselogging=True)
            cnt = cnt + 1
        rawname = name.replace(".gif", "_rawts.gif")
        # Make the animation
        execute("convert -delay 120 -loop 0 anno_*.png {}".format(rawname),
                uselogging=True)
    elif errorFlag:
        for myfile in glob.glob("anno_*.png"):
            os.remove(myfile)
        cnt = 0
        for myfile in filelist2:
            execute(
                "convert {FILE} -gravity north  -annotate +0+5 '{DATE}' anno_{FILE}"
                .format(FILE=myfile, DATE=dateList[cnt]),
                uselogging=True)
            cnt = cnt + 1
        rawname = name.replace(".gif", "_error.gif")
        # Make the animation
        execute("convert -delay 120 -loop 0 anno_*.png {}".format(rawname),
                uselogging=True)

    # Get product directory ready
    os.chdir("..")
    prodDir = "PRODUCT_{}".format(output)
    createCleanDir(prodDir)

    os.chdir("Stack")

    shutil.move(name, "../{}".format(prodDir))
    if rawFlag or errorFlag:
        shutil.move(rawname, "../{}".format(prodDir))

    makeGeotiffFiles(h5File, "recons", params)
    if rawFlag:
        makeGeotiffFiles(h5File, "rawts", params)
    elif errorFlag:
        makeGeotiffFiles(h5File, "error", params)

    # Move files from Stack directory
    for myfile in glob.glob("*.tif"):
        shutil.move(myfile, "../{}".format(prodDir))
    shutil.move(h5File, "../{}/{}.h5".format(prodDir, output))
    os.chdir("..")

    shutil.copy(descFile, prodDir)

    if not leave:
        if type == 'hyp':
            shutil.rmtree(hypDir)
        shutil.rmtree("DATA")
        #        shutil.rmtree("LINKS")
        shutil.rmtree("Stack")
        shutil.rmtree("Figs")

        os.remove("data.xml")
        os.remove("userfn.pyc")
        os.remove("sbas.xml")
        os.remove("prepdataxml.py")
        os.remove("prepsbasxml.py")
        os.remove("userfn.py")
        os.remove("ifg.list")
        os.remove("example.rsc")

        if train:
            for myfile in glob.glob("merra/*/*.xyz"):
                os.remove(myfile)
    else:
        shutil.move("DATA", "DATA_{}".format(output))

    logging.info(
        "***********************************************************************************"
    )
    logging.info("                 END OF RUN {}".format(output))
    logging.info(
        "***********************************************************************************"
    )