Example #1
0
from hydroDL import pathSMAP, master
import os
from hydroDL.data import dbCsv

optData = master.default.update(master.default.optDataSMAP,
                                rootDB=pathSMAP['DB_L3_Global'],
                                subset='Globalv4f1_NorthAmerica',
                                tRange=[20150401, 20160401],
                                varT=dbCsv.varForcingGlobal)
optModel = master.default.optLstm
optLoss = master.default.optLossSigma
optTrain = master.default.optTrainSMAP
out = os.path.join(pathSMAP['Out_L3_Global'], 'test')
masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
master.train(masterDict)
Example #2
0
    else:
        cmd = "CUDA_VISIBLE_DEVICES={} screen -dmS {} python {} -F {} -M {}".format(
            cudaID, screen, codePath, "train", mFile)

    # if screen is None:
    #     #add some debugs Dapeng
    #     parser = argparse.ArgumentParser()
    #     parser.add_argument('-F', dest='func', type=str, default='train')
    #     parser.add_argument('-M', dest='mFile', type=str, default=mFile)
    #     args = parser.parse_args()
    #     if args.func == 'train':
    #         mDict = master.readMasterFile(args.mFile)
    #         master.train(mDict)
    #         # out = mDict['out']
    #         # email.sendEmail(subject='Training Done', text=out)

    print(cmd)
    os.system(cmd)


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-F", dest="func", type=str)
    parser.add_argument("-M", dest="mFile", type=str)
    args = parser.parse_args()
    if args.func == "train":
        mDict = master.readMasterFile(args.mFile)
        master.train(mDict)
        # out = mDict['out']
        # email.sendEmail(subject='Training Done', text=out)
Example #3
0
from hydroDL import pathSMAP, master
import os

cDir = os.path.dirname(os.path.abspath(__file__))
cDir = r'/home/kxf227/work/GitHUB/pyRnnSMAP/example/'

# define training options
optData = master.updateOpt(master.default.optDataCsv,
                           path=os.path.join(cDir, 'data'),
                           subset='CONUSv4f1',
                           tRange=[20150401, 20160401])
optModel = master.default.optLstm
optLoss = master.default.optLoss
optTrain = master.default.optTrainSMAP
out = os.path.join(cDir, 'output', 'CONUSv4f1')
masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)

# train
master.train(masterDict, overwrite=True)
Example #4
0
    regionId = args.regionId

# k = [7, 8, 13]
regionId = 7
for k in range(len(subsetLst)):
    kc = regionId - 1
    if k != kc:
        outName = 'ecoRegion{:02d}{:02d}_v2f1'.format(regionId,
                                                      k + 1) + '_Forcing'
        varLst = dbCsv.varForcing
        optData = default.update(default.optDataSMAP,
                                 rootDB=pathSMAP['DB_L3_NA'],
                                 tRange=[20150401, 20160401],
                                 varT=varLst)
        optData = default.forceUpdate(optData,
                                      subset=[subsetLst[kc], subsetLst[k]])
        optModel = default.optLstm
        optLoss = default.optLossRMSE
        optTrain = default.optTrainSMAP
        out = os.path.join(pathSMAP['Out_L3_NA'], 'ecoRegion', outName)
        masterDict = wrapMaster(out, optData, optModel, optLoss, optTrain)
        train(masterDict)
'''
source /home/kxf227/anaconda3/bin/activate
conda activate pytorch

CUDA_VISIBLE_DEVICES=0 python /home/kxf227/work/GitHUB/hydroDL-dev/app/region/trainEcoComb.py --rid 7
CUDA_VISIBLE_DEVICES=1 python /home/kxf227/work/GitHUB/hydroDL-dev/app/region/trainEcoComb.py --rid 8
CUDA_VISIBLE_DEVICES=2 python /home/kxf227/work/GitHUB/hydroDL-dev/app/region/trainEcoComb.py --rid 13
'''
Example #5
0
            model,
            xTrain,
            yTrain,
            attrs,
            lossFun,
            nEpoch=EPOCH,
            miniBatch=[BATCH_SIZE, RHO],
            saveEpoch=saveEPOCH,
            saveFolder=out,
        )

    if interfaceOpt == 0:
        # Only need to pass the wrapped configuration dict 'masterDict' for training
        # nx, ny will be automatically updated later
        masterDict = master.wrapMaster(out, optData, optModel, optLoss, optTrain)
        master.train(masterDict)

        ## Not used here.
        ## A potential way to run batch jobs simultaneously in background through multiple GPUs and Linux screens.
        ## To use this, must manually set the "pathCamels['DB']" in hydroDL/__init__.py as your own root path of CAMELS data.
        ## Use the following master.runTrain() instead of the above master.train().
        # master.runTrain(masterDict, cudaID=cid % gnum, screen='test-'+str(cid))
        # cid = cid + 1

if 2 in Action:
    # Train CNN-LSTM PUR model to integrate FDCs
    # LCrange defines from which period to get synthetic FDC
    LCTstr = str(LCrange[0]) + "-" + str(LCrange[1])
    out = os.path.join(rootOut, save_path, hucdic, "Reg-85-95-Sub-Full-FDC" + LCTstr)
    # out = os.path.join(rootOut, save_path, hucdic, 'Reg-85-95-Sub-5attr-FDC' + LCTstr)
    # out = os.path.join(rootOut, save_path, hucdic, 'Reg-85-95-Sub-Noattr-FDC' + LCTstr)