コード例 #1
0
ファイル: pyWPS.py プロジェクト: xigrug/WRF-Tools
 def checkFileList(filelist, ListDir, okdates, depth):
     depth += 1  # counter for recursion depth
     # N.B.: the recursion depth limit was introduced to prevent infinite recursions when circular links occur
     # loop over dates
     for filename in filelist:
         TmpDir = ListDir + '/' + filename
         if os.path.isdir(TmpDir):
             if dataset.checkSubDir(filename, starts[0], ends[0]):
                 # make list of contents and process recursively
                 if depth > 1:
                     print(
                         ' (skipping subfolders beyond recursion depth/level 1)'
                     )
                 else:
                     okdates = checkFileList(os.listdir(TmpDir), TmpDir,
                                             okdates, depth)
         else:
             # figure out time and date
             date = dataset.extractDate(filename)
             # collect valid dates
             if date:  # i.e. not 'None'
                 # check date for validity (only need to check first/master domain)
                 lok = nlt.checkDate(date, starts[0], ends[0])
                 # collect dates within range
                 if lok: okdates.append(date)
     return okdates
コード例 #2
0
ファイル: pyWPS.py プロジェクト: camposdelano/WRF-Tools
 def checkFileList(filelist, ListDir, okdates, depth):
   depth += 1 # counter for recursion depth
   # N.B.: the recursion depth limit was introduced to prevent infinite recursions when circular links occur
   # loop over dates
   for filename in filelist:
     TmpDir = ListDir + '/' + filename
     if os.path.isdir(TmpDir):
       if dataset.checkSubDir(filename, starts[0], ends[0]):          
         # make list of contents and process recursively
         if depth > 1: print(' (skipping subfolders beyond recursion depth/level 1)')
         else: okdates = checkFileList(os.listdir(TmpDir), TmpDir, okdates, depth)
     else:
       # figure out time and date
       date = dataset.extractDate(filename)
       # collect valid dates
       if date: # i.e. not 'None'
         # check date for validity (only need to check first/master domain)      
         lok = nlt.checkDate(date, starts[0], ends[0])
         # collect dates within range
         if lok: okdates.append(date)
   return okdates
コード例 #3
0
ファイル: pyWPS.py プロジェクト: camposdelano/WRF-Tools
def processTimesteps(myid, dates):
  
  # create process sub-folder
  mydir = pdir.format(myid)
  MyDir = Tmp + mydir
  mytag = '['+pname.format(myid)+']'
  if os.path.exists(mydir): 
    shutil.rmtree(mydir)
  os.mkdir(mydir)
  # copy namelist
  shutil.copy(nmlstwps, mydir)
  # change working directory to process sub-folder
  os.chdir(mydir)
  # link dataset specific files
  dataset.setup(src=Tmp, dst=MyDir, lsymlink=True)
  # link other source files
  os.symlink(Meta, meta[:-1]) # link to folder
  # link geogrid (data) and metgrid
  os.symlink(Tmp+metgrid_exe, metgrid_exe)
  for i in doms: # loop over all geogrid domains
    geoname = geopfx.format(i)+ncext
    os.symlink(Tmp+geoname, geoname)
  
  ## loop over (atmospheric) time steps
  if dates: print('\n '+mytag+' Looping over Time-steps:')
  else: print('\n '+mytag+' Nothing to do!')
  # loop over date-tuples
  for date in dates:
    
    # figure out sub-domains
    ldoms = [True,]*maxdom # first domain is always computed
    for i in xrange(1,maxdom): # check sub-domains
      ldoms[i] = nlt.checkDate(date, starts[i], ends[i])
    # update date string in namelist.wps
    #print imform,date
    imdate = imform.format(*date)    
    imfile = impfx+imdate
    nmldate = nmlform.format(*date) # also used by metgrid
    nlt.writeNamelist(nmlstwps, ldoms, nmldate, imd, isd, ied)
    
    # N.B.: in case the stack size limit causes segmentation faults, here are some workarounds
    # subprocess.call(r'ulimit -s unlimited; ./unccsm.exe', shell=True)
    # import resource
    # subprocess.call(['./unccsm.exe'], preexec_fn=resource.setrlimit(resource.RLIMIT_STACK,(-1,-1)))
    # print resource.getrlimit(resource.RLIMIT_STACK)
      
    ## prepare WPS processing 
    # run ungrib.exe or equivalent operation
    preimfile = dataset.ungrib(date, mytag) # need 'mytag' for status messages
    # rename intermediate file according to WPS convention (by date), if necessary
    if preimfile: os.rename(preimfile, imfile) # not the same as 'move'
    
    ## run WPS' metgrid.exe on intermediate file
    # run metgrid_exe.exe
    print('\n  * '+mytag+' interpolating to WRF grid (metgrid.exe)')
    fmetgrid = open(metgrid_log, 'a') # metgrid.exe standard out and error log    
    subprocess.call([METGRID], stdout=fmetgrid, stderr=fmetgrid) # metgrid.exe writes a fairly detailed log file
    fmetgrid.close()
    
    ## finish time-step
    os.remove(MyDir+imfile) # remove intermediate file after metgrid.exe completes
    # copy/move data back to disk (one per domain) and/or keep in memory
    tmpstr = '\n '+mytag+' Writing output to disk: ' # gather output for later display
    for i in xrange(maxdom):
      metfile = metpfx.format(i+1)+nmldate+ncext
      if ldoms[i]:
        tmpstr += '\n                           '+metfile
        if ldisk: 
          shutil.copy(metfile,Disk+metfile)
        if ldata:
          shutil.move(metfile,Data+metfile)      
        else:
          os.remove(metfile)
      else:
        if os.path.exists(metfile): 
          os.remove(metfile) # metgrid.exe may create more files than needed
    # finish time-step
    tmpstr += '\n\n   ============================== finished '+imdate+' ==============================   \n'
    print(tmpstr)    
    
      
  ## clean up after all time-steps
  # link other source files
  os.remove(meta[:-1]) # link to folder
  dataset.cleanup(tgt=MyDir)
  os.remove(metgrid_exe)
  for i in doms: # loop over all geogrid domains
    os.remove(geopfx.format(i)+ncext)
コード例 #4
0
ファイル: pyWPS.py プロジェクト: xigrug/WRF-Tools
def processTimesteps(myid, dates):

    # create process sub-folder
    mydir = pdir.format(myid)
    MyDir = Tmp + mydir
    mytag = '[' + pname.format(myid) + ']'
    if os.path.exists(mydir):
        shutil.rmtree(mydir)
    os.mkdir(mydir)
    # copy namelist
    shutil.copy(nmlstwps, mydir)
    # change working directory to process sub-folder
    os.chdir(mydir)
    # link dataset specific files
    dataset.setup(src=Tmp, dst=MyDir, lsymlink=True)
    # link other source files
    os.symlink(Meta, meta[:-1])  # link to folder
    # link geogrid (data) and metgrid
    os.symlink(Tmp + metgrid_exe, metgrid_exe)
    for i in doms:  # loop over all geogrid domains
        geoname = geopfx.format(i) + ncext
        os.symlink(Tmp + geoname, geoname)

    ## loop over (atmospheric) time steps
    if dates: print('\n ' + mytag + ' Looping over Time-steps:')
    else: print('\n ' + mytag + ' Nothing to do!')
    # loop over date-tuples
    for date in dates:

        # figure out sub-domains
        ldoms = [
            True,
        ] * maxdom  # first domain is always computed
        for i in xrange(1, maxdom):  # check sub-domains
            ldoms[i] = nlt.checkDate(date, starts[i], ends[i])
        # update date string in namelist.wps
        #print imform,date
        imdate = imform.format(*date)
        imfile = impfx + imdate
        nmldate = nmlform.format(*date)  # also used by metgrid
        nlt.writeNamelist(nmlstwps, ldoms, nmldate, imd, isd, ied)

        # N.B.: in case the stack size limit causes segmentation faults, here are some workarounds
        # subprocess.call(r'ulimit -s unlimited; ./unccsm.exe', shell=True)
        # import resource
        # subprocess.call(['./unccsm.exe'], preexec_fn=resource.setrlimit(resource.RLIMIT_STACK,(-1,-1)))
        # print resource.getrlimit(resource.RLIMIT_STACK)

        ## prepare WPS processing
        # run ungrib.exe or equivalent operation
        preimfile = dataset.ungrib(date,
                                   mytag)  # need 'mytag' for status messages
        # rename intermediate file according to WPS convention (by date), if necessary
        if preimfile: os.rename(preimfile, imfile)  # not the same as 'move'

        ## run WPS' metgrid.exe on intermediate file
        # run metgrid_exe.exe
        print('\n  * ' + mytag + ' interpolating to WRF grid (metgrid.exe)')
        fmetgrid = open(metgrid_log,
                        'a')  # metgrid.exe standard out and error log
        subprocess.call(
            [METGRID], stdout=fmetgrid,
            stderr=fmetgrid)  # metgrid.exe writes a fairly detailed log file
        fmetgrid.close()

        ## finish time-step
        os.remove(
            MyDir +
            imfile)  # remove intermediate file after metgrid.exe completes
        # copy/move data back to disk (one per domain) and/or keep in memory
        tmpstr = '\n ' + mytag + ' Writing output to disk: '  # gather output for later display
        for i in xrange(maxdom):
            metfile = metpfx.format(i + 1) + nmldate + ncext
            if ldoms[i]:
                tmpstr += '\n                           ' + metfile
                if ldisk:
                    shutil.copy(metfile, Disk + metfile)
                if ldata:
                    shutil.move(metfile, Data + metfile)
                else:
                    os.remove(metfile)
            else:
                if os.path.exists(metfile):
                    os.remove(
                        metfile
                    )  # metgrid.exe may create more files than needed
        # finish time-step
        tmpstr += '\n\n   ============================== finished ' + imdate + ' ==============================   \n'
        print(tmpstr)

    ## clean up after all time-steps
    # link other source files
    os.remove(meta[:-1])  # link to folder
    dataset.cleanup(tgt=MyDir)
    os.remove(metgrid_exe)
    for i in doms:  # loop over all geogrid domains
        os.remove(geopfx.format(i) + ncext)