예제 #1
0
파일: Timelist.py 프로젝트: inogs/bit.sea
    def fromfilenames(timeinterval, inputdir,searchstring, filtervar=None, prefix='ave.', dateformat="%Y%m%d-%H:%M:%S"):
        '''
        Generates a TimeList object by reading a directory

        HYPOTHESIS:
         - the Input directory has files containing a date in their names
         - every file refers to a single time frame
         - The date of in the file name can be considered as centered in their period
        The generated datetime list has all the files concerning the period indicated in the timeinterval.
        Some of these files can have the centered date out of that period.

        Example:

        INPUTDIR="/pico/scratch/userexternal/gbolzon0/Carbonatic/wrkdir/MODEL/AVE_FREQ_1/"
        Time_int = TimeInterval('20141201-00:00:00','20150701-00:00:00',"%Y%m%d-%H:%M:%S")
        TL = TimeList.fromfilenames(Time_int, INPUTDIR,"ave*N1p.nc")
        TL = TimeList.fromfilenames(Time_int, INPUTDIR,"ave*nc",filtervar="N1p")

        For Sat data
        TL = TimeList.fromfilenames(Time_int, INPUTDIR,"*nc", prefix='',dateformat='%Y%m%d')

        For physical forcings
        TL = TimeList.fromfilenames(Time_int, INPUTDIR,"T*.nc",prefix='T',dateformat='%Y%m%d')
        '''

        IOname = IOnames.filenamer(prefix,dateformat)
        if not os.path.exists(inputdir):
            raise NameError("Not existing directory " + inputdir)

        inputdir = addsep(inputdir)
        filelist_ALL = glob.glob(inputdir + searchstring)
        if not filtervar is None:
            filelist_ALL=[f for f in filelist_ALL if filtervar in f ]
        assert len(filelist_ALL) > 0
        filenamelist=[]
        datetimelist=[]
        External_filelist=[]
        External_timelist=[]
        for pathfile in filelist_ALL:
            filename   = os.path.basename(pathfile)
            datestr     = filename[IOname.date_startpos:IOname.date_endpos]
            actualtime = datetime.datetime.strptime(datestr,IOname.dateformat)
            if timeinterval.contains(actualtime) :
                filenamelist.append(pathfile)
                datetimelist.append(actualtime)
            else:
                External_filelist.append(pathfile)
                External_timelist.append(actualtime)

        TimeListObj = TimeList(datetimelist)
        filenamelist.sort()
        TimeListObj.timeinterval = timeinterval
        TimeListObj.inputdir     = inputdir
        TimeListObj.searchstring = searchstring
        TimeListObj.prefix       = prefix
        TimeListObj.filtervar    = filtervar
        TimeListObj.filelist = filenamelist



        if TimeListObj.inputFrequency is not None:
            for iFrame, t in enumerate(External_timelist):
                TimInt = computeTimeWindow(TimeListObj.inputFrequency, t)
                if TimeListObj.timeinterval.isInWindow(TimInt):
                    TimeListObj.filelist.append(External_filelist[iFrame])
                    TimeListObj.Timelist.append(External_timelist[iFrame])
            TimeListObj.filelist.sort()
            TimeListObj.Timelist.sort()
        TimeListObj.nTimes   =len(TimeListObj.filelist)


        # we force daily datetimes to have hours = 12
        if TimeListObj.inputFrequency == 'daily':
            for iFrame, t in enumerate(TimeListObj.Timelist):
                if t.hour==0:
                    newt = datetime.datetime(t.year,t.month,t.day,12,0,0)
                    TimeListObj.Timelist[iFrame] = newt

        return TimeListObj
import pylab as pl
import math, operator
import glob
from dateutil.relativedelta import relativedelta
import datetime
from matplotlib.dates import date2num, num2date, YearLocator, MonthLocator, DateFormatter
from commons.utils import addsep


def movingaverage(interval, window_size):
    window = np.ones(int(window_size))/float(window_size)
    return np.convolve(interval, window, 'same')



DATADIR = addsep(args.inputdir)
FIGDIR = addsep(args.outdir)
os.system('mkdir -p ' + FIGDIR)

pl.close('all')
var='ppn'
SUBlist_subset=['alb','sww','swe','nwm','tyr','adn','ads','ion','lev','med']
subreg_mod=SUBlist_subset
#TEXTylabel="mgC/m2/day"
TEXTylabel="gC/m2/yr"

# define dates
dd=[]
#datelist=glob.glob(DATADIR + "ave." + in_yr + "*-12:00:00.col_integrals.nc")
datelist=glob.glob(DATADIR + "ave.*.col_integrals.nc")
datelist.sort()
예제 #3
0
    return parser.parse_args()

args = argument()


from biofloats_ms_timeseries import timelistcontainer
from commons.time_interval import TimeInterval
import matplotlib
matplotlib.use('Agg')
import pylab as pl
import numpy as np
from commons.layer import Layer
from basins import V2 as OGS
from commons.utils import addsep

ARCHIVEDIR       = addsep(args.archivedir)
ARCHIVE_PREV     = addsep(args.previous_archive)
VALID_WRKDIR     = addsep(args.validation_dir)
OUTFIG_DIR       = addsep(args.outdir)

TI_V1 = TimeInterval("20150608","20160101","%Y%m%d")
TI_V2 = TimeInterval('20160412', args.date,'%Y%m%d')

V4_data  = timelistcontainer(TI_V1,ARCHIVE_PREV,postfix_dir="")
V2C_data = timelistcontainer(TI_V2,ARCHIVEDIR  ,postfix_dir="POSTPROC/AVE_FREQ_1/validation/biofloats_ms/")
V2C_data.append_dir(VALID_WRKDIR)


def single_plot(longvar, var, sub, layer ):
    varV4 = var
    if var == 'P_l': varV4 = 'P_i'
예제 #4
0
파일: preproc.py 프로젝트: gbolzon/ogstm
from commons import timerequestors
from commons.Timelist import TimeList
from commons.layer import Layer
import pylab as pl
from commons.utils import addsep
import basins.OGS as OGS
from instruments import lovbio_float as bio_float
from instruments.var_conversions import LOVFLOATVARS
from commons.mask import Mask
from instruments.matchup_manager import Matchup_Manager
import sys
#import makeplots
import qualitycheck

idate0 = args.inputdate
BASEDIR = addsep(args.basedir)
INPUTDIR = addsep(args.inputdir)

# DATE INTERVAL
year = int(idate0[0:4])
month = int(idate0[4:6])
day = int(idate0[6:8])
idate1 = timerequestors.Weekly_req(year, month, day)
print idate1

idate2 = timerequestors.Daily_req(year, month, day)
print idate2

# Variable name
VARLIST = ['P_l']  #'N3n','O2o']
read_adjusted = [True]  #,False,False]
예제 #5
0
    
    for date in S:
        LINES.append("<date day=\"" + date + "\">\n")
        for filename in analysis_forecast_basenames:
            if filename[:8] == date:
                LINES.append("<float wmo=\"" + filename[9:-3] + "\"></float>\n")
        LINES.append("</date>\n")
    LINES.append("</root>\n")

    fid=open(filexml,'w')
    fid.writelines(LINES)
    fid.close()



PREVIOUS_DIR= addsep(args.previous_dir) #"/pico/home/usera07ogs/a07ogs00/OPA/V2C-dev/wrkdir/2/POSTPROC/AVE_FREQ_1/online_validation/PREVIOUS/matchup_outputs/"
ACTUAL_DIR  = addsep(args.actual_dir)  #"/pico/home/usera07ogs/a07ogs00/OPA/V2C-dev/wrkdir/2/POSTPROC/AVE_FREQ_1/online_validation/ACTUAL/matchup_outputs/"
OUTDIR      = addsep(args.outdir)
xmlfile     = args.xmlfile

ANALYSIS_FORECAST_LIST = glob.glob(PREVIOUS_DIR + "*nc")
ONLY_ANALYSIS_LIST     = glob.glob(ACTUAL_DIR   + "*nc")


analysis_forecast_basenames = [os.path.basename(filename) for filename in ANALYSIS_FORECAST_LIST]
analysis_forecast_basenames.sort()
only_analyis_basenames      = [os.path.basename(filename) for filename in ONLY_ANALYSIS_LIST]
only_analyis_basenames.sort()

print xmlfile
dump_xml(xmlfile)
예제 #6
0
    return parser.parse_args()

args = argument()

from sat_timeseries import timelistcontainer
from commons.time_interval import TimeInterval
from commons.utils import addsep
import matplotlib
matplotlib.use('Agg')
import pylab as pl
import matplotlib.dates as mdates
from basins import V2 as OGS

TI_V1 = TimeInterval('20150407','20160607','%Y%m%d')
TI_V2 = TimeInterval('20160412', args.date,'%Y%m%d')
ARCHIVE_DIR      = addsep(args.archivedir) #"/gpfs/work/IscrC_MYMEDBIO/COPERNICUS/V2-dev"
ARCHIVE_PREV     = addsep(args.previous_archive)   #r"/gpfs/work/IscrC_MYMEDBIO/COPERNICUS/V4"
VALID_WRKDIR     = addsep(args.validation_dir)
OUTFIG_DIR       = addsep(args.outdir) # "/pico/home/userexternal/gcossari/COPERNICUS/CATENA/FIG_VALIDATION_ONLINE"

F0v1    = timelistcontainer(TI_V1,ARCHIVE_PREV,'f0', postfix_dir="")
F1v1    = timelistcontainer(TI_V1,ARCHIVE_PREV,'f1', postfix_dir="")
F2v1    = timelistcontainer(TI_V1,ARCHIVE_PREV,'f2', postfix_dir="")

F0v2 = timelistcontainer(TI_V2,ARCHIVE_DIR, 'f0', postfix_dir="POSTPROC/AVE_FREQ_1/validation/Sat/")
F1v2 = timelistcontainer(TI_V2,ARCHIVE_DIR, 'f1', postfix_dir="POSTPROC/AVE_FREQ_1/validation/Sat/")
F2v2 = timelistcontainer(TI_V2,ARCHIVE_DIR, 'f2', postfix_dir="POSTPROC/AVE_FREQ_1/validation/Sat/")

F0v2.append_dir(VALID_WRKDIR)
F1v2.append_dir(VALID_WRKDIR)
F1v2.append_dir(VALID_WRKDIR)
예제 #7
0
args = argument()
import matplotlib
matplotlib.use('Agg')
from commons.time_interval import TimeInterval
from commons.Timelist import TimeList
from instruments.matchup_manager import Matchup_Manager
import basins.OGS as OGS
from instruments import lovbio_float as bio_float
from commons.mask import Mask
from commons.utils import addsep
from datetime import timedelta

starttime=args.starttime
end__time=args.endtime
INPUTDIR=addsep(args.inputdir)
BASEDIR=addsep(args.basedir)


TI=TimeInterval(starttime,end__time,'%Y%m%d')
TI.end_time = TI.end_time + timedelta(1)

TheMask=Mask(args.maskfile)

Profilelist=bio_float.FloatSelector(None,TI,OGS.med)
TL = TimeList.fromfilenames(TI, INPUTDIR,"ave*.nc")
M = Matchup_Manager(Profilelist,TL,BASEDIR)

profilerscript = BASEDIR + 'jobProfiler.sh'
M.writefiles_for_profiling('VarDescriptor_valid_online.xml', profilerscript) # preparation of data for aveScan
M.dumpModelProfiles(profilerscript) # sequential launch of aveScan
예제 #8
0
파일: aveScan.py 프로젝트: inogs/bit.sea

args = argument()

import scipy.io.netcdf as NC
import glob
import os
import numpy as np
import read_descriptor
import IOnames as IOname
from maskload import *
import GB_lib
from commons.utils import addsep


INPUT_AVEDIR = addsep(args.inputdir)
AGGREGATE_AVEDIR = addsep(args.aggregatedir)
TMPDIR       = addsep(args.tmpdir)
BASEDIR      = addsep(args.outdir)
ionamesfile  = args.ionames
IOnames      = IOname.IOnames(ionamesfile)
filtervar    = args.var

try:
    from mpi4py import MPI
    comm  = MPI.COMM_WORLD
    rank  = comm.Get_rank()
    nranks =comm.size
    isParallel = True
except:
    rank   = 0
예제 #9
0


try :
    from mpi4py import MPI
    comm  = MPI.COMM_WORLD
    rank  = comm.Get_rank()
    nranks = comm.size 
except:
    rank   = 0
    nranks = 1

args = argument()
 

BIOAVEDIR     = addsep(args.biodir)
PHYS_DIR      = addsep(args.physdir)
PATH_NAME = BIOAVEDIR + "ave.*nc"
if rank==0 : 
    print "BIO_INPUT_DIR =", BIOAVEDIR


TMPOUTdir  = addsep(args.tmpdir)
if rank==0 : print "TMPOUTDIR= ", TMPOUTdir
os.system("mkdir -p " + TMPOUTdir)

archived_filelist=glob.glob(PATH_NAME)
archived_filelist.sort()

for filename in archived_filelist[rank::nranks]:
    dailyAve  = os.path.basename(filename)
예제 #10
0
from commons.mask import Mask
from commons.layer import Layer

from layer_integral.mapbuilder import MapBuilder
from layer_integral.mapplot import mapplot,pl
from commons.dataextractor import DataExtractor
from commons.time_averagers import TimeAverager3D
from layer_integral import coastline
import commons.timerequestors as requestors
from commons.utils import addsep

clon,clat = coastline.get()
TheMask=Mask(args.maskfile)


INPUTDIR  = addsep(args.inputdir)
OUTPUTDIR = addsep(args.outdir)
var       = args.varname


ldtype=[('top',np.float32),('bottom',np.float32),('mapdepthfilter',np.float32)]
LF = np.loadtxt(args.layerfile,ldtype,ndmin=1)

LAYERLIST=[ Layer(l['top'], l['bottom']) for l in LF ]

UNITS_DICT={
         'ppn' : 'gC/m^2/y',
         'N1p' : 'mmol /m^3',
         'N3n' : 'mmol /m^3',
         'PH'  : '',
         'pCO2': 'ppm',
예제 #11
0
from commons.submask import SubMask
from basins import V2 as OGS
import glob
import scipy.io.netcdf as NC

def weighted_mean(Conc, Weight):

    Weight_sum      = Weight.sum()
    Mass            = (Conc * Weight).sum()
    Weighted_Mean   = Mass/Weight_sum
    return Weighted_Mean



date=args.date
MODELDIR=addsep(args.forecastdir)
REF_DIR=addsep(args.satdir)
outfile = args.outfile

TheMask=Mask(args.maskfile)


nSUB = len(OGS.P.basin_list)

jpk,jpj,jpi =TheMask.shape
mask200_2D = TheMask.mask_at_level(200.0)
dtype = [(sub.name, np.bool) for sub in OGS.P]
SUB = np.zeros((jpj,jpi),dtype=dtype)

for sub in OGS.Pred:
    SUB[sub.name]  = SubMask(sub,maskobject=TheMask).mask_at_level(0)
예제 #12
0
    parser.add_argument(   '--type', 
                                type = str,
                                choices = ['analysis','forecast'],
                                required = True)

    return parser.parse_args()

args = argument()

from commons.timeseries import TimeSeries
from commons.time_interval import TimeInterval
from commons.utils import addsep

starttime=args.starttime
end__time=args.endtime
LOC = addsep(args.outdir)
archive_dir= args.arcdir

TI=TimeInterval(starttime,end__time,'%Y%m%d')

if args.type=='analysis':
    T_bio = TimeSeries(TI, archive_dir,postfix_dir='POSTPROC/AVE_FREQ_1/ARCHIVE/',glob_pattern="ave*gz")
    T_phys= TimeSeries(TI, archive_dir,postfix_dir='OPAOPER_A/'          ,glob_pattern="*gz"   )
    
    T_bio.extract_analysis( LOC + 'output_bio/')
    T_phys.extract_analysis(LOC + 'output_phys/');

if args.type =='forecast':
    
    T_bio = TimeSeries(TI, archive_dir,postfix_dir='POSTPROC/AVE_FREQ_1/ARCHIVE/',glob_pattern="ave*gz")
    T_phys_s= TimeSeries(TI, archive_dir,postfix_dir='OPAOPER_A/'          ,glob_pattern="*gz" )
예제 #13
0
from commons.mask import Mask
from commons.submask import SubMask
from basins import V2 as OGS
from commons.layer import Layer
from commons.utils import addsep
import pickle

def weighted_mean(Conc, Weight):

    Weight_sum      = Weight.sum()
    Mass            = (Conc * Weight).sum()
    Weighted_Mean   = Mass/Weight_sum
    return Weighted_Mean

TheMask=Mask(args.maskfile)
MODEL_DIR= addsep(args.inputmodeldir)
REF_DIR  = addsep(args.satdir)
outfile  = args.outfile


Timestart="19990101"
Time__end="20150901"
TI    = TimeInterval(Timestart,Time__end,"%Y%m%d")

sat_TL   = TimeList.fromfilenames(TI, REF_DIR  ,"*.nc", prefix="", dateformat="%Y%m")
model_TL = TimeList.fromfilenames(TI, MODEL_DIR,"*P_l.nc")

IOname = IOnames.IOnames('IOnames_sat_monthly.xml')

nFrames = model_TL.nTimes
nSUB = len(OGS.P.basin_list)