예제 #1
0
def BuildFieldOLD(BuildType, TheVar, DirStr, InFileStr, TheStYr, TheEdYr,InFileOLD = ''):
    ''' function for building complete reanalyses files over the period specified 
        this can be very computationally expensive so do it by year
	This requires initial reanalysis data to be read in in chunks of 1 month previously

        INPUTS:
	BuildType - 'Update' or 'Build'
	TheVar - string lower case character of q, rh, t, td, dpd, tw, e, msl, sp, ws +2m or appropriate
	InFileStr - string of dir+file string to read in 
	TheStYr = integer start year of data - assume Jan 1st (0101) start 
	TheEdYr = integer end year of data - assume Dec 31st (1231) end	
	InFileOLD = optional string for old data file to be read in
	OUTPUTS:
	TheNewData[:,:,:] - time, lat, long numpy array of complete field in new time resolution
	
	'''
    # Should just be able to read in the data and then append to build complete file
    nyrs = (TheEdYr - TheStYr) + 1

    if (BuildType == 'Build'):
    
        FuncStYr = TheStYr
        NewDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!


    elif (BuildType == 'Update'):
    
        FuncStYr = TheEdYr
	
	# Now read in the old data to start array to append to
        NewDataArr,Latitudes,Longitudes = GetGrid4(InFileOLD,[TheVar],['latitude'],['longitude'])
    
    for y in np.arange(FuncStYr,TheEdYr+1):
     
    	# Get actual year we're working on
        print('Working Year: ',y)
    	
        # Loop through each month or pentad depending on thing
        for m in range(12):
		
    	    ## string for file name
            mm = '%02i' % (m+1)

            # Now read in the old data to start array to append to
            TmpDataArr,Latitudes,Longitudes = GetGrid4(DirStr+str(y)+mm+InFileStr,[TheVar],['latitude'],['longitude'])
	    
            if (len(NewDataArr) == 0):
	    
                # this is the start of the build
                NewDataArr = np.copy(TmpDataArr)
		
            else:
	    
                NewDataArr = np.append(NewDataArr,np.copy(TmpDataArr),0)

    #print('Check built array')
    #pdb.set_trace()
    
    return NewDataArr
예제 #2
0
def GetDaily(TheVar, DirStr, InFileStr, TheYr):
    ''' function for building complete reanalyses files over the period specified 
        this can be very computationally expensive so do it by year
	This requires initial reanalysis data to be read in in chunks of 1 month previously

        INPUTS:
	TheVar - string lower case character of q, rh, t, td, dpd, tw, e, msl, sp, ws +2m or appropriate
	InFileStr - string of dir+file string to read in 
	TheYr = integer start year of data - assume Jan 1st (0101) start 
	OUTPUTS:
	TheNewData[:,:,:] - time, lat, long numpy array of complete field in new time resolution
	
	'''
    NewDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!

    # Loop through each month or pentad depending on thing
    for m in range(12):
		
        ## string for file name
        mm = '%02i' % (m+1)

        # Now read in the old data to start array to append to
        TmpDataArr,Latitudes,Longitudes = GetGrid4(DirStr+str(TheYr)+mm+InFileStr,[TheVar],['latitude'],['longitude'])
	    
        if (len(NewDataArr) == 0):
	    
            # this is the start of the build
            NewDataArr = np.copy(TmpDataArr)
		
        else:
	    
            NewDataArr = np.append(NewDataArr,np.copy(TmpDataArr),0)
    
    return NewDataArr
예제 #3
0
def MaskLandS(TheDataArray,LandMaskFile,TheMDI):
    ''' This function takes in any field (anoms, clims, actuals etc)
    and returns a land-masked (lsm < 0.5 - do not want inland lakes!!!) for sea and a sea-masked (lsm > 0) for land ''' 

    # Read in land mask file
    LSMArr,Latitudes,Longitudes = GetGrid4(LandMaskFile,['lsm'],['latitude'],['longitude'])
    LSMArr = np.reshape(LSMArr,(1,len(LSMArr[:,0]),len(LSMArr[0,:])))
      
    # loop through each time step and mask
    for tt in range(len(TheDataArray[:,0,0])):
    
        # create a temporary array for this time step with an added time dimension of 1
        TmpArr = np.reshape(TheDataArray[tt,:,:],(1,len(TheDataArray[0,:,0]),len(TheDataArray[0,0,:])))

        # Set up temporary MDI land and sea arrays
        LandTmp = np.empty_like(TmpArr,dtype = float)
        LandTmp[:,:,:] = TheMDI
    
        SeaTmp = np.empty_like(TmpArr,dtype = float)
        SeaTmp[:,:,:] = TheMDI
   
        # Fill the land and sea arrays - trying more inclusive approach
        LandTmp[np.where(LSMArr > 0.)] = TmpArr[np.where(LSMArr > 0.)]
        SeaTmp[np.where(LSMArr < 0.5)] = TmpArr[np.where(LSMArr < 0.5)]
#        LandTmp[np.where(LSMArr >= 0.25)] = TmpArr[np.where(LSMArr >= 0.25)]
#        SeaTmp[np.where(LSMArr <= 0.75)] = TmpArr[np.where(LSMArr <= 0.75)]
	
	# Now build complete array
        if (tt == 0):
	
            LandData = LandTmp
            SeaData = SeaTmp
	    
        else:
	
            LandData = np.append(LandData,np.copy(LandTmp),0)
            SeaData = np.append(SeaData,np.copy(SeaTmp),0)
	    
        #print('Test land sea mask')
        #pdb.set_trace()
	
    LSMArr = 0
	    
    return LandData, SeaData
예제 #4
0
def main(argv):
    # INPUT PARAMETERS AS STRINGS!!!!
    var = 'q'	    # 'q','rh','e','td','tw','t','dpd'
    mchoice = 0 # 0 to 11 this is either the month you want plotted or start of the month range to plot (can do 11-1)
    mchoiceend = '' # '' for no range of months (single) or end month of rande 0-11 (can do 11-1)
    mmult = False # False for an average, True for plotting each individual month of range
    ychoice = 1973 # 0this is either the year you want plotted or start of the year range to plot - for 11-1 need year of 11
    ychoiceend = '' # '' for no range of years (single) or end year of range
    ymult = False # False for an average, True for plotting each individual year of range
    
    try:
        opts, args = getopt.getopt(argv, "hi:",
	                           ["var=","mchoice=","mchoiceend=","mmult=","ychoice=","ychoiceend=","ymult="])
    except getopt.GetoptError:
        print('Usage (as strings) PlotAnyMap.py --var <q> --mchoice <0> --mchoiceend <11> --mmult False --ychoice <1973> --ychoiceend <''> --ymult True')
        sys.exit(2)

    for opt, arg in opts:
        if opt == "--var":
            try:
                Var = arg
            except:
                sys.exit("Failed: var not a string")
        elif opt == "--mchoice":
            try:
                Mchoice = arg
            except:
                sys.exit("Failed: mchoice not an integer")
        elif opt == "--mchoiceend":
            try:
                Mchoiceend = arg
            except:
                sys.exit("Failed: mchoiceend not an integer")
        elif opt == "--mmult":
            try:
                Mmult = arg
            except:
                sys.exit("Failed: mmult not a boolean")
        elif opt == "--ychoice":
            try:
                Ychoice = arg
            except:
                sys.exit("Failed: ychoice not an integer")
        elif opt == "--ychoiceend":
            try:
                Ychoiceend = arg
            except:
                sys.exit("Failed: ychoiceend not an integer")
        elif opt == "--ymult":
            try:
                Ymult = arg
            except:
                sys.exit("Failed: ymult not a boolean")

#    assert year1 != -999 and year2 != -999, "Year not specified."

    print(Var,Mchoice,Mchoiceend,Mmult,Ychoice,Ychoiceend,Ymult)
    # Set argparse so that key arguments can be passed to this program externally rather than being set above

    StYr = StartYr

    # Version
    if (Domain == 'land'):
        Version = lversion
        ExpType = lExpType
    elif (Domain == 'marine'):    
        Version = mversion
        ExpType = mExpType
    elif (Domain == 'blend'):
        Version = bversion
        ExpType = bExpType
    elif (Domain == 'ERA5'):
        Version = ''
        ExpType = ''

    # Select your month of choice, or a range for an average, or a range for looping through
    # 0...11 represent Jan..Dec, [2,4] for Mar-Apr-May average, [0,11] for annual average, [11,1] for Dec-Jan-Feb average
    # For month ranges than span 11 to 0, December will be taken from the first year of ChooseYr - will NOT work for last year!
    if (Mchoiceend != ''):
        ChooseMon = [int(Mchoice),int(Mchoiceend)]
    else:
        ChooseMon = [int(Mchoice)]
    if (Mmult == 'True'):
        PlotMonMultiple = True # False to plot an average over range, True to plot each individual month in range
    else:
        PlotMonMultiple = False # False to plot an average over range, True to plot each individual month in range
    
    # Select your year of choice, or a range for an average, or a range for looping through
    # 1973...2014 for individual years, [1973,1982] for decadal average etc
    if (Ychoiceend != ''):
        ChooseYr = [int(Ychoice),int(Ychoiceend)]
    else:
        ChooseYr = [int(Ychoice)]
    if (Ymult == 'True'):
        PlotYrMultiple = True # False to plot an average over range, True to plot each individual month in range
    else:
        PlotYrMultiple = False # False to plot an average over range, True to plot each individual month in range
        
    # Set up files and variable bundles:
    # For HadISDH the next bits are automatically populated or uncomment a bundle if its a different dataset
    DataSetShort = 'HadISDH.'+Domain+VarDict[Var][0]+'.'+Version
    DataSet = DataSetShort+'_'+ExpType
    candidate = DataSet+'_anoms'+str(ChooseClimSt)[2:4]+str(ChooseClimEd)[2:4]
    NameOfVar = [Var+'_'+vartype]
    LatInfo = ['latitude'] # variable name or number of latitudes, start latitude
    LonInfo = ['longitude']  # variable name or number of longitudes, start longitude

    # Set up other variables
    VarName = VarDict[Var][2]
    Unit = VarDict[Var][1]  
    ColourMapChoice = VarDict[Var][3]
    PlotInfo = [RangeDict['MinVal'], RangeDict['MaxVal'], RangeDict['StepVal'], RangeDict['LetterVal']]
    NYrs = (EdYr+1)-StYr
    NMons = NYrs*12-(12-EdMon)

    if (Domain == 'ERA5'):

        INDIRC = INDIRO
        candidate = Var+'2m_monthly_5by5_ERA5_clim'+str(ChooseClimSt)+'-'+str(ChooseClimEd)+'_'+str(StYrERA)+str(EdYr)
        DataSet = Domain+VarDict[Var][0]
        NameOfVar = [Var+'2m_'+vartype]
        StYr = StYrERA
        NYrs = (EdYr+1)-StYr
        NMons = NYrs*12-(12-EdMon)

    # read in trend maps
    MyFile = INDIRC+candidate+'.nc'
    #print(NameOfVar,LatInfo,LonInfo)
    TmpCandFields,LatList,LonList = GetGrid4(MyFile,NameOfVar,LatInfo,LonInfo)

    # If the data do not end in December then pad the file with missing data
    if (EdMon < 12):
        #pdb.set_trace()
        TmpCandFields = np.concatenate((TmpCandFields,np.reshape(np.repeat(mdi,((12-EdMon)*len(LonList)*len(LatList))),(12-EdMon,len(LatList),len(LonList)))))
        #pdb.set_trace()

    # force mdi to equal mdi without floating point errors
    print(len(TmpCandFields[TmpCandFields < mdi]))
    TmpCandFields[TmpCandFields < mdi] = mdi
    print(len(TmpCandFields[TmpCandFields < mdi]))
    #pdb.set_trace()

    # Do you need to renormalise (or create anomalies from actuals?
    if (DoReClim):
        CandFields = GetAnomalies(TmpCandFields,StYr,EdYr,ChooseClimSt,ChooseClimEd,mdi,PctDataPresent)
    else:
        CandFields = TmpCandFields    

    # Now work on plots either singular or multiple
    # If we're looping through years then start loop
    if (PlotYrMultiple):

        for yy in range(ChooseYr[0],ChooseYr[1]+1): # needs extra to include last month within the range

            YrStr = str(yy)
	    
	    # If we're looping through months then start loop
            if (PlotMonMultiple):
	
                for mm in range(ChooseMon[0],ChooseMon[1]+1): # needs extra to include last month within the range
	
                    MonStr = MonArr[mm]

                    # Extract chosen month
                    CandData = SelectSlice(CandFields,StYr,EdYr,[mm],[yy],mdi,PctDataPresent)

                    # pass to plotter
                    OUTPLOT = 'Map_'+DataSet+'_clim'+str(ChooseClimSt)+str(ChooseClimEd)+'_'+PlotType+MonStr+YrStr
                    Namey = DataSetShort+' '+PlotType+' '+MonStr+' '+YrStr
                    MyFile = OUTDIRP+OUTPLOT
                    PlotAnyMap(MyFile,LatList,LonList,CandData,Unit,Namey,ColourMapChoice,PlotType,VarName,mdi,PlotInfo)
                    if (SaveData):
                        WriteNetCDF(OUTDIRD+OUTPLOT+'.nc',CandData,mdi,Var,VarName,Unit,LatList,LonList)

	    # If we're not then work on the individual or average
            else:
       
                if (len(ChooseMon) == 1):
               
                    MonStr = MonArr[ChooseMon[0]]
    
                else:
               
                    MonStr = MonArr[ChooseMon[0]]+'-'+MonArr[ChooseMon[1]]
		
                # Extract chosen month
                CandData = SelectSlice(CandFields,StYr,EdYr,ChooseMon,[yy],mdi,PctDataPresent)

                # pass to plotter
                OUTPLOT = 'Map_'+DataSet+'_clim'+str(ChooseClimSt)+str(ChooseClimEd)+'_'+PlotType+MonStr+YrStr
                Namey = DataSetShort+' '+PlotType+' '+MonStr+' '+YrStr
                MyFile = OUTDIRP+OUTPLOT
                PlotAnyMap(MyFile,LatList,LonList,CandData,Unit,Namey,ColourMapChoice,PlotType,VarName,mdi,PlotInfo)
                if (SaveData):
                    WriteNetCDF(OUTDIRD+OUTPLOT+'.nc',CandData,mdi,Var,VarName,Unit,LatList,LonList)
    
    
    # If we're not looping through years then check month multiples
    else:
        
	# If we're looping through months then start loop
        if (PlotMonMultiple):
	
            for mm in range(ChooseMon[0],ChooseMon[1]+1): # needs extra to include last month within the range
	
                MonStr = MonArr[mm]

                if (len(ChooseYr) == 1):
    
                    YrStr = str(ChooseYr[0])
   
                else:
    
                    YrStr = str(ChooseYr[0])+'-'+str(ChooseYr[1])

                # Extract chosen month
                CandData = SelectSlice(CandFields,StYr,EdYr,[mm],ChooseYr,mdi,PctDataPresent)

                # pass to plotter
                OUTPLOT = 'Map_'+DataSet+'_clim'+str(ChooseClimSt)+str(ChooseClimEd)+'_'+PlotType+MonStr+YrStr
                Namey = DataSetShort+' '+PlotType+' '+MonStr+' '+YrStr
                MyFile = OUTDIRP+OUTPLOT
                PlotAnyMap(MyFile,LatList,LonList,CandData,Unit,Namey,ColourMapChoice,PlotType,VarName,mdi,PlotInfo)
                if (SaveData):
                    WriteNetCDF(OUTDIRD+OUTPLOT+'.nc',CandData,mdi,Var,VarName,Unit,LatList,LonList)
		
	# If we're not then work on the individual or average
        else:
       
            if (len(ChooseMon) == 1):
            
                MonStr = MonArr[ChooseMon[0]]
    
            else:
               
                MonStr = MonArr[ChooseMon[0]]+'-'+MonArr[ChooseMon[1]]
		
            if (len(ChooseYr) == 1):
    
                YrStr = str(ChooseYr[0])
   
            else:
    
                YrStr = str(ChooseYr[0])+'-'+str(ChooseYr[1])

            # Extract chosen month
            CandData = SelectSlice(CandFields,StYr,EdYr,ChooseMon,ChooseYr,mdi,PctDataPresent)
            #pdb.set_trace()
            # pass to plotter
            OUTPLOT = 'Map_'+DataSet+'_clim'+str(ChooseClimSt)+str(ChooseClimEd)+'_'+PlotType+MonStr+YrStr
#            Namey = DataSetShort+' '+PlotType+' '+MonStr+' '+YrStr
            Namey = ''
            MyFile = OUTDIRP+OUTPLOT
            PlotAnyMap(MyFile,LatList,LonList,CandData,Unit,Namey,ColourMapChoice,PlotType,VarName,mdi,PlotInfo)
            if (SaveData):
                WriteNetCDF(OUTDIRD+OUTPLOT+'.nc',CandData,mdi,Var,VarName,Unit,LatList,LonList)

    #pdb.set_trace()
    
    print("And, we are done!")
예제 #5
0
    # If its an update read in old pentad fields and old monthly fields to append to,
    # then read in year of daily data, process to pentad and monthly and then append
    # If its a total build then read in year by year, process to pentads and monthlies and append

    if (Freq == 'pentad'):

        if (ThisProg == 'Build'):
    
            PentadDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!

        elif (ThisProg == 'Update'):
    
	    # Now read in the old data to start array to append to
#            PentadDataArr,Latitudes,Longitudes = GetGrid4(InFileOLDPT,[TheVar],['latitude'],['longitude'])
            PentadDataArr,Latitudes,Longitudes = GetGrid4(OldERAStrPT,[Var+'2m'],['latitude'],['longitude'])

    else:
       
        if (ThisProg == 'Build'):
    
            MonthDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!

        elif (ThisProg == 'Update'):
    
            # Now read in the old data to start array to append to
#            MonthDataArr,Latitudes,Longitudes = GetGrid4(InFileOLDMN,[TheVar],['latitude'],['longitude'])
            MonthDataArr,Latitudes,Longitudes = GetGrid4(OldERAStrMN,[Var+'2m'],['latitude'],['longitude'])
    
    
    # Loop through the years
            outfile = outfile + '_marine'
    else:
        ReadInfo = [varname + '_anoms', 'time']
else:
    if (homogtype == 'ERA-Interim') | (homogtype == 'ERA5'):
        if (domain == 'land'):
            ReadInfo = [varname + '_land', 'time']
            outfile = outfile + '_land'
        if (domain == 'marine'):
            ReadInfo = [varname + '_ocean', 'time']
            outfile = outfile + '_land'
    else:
        ReadInfo = [varname + '_abs', 'time']

print('Reading in the data for :', homogtype)
TmpVals, Latitudes, Longitudes = GetGrid4(infile, ReadInfo, LatInfo, LonInfo)

# Seperate out data and times
TheData = TmpVals[0]
Times = TmpVals[1]
TmpVals = []

# Check the mdis = IDL output netCDF differs from Python output
bads = np.where(TheData < -10000)
if (len(bads[0]) > 0):
    TheData[bads] = mdi

# Now if we're masking then read in the mask for the time slice of ERA-Interim
if (mask == True):

    SliceInfo = dict([('TimeSlice', [mskstpt, mskedpt]),
예제 #7
0
            edyr) + '.nc'

    # What are we working on?
    print('Working variable: ', Var)
    print('Working frequency: ', Freq)
    print('Climatology: ', ClimStart, ClimEnd)
    print('Reanalysis: ', ThisRean)

    # If its an update read in old pentad fields and old monthly fields to append to,
    # then read in year of daily data, process to pentad and monthly and then append
    # If its a total build then read in year by year, process to pentads and monthlies and append

    if (Freq == 'pentad'):

        # Read in the actuals and then the land only and marine only (for mask)
        PentadDataArr, Latitudes, Longitudes = GetGrid4(
            OldERAStrP1, [Var + '2m'], ['latitude'], ['longitude'])
        PentadDataLand, Latitudes, Longitudes = GetGrid4(
            OldERAStrP1, [Var + '2m_anoms_land'], ['latitude'], ['longitude'])
        PentadDataSea, Latitudes, Longitudes = GetGrid4(
            OldERAStrP1, [Var + '2m_anoms_ocean'], ['latitude'], ['longitude'])

        # Recalculate climatology and apply to create anomalies and climatological stdevs
        PentadAnoms, PentadClims, PentadStdevs = CreateAnoms(
            ClimStart, ClimEnd, styr, edyr, PentadDataArr, mdi)

        # Apply existing land and sea mask to anomalies
        PentadAnomsLand = np.copy(PentadAnoms)
        PentadAnomsLand[np.where(PentadDataLand == mdi)] = mdi
        PentadAnomsSea = np.copy(PentadAnoms)
        PentadAnomsSea[np.where(PentadDataSea == mdi)] = mdi
예제 #8
0
#************************************************************
# MAIN
#************************************************************
# What are we working on?
print('Working variable: ', OutputVar)
print('Input Time and Grid: ', ReadInTime, ReadInGrid)
print('Output Time and Grid: ', OutputTime, OutputGrid)
print('Type of run: ', ThisProg, styr, edyr, MakeAnoms)
print('Reanalysis: ', ThisRean)

# For ThisProg = Convert or Update read in monthly or pentad 1by1 (to present or previous year)
if (ThisProg != 'Build'):

    ReadInfo = [OutputVar + '2m']
    FileName = workingdir + '/OTHERDATA/' + OldERAStr
    TheData, Latitudes, Longitudes = GetGrid4(FileName, ReadInfo, LatInfo,
                                              LonInfo)

    # For Update we also need to read in most recent year (or months) of data and convert to desired variable (BuildField)
    if (ThisProg == 'Update'):

        print('Creating Update')

        # Set up the desired output array
        if (OutputTime == 'monthly'):
            FullArray = np.empty((nmons, nlatsOut, nlonsOut), dtype=float)
        elif (OutputTime == 'pentad'):
            FullArray = np.empty((npts, nlatsOut, nlonsOut), dtype=float)
        FullArray.fill(mdi)

        # Build the most recent year
        if (OutputTime == 'monthly'):
예제 #9
0
def CreateAnoms(TheInputGrid, TheOutputTime, TheClimSt, TheClimEd, TheStYr,
                TheEdYr, TheInData):
    '''
    This function takes any grid and any var, computes climatologies/stdevs over given period and then anomalies
    It also outputs land only and ocean only anomalies dependning on the grid
    if (TheInputGrid == '5by5'):
    LandMask = workingdir+'/OTHERDATA/HadCRUT.4.3.0.0.land_fraction.nc' # 0 = 100% sea, 1 = 100% land - no islands!, latitude, longitude, land_area_fraction, -87.5 to 87.5, -177.5 to 177.5
    elif (TheInputGrid == '1by1'):
    LandMask = workingdir+'/OTHERDATA/lsmask.nc' # 1 = sea, 0 = land - no islands! lat, lon, mask 89.5 to -89.5Lat, 0.5 to 359.5 long 
    
    INPUTS:
    TheInputGrid - string of 1by1 or 5by5 to determine the land mask to use 
    TheOutputTime - string of monthly or pentad
    TheClimSt - interger start year of climatology Always Jan start
    TheClimEd - integer end  year of climatology Always Dec end
    TheStYr - integer start year of data to find climatology
    TheEdYr - integer end year of data to find climatology
    TheInData[:,:,:] - time, lat, lon array of actual values
    
    OUTPUTS:
    AllAnomsArr[:,:,:] - time, lat, lon array of anomalies
    LandAnomsArr[:,:,:] - time, lat, lon array of land anomalies
    OceanAnomsArr[:,:,:] - time, lat, lon array of ocean anomalies
    ClimsArr[:,:,:] - time, lat, lon array of climatologies
    StDevsArr[:,:,:] - time, lat, lon array of stdeviations
    
    '''

    # Set up for time
    if (TheOutputTime == 'monthly'):
        nclims = 12
    elif (TheOutputTime == 'pentad'):
        nclims = 73
    nyrs = (TheEdYr - TheStYr) + 1

    # Get land/sea mask and format accordingly
    if (TheInputGrid == '1by1'):
        MaskData, Lats, Longs = GetGrid4(LandMask, ['mask'], ['lat'], ['lon'])
        # Check shape and force to be 2d
        if (len(np.shape(MaskData)) == 3):
            MaskData = np.reshape(MaskData, (180, 360))
# roll the longitudes
        MaskData = np.roll(MaskData[:, :], 180, axis=1)
        # swap the land/sea so that land = 1
        land = np.where(MaskData == 0)
        MaskData[np.where(MaskData == 1)] = 0
        MaskData[land] = 1
    elif (TheInputGrid == '5by5'):
        MaskData, Lats, Longs = GetGrid4(LandMask, ['land_area_fraction'],
                                         LatInfo, LonInfo)
        if (len(np.shape(MaskData)) == 3):
            MaskData = np.reshape(MaskData, (36, 72))

    # first create empty arrays
    AllAnomsArr = np.empty_like(TheInData)
    AllAnomsArr.fill(mdi)
    LandAnomsArr = np.copy(AllAnomsArr)
    OceanAnomsArr = np.copy(AllAnomsArr)
    ClimsArr = np.copy(AllAnomsArr[0:nclims, :, :])
    StDevsArr = np.copy(AllAnomsArr[0:nclims, :, :])

    # loop through gridboxes
    for lt in range(len(TheInData[0, :, 0])):
        for ln in range(len(TheInData[0, 0, :])):

            # pull out gridbox and reform to years by nclims (months or pentads)
            SingleSeries = np.reshape(
                TheInData[:, lt,
                          ln], (nyrs, nclims))  # nyrs rows, nclims columns

            # create an empty array to fill with anomalies
            NewSingleSeries = np.empty_like(SingleSeries)
            NewSingleSeries.fill(mdi)

            # loop through clims 1 to 12 or 73
            for m in range(nclims):

                # create, save and subtract climatological mean
                # THERE ARE NO MISSING DATA IN ERA INTERIM but sst is missing over land
                # test first time value only
                if (SingleSeries[0, m] > mdi):
                    ClimsArr[m, lt, ln] = np.mean(
                        SingleSeries[(TheClimSt -
                                      TheStYr):((TheClimEd - TheStYr) + 1), m])
                    StDevsArr[m, lt, ln] = np.std(
                        SingleSeries[(TheClimSt -
                                      TheStYr):((TheClimEd - TheStYr) + 1), m])
                    NewSingleSeries[:,
                                    m] = SingleSeries[:, m] - ClimsArr[m, lt,
                                                                       ln]

# fill new arrays
            AllAnomsArr[:, lt, ln] = np.reshape(NewSingleSeries, nyrs * nclims)

            # is there any land?
            if (MaskData[lt, ln] > 0):
                LandAnomsArr[:, lt, ln] = np.reshape(NewSingleSeries,
                                                     nyrs * nclims)

# is there any sea?
            if (MaskData[lt, ln] < 1):
                OceanAnomsArr[:, lt, ln] = np.reshape(NewSingleSeries,
                                                      nyrs * nclims)

    return AllAnomsArr, LandAnomsArr, OceanAnomsArr, ClimsArr, StDevsArr
예제 #10
0
    print('Reanalysis: ',ThisRean)

    # If its an update read in old pentad fields and old monthly fields to append to,
    # then read in year of daily data, process to pentad and monthly and then append
    # If its a total build then read in year by year, process to pentads and monthlies and append

    if (Freq == 'pentad'):

        if (ThisProg == 'Build'):
    
            PentadDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!

        elif (ThisProg == 'Update'):
    
	    # Now read in the old data to start array to append to
            PentadDataArr,Latitudes,Longitudes = GetGrid4(InFileOLDPT,[TheVar],['latitude'],['longitude'])

    else:
       
        if (ThisProg == 'Build'):
    
            MonthDataArr = np.array(()) # This will be set up on first read in - this has len() of 0!!!

        elif (ThisProg == 'Update'):
    
            # Now read in the old data to start array to append to
            MonthDataArr,Latitudes,Longitudes = GetGrid4(InFileOLDMN,[TheVar],['latitude'],['longitude'])
    
    
    # Loop through the years
    for years in range(styr,edyr+1):
#    pctLPs=round((float(CountLargePos)/CountGoods)*100.,1)
    
    
#    plt.show()
#    stop()

    plt.savefig(TheFile+".eps")
    plt.savefig(TheFile+".png")
     
    return #PlotClimMap
    
#************************************************************************
# MAIN PROGRAM
#************************************************************************
# read in climatology fields
AllCandData,LatList,LonList=GetGrid4(MyFile,ReadInfo,LatInfo,LonInfo)
PctLand,LLatList,LLonList=GetGrid(INDIRO+incover+'.nc',['land_area_fraction'],['latitude'],['longitude'])

#ncf=netcdf.netcdf_file(INDIRO+incover+'.nc','r')
##var=ncf.variables['pct_land']
#var=ncf.variables['land_area_fraction']
#PctLand=np.array(var.data)
##PctLand=np.transpose(PctLand)
PctLand=np.flipud(PctLand)
PctLand=PctLand[0,:,:]
# If its marine data then swap to % ocean
if (IsLand == False):
    PctLand = 1. - PctLand
if (IsLand == None):
    PctLand[:,:] = 1.
#ncf.close()
예제 #12
0
def main(argv):
    # INPUT PARAMETERS AS STRINGS!!!!
    var = 'q'	    # 'q','rh','e','td','tw','t','dpd'
    typee = 'LAND' # 'LAND','RAW','OTHER', 'BLEND', 'BLENDSHIP', 'MARINE', 'MARINESHIP' # domain does not need to be set correctly!!!
    # can also be 'ERA5' 'ERA5LAND','ERA5MARINE' 'ERA5MARINEMASK' ERA5LANDMASK'
    season = 'Monthly' # 'Annual', [0],[11,0,1] etc
    year1 = '1973' # Start year of trend
    year2 = '2018' # End year of trend
    
    try:
        opts, args = getopt.getopt(argv, "hi:",
	                           ["var=","typee=","season=","year1=","year2="])
    except getopt.GetoptError:
        print('Usage (as strings) MakeGridTrends.py --var <q> --typee <IDPHA> --year1 <1973> --year2 <2018>')
        sys.exit(2)

    for opt, arg in opts:
        if opt == "--var":
            try:
                var = arg
            except:
                sys.exit("Failed: var not a string")
        elif opt == "--typee":
            try:
                typee = arg
            except:
                sys.exit("Failed: typee not a string")
        elif opt == "--season":
            try:
                season = arg
            except:
                sys.exit("Failed: season not a string")
        elif opt == "--year1":
            try:
                year1 = arg
            except:
                sys.exit("Failed: year1 not an integer")
        elif opt == "--year2":
            try:
                year2 = arg
            except:
                sys.exit("Failed: year2 not an integer")

    assert year1 != -999 and year2 != -999, "Year not specified."

    # *** CHOOSE WHETHER TO WORK WITH ANOMALIES OR ACTUALS - COULD ADD RENORMALISATION IF DESIRED ***
    isanom = True	    # 'false' for actual values, 'true' for anomalies

    # tw_max_sum can only be actual values
    if (var in ['tw_max_max', 't_max_max']):
        isanom = False

    print(season)

    if (season != 'Annual') & (season != 'Monthly'):
        season = season[1:-1].split(',')
        print(season)
        season = [int(i) for i in season]
        print(season)

    print(var,typee,season,year1, year2, 'Fitting to Anomalies = ',isanom)

    #****************** LONGER LIFE EDITABLES****************
    # TWEAK ME!!!!
    # Which trend type and confidence interval?
    TrendType = 'OLS' # this is in fact with AR(1) correction as in Santer et al., 2008
    #TrendType = 'MP'  # this is median pairwise as in Sen 1968

    ConfIntP = 0.9 # confidence interval p value	
    MissingDataThresh = 0.7 # Proportion of data values that must be present across the trend period

    # What domain?
    if (typee == 'MARINE') | (typee == 'MARINESHIP') | (typee == 'ERA5MARINE') | (typee == 'ERA5MARINEMASK'):
        domain = 'marine'
        version = mversion
    elif (typee == 'BLEND') | (typee == 'BLENDSHIP') | (typee == 'ERA5') | (typee == 'ERA5MASK'):
        domain = 'blend'
        version = bversion
    else:
        domain = 'land'
        version = lversion

    # Latitude and Longitude gridbox width and variable names
    latlg = 5.
    lonlg = 5.
    #latlg = 1.
    #lonlg = 1.
    LatInfo = ['latitude'] 
    LonInfo = ['longitude'] 

    # Time and dimension variables
    nyrs =     (edyr+1)-startyr
    nmons =    nyrs*12
    stlt =     -90+(latlg/2.)
    stln =     -180+(lonlg/2.)
    nlats =    int(180/latlg)
    nlons =    int(360/lonlg)

    lats = (np.arange(nlats)*latlg) + stlt
    lons = (np.arange(nlons)*lonlg) + stln

    MDI = -1e30 # missing data indicator

#    WORKINGDIR = '/scratch/hadkw/UPDATE20'+str(edyr)[2:4]
    WORKINGDIR = '/data/users/hadkw/WORKING_HADISDH_9120/UPDATE20'+str(edyr)[2:4]

    INDIR  = WORKINGDIR+'/STATISTICS/GRIDS/'
    OUTDIR = WORKINGDIR+'/STATISTICS/TRENDS/'
    
    # If we're working with ERA5 then set INDIR to OTHERDATA
    if (typee.find('ERA5') >= 0):

        INDIR  = WORKINGDIR+'/OTHERDATA/'
        INDIRH  = WORKINGDIR+'/STATISTICS/GRIDS/'

    # END OF EDITABLES**********************************************************

    # Dictionaries for filename and other things
    ParamDict = dict([('q',['q','q2m','g/kg']),
	      ('rh',['RH','rh2m','%rh']),
	      ('t',['T','t2m','deg C']),
	      ('td',['Td','td2m','deg C']),
	      ('tw',['Tw','tw2m','deg C']),
	      ('e',['e','e2m','hPa']),
	      ('dpd',['DPD','dpd2m','deg C']),
	      ('evap',['q','evap','cm w.e.']),
	      ('tw_max',['TwX','twmax','deg C']), # 'twmx'
	      ('tw_max_max',['TwXX','twmaxx','deg C']), # 'twmx'
	      ('tw_min',['TwN','twmin','deg C']), # 'twmx'
	      ('tw_max_90p',['TwX90p','twx90','%']), # 'twx90'
	      ('tw_mean_90p',['TwM90p''twm90','%']), # 'twm90'
	      ('tw_mean_10p',['TwM10p','twm10','%']), # 'twm90'
	      ('tw_min_10p',['TwN10p','twn10','%']), # 'twm90'
	      ('tw_max_ex25',['TwX25','tw25','%']), # 'tw25'
	      ('tw_max_ex27',['TwX27','tw27','%']), # 'tw27'
	      ('tw_max_ex29',['TwX29','tw29','%']), # 'tw29'
	      ('tw_max_ex31',['TwX31','tw31','%']), # 'tw31'
	      ('tw_max_ex33',['TwX33','tw33','%']), # 'tw33'
	      ('tw_max_ex35',['TwX35','tw35','%']), # 'tw35'
	      ('tw_max_sw25',['TwXD25','twd25','deg C']), # 'tw25'
	      ('tw_max_sw27',['TwXD27','twd27','deg C']), # 'tw27'
	      ('tw_max_sw29',['TwXD29','twd29','deg C']), # 'tw29'
	      ('tw_max_sw31',['TwXD31','twd31','deg C']), # 'tw31'
	      ('tw_max_sw33',['TwXD33','twd33','deg C']), # 'tw33'
	      ('tw_max_sw35',['TwXD35','twd35','deg C']), # 'tw35'
	      ('t_max',['TX','tmax','deg C']), # 'twmx'
	      ('t_max_max',['TXX','tmaxx','deg C']), # 'twmx'
	      ('t_min',['TN','tmin','deg C']), # 'twmx'
	      ('t_max_90p',['TX90p','tx90','%']), # 'twx90'
	      ('t_mean_90p',['TM90p','tm90','%']), # 'twm90'
	      ('t_mean_10p',['TM10p','tm10','%']), # 'twm90'
	      ('t_min_10p',['TN10p','tn10','%']), # 'twm90'
	      ('t_max_ex25',['TX25','t25','%']), # 'tw25'
	      ('t_max_ex30',['TX30','t30','%']), # 'tw27'
	      ('t_max_ex35',['TX35','t35','%']), # 'tw29'
	      ('t_max_ex40',['TX40','t40','%']), # 'tw31'
	      ('t_max_ex45',['TX45','t45','%']), # 'tw33'
	      ('t_max_ex50',['TX50','t50','%']), # 'tw35'
	      ('t_all_ex18',['TN18','t18''%'])]) # 'tw35'


    # Dictionary for looking up variable standard (not actually always standard!!!) names for netCDF output of variables
    NameDict = dict([('q',['specific_humidity',' decadal trend in specific humidity']), # anomaly (1981-2010 base period) added later for anomalies
	 ('rh',['relative_humidity',' decadal trend in relative humidity']),
	 ('e',['vapour_pressure',' decadal trend in vapour pressure']),
	 ('tw',['wetbulb_temperature',' decadal trend in wetbulb temperature']),
	 ('t',['drybulb_temperature',' decadal trend in dry bulb temperature']),
	 ('td',['dewpoint_temperature',' decadal trend in dew point temperature']),
	 ('dpd',['dewpoint depression',' decadal trend in dew point depression']),
	 ('evap',['evaporation',' decadal trend in evaporation']),
	 ('tw_max',['TwX',' decadal trend in monthly maximum 2m wetbulb temperature']), # 'twmx'
	 ('tw_max_max', ['TwXX',' decadal trend in maximum monthly maximum 2m wetbulb temperature']), # 'twmx'
	 ('tw_min',     ['TwN',' decadal trend in monthly minimum 2m wetbulb temperature']), # 'twmx'
	 ('tw_max_90p', ['TwX90p',' decadal trend in percentage of days per month maximum > 90 percentile maximum 2m wetbulb temperature']), # 'twx90'
	 ('tw_mean_90p',['TwM90p',' decadal trend in percentage of days per month mean > 90 percentile mean 2m wetbulb temperature']), # 'twm90'
	 ('tw_mean_10p',['TwM10p',' decadal trend in percentage of days per month mean < 10 percentile mean 2m wetbulb temperature']), # 'twm90'
	 ('tw_min_10p', ['TwN10p',' decadal trend in percentage of days per month minimum < 10 percentile mean 2m wetbulb temperature']), # 'twm90'
	 ('tw_max_ex25',['TwX25',' decadal trend in percentage of days per month >= 25 deg 2m maximum wetbulb temperature']), # 'tw25'
	 ('tw_max_ex27',['TwX27',' decadal trend in percentage of days per month >= 27 deg 2m maximum wetbulb temperature']), # 'tw27'
	 ('tw_max_ex29',['TwX29',' decadal trend in percentage of days per month >= 29 deg 2m maximum wetbulb temperature']), # 'tw29'
	 ('tw_max_ex31',['TwX31',' decadal trend in percentage of days per month >= 31 deg 2m maximum wetbulb temperature']), # 'tw31'
	 ('tw_max_ex33',['TwX33',' decadal trend in percentage of days per month >= 33 deg 2m maximum wetbulb temperature']), # 'tw33'
	 ('tw_max_ex35',['TwX35',' decadal trend in percentage of days per month >= 35 deg 2m maximum wetbulb temperature']), # 'tw35'
	 ('tw_max_sw25',['TwXD25',' decadal trend in degrees per month >= 25 deg 2m maximum wetbulb temperature']), # 'tw25'
	 ('tw_max_sw27',['TwXD27',' decadal trend in degrees per month >= 27 deg 2m maximum wetbulb temperature']), # 'tw27'
	 ('tw_max_sw29',['TwXD29',' decadal trend in degrees per month >= 29 deg 2m maximum wetbulb temperature']), # 'tw29'
	 ('tw_max_sw31',['TwXD31',' decadal trend in degrees per month >= 31 deg 2m maximum wetbulb temperature']), # 'tw31'
	 ('tw_max_sw33',['TwXD33',' decadal trend in degrees per month >= 33 deg 2m maximum wetbulb temperature']), # 'tw33'
	 ('tw_max_sw35',['TwXD35',' decadal trend in degrees per month >= 35 deg 2m maximum wetbulb temperature']), # 'tw35'
	 ('t_max',      ['TX',' decadal trend in monthly maximum 2m drybulb temperature']), # 'twmx'
	 ('t_max_max',  ['TXX',' decadal trend in maximum monthly maximum 2m drybulb temperature']), # 'twmx'
	 ('t_min',      ['TN',' decadal trend in monthly minimum 2m drybulb temperature']), # 'twmx'
	 ('t_max_90p',  ['TX90p',' decadal trend in percentage of days per month maximum > 90 percentile maximum 2m drybulb temperature']), # 'twx90'
	 ('t_mean_90p', ['TM90p',' decadal trend in percentage of days per month mean > 90 percentile mean 2m drybulb temperature']), # 'twm90'
	 ('t_mean_10p', ['TM10p',' decadal trend in percentage of days per month mean < 10 percentile mean 2m drybulb temperature']), # 'twm90'
	 ('t_min_10p',  ['TN10p',' decadal trend in percentage of days per month minimum < 10 percentile mean 2m drybulb temperature']), # 'twm90'
	 ('t_max_ex25', ['TX25',' decadal trend in percentage of days per month >= 25 deg 2m maximum drybulb temperature']), # 'tw25'
	 ('t_max_ex30', ['TX30',' decadal trend in percentage of days per month >= 30 deg 2m maximum drybulb temperature']), # 'tw27'
	 ('t_max_ex35', ['TX35',' decadal trend in percentage of days per month >= 35 deg 2m maximum drybulb temperature']), # 'tw29'
	 ('t_max_ex40', ['TX40',' decadal trend in percentage of days per month >= 40 deg 2m maximum drybulb temperature']), # 'tw31'
	 ('t_max_ex45', ['TX45',' decadal trend in percentage of days per month >= 45 deg 2m maximum drybulb temperature']), # 'tw33'
	 ('t_max_ex50', ['TX50',' decadal trend in percentage of days per month >= 50 deg 2m maximum drybulb temperature']), # 'tw35'
	 ('t_all_ex18', ['TN18',' decadal trend in percentage of days per month >= 18 deg 2m minimum drybulb temperature'])]) # 'tw35'

    # Set up the trend years
    sttrd = int(year1)
    edtrd = int(year2)

    if domain == 'land':
        fileblurb = 'FLATgridHOM5by5'
    elif domain == 'marine':
        if (typee == 'MARINE'):
            fileblurb = 'BClocal5by5both'
        elif (typee == 'MARINESHIP') | (typee == 'ERA5MARINEMASK') | (typee == 'ERA5MARINE'):
            fileblurb = 'BClocalSHIP5by5both'
    elif domain == 'blend':
        if (typee == 'BLEND'):
            fileblurb = 'FLATgridHOMBClocalboth5by5'
        elif (typee == 'BLENDSHIP') | (typee == 'ERA5MASK') | (typee == 'ERA5'):
            fileblurb = 'FLATgridHOMBClocalSHIPboth5by5'

#    if (typee == 'OTHER'):
#        INDIR  = WORKINGDIR+'/OTHERDATA/'
#        OUTDIR  = WORKINGDIR+'/OTHERDATA/'
#    elif (typee == 'MARINE'):
#        INDIR  = '/project/hadobs2/hadisdh/marine/ICOADS.3.0.0/'
#        OUTDIR = '/data/users/hadkw/WORKING_HADISDH/MARINE/DATA/'

    INFILE = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT
    OUTFILE = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+TrendType+'trends_'+str(sttrd)+str(edtrd)	#70S-70N

    if (typee.find('ERA5') >= 0):

        INFILE = var+'2m_monthly_5by5_ERA5_1979'+str(edyr)
        OUTFILE = var+'2m_monthly_5by5_'+typee+'_'+climBIT+'_'+TrendType+'trends_'+str(sttrd)+str(edtrd)	#70S-70N

        INFILEH = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT

    # Get Data
    styr = startyr
    if (typee.find('ERA') >= 0):
        styr = stera
	
        if (domain == 'land'):
            ReadInfo = [var+'2m_anoms_land','time']
            OUTFILE = OUTFILE+'_land'
        if (domain == 'marine'):
            ReadInfo = [var+'2m_anoms_ocean','time']
            OUTFILE = OUTFILE+'_marine'
        if (domain == 'blend'):
            ReadInfo = [var+'2m_anoms','time']

        ReadInfoH = [var+'_anoms','time']
   
    else:
        if (isanom):
            ReadInfo = [var+'_anoms','time']
        else:
            ReadInfo = [var+'_abs','time']

    if (isanom == False):        
        OUTFILE = OUTFILE+'_ABS'

    # Filenames if working on seasonal data if not processing monthly
    if (season != 'Monthly'):
        if (season == 'Annual'):
            season = [season]
        seasstr = ''.join([Season_Names[i] for i in season])
        print('Test string: ',seasstr)
#        pdb.set_trace()
        OUTFILE = OUTFILE+'_'+seasstr

    print('Reading in the data for :',var,typee)
    TmpVals,Latitudes,Longitudes = GetGrid4(INDIR+INFILE+'.nc',ReadInfo,LatInfo,LonInfo)
    # Check that data have been read in
    #pdb.set_trace()
    
    # Seperate out data and times
    TheData = TmpVals[0]
    Times = TmpVals[1]
    TmpVals = []

    # Check the mdis = IDL output netCDF differs from Python output
    TheData = TheData.astype(float)
    bads = np.where(TheData <= -999)
#    bads = np.where(TheData < -10000)
    if (len(bads[0]) > 0):
        TheData[bads] = MDI

    # If we're masking ERA then read in HadISDH
    if (typee.find('MASK') >= 0):
    
        print('Masking ERA5')
        OUTFILE = OUTFILE+'_mask'
        TmpValsH,LatitudesH,LongitudesH = GetGrid4(INDIRH+INFILEH+'.nc',ReadInfoH,LatInfo,LonInfo)

        # Seperate out data and times
        TheDataH = TmpValsH[0]
        TimesH = TmpValsH[1]
        TmpValsH = []

        # Check the mdis = IDL output netCDF differs from Python output
        bads = np.where(TheDataH < -10000)
        if (len(bads[0]) > 0):
            TheDataH[bads] = MDI
            
        # Make HadISDH start in the same years
        TheDataH = TheDataH[(styr-styrh)*12:((edyr-styrh) + 1)*12,:,:]
            
        # Now mask the ERA data with HadISDH missing data
        TheData[np.where(TheDataH == MDI)] = MDI

    # Calculate trends
    # Set trend multuplier to get decadal values
    DecadalMultiplier = 120 # for monthly data
	
    # Normal version for non-extremes
    if (var in ['q','e','rh','t','tw','td','dpd', 'evap']):
        
        # If working with season averages then pre-process data
        if (season != 'Monthly'):
        
            DecadalMultiplier = 10
            TheData = GetSeasons(season,TheData,styr,edyr,MDI,MissingDataThresh)

        TrendGrids,LowBoundGrids,UpperBoundGrids,SE1sigGrids = GetTrends(TheData,styr,edyr,sttrd,edtrd,TrendType,ConfIntP,MDI,MissingDataThresh,DecadalMultiplier)

    # Version that uses the HQscores to filter data for the extremes
    else:
        print('Reading in the data for :',var,typee)
        ReadInfo = ['HQscore']
        TheHQData,Latitudes,Longitudes = GetGrid4(INDIR+INFILE+'.nc',ReadInfo,LatInfo,LonInfo)
        # Check that data have been read in
        print(np.shape(TheHQData))
        #pdb.set_trace()
    
        ## Seperate out data and times
        #TheHQData = TmpVals[0]
        #TmpVals = []

        # Check the mdis = IDL output netCDF differs from Python output
        TheHQData = TheHQData.astype(float)
        bads = np.where(TheHQData <= -999)
        if (len(bads[0]) > 0):
            TheHQData[bads] = MDI

        # If working with season averages then pre-process data
        if (season != 'Monthly'):
        
            DecadalMultiplier = 10
            TheData, TheHQData = GetSeasonsExtremes(season,TheData,TheHQData,styr,edyr,MDI,MissingDataThresh)
        
        TrendGrids,LowBoundGrids,UpperBoundGrids,SE1sigGrids = GetTrendsExtremes(TheData,TheHQData,styr,edyr,sttrd,edtrd,TrendType,ConfIntP,MDI,MissingDataThresh,DecadalMultiplier)    
    
    # Write out files
    if (TrendType == 'OLS'):
    
        CommentText = TrendType+' decadal trend with p='+str(ConfIntP)+' confidence intervals [AR(1) correction following Santer et al., 2008] using a missing data threshold >= '+str(MissingDataThresh) 

    else:

        CommentText = TrendType+' decadal trend with p='+str(ConfIntP)+' confidence intervals following Sen 1968 using a missing data threshold >= '+str(MissingDataThresh)

    if (isanom == True):
        LongText = NameDict[var][1]+' anomaly ('+climST+' to '+climED+' base period)'
    else:
        LongText = NameDict[var][1]
       
    WriteNetCDF(OUTDIR+OUTFILE+'.nc',TrendGrids,LowBoundGrids,UpperBoundGrids,SE1sigGrids,MDI,CommentText,var,ParamDict[var][0],NameDict[var][0],str(sttrd)+' to '+str(edtrd)+LongText,ParamDict[var][2],lats,lons)

    print("And were done")
예제 #13
0
def main(argv):
    # INPUT PARAMETERS AS STRINGS!!!!
    var = 'q'  # 'q','rh','e','td','tw','t','dpd'
    typee = 'LAND'  # 'LAND','RAW','OTHER', 'BLEND', 'BLENDSHIP', 'MARINE', 'MARINESHIP' # domain does not need to be set correctly!!!
    # can also be 'ERA5' 'ERA5LAND','ERA5MARINE' 'ERA5MARINEMASK' ERA5LANDMASK'
    region = 'All'  # All, Nhem, Trop, Shem, UK, China
    year1 = '1973'  # Start year of trend
    year2 = '2018'  # End year of trend

    try:
        opts, args = getopt.getopt(
            argv, "hi:", ["var=", "typee=", "region=", "year1=", "year2="])
    except getopt.GetoptError:
        print(
            'Usage (as strings) MakeGridTrends.py --var <q> --typee <IDPHA> --year1 <1973> --year2 <2018>'
        )
        sys.exit(2)

    for opt, arg in opts:
        if opt == "--var":
            try:
                var = arg
            except:
                sys.exit("Failed: var not a string")
        elif opt == "--typee":
            try:
                typee = arg
            except:
                sys.exit("Failed: typee not a string")
        elif opt == "--region":
            try:
                region = arg
            except:
                sys.exit("Failed: region not a string")
        elif opt == "--year1":
            try:
                year1 = arg
            except:
                sys.exit("Failed: year1 not an integer")
        elif opt == "--year2":
            try:
                year2 = arg
            except:
                sys.exit("Failed: year2 not an integer")

    assert year1 != -999 and year2 != -999, "Year not specified."

    # *** CHOOSE WHETHER TO WORK WITH ANOMALIES OR ACTUALS - COULD ADD RENORMALISATION IF DESIRED ***
    isanom = True  # 'false' for actual values, 'true' for anomalies

    # tw_max_sum can only be actual values
    if (var in ['tw_max_max', 't_max_max']):
        isanom = False

    print(var, typee, year1, year2, 'Fitting to Anomalies = ', isanom)

    # What domain?
    if (typee == 'MARINE') | (typee == 'MARINESHIP') | (
            typee == 'ERA5MARINE') | (typee == 'ERA5MARINEMASK'):
        domain = 'marine'
        version = mversion
    elif (typee == 'BLEND') | (typee == 'BLENDSHIP') | (typee == 'ERA5') | (
            typee == 'ERA5MASK'):
        domain = 'blend'
        version = bversion
    else:
        domain = 'land'
        version = lversion

    # Set up the trend years
    sttrd = int(year1)
    edtrd = int(year2)

    # Latitude and Longitude gridbox width and variable names
    latlg = 5.
    lonlg = 5.
    #latlg = 1.
    #lonlg = 1.
    LatInfo = ['latitude']
    LonInfo = ['longitude']

    #    # SEt up area average masks
    #    MaskDict = dict([('G',[-70.,70.]),
    #                 ('NH',[20.,70.]),
    #		 ('T',[-20.,20.]),
    #		 ('SH',[-70.,-20.])])

    # Time and dimension variables
    #    nyrs =     (edyr+1)-styr
    #    nmons =    nyrs*12
    nyrs = (edtrd + 1) - sttrd
    nmons = nyrs * 12
    stlt = -90 + (latlg / 2.)
    stln = -180 + (lonlg / 2.)
    nlats = int(180 / latlg)
    nlons = int(360 / lonlg)

    lats = (np.arange(nlats) * latlg) + stlt
    lons = (np.arange(nlons) * lonlg) + stln

    #    WORKINGDIR = '/scratch/hadkw/UPDATE20'+str(edyr)[2:4]
    WORKINGDIR = '/data/users/hadkw/WORKING_HADISDH_9120/UPDATE20' + str(
        edyr)[2:4]

    indir = WORKINGDIR + '/STATISTICS/GRIDS/'
    outdir = WORKINGDIR + '/STATISTICS/TIMESERIES/'

    # If we're working with ERA5 then set INDIR to OTHERDATA
    if (typee.find('ERA5') >= 0):

        indir = WORKINGDIR + '/OTHERDATA/'
        indirH = WORKINGDIR + '/STATISTICS/GRIDS/'

    # END OF EDITABLES**********************************************************

    # Dictionaries for filename and other things
    ParamDict = dict([
        ('q', ['q', 'q2m', 'g/kg']),
        ('rh', ['RH', 'rh2m', '%rh']),
        ('t', ['T', 't2m', 'deg C']),
        ('td', ['Td', 'td2m', 'deg C']),
        ('tw', ['Tw', 'tw2m', 'deg C']),
        ('e', ['e', 'e2m', 'hPa']),
        ('dpd', ['DPD', 'dpd2m', 'deg C']),
        ('evap', ['q', 'evap', 'cm w.e.']),
        ('tw_max', ['TwX', 'twmax', 'deg C']),  # 'twmx'
        ('tw_max_max', ['TwXX', 'twmaxx', 'deg C']),  # 'twmx'
        ('tw_min', ['TwN', 'twmin', 'deg C']),  # 'twmx'
        ('tw_max_90p', ['TwX90p', 'twx90', '%']),  # 'twx90'
        ('tw_mean_90p', ['TwM90p'
                         'twm90', '%']),  # 'twm90'
        ('tw_mean_10p', ['TwM10p', 'twm10', '%']),  # 'twm90'
        ('tw_min_10p', ['TwN10p', 'twn10', '%']),  # 'twm90'
        ('tw_max_ex25', ['TwX25', 'tw25', '%']),  # 'tw25'
        ('tw_max_ex27', ['TwX27', 'tw27', '%']),  # 'tw27'
        ('tw_max_ex29', ['TwX29', 'tw29', '%']),  # 'tw29'
        ('tw_max_ex31', ['TwX31', 'tw31', '%']),  # 'tw31'
        ('tw_max_ex33', ['TwX33', 'tw33', '%']),  # 'tw33'
        ('tw_max_ex35', ['TwX35', 'tw35', '%']),  # 'tw35'
        ('tw_max_sw25', ['TwXD25', 'twd25', 'deg C']),  # 'tw25'
        ('tw_max_sw27', ['TwXD27', 'twd27', 'deg C']),  # 'tw27'
        ('tw_max_sw29', ['TwXD29', 'twd29', 'deg C']),  # 'tw29'
        ('tw_max_sw31', ['TwXD31', 'twd31', 'deg C']),  # 'tw31'
        ('tw_max_sw33', ['TwXD33', 'twd33', 'deg C']),  # 'tw33'
        ('tw_max_sw35', ['TwXD35', 'twd35', 'deg C']),  # 'tw35'
        ('t_max', ['TX', 'tmax', 'deg C']),  # 'twmx'
        ('t_max_max', ['TXX', 'tmaxx', 'deg C']),  # 'twmx'
        ('t_min', ['TN', 'tmin', 'deg C']),  # 'twmx'
        ('t_max_90p', ['TX90p', 'tx90', '%']),  # 'twx90'
        ('t_mean_90p', ['TM90p', 'tm90', '%']),  # 'twm90'
        ('t_mean_10p', ['TM10p', 'tm10', '%']),  # 'twm90'
        ('t_min_10p', ['TN10p', 'tn10', '%']),  # 'twm90'
        ('t_max_ex25', ['TX25', 't25', '%']),  # 'tw25'
        ('t_max_ex30', ['TX30', 't30', '%']),  # 'tw27'
        ('t_max_ex35', ['TX35', 't35', '%']),  # 'tw29'
        ('t_max_ex40', ['TX40', 't40', '%']),  # 'tw31'
        ('t_max_ex45', ['TX45', 't45', '%']),  # 'tw33'
        ('t_max_ex50', ['TX50', 't50', '%']),  # 'tw35'
        ('t_all_ex18', ['TN18', 't18'
                        '%'])
    ])  # 'tw35'

    # Dictionary for looking up variable standard (not actually always standard!!!) names for netCDF output of variables
    NameDict = dict([
        ('q', ['specific_humidity', ' specific humidity']),
        ('rh', ['relative_humidity', ' relative humidity']),
        ('e', ['vapour_pressure', ' vapour pressure']),
        ('tw', ['wetbulb_temperature', ' wetbulb temperature']),
        ('t', ['drybulb_temperature', ' dry bulb temperature']),
        ('td', ['dewpoint_temperature', ' dew point temperature']),
        ('dpd', ['dewpoint depression', ' dew point depression']),
        ('evap', ['evaporation', ' evaporation']),
        ('tw_max', ['TwX',
                    ' monthly maximum 2m wetbulb temperature']),  # 'twmx'
        ('tw_max_max',
         ['TwXX',
          ' maximum monthly maximum 2m wetbulb temperature']),  # 'twmx'
        ('tw_min', ['TwN',
                    ' monthly minimum 2m wetbulb temperature']),  # 'twmx'
        ('tw_max_90p', [
            'TwX90p',
            ' percentage of days per month maximum > 90 percentile maximum 2m wetbulb temperature'
        ]),  # 'twx90'
        ('tw_mean_90p', [
            'TwM90p',
            ' percentage of days per month mean > 90 percentile mean 2m wetbulb temperature'
        ]),  # 'twm90'
        ('tw_mean_10p', [
            'TwM10p',
            ' percentage of days per month mean < 10 percentile mean 2m wetbulb temperature'
        ]),  # 'twm90'
        ('tw_min_10p', [
            'TwN10p',
            ' percentage of days per month minimum < 10 percentile mean 2m wetbulb temperature'
        ]),  # 'twm90'
        ('tw_max_ex25', [
            'TwX25',
            ' percentage of days per month >= 25 deg 2m maximum wetbulb temperature'
        ]),  # 'tw25'
        ('tw_max_ex27', [
            'TwX27',
            ' percentage of days per month >= 27 deg 2m maximum wetbulb temperature'
        ]),  # 'tw27'
        ('tw_max_ex29', [
            'TwX29',
            ' percentage of days per month >= 29 deg 2m maximum wetbulb temperature'
        ]),  # 'tw29'
        ('tw_max_ex31', [
            'TwX31',
            ' percentage of days per month >= 31 deg 2m maximum wetbulb temperature'
        ]),  # 'tw31'
        ('tw_max_ex33', [
            'TwX33',
            ' percentage of days per month >= 33 deg 2m maximum wetbulb temperature'
        ]),  # 'tw33'
        ('tw_max_ex35', [
            'TwX35',
            ' percentage of days per month >= 35 deg 2m maximum wetbulb temperature'
        ]),  # 'tw35'
        ('tw_max_sw25', [
            'TwXD25',
            ' degrees per month >= 25 deg 2m maximum wetbulb temperature'
        ]),  # 'tw25'
        ('tw_max_sw27', [
            'TwXD27',
            ' degrees per month >= 27 deg 2m maximum wetbulb temperature'
        ]),  # 'tw27'
        ('tw_max_sw29', [
            'TwXD29',
            ' degrees per month >= 29 deg 2m maximum wetbulb temperature'
        ]),  # 'tw29'
        ('tw_max_sw31', [
            'TwXD31',
            ' degrees per month >= 31 deg 2m maximum wetbulb temperature'
        ]),  # 'tw31'
        ('tw_max_sw33', [
            'TwXD33',
            ' degrees per month >= 33 deg 2m maximum wetbulb temperature'
        ]),  # 'tw33'
        ('tw_max_sw35', [
            'TwXD35',
            ' degrees per month >= 35 deg 2m maximum wetbulb temperature'
        ]),  # 'tw35'
        ('t_max', ['TX', ' monthly maximum 2m drybulb temperature']),  # 'twmx'
        ('t_max_max',
         ['TXX', ' maximum monthly maximum 2m drybulb temperature']),  # 'twmx'
        ('t_min', ['TN', ' monthly minimum 2m drybulb temperature']),  # 'twmx'
        ('t_max_90p', [
            'TX90p',
            ' percentage of days per month maximum > 90 percentile maximum 2m drybulb temperature'
        ]),  # 'twx90'
        ('t_mean_90p', [
            'TM90p',
            ' percentage of days per month mean > 90 percentile mean 2m drybulb temperature'
        ]),  # 'twm90'
        ('t_mean_10p', [
            'TM10p',
            ' percentage of days per month mean < 10 percentile mean 2m drybulb temperature'
        ]),  # 'twm90'
        ('t_min_10p', [
            'TN10p',
            ' percentage of days per month minimum < 10 percentile mean 2m drybulb temperature'
        ]),  # 'twm90'
        ('t_max_ex25', [
            'TX25',
            ' percentage of days per month >= 25 deg 2m maximum drybulb temperature'
        ]),  # 'tw25'
        ('t_max_ex30', [
            'TX30',
            ' percentage of days per month >= 30 deg 2m maximum drybulb temperature'
        ]),  # 'tw27'
        ('t_max_ex35', [
            'TX35',
            ' percentage of days per month >= 35 deg 2m maximum drybulb temperature'
        ]),  # 'tw29'
        ('t_max_ex40', [
            'TX40',
            ' percentage of days per month >= 40 deg 2m maximum drybulb temperature'
        ]),  # 'tw31'
        ('t_max_ex45', [
            'TX45',
            ' percentage of days per month >= 45 deg 2m maximum drybulb temperature'
        ]),  # 'tw33'
        ('t_max_ex50', [
            'TX50',
            ' percentage of days per month >= 50 deg 2m maximum drybulb temperature'
        ]),  # 'tw35'
        ('t_all_ex18', [
            'TN18',
            ' percentage of days per month >= 18 deg 2m minimum drybulb temperature'
        ])
    ])  # 'tw35'

    #    unitees = ParamDict[param][2]
    #    varname = param
    unitees = ParamDict[var][2]
    varname = var

    if domain == 'land':
        DatTyp = 'IDPHA'
        if (var == 'dpd'):
            DatTyp = 'PHA'
        if (var == 'td'):
            DatTyp = 'PHADPD'
        fileblurb = 'FLATgridHOM5by5'
#        fileblurb = 'FLATgrid'+DatTyp+'5by5'
    elif domain == 'marine':
        if (typee == 'MARINE'):
            fileblurb = 'BClocal5by5both'
        elif (typee == 'MARINESHIP') | (typee == 'ERA5MARINEMASK') | (
                typee == 'ERA5MARINE'):
            fileblurb = 'BClocalSHIP5by5both'
    elif domain == 'blend':
        DatTyp = 'IDPHA'
        if (var == 'dpd'):
            DatTyp = 'PHA'
        if (var == 'td'):
            DatTyp = 'PHADPD'

        if (typee == 'BLEND'):
            fileblurb = 'FLATgridHOMBClocalboth5by5'
#            fileblurb = 'FLATgrid'+DatTyp+'BClocalboth5by5'
        elif (typee == 'BLENDSHIP') | (typee == 'ERA5MASK') | (typee
                                                               == 'ERA5'):
            fileblurb = 'FLATgridHOMBClocalSHIPboth5by5'
#            fileblurb = 'FLATgrid'+DatTyp+'BClocalSHIPboth5by5'

    inlandcover = WORKINGDIR + '/OTHERDATA/HadCRUT.4.3.0.0.land_fraction.nc'

    infile = 'HadISDH.' + domain + ParamDict[var][
        0] + '.' + version + '_' + fileblurb + '_' + climBIT
    #    infile = 'HadISDH.'+domain+ParamDict[var][0]+'.'+version+'_'+fileblurb+'_'+climBIT+'_'+thenmon+thenyear+'_cf'
    outfile = 'HadISDH.' + domain + ParamDict[var][
        0] + '.' + version + '_' + fileblurb + '_' + climBIT + '_areaTS_' + str(
            sttrd) + str(edtrd)  #70S-70N

    if (typee.find('ERA5') >= 0):

        infile = var + '2m_monthly_5by5_ERA5_1979' + str(edyr)
        outfile = var + '2m_monthly_5by5_ERA5_' + climBIT + '_areaTS_' + str(
            sttrd) + str(edtrd)  #70S-70N

        infileH = 'HadISDH.' + domain + ParamDict[var][
            0] + '.' + version + '_' + fileblurb + '_' + climBIT
        outfileH = 'HadISDH.' + domain + ParamDict[var][
            0] + '.' + version + '_' + fileblurb + '_' + climBIT + '_areaTS_' + str(
                sttrd) + str(edtrd)  #70S-70N

    if (isanom == False):
        outfile = outfile + '_ABS'

    # Add name of region if its not 'All' (All includes G, NH, Tr and SH)
    if (region != 'All'):
        outfile = outfile + '_' + region

    # Get Data
    styr = startyr
    if (typee.find('ERA') >= 0):
        styr = 1979

        if (isanom == True):
            if (domain == 'land'):
                ReadInfo = [var + '2m_anoms_land', 'time']
                outfile = outfile + '_land'
            if (domain == 'marine'):
                ReadInfo = [var + '2m_anoms_ocean', 'time']
                outfile = outfile + '_marine'
            if (domain == 'blend'):
                ReadInfo = [var + '2m_anoms', 'time']

            ReadInfoH = [var + '_anoms', 'time']

        else:

            ReadInfo = [var + '2m', 'time']
            ReadInfoH = [var + '_abs', 'time']

    else:

        if (isanom == True):
            ReadInfo = [var + '_anoms', 'time']

        else:
            ReadInfo = [var + '_abs', 'time']

    print('Reading in the data for :', typee)
    TmpVals, Latitudes, Longitudes = GetGrid4(indir + infile + '.nc', ReadInfo,
                                              LatInfo, LonInfo)

    # Seperate out data and times
    TheData = TmpVals[0]
    print(np.shape(TheData))
    Times = TmpVals[1]
    TmpVals = []

    # Check the mdis = IDL output netCDF differs from Python output
    TheData = TheData.astype(float)
    bads = np.where(TheData <= -999)
    #    bads = np.where(TheData < -10000)
    if (len(bads[0]) > 0):
        TheData[bads] = mdi

    if (var not in ['q', 'e', 'rh', 't', 'tw', 'td', 'dpd', 'evap']):
        print('Reading in the data for :', var, typee)
        ReadInfo = ['HQscore']
        TheHQData, Latitudes, Longitudes = GetGrid4(indir + infile + '.nc',
                                                    ReadInfo, LatInfo, LonInfo)
        # Check that data have been read in
        print(np.shape(TheHQData))
        #pdb.set_trace()

        # Check the mdis = IDL output netCDF differs from Python output
        TheHQData = TheHQData.astype(float)
        bads = np.where(TheHQData <= -999)
        if (len(bads[0]) > 0):
            TheHQData[bads] = mdi

        TheData[np.where(TheHQData >= 10)] = mdi
        print(np.shape(TheData))

    # If we're masking ERA then read in HadISDH
    if (typee.find('MASK') >= 0):

        print('Masking ERA5')
        outfile = outfile + '_mask'
        TmpValsH, LatitudesH, LongitudesH = GetGrid4(indirH + infileH + '.nc',
                                                     ReadInfoH, LatInfo,
                                                     LonInfo)

        # Seperate out data and times
        TheDataH = TmpValsH[0]
        TimesH = TmpValsH[1]
        TmpValsH = []

        # Check the mdis = IDL output netCDF differs from Python output
        bads = np.where(TheDataH < -10000)
        if (len(bads[0]) > 0):
            TheDataH[bads] = mdi

        # Make HadISDH start in the same years
        TheDataH = TheDataH[(styr - styrh) * 12:((edyr - styrh) + 1) *
                            12, :, :]

        # Now mask the ERA data with HadISDH missing data
        TheData[np.where(TheDataH == mdi)] = mdi


#************************************** NEW
# make spatial area masks
    global_mask = np.zeros((nlats, nlons), dtype=float)
    global_mask.fill(mdi)

    Nreg = len(MaskList[region])
    print(region, Nreg)
    #    pdb.set_trace()

    # lists of masks and time series to append to
    reg_masks = []
    avg_ts = np.empty((len(MaskList[region]), nmons))
    #    pdb.set_trace()

    for rr in range(Nreg):

        reg_masks.append(np.copy(global_mask))

        for ln in range(nlons):
            for lt in range(nlats):
                if (lons[ln] > MaskDict[MaskList[region][rr]][2]) & (lons[ln] < MaskDict[MaskList[region][rr]][3]) & \
                   (lats[lt] > MaskDict[MaskList[region][rr]][0]) & (lats[lt] < MaskDict[MaskList[region][rr]][1]):
                    reg_masks[rr][lt, ln] = 1.

        reg_masks[rr] = np.repeat(reg_masks[rr][np.newaxis, :, :],
                                  nmons,
                                  axis=0)

        print(np.shape(reg_masks[rr]))
        avg_ts[rr, :] = AreaMean(TheData, lats, reg_masks[rr])
        print(len(avg_ts[rr, :]), np.max(avg_ts[rr, :]), np.min(avg_ts[rr, :]))
        #        pdb.set_trace()

        # Note if any of the series have missing data because at these large scales they should not
        if (len(np.where(avg_ts[rr, :] <= mdi)[0]) > 0):

            print('Missing months for ' + MaskList[region][rr] + ' average: ',
                  len(np.where(avg_ts[rr, :] <= mdi)[0]))
            pdb.set_trace()

    # save to file as netCDF and .dat

    if (isanom == True):
        LongText = NameDict[var][
            1] + ' anomaly (' + climST + ' to ' + climED + ' base period)'
    else:
        LongText = NameDict[var][1]

    WriteNetCDF(outdir + outfile, avg_ts, Times, styr, edyr, climST, climED,
                ParamDict[var][0], NameDict[var][0], LongText, unitees,
                MaskList[region])
    WriteText(outdir + outfile, avg_ts, MaskList[region], Times, styr, edyr)

    print('And we are done!')
예제 #14
0
def main():
    # Set up this run files, directories and dates/clims: years, months, ship or all
    # Read in
    # Variables - short names
    # Quantities to be blended
    var_loop_lower = ['t', 'td', 'q', 'e', 'rh', 'tw', 'dpd']
    #    homogtype = ['IDPHA','PHADPD','IDPHA','IDPHA','IDPHA','IDPHA','PHA']
    var_loop = ['T', 'Td', 'q', 'e', 'RH', 'Tw', 'DPD']
    var_loop_full = [
        'Air Temperature', 'Dew Point Temperature', 'Specific Humidity',
        'Vapor Pressure', 'Relative Humidity', 'Wet Bulb Temperature',
        'Dew Point Pepression'
    ]  # This is the ReadInfo

    # INput land variables
    InLVarList = [
        'mean_n_stations', 'actual_n_stations', '_anoms', '_abs', '_clims',
        '_std', '_obserr', '_samplingerr', '_combinederr'
    ]
    # start point and end point of Land vars with variable prefixes
    InLPointers = [2, 9]

    # INput marine variables
    InMVarList = [
        '_n_grids', '_n_obs', '_clims_n_grids', '_clims_n_obs',
        '_clim_std_n_grids', '_clim_std_n_obs', '_uSAMP_n_grids',
        '_usbarSQ_n_grids', '_anoms', '_abs', '_clims', '_clim_std',
        '_anoms_uOBS', '_abs_uOBS', '_anoms_uSAMP', '_abs_uSAMP',
        '_anoms_uFULL', '_abs_uFULL'
    ]
    # start point and end point of Marine vars with variable prefixes
    InMPointers = [0, 18]

    # Shared output quantities to be blended, land only and marine only
    OutBVarList = [
        '_anoms', '_abs', '_clims', '_anoms_obserr', '_abs_obserr',
        '_anoms_samplingerr', '_abs_samplingerr', '_anoms_combinederr',
        '_abs_combinederr'
    ]
    OutLVarList = [
        'land_mean_n_stations', 'land_actual_n_stations', '_land_std'
    ]
    OutMVarList = [
        'marine_n_grids', 'marine_n_obs', 'marine_mean_pseudo_stations',
        'marine_actual_pseudo_stations', 'marine_clim_n_obs',
        'marine_clim_n_grids', 'marine_climstd_n_obs',
        'marine_climstd_n_grids', '_marine_clims_std'
    ]

    # Output variables list elements ????? have the var_loop_lower[v] prefixed
    OutBVarLong = [
        'monthly mean anomalies', 'monthly mean actuals',
        'monthly mean climatologies',
        'monthly mean anomaly observation uncertainty (2 sigma)',
        'monthly mean actual observation uncertainty (2 sigma)',
        'monthly mean anomaly sampling uncertainty',
        'monthly mean actual sampling uncertainty',
        'monthly mean anomaly full uncertainty',
        'monthly mean actual full uncertainty'
    ]
    OutLVarLong = [
        'mean number of stations over land',
        'actual number of stations over land',
        'land monthly mean standard deviations'
    ]
    OutMVarLong = [
        'number of 1x1 daily grids with data over ocean',
        'number of marine observations',
        'mean number of pseudo stations over ocean',
        'actual number of pseudo stations over ocean',
        'number of marine observations in the climatology',
        'number of 1x1 daily grids in the climatology over ocean',
        'number of marine observations in the climatological standard deviation',
        'number of 1x1 daily grids in the climatological standard deviation over ocean',
        'marine monthly mean climatological standard deviation'
    ]

    OutBVarStandard = [
        '_anoms', '_abs', '_clims', '_anoms_obserr', '_abs_obserr',
        '_anoms_samplingerr', '_abs_samplingerr', '_anoms_combinederr',
        '_abs_combinederr'
    ]
    OutLVarStandard = [
        'land_mean_n_stations', 'land_actual_n_stations', '_land_std'
    ]
    OutMVarStandard = [
        'marine_n_grids', 'marine_n_obs', 'marine_mean_pseudo_stations',
        'marine_actual_pseudo_stations', 'marine_clim_n_obs',
        'marine_clim_n_grids', 'marine_climstd_n_obs',
        'marine_climstd_n_grids', '_marine_clims_std'
    ]

    units_loop = [
        'degrees C', 'degrees C', 'g/kg', 'hPa', '%rh', 'degrees C',
        'degrees C', 'standard'
    ]

    # Missing Data Indicator
    MDI = -1e30

    # Input and Output directory:
    WorkingDir = '/scratch/hadkw/UPDATE' + str(EdYr)
    #    WorkingDir = '/data/users/hadkw/WORKING_HADISDH/UPDATE'+str(EdYr)
    DataDir = '/STATISTICS/GRIDS/'
    OtherDataDir = '/OTHERDATA/'

    # Input Files
    InFilLandBit1 = WorkingDir + DataDir + 'HadISDH.land'  # append to var+InFilLandBit2+homogtype+InFilBit3
    InFilLandBit2 = '.' + LandVersion + '_FLATgridHOM5by5_anoms' + ClimPeriod + '.nc'

    InFilMarineBit1 = WorkingDir + DataDir + 'HadISDH.marine'  # append to var+InFilMarineBit2
    InFilMarineBit2 = '.' + MarineVersion + '_BClocal' + PT + '5by5both_anoms' + ClimPeriod + '.nc'

    # Output Files
    OutFilBit1 = WorkingDir + DataDir + 'HadISDH.blend'  # append to var+OutFilBit2+homogtype+InFilBit3
    OutFilBit2 = '.' + BlendVersion + '_FLATgridHOMBClocal' + PT + 'both5by5_anoms' + ClimPeriod + '.nc'

    # Set up necessary dates - dates for output are just counts of months from 0 to 54?...
    Ntims = ((EdYr + 1) - StYr) * 12
    TimesArr = np.arange(Ntims)  # months since January 1973

    # Read in the pct land data
    Filee = WorkingDir + OtherDataDir + 'HadCRUT.4.3.0.0.land_fraction.nc'
    LatInfo = ['latitude']
    LonInfo = ['longitude']
    PctLand, LatList, LonList = GetGrid(Filee, ['land_area_fraction'], LatInfo,
                                        LonInfo)

    # Make PctLand a min and max of 0.25 or 0.75 respectively
    # We only use it when both land and marine are present so it doesn't matter in land only or marine only cases
    PctLand[np.where(PctLand < 0.25)] = 0.25
    PctLand[np.where(PctLand > 0.75)] = 0.75
    # Now make PctLand have a time dimension the same length as the data
    # PctLand is alread np.shape(1,26,72) so no need for np.newaxis in the first dimension
    FullPctLand = np.repeat(PctLand[:, :, :], Ntims, axis=0)
    ##Test It
    #pdb.set_trace()

    #########
    # Loop through each variable to create the blended file

    # Which variable to loop through?
    for v, var in enumerate(var_loop_lower):

        ## if I haven't finished the land Td yet so skip this
        #if (var == 'td'):
        #    continue

        print('Working on: ', var, v)

        # Loop through quantities to be blended and build list
        BlendedList = []
        LandList = []
        LandDataList = []
        MarineList = []
        MarineDataList = []

        # For this var read in all land elements
        Filee = InFilLandBit1 + var_loop[v] + InFilLandBit2
        LatInfo = ['latitude']
        LonInfo = ['longitude']
        TmpVarList = []
        #        pdb.set_trace()
        TmpVarList = InLVarList.copy()
        TmpVarList[InLPointers[0]:InLPointers[1]] = [
            var + i for i in TmpVarList[InLPointers[0]:InLPointers[1]]
        ]
        TmpDataList, LatList, LonList = GetGrid4(Filee, TmpVarList, LatInfo,
                                                 LonInfo)
        #        pdb.set_trace()
        # This comes out as:
        # TmpDataList: a list of np arrays (times, lats{87.5N to 87.5S], lons[-177.5W to 177.5E])
        # LatList: an NLats np array of lats centres (87.5N to 87.5S)
        # LonList: an NLons np array of lons centres (87.5N to 87.5S)

        # Get lat and lon counts
        NLats = len(LatList)
        #        print(LatList)
        NLons = len(LonList)

        # change MDI for counts to -1
        TmpDataList[0][np.where(TmpDataList[0] <= 0)] = -1
        TmpDataList[1][np.where(TmpDataList[1] <= 0)] = -1

        # Fill in lists
        LandList.append(np.copy(TmpDataList[0]))  # mean_n_stations
        LandList.append(np.copy(TmpDataList[1]))  # actual_n_stations
        LandList.append(np.copy(TmpDataList[5]))  # _std
        LandDataList.append(np.copy(TmpDataList[2]))  # _anoms
        LandDataList.append(np.copy(TmpDataList[3]))  # _abs
        LandDataList.append(np.copy(TmpDataList[4]))  # _clims
        LandDataList.append(np.copy(TmpDataList[6]))  # _obserr
        LandDataList.append(np.copy(TmpDataList[6]))  # _abs_obserr
        LandDataList.append(np.copy(TmpDataList[7]))  # _samplingerr
        LandDataList.append(np.copy(TmpDataList[7]))  # _abs_samplingerr
        LandDataList.append(np.copy(TmpDataList[8]))  # _combinederr
        LandDataList.append(np.copy(TmpDataList[8]))  # _abs_combinederr

        # For this var read in all marine elements
        Filee = InFilMarineBit1 + var_loop[v] + InFilMarineBit2
        LatInfo = ['latitude']
        LonInfo = ['longitude']
        TmpVarList = []
        TmpVarList = InMVarList.copy()
        #        pdb.set_trace()
        TmpVarList[InMPointers[0]:InMPointers[1]] = [
            var + i for i in TmpVarList[InMPointers[0]:InMPointers[1]]
        ]
        #        print(TmpVarList)
        TmpDataList, LatList, LonList = GetGrid4(Filee, TmpVarList, LatInfo,
                                                 LonInfo)
        #        pdb.set_trace()
        # This comes out as:
        # TmpDataList: a list of np arrays (times, lats{87.5N to 87.5S], lons[-177.5W to 177.5E])
        # LatList: an NLats np array of lats centres (87.5N to 87.5S)
        # LonList: an NLons np array of lons centres (87.5N to 87.5S)

        # Fill in lists

        # NOT SURE THIS FLIPPING IS THE CASE ANYMORE
        #	# NOTE: ALL MARINE ARRAYS NEED TO HAVE THEIR LATITUDES FLIPPED!!!
        #        LatList = LatList[::-1]
        #        pdb.set_trace()
        #        TmpDataList[0] = TmpDataList[0][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[0]))  # _n_grids
        #        TmpDataList[1] = TmpDataList[1][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[1]))  # _n_obs
        #        TmpDataList[7] = TmpDataList[7][::-1,:]
        MarineList.append(np.copy(TmpDataList[7]))  # _mean_pseudo_stations
        #        TmpDataList[6] = TmpDataList[6][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[6]))  # _actual_pseudo_stations
        #        TmpDataList[2] = TmpDataList[2][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[2]))  # _clim_n_grids
        #        TmpDataList[3] = TmpDataList[3][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[3]))  # _clim_n_obs
        #        TmpDataList[4] = TmpDataList[4][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[4]))  # _climstd_n_grids
        #        TmpDataList[5] = TmpDataList[5][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[5]))  # _climstd_n_obs
        #        TmpDataList[11] = TmpDataList[11][:,::-1,:]
        MarineList.append(np.copy(TmpDataList[11]))  # _climstd
        #        TmpDataList[8] = TmpDataList[8][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[8]))  # _anoms
        #        TmpDataList[9] = TmpDataList[9][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[9]))  # _abs
        #        TmpDataList[10] = TmpDataList[10][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[10]))  # _clims
        #        TmpDataList[12] = TmpDataList[12][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[12]))  # _obserr
        #        TmpDataList[13] = TmpDataList[13][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[13]))  # _abs_obserr
        #        TmpDataList[14] = TmpDataList[14][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[14]))  # _samplingerr
        #        TmpDataList[15] = TmpDataList[15][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[15]))  # _abs_samplingerr
        #        TmpDataList[16] = TmpDataList[16][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[16]))  # _combinederr
        #        TmpDataList[17] = TmpDataList[17][:,::-1,:]
        MarineDataList.append(np.copy(TmpDataList[17]))  # _abs_combinederr

        # Now loop through the Blended quantities and blend
        for b, bvar in enumerate(OutBVarList):

            print('Working on: ', bvar)

            # Is this an uncertainty quantity?
            if (b > 3):  # Set IsUnc = True
                IsUnc = True
            else:
                IsUnc = False

            BlendedField = BlendIt(LandDataList[b], MarineDataList[b],
                                   FullPctLand, MDI, IsUnc)
            BlendedList.append(BlendedField)

##############
# Write out combined file - uncertainties are all 2 sigma!!!.

        Write_NetCDF_All(OutFilBit1 + var_loop[v] + OutFilBit2, [
            OutLVarList[0], OutLVarList[1], var + OutLVarList[2]
        ], [
            OutMVarList[0], OutMVarList[1], OutMVarList[2], OutMVarList[3],
            OutMVarList[4], OutMVarList[5], OutMVarList[6], OutMVarList[7],
            var + OutMVarList[8]
        ], [var + i for i in OutBVarList], LandList, MarineList, BlendedList, [
            OutLVarLong[0], OutLVarLong[1], var_loop_full[v] + OutLVarLong[2]
        ], [
            OutMVarLong[0], OutMVarLong[1], OutMVarLong[2], OutMVarLong[3],
            OutMVarLong[4], OutMVarLong[5], OutMVarLong[6], OutMVarLong[7],
            var_loop_full[v] + OutMVarLong[8]
        ], [var_loop_full[v] + ' ' + i for i in OutBVarLong], [
            OutLVarStandard[0], OutLVarStandard[1],
            var_loop_full[v] + OutLVarStandard[2]
        ], [
            OutMVarStandard[0], OutMVarStandard[1], OutMVarStandard[2],
            OutMVarStandard[3], OutMVarStandard[4], OutMVarStandard[5],
            OutMVarStandard[6], OutMVarStandard[7],
            var_loop_full[v] + OutMVarStandard[8]
        ], [var_loop_full[v] + ' ' + i for i in OutBVarStandard], LatList,
                         LonList, StYr, EdYr, RefPeriod, units_loop[v], MDI)


#########

    print('And we are done!')