예제 #1
0
outputdir='/net/data5/jmanning/modvsobs/'
mode='_post2007'
outputfile1='ProcessedSites'+mode+'.csv'
outputfile2='Depthinformation'+mode+'.csv'
starttime_mod=dt.datetime(1880,1,1,0,0,0,0,pytz.UTC)
endtime_mod=dt.datetime(2010,12,31,0,0,0,0,pytz.UTC)
if mode=='_pre2008':
           endtime_mod=dt.datetime(2008,1,1,0,0,0,0,pytz.UTC)
if mode=='_post2007':
           starttime_mod=dt.datetime(2008,1,1,0,0,0,0,pytz.UTC)

for k in range(len(site)):
#################read-in obs data##################################
        print site[k]
        #[lati,loni,on,bd]=getemolt_latlon(site[k]) # extracts lat/lon based on site code
        [lati,loni,bd]=getemolt_latlon(site[k]) # extracts lat/lon based on site code
        #[lati,loni]=dm2dd(lati,loni)#converts decimal-minutes to decimal degrees
        if surf_or_bott=='bott':
            #dept=[bd[0]-0.25*bd[0],bd[0]+.25*bd[0]]
            dept=[bd-0.25*bd,bd+.25*bd]
        else:
            dept=[0,5]
        #(obs_dt,obs_temp,obs_salt,distinct_dep)=getobs_tempsalt(site[k], input_time=[dt.datetime(1880,1,1),dt.datetime(2010,12,31)], dep=dept)
        (obs_dt,obs_temp,distinct_dep)=getobs_tempsalt(site[k], input_time=[starttime_mod,endtime_mod], dep=dept)
        #depthinfor.append(site[k]+','+str(bd[0])+','+str(distinct_dep)+'\n') # note that this distinct depth is actually the overall mean
        depthinfor.append(site[k]+','+str(bd)+','+str(distinct_dep)+'\n') # note that this distinct depth is actually the overall mean
        obs_dtindex=[]
        if intend_to=='temp':            
            for kk in range(len(obs_temp)):
                #obs_temp[kk]=f2c(obs_temp[kk]) # converts to Celcius
                obs_dtindex.append(datetime.strptime(str(obs_dt[kk])[:10],'%Y-%m-%d'))
예제 #2
0
"""
Compares eMOLT with FVCOM bottom temp
@author: jmanning, rsignell, yacheng
"""
import matplotlib.pyplot as plt
import netCDF4
from getdata import getemolt_latlon, getemolt_temp
from conversions import dm2dd, f2c
from utilities import nearxy

site = 'BN01'
[lati, loni, on] = getemolt_latlon(site)  # extracts lat/lon based on site code
[lati, loni] = dm2dd(lati, loni)  #converts decimal-minutes to decimal degrees
[obs_dt, obs_temp] = getemolt_temp(site)  # extracts time series
for kk in range(len(obs_temp)):
    obs_temp[kk] = f2c(obs_temp[kk])  # converts to Celcius

# now get the model output
urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
nc = netCDF4.Dataset(urlfvcom)
nc.variables
lat = nc.variables['lat'][:]
lon = nc.variables['lon'][:]
times = nc.variables['time']
jd = netCDF4.num2date(times[:], times.units)
vname = 'temp'
var = nc.variables[vname]

# find nearest point to desired location and time
inode = nearxy(lon, lat, loni, lati)
index = netCDF4.date2index(
예제 #3
0
layer = 44
intend_to = 'temp'  ##############notice intend_to can be 'temp'or'salinity'
vname = intend_to
surf_or_bott = 'bott'
degree = [0.8, 0.9, 1.0]
month = range(1, 13)
f = open('distance_affect.csv', 'w')
for k in range(len(site)):
    print site[k]
    fig = plt.figure()
    for d in range(len(degree)):
        moddatadf = pd.DataFrame()
        befdatadf = pd.DataFrame()
        print "now degree is " + str(degree[d])
        [lati, loni, on,
         bd] = getemolt_latlon(site[k])  # extracts lat/lon based on site code
        [lati,
         loni] = dm2dd(lati,
                       loni)  #converts decimal-minutes to decimal degrees
        lati = [lati + degree[d], lati - degree[d]]
        loni = [loni + degree[d], loni - degree[d]]
        for m in range(len(month)):
            month_time = month[m]
            #################read-in obs data##################################
            print month_time, site[k], str(degree[d])
            ######################################################################################
            if month_time < 12:
                input_time = [
                    dt.datetime(2008, int(month_time), 1),
                    dt.datetime(2008,
                                int(month_time) + 1, 1)
예제 #4
0
"""
Compares eMOLT with FVCOM bottom temp
@author: jmanning, rsignell, yacheng
"""
import matplotlib.pyplot as plt
import netCDF4
from getdata import getemolt_latlon,getemolt_temp
from conversions import dm2dd,f2c
from utilities import nearxy

site='BN01'
[lati,loni,on]=getemolt_latlon(site) # extracts lat/lon based on site code
[lati,loni]=dm2dd(lati,loni) #converts decimal-minutes to decimal degrees
[obs_dt,obs_temp]=getemolt_temp(site) # extracts time series
for kk in range(len(obs_temp)):
  obs_temp[kk]=f2c(obs_temp[kk]) # converts to Celcius

# now get the model output
urlfvcom = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3'
nc = netCDF4.Dataset(urlfvcom)
nc.variables
lat = nc.variables['lat'][:]
lon = nc.variables['lon'][:]
times = nc.variables['time']
jd = netCDF4.num2date(times[:],times.units)
vname = 'temp'
var = nc.variables[vname]

# find nearest point to desired location and time
inode = nearxy(lon,lat,loni,lati)
index=netCDF4.date2index([obs_dt[0],obs_dt[-1]],times,select='nearest')#find the model time index at start & end pf obs
예제 #5
0
#basemap_standard([40.,45.],[-72.,-65.],2)#True,False,[0.5],list(np.array(cont_range)*-1))
from mpl_toolkits.basemap import Basemap
lat=[40.,45.]
lon=[-71.,-65.]
comp='p_emoltvsmod'# this is the mode of comparison where p_ referes to percent difference
print comp
m = Basemap(projection='cyl',llcrnrlat=min(lat)-0.01,urcrnrlat=max(lat)+0.01,\
            llcrnrlon=min(lon)-0.01,urcrnrlon=max(lon)+0.01,resolution='h')
m.drawcoastlines()
m.fillcontinents(color='grey')
m.drawmapboundary()   
ebd,mbd,ubd,hbd=[],[],[],[]       
diffd=[]
outf.write('site,lon,lat,ebd,udb,hbd,mbd,perc_diff\n')
for k in range(len(site)):
  [elat, elon, original_name,eb]=getemolt_latlon(site[k])
  ebd.append(eb)
  [elat_dd,elon_dd]=dm2dd(elat,elon)
  mbd.append(getFVCOM_depth(elat_dd,elon_dd))
  ubd.append(-1*getdepth(elat_dd,elon_dd))#nearset usgs depth
  hbd.append(-1*get_w_depth( [elon_dd],[elat_dd]))#huanxins interpolated depth
  if comp=='usgsvsmod':
    diffd.append(ubd[k]-mbd[k])
  elif comp=='p_emoltvsmod':
    diffd.append((ebd[k]-mbd[k])/ebd[k]*100)  
  if diffd[k]<1:
      dotcol='magenta'
  else:
      dotcol='cyan'
  plt.scatter(elon_dd,elat_dd,25*abs(diffd[k]),marker='o',color=dotcol)
  
예제 #6
0
   
    cp=np.cos(latp*np.pi/180.)
    dx=(lon-lonp)*cp
    dy=lat-latp
    dist2=dx*dx+dy*dy
    i=np.argmin(dist2)
    min_dist=np.sqrt(dist2[i])
    return i,min_dist 
depthunique=[]
lat=[]
lon=[]
sites=[]    
site=['BF01']     
#site=['AB01','AG01','BA01','BA02','BC01','BD01','BF01','BI02','BI01','BM01','BM02','BN01','BS02','CJ01','CP01','DC01','DJ01','DK01','DMF1','ET01','GS01','JA01','JC01','JS06','JT04','KO01','MF02','MM01','MW01','NL01','PF01','PM02','PM03','PW01','RA01','RM02','RM04','SJ01','TA14','TA15','TS01']
for k in range(len(site)):
    [lati,loni,on,bd]=getemolt_latlon(site[k])
    (a,b)=divmod(float(lati),100)   
    aa=int(a)
    bb=float(b)
    lati=aa+bb/60
    (c,d)=divmod(float(loni),100)
    cc=int(c)
    dd=float(d)
    loni=cc+(dd/60)
    
    url = 'http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.SITE,emolt_sensor.YRDAY0_LOCAL,emolt_sensor.TIME_LOCAL,emolt_sensor.TEMP,emolt_sensor.DEPTH_I,emolt_sensor.SALT&emolt_sensor.SITE='
    dataset = get_dataset(url + '"' + site[k] + '"')
    var = dataset['emolt_sensor']
    print 'extracting eMOLT data using PyDap... hold on'
    depth = list(var.DEPTH_I)
    distinct_dep=unique(depth)