コード例 #1
0
def make_images(dpath, path, dt=datetime(2019, 5, 1, 0, 0, 0), interval=31):
    '''dpath: the path of dictionary, use to store telemetered data
        path: use to store images
        dt: start time
        interval: how many days we need make 
    '''
    with open(dpath, 'rb') as fp:
        telemetered_dict = pickle.load(fp)
    for j in range(interval):
        dtime = dt + timedelta(days=j)
        print(dtime)
        url = get_doppio_url(dtime)
        while True:
            if zl.isConnected(address=url):
                break
            print('check the website is well or internet is connected?')
            time.sleep(5)
        skip = 0
        while True:
            try:
                nc = NetCDFFile(url)
                lons = nc.variables['lon_rho'][:]
                lats = nc.variables['lat_rho'][:]
                temps = nc.variables['temp']
                depth = nc.variables['h'][:]
                break
            except RuntimeError:
                print(str(url) + ': need reread')
            except OSError:
                if zl.isConnected(address=url):
                    print(str(url) + ': file not exit.')
                    skip = 1
                    break
            except KeyboardInterrupt:
                sys.exit()
        if skip == 1:
            continue

        m_temp = mean_temp(temps)
        ntime = dtime
        time_str = ntime.strftime('%Y-%m-%d')
        temp = m_temp * 1.8 + 32
        Year = str(ntime.year)
        Month = str(ntime.month)
        Day = str(ntime.day)
        slons, slats = [], []
        try:
            slons, slats = [], []
            for i in telemetered_dict[Year][Month][Day].index:
                slons.append(telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                slats.append(telemetered_dict[Year][Month][Day]['lat'].iloc[i])
        except:
            slons, slats = [], []
        plot(lons, lats, slons, slats, temp, depth, time_str, path)
コード例 #2
0
def per_boat_map(df,path_save,dpi=300):
    '''plot per month, per vessel map
    oupput a map: below is the information in map:
        the mean temperature in historical
        the mean temperature of observation,
        the number is the Standard deviation in Parentheses
        the time period'''
    start_t,end_t=week_start_end(df['time'])
    fig=plt.figure(figsize=(8,10))
    size=min(fig.get_size_inches())
    fig.suptitle('F/V '+df['name'],fontsize=3*size, fontweight='bold')
    ax=fig.add_axes([0.03,0.2,0.85,0.68])
    ax.set_title(start_t.strftime('%Y/%m/%d')+'-'+end_t.strftime('%Y/%m/%d'))
    ax.axes.title.set_size(2*size)
    
    while(not zl.isConnected()):#check the internet is good or not
        time.sleep(120)   #if no internet, sleep 2 minates try again
    try:

        service = 'Ocean_Basemap'
        xpixels = 5000 
        #Build a map background
        map=Basemap(projection='mill',llcrnrlat=df['lat']-1,urcrnrlat=df['lat']+1,llcrnrlon=df['lon']-1,urcrnrlon=df['lon']+1,\
                resolution='f',lat_0=df['lat'],lon_0=df['lon'],epsg = 4269)
        map.arcgisimage(service=service, xpixels = xpixels, verbose=False)
        # draw parallels.
        parallels = np.arange(0.,90.0,0.5)
        map.drawparallels(parallels,labels=[0,1,0,0],fontsize=size,linewidth=0.0)
        # draw meridians
        meridians = np.arange(180.,360.,0.5)
        map.drawmeridians(meridians,labels=[0,0,0,1],fontsize=size,linewidth=0.0)
        #Draw a scatter plot
        tele_lat,tele_lon=df['lat'],df['lon']
        x,y=map(tele_lon,tele_lat)
        s='HT:'+str(df['climtemp'])+'\nOBS:'+str(round(df['obstemp'],4))+'('+str(round(df['Stdtemp'],2))+')'
        ax.plot(x, y,'b*',markersize=2*size, alpha=0.5)
        ax.text(x+0.05,y-0.05,s,fontsize =2*size)
        xlabel='\nHT:the mean temperature in historical\nOBS:the mean temperature of observation,the number is the Standard deviation in Parentheses'
        ax.set_xlabel(xlabel,position=(0., 1e6),horizontalalignment='left',fontsize =size)
#        if the path of the picture save is not there, creat the folder
        if not os.path.exists(path_save+'/picture'+df['time'].strftime('%Y-%m')+'/'):
            os.makedirs(path_save+'/picture'+df['time'].strftime('%Y-%m')+'/')
        #save the map
        plt.savefig(path_save+'/picture'+df['time'].strftime('%Y-%m')+'/'+df['name']+'_map'+'_'+end_t.strftime('%Y-%m-%d')+'.ps',dpi=dpi) #save picture
        print(df['name']+' finished draw!')
    except KeyboardInterrupt:
        sys.exit()
    except:
        print(df['name']+' need redraw!')
コード例 #3
0
def draw_map(df,name,dtime,path_picture_save,timeinterval=30,mindepth=10,dpi=300):
    """
    the type of start_time_local and end time_local is datetime.datetime
    use to draw the location of raw file and telemetered produced"""
    df=pd.DataFrame.from_dict(dict[i])
    df['time']=df.index
    df=df[['time','lat','lon','observation_T', 'observation_H']]
    df.rename(columns={'observation_T':'temp','observation_H':'depth'},inplace=True)
    #creat map
    #Create a blank canvas 
    df=check_depth(df.dropna(),mindepth=10) #screen out the data 
    #make sure the start time through the latest time of get data
    if len(df)==0:  #if the length of 
        print(name+': valuless data!')
        return 0
    endtime=df['time'][len(df)-1]
    if type(endtime)==str:
        endtime=datetime.strptime(endtime,'%Y-%m-%d %H:%M:%S')
    if dtime>endtime:
        start_time=endtime-timedelta(days=timeinterval)
    else:
        start_time=dtime-timedelta(days=timeinterval)
    df=check_time(df,'time',start_time,dtime) #screen out the valuable data that we need through the time
    if len(df)==0:  #if the length of 
        print(name+': valuless data!')
        return 0
    fig=plt.figure(figsize=(8,8.5))
    fig.suptitle('F/V '+name,fontsize=24, fontweight='bold')
 
    start_time=df['time'][0]
    end_time=df['time'][len(df)-1]
    if type(start_time)!=str:
        start_time=start_time.strftime('%Y/%m/%d')
        end_time=end_time.strftime('%Y/%m/%d')
    else:
        start_time=start_time.replace('-','/')[:10]
        end_time=end_time.replace('-','/')[:10]
    ax=fig.add_axes([0.03,0.03,0.85,0.85])
    ax.set_title(start_time+' to '+end_time)
    ax.axes.title.set_size(16)
    
    min_lat=min(df['lat'])
    max_lat=max(df['lat'])
    max_lon=max(df['lon'])
    min_lon=min(df['lon'])
    #keep the max_lon-min_lon>=0.2
    if (max_lon-min_lon)<=0.4: #0.2 is a parameter that avoid the dataframe only have one value.
        max_lon=max_lon+(0.4-(max_lon-min_lon))/2.0
        min_lon=max_lon-0.4
    #adjust the max and min,let map have the same width and height 
    if (max_lon-min_lon)>(max_lat-min_lat):
        max_lat=max_lat+((max_lon-min_lon)-(max_lat-min_lat))/2.0
        min_lat=min_lat-((max_lon-min_lon)-(max_lat-min_lat))/2.0
    else:
        max_lon=max_lon+((max_lat-min_lat)-(max_lon-min_lon))/2.0
        min_lon=min_lon-((max_lat-min_lat)-(max_lon-min_lon))/2.0

    while(not zl.isConnected()):#check the internet is good or not
        time.sleep(120)   #if no internet, sleep 2 minates try again
    try:
#    print(min_lat,max_lat,max_lon,min_lon)
#    a=1
#    if a==1:
        service = 'Ocean_Basemap'
        xpixels = 5000 
        #Build a map background
        extend=0.1*(max_lon-min_lon)
        map=Basemap(projection='mill',llcrnrlat=min_lat-extend,urcrnrlat=max_lat+extend,llcrnrlon=min_lon-extend,urcrnrlon=max_lon+extend,\
                resolution='f',lat_0=(max_lat+min_lat)/2.0,lon_0=(max_lon+min_lon)/2.0,epsg = 4269)
        map.arcgisimage(service=service, xpixels = xpixels, verbose= False)
        #set the size of step in parallels and draw meridians
        if max_lat-min_lat>=3:
            step=int((max_lat-min_lat)/5.0*10)/10.0
        elif max_lat-min_lat>=1.0:
            step=0.5
        elif max_lat-min_lat>=0.5:
            step=0.2
        else :
            step=0.1
        
        # draw parallels.
        parallels = np.arange(0.,90.0,step)
        map.drawparallels(parallels,labels=[0,1,0,0],fontsize=10,linewidth=0.0)
        # draw meridians
        meridians = np.arange(180.,360.,step)
        map.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10,linewidth=0.0)
        #Draw a scatter plot
        tele_lat,tele_lon=to_list(df['lat'],df['lon'])
        tele_x,tele_y=map(tele_lon,tele_lat)
        ax.plot(tele_x,tele_y,'b*',markersize=6,alpha=0.5,label='telemetry')
        ax.legend()
        #if the path of the picture save is not there, creat the folder
        if not os.path.exists(path_picture_save+'/picture'+dtime.strftime('%Y-%m-%d')+'/'):
            os.makedirs(path_picture_save+'/picture'+dtime.strftime('%Y-%m-%d')+'/')
        #save the map
        plt.savefig(path_picture_save+'/picture'+dtime.strftime('%Y-%m-%d')+'/'+name+'_map'+'_'+dtime.strftime('%Y%m')+'.ps',dpi=dpi) #save picture
        print(name+' finished draw!')
    except KeyboardInterrupt:
        sys.exit()
    except:
        print(name+' need redraw!')
コード例 #4
0
def draw_map(df,name,dtime,path_picture_save,timeinterval=30,mindepth=10,dpi=300):
    """
    the type of start_time_local and end time_local is datetime.datetime
    use to draw the location of raw file and telemetered produced"""
    #creat map
    #Create a blank canvas 
    df=check_depth(df.dropna(),mindepth=10) #screen out the data 
    #make sure the start time through the latest time of get data
    if dtime>df['time'][len(df)-1]:
        start_time=df['time'][len(df)-1]-timedelta(days=timeinterval)
    else:
        start_time=dtime-timedelta(days=timeinterval)
    
    df=check_time(df,'time',start_time,dtime) #screen out the valuable data that we need through the time
    if len(df)==0:  #if the length of 
        print(name+': no value data!')
        return 0
    fig=plt.figure(figsize=(8,8.5))
    fig.suptitle('F/V '+name,fontsize=24, fontweight='bold')
    if len(df)>0:
        start_time=df['time'][0]
        end_time=df['time'][len(df)-1]
    if type(start_time)!=str:
        start_time=start_time.strftime('%Y-%m-%d')
        end_time=dtime.strftime('%Y-%m-%d')
    ax=fig.add_axes([0.02,0.02,0.9,0.9])
    ax.set_title(start_time+'-'+end_time)
    ax.axes.title.set_size(16)
    
    min_lat=min(df['lat'])
    max_lat=max(df['lat'])
    max_lon=max(df['lon'])
    min_lon=min(df['lon'])
    #keep the max_lon-min_lon>=2
    if (max_lon-min_lon)<=2:
        max_lon=1-(max_lon-min_lon)/2.0+(max_lon+min_lon)/2.0
        min_lon=max_lon-2
    #adjust the max and min,let map have the same width and height 
    if (max_lon-min_lon)>(max_lat-min_lat):
        max_lat=max_lat+((max_lon-min_lon)-(max_lat-min_lat))/2.0
        min_lat=min_lat-((max_lon-min_lon)-(max_lat-min_lat))/2.0
    else:
        max_lon=max_lon+((max_lat-min_lat)-(max_lon-min_lon))/2.0
        min_lon=min_lon-((max_lat-min_lat)-(max_lon-min_lon))/2.0
    #if there only one data in there
    while(not zl.isConnected()):
        time.sleep(120)
    try:
        service = 'Ocean_Basemap'
        xpixels = 5000 
        #Build a map background
        map=Basemap(projection='mill',llcrnrlat=min_lat-0.1,urcrnrlat=max_lat+0.1,llcrnrlon=min_lon-0.1,urcrnrlon=max_lon+0.1,\
                resolution='f',lat_0=(max_lat+min_lat)/2.0,lon_0=(max_lon+min_lon)/2.0,epsg = 4269)
        map.arcgisimage(service=service, xpixels = xpixels, verbose= False)
        if max_lat-min_lat>=3:
            step=int((max_lat-min_lat)/5.0*10)/10.0
        else:
            step=0.5
        
        # draw parallels.
        parallels = np.arange(0.,90.0,step)
        map.drawparallels(parallels,labels=[0,1,0,0],fontsize=10,linewidth=0.0)
        # draw meridians
        meridians = np.arange(180.,360.,step)
        map.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10,linewidth=0.0)
    
        #Draw a scatter plot
        if len(df)>0:
            tele_lat,tele_lon=to_list(df['lat'],df['lon'])
            tele_x,tele_y=map(tele_lon,tele_lat)
            ax.plot(tele_x,tele_y,'b*',markersize=6,alpha=0.5,label='telemetry')
            ax.legend()
        #if the path of the picture save is not there, creat the folder
        if not os.path.exists(path_picture_save+'/picture/'):
            os.makedirs(path_picture_save+'/picture/')
        plt.savefig(path_picture_save+'/picture/'+name+'_map'+'_'+dtime.strftime('%Y%m%d')+'.png',dpi=dpi) #save picture
        print(name+' finished draw!')
    except:
        print(name+' need redraw!')
コード例 #5
0
def contours_depth_temp_gomfs(output_path,date_time,lat=41.784712,lon=-69.231081,depth='bottom',addlon=.3,addlat=.3,mod_points='yes',depth_contours_interval=[20, 50,100,150,200,500]):
    """Draw contours and isothermal layers on the map
    notice:
    addlon,addlat: edges around point to include in the zoomed in plot
    depth: if depth=='bottom' print the bottom data.
    the format time is: datetime.datetime(2019, 2, 27, 11, 56, 51, 666857)
    mod_points:do you want to post model grid nodes,if mod_points='yes', print model grid nodes;if other string, skip 
    """
    #prepare the data
    try:
        temperature,rho_index,gomofs_temp,gomofs_h,gomofs_lats,gomofs_lons=get_gomofs(date_time,lat,lon,depth,forcontours=True)
    except:
        return 0
    #creat map   
    print('start draw map!')   
    #Create a blank canvas
    fig=plt.figure(figsize = (20, 20))
    fig.suptitle('GoMOFs model bottom temp(deg C) and depth(meter)',fontsize=35, fontweight='bold')
    #Draw contour lines and temperature maps in detail
    ax1=fig.add_axes([0.07,0.03,0.85,0.95])
    ax1.set_title(date_time.strftime('%Y-%m-%d %H:%M:%S'), loc='center')
    ax1.axes.title.set_size(24)
    while(not zl.isConnected()):
        time.sleep(120)
    check,count=0,0
    service = 'Ocean_Basemap'
    while(check==0):
        try:
            map=Basemap(llcrnrlat=lat-addlat,urcrnrlat=lat+addlat,llcrnrlon=lon-addlon,urcrnrlon=lon+addlon,\
                        resolution='f',projection='tmerc',lat_0=lat,lon_0=lon,epsg = 4269)
            map.arcgisimage(service=service, xpixels = 5000, verbose= False)
            check=1
        except:
            check=0
            count=count+1
            print('start '+str(count)+' times add arcgisimage!')
    if mod_points=='yes':
        x1,y1=map(gomofs_lons,gomofs_lats)
        ax1.plot(x1,y1,'yo',markersize=0.1)
    #label the latitude and longitude
    parallels = np.arange(0.,90,1)
    map.drawparallels(parallels,labels=[1,0,0,0],fontsize=20,linewidth=0.0)
    # draw meridians
    meridians = np.arange(180.,360.,1)
    map.drawmeridians(meridians,labels=[0,0,0,1],fontsize=20,linewidth=0.0)
    m_lon,m_lat=map(gomofs_lons,gomofs_lats)
    print('start contour depth!')
    dept_clevs=depth_contours_interval
    dept_cs=map.contour(m_lon,m_lat,gomofs_h,dept_clevs,colors='black')
    plt.clabel(dept_cs, inline = True, fontsize =20,fmt="%1.0f")
    print('start contour temperature!')
    temp_cs=map.contourf(m_lon,m_lat,gomofs_temp[:][0][rho_index],7)
    temp_cbar=map.colorbar(temp_cs,location='right',size="5%",pad="1%")
    temp_cbar.set_label('deg C',size=25)
    temp_cbar.ax.set_yticklabels(temp_cbar.ax.get_yticklabels(), fontsize=20)
    if temperature==9999:
        citys=['the depth is out of the bottom depth']
    else:
        citys=[str(round(temperature,2))]
    lat_point=[lat]
    lon_point=[lon]
    x,y=map(lon_point,lat_point)
    ax1.plot(x,y,'ro')
    ax1.text(x[0]+0.02,y[0]-0.01,citys[0],bbox=dict(facecolor='yellow',alpha=0.5),fontsize =30)
    #indert a map that have mmore screen 
    ax2=fig.add_axes([0.09,0.68,0.2,0.2])
    #Build a map background
    print('start draw insert map!')
    while(not zl.isConnected()):
        time.sleep(120)
    check,count=0,0
    while(check==0):
        try:
            map2=Basemap(llcrnrlat=int(lat)-5,urcrnrlat=int(lat)+5,llcrnrlon=int(lon)-5,urcrnrlon=int(lon)+5,\
                         resolution='f',projection='tmerc',lat_0=int(lat),lon_0=int(lon),epsg = 4269)
            map2.arcgisimage(service=service, xpixels = 5000, verbose= False)
            check=1
        except:
            check=0
            count=count+1
            print('start '+str(count)+' times add arcgisimage!')

    parallels = np.arange(0.,90.,3)
    map2.drawparallels(parallels,labels=[0,1,0,0],fontsize=10,linewidth=0.0)
    # draw meridians
    meridians = np.arange(180.,360.,3)
    map2.drawmeridians(meridians,labels=[0,0,0,1],fontsize=10,linewidth=0.0)
    x2,y2=map2(lon_point,lat_point)
    ax2.plot(x2,y2,'ro')
    plt.savefig(output_path+'contour_depth_tem_GoMOFs.png',dpi=300)
#    plt.show()
    return 1
コード例 #6
0
def get_gomofs_zl(date_time,lat,lon,depth='bottom',mindistance=20,autocheck=True,fortype='temperature'):
    """
    the format time(GMT) is: datetime.datetime(2019, 2, 27, 11, 56, 51, 666857)
    lat and lon use decimal degrees
    if the depth is under the water, please must add the marker of '-'
    input time,lat,lon,depth return the temperature of specify location (or return temperature,nc,rho_index,ocean_time_index)
    the unit is mile of distance
    return the temperature of specify location
    """
    #print(depth)
    if depth==99999:
        depth='bottom'
    if not gomofs_coordinaterange(lat,lon):
        print('lat and lon out of range in gomofs')
        return np.nan
    if date_time<datetime.datetime.strptime('2018-07-01 00:00:00','%Y-%m-%d %H:%M:%S'):
        print('Time out of range, time start :2018-07-01 00:00:00z')
        return np.nan
    if date_time>datetime.datetime.now()+datetime.timedelta(days=3): #forecast time under 3 days
        print('forecast time under 3 days')
        return np.nan
    #start download data
    forecastdate=date_time  #forecast time equal input date_time
    changefile,filecheck=1,1  #changefile means whether we need to change the file to get data, filecheck means check the file exist or not.
    while(changefile==1):  
        count=1
        while(filecheck==1):  #download the data
            try:
                if forecastdate==date_time:   #the forcastdate is input date_time, if the date_time changed yet,we will use the forecast data
                    url=get_gomofs_url(date_time)
                    nc=netCDF4.Dataset(str(url))
                    #print('download nowcast data.')
                else:
                    url=get_gomofs_url_forecast(date_time,forecastdate)
                    nc=netCDF4.Dataset(str(url))
                    #print('download forecast data.')
                filecheck,readcheck=0,1      # if the file is there, filecheck=0,readcheck use to check the file whether read successfully               
            except OSError:
                try:
                    url=get_gomofs_url_forecast(date_time,forecastdate)
                    nc=netCDF4.Dataset(str(url))
                    #print('download forecast data.')
                    filecheck,readcheck=0,1  
                except OSError:
                    date_time=date_time-datetime.timedelta(hours=6)
                    if (forecastdate-date_time)>datetime.timedelta(days=3):  #every file only have 3 days data.
                        print('please check the website or file is exist!')
                        print(url)
                        return np.nan
                except:
                    return np.nan
            except:
                return np.nan
       # print('start read data.')
        while(readcheck==1):  #read data,  if readcheck==1 start loop
            try:
                while True:
                    #print('connecting the web.')
                    if zl.isConnected(address=url):
                        break
                    print('check the website is well or internet is connected?')
                    time.sleep(5)
                gomofs_lons=nc.variables['lon_rho'][:]
                gomofs_lats=nc.variables['lat_rho'][:]
                gomofs_rho=nc.variables['s_rho']
                #gomofs_h=nc.variables['h']
                gomofs_h=nc.variables['h'][:]
                gomofs_temp=nc.variables['temp']
                readcheck,changefile=0,0   #if read data successfully, we do not need to loop
               # print('end read data.')
            except RuntimeError: 
                count=count+1
                if count>8:
                    if autocheck==True:
                        return np.nan
                    while True:
                        print('it will return nan, if you do not need read again.')
                        cmd = input("whether need read again(y/n)?:")
                        if cmd.lower() == "y":
                            count=1
                            break
                        elif cmd.lower() == "n":
                            cmd2 = input("whether need change file(y/n)?:")
                            if cmd2.lower()=="y":
                                date_time=date_time-datetime.timedelta(hours=6)
                                readcheck,filecheck=0,1
                                break
                            else:
                                print('interrupt read data.')
                                return np.nan
                        else:
                            break
                time.sleep(20)   #every time to reread data need stop 20s
                print('the '+str(int(count))+' times to read data.')
            except:
                return np.nan

    #caculate the index of the nearest four points    
    target_distance=2*zl.dist(lat1=gomofs_lats[0][0],lon1=gomofs_lons[0][0],lat2=gomofs_lats[0][1],lon2=gomofs_lons[0][1])
    eta_rho,xi_rho=zl.find_nd(target=target_distance,lat=lat,lon=lon,lats=gomofs_lats,lons=gomofs_lons)
    
    if zl.dist(lat1=lat,lon1=lon,lat2=gomofs_lats[eta_rho][xi_rho],lon2=gomofs_lons[eta_rho][xi_rho])>mindistance:
        print('THE location is out of range')
        return np.nan
    # estimate the bottom depth of point location 
    if eta_rho==0:
        eta_rho=1
    if eta_rho==len(gomofs_lats)-1:
        eta_rho=len(gomofs_lats)-2
    if xi_rho==len(gomofs_lats[0])-1:
        eta_rho=len(gomofs_lats[0])-2
   # print('start caculate the bottom depth of point location!') 
    while True:
        points_h=[[gomofs_lats[eta_rho][xi_rho],gomofs_lons[eta_rho][xi_rho],gomofs_h[eta_rho,xi_rho]],
             [gomofs_lats[eta_rho,(xi_rho-1)],gomofs_lons[eta_rho,(xi_rho-1)],gomofs_h[eta_rho,(xi_rho-1)]],
             [gomofs_lats[eta_rho,(xi_rho+1)],gomofs_lons[eta_rho,(xi_rho+1)],gomofs_h[eta_rho,(xi_rho+1)]],
             [gomofs_lats[(eta_rho-1),xi_rho],gomofs_lons[(eta_rho-1),xi_rho],gomofs_h[(eta_rho-1),xi_rho]],
             [gomofs_lats[(eta_rho+1),xi_rho],gomofs_lons[(eta_rho+1),xi_rho],gomofs_h[(eta_rho+1),xi_rho]]]
        break
    point_h=zl.fitting(points_h,lat,lon) 
    # caculate the rho index
    if depth=='bottom':
        rho_index=0
    else:
        distance_h=gomofs_rho[0]*point_h-depth      
        for k in range(len(gomofs_rho)):
            if abs(distance_h)>=abs(gomofs_rho[k]*point_h-depth):
                distance_h=gomofs_rho[k]*point_h-depth
                rho_index=k        
    #estimate the temperature of point location
    while True:  
        points_temp=[[gomofs_lats[eta_rho,xi_rho],gomofs_lons[eta_rho,xi_rho],gomofs_temp[0][rho_index][eta_rho][xi_rho]],
             [gomofs_lats[eta_rho,(xi_rho-1)],gomofs_lons[eta_rho,(xi_rho-1)],gomofs_temp[0][rho_index][eta_rho,(xi_rho-1)]],
             [gomofs_lats[eta_rho,(xi_rho+1)],gomofs_lons[eta_rho,(xi_rho+1)],gomofs_temp[0][rho_index][eta_rho][(xi_rho+1)]],             
             [gomofs_lats[(eta_rho-1),xi_rho],gomofs_lons[(eta_rho-1),xi_rho],gomofs_temp[0][rho_index][(eta_rho-1)][xi_rho]],
             [gomofs_lats[(eta_rho-1),xi_rho],gomofs_lons[(eta_rho-1),xi_rho],gomofs_temp[0][rho_index][(eta_rho-1)][xi_rho]]]
        break
    temperature=zl.fitting(points_temp,lat,lon)
    # if input depth out of the bottom, print the prompt message
    if depth!='bottom':
        if abs(point_h)<abs(depth):
            print ("the depth is out of the bottom:"+str(point_h))
            return np.nan
    if fortype=='tempdepth':
        return temperature,point_h
    else:
        return temperature
コード例 #7
0
def temp_min_max(model_name,
                 dt=datetime(2019, 5, 1, 0, 0, 0),
                 interval=31,
                 area='OOI'):
    ''' 
        Loop through each day to find min/max temp
        model_name:name of model
        dt: start time
        interval: how many days we need make
        area:limited area you want look
    '''
    temp_list = []  #store temperature of min and max
    #if model_name == 'DOPPIO':
    if model_name == 'DOPPIO' or 'GOMOFS':
        interval = interval * 24
        for j in range(interval):
            #dtime=dt+timedelta(days=j)
            dt = local2utc(dt)  #change local time to UTC time
            dtime = dt + timedelta(hours=j)
            #print(dtime)
            url = get_doppio_url(dtime)
            while True:
                if zl.isConnected(address=url):
                    break
                print('check the website is well or internet is connected?')
                time.sleep(5)
            skip = 0
            while True:
                try:
                    nc = NetCDFFile(url)
                    lons = nc.variables['lon_rho'][:]
                    lats = nc.variables['lat_rho'][:]
                    temps = nc.variables['temp']
                    i0, i1, j0, j1 = get_limited_gbox(area, lon=lons, lat=lats)
                    break
                except RuntimeError:
                    print(str(url) + ': need reread')
                except OSError:
                    if zl.isConnected(address=url):
                        print(str(url) + ': file does not exist.')
                        #print('MING CHAO TEST ')
                        skip = 1
                        break
                except KeyboardInterrupt:
                    sys.exit()
            if skip == 1:
                continue
            #m_temp=mean_temp(temps)# here we are taking a daily average
            m_temp = temps[np.mod(j, 24),
                           0]  #0 is bottom of depth,-1 is surface of depth
            #ntime=dtime
            #time_str=ntime.strftime('%Y-%m-%d')
            temp = m_temp * 1.8 + 32
            temp_F = temp[j0:j1, i0:i1]
            Min_temp = int(min(temp_F.data[~np.isnan(temp_F.data)]))
            Max_temp = int(max(temp_F.data[~np.isnan(temp_F.data)]))
            temp_list.append(Min_temp)
            temp_list.append(Max_temp)
    Min_temp = min(temp_list)
    Max_temp = max(
        temp_list
    ) + 3.0  #Gomofs is more warmer than Doppio,so use Doppio's max temperature plus 3 equal Gomofs' max temperature
    return Min_temp, Max_temp
コード例 #8
0
def make_images(model_name,
                dpath,
                path,
                dt=datetime(2019, 5, 1, 0, 0, 0),
                interval=31,
                Min_temp=0,
                Max_temp=10,
                area='OOI'):
    ''' dpath: the path of dictionary, use to store telemetered data
        path: use to store images
        dt: start time
        interval: how many days we need make 
    '''
    with open(dpath, 'rb') as fp:
        telemetered_dict = pickle.load(fp)
    if model_name == 'DOPPIO':
        interval = interval * 24
        for j in range(interval):
            #dtime=dt+timedelta(days=j)
            dtime_local = dt + timedelta(hours=j)  #for time of title
            print(dtime_local)
            dt_utc = local2utc(dt)
            dtime = dt_utc + timedelta(hours=j)
            #print(dtime)
            url = get_doppio_url(dtime)
            while True:
                if zl.isConnected(address=url):
                    break
                print('check the website is well or internet is connected?')
                time.sleep(5)
            skip = 0
            while True:
                try:
                    nc = NetCDFFile(url)
                    lons = nc.variables['lon_rho'][:]
                    lats = nc.variables['lat_rho'][:]
                    temps = nc.variables['temp']
                    depth = nc.variables['h'][:]
                    #i0,i1,j0,j1 = get_limited_gbox(area,lon=lons,lat=lats)
                    break
                except RuntimeError:
                    print(str(url) + ': need reread')
                except OSError:
                    if zl.isConnected(address=url):
                        print(str(url) + ': file not exist.')
                        print('Mingchao test there are something wrong')
                        skip = 1
                        break
                except KeyboardInterrupt:
                    sys.exit()
            if skip == 1:
                continue
            #m_temp=mean_temp(temps)# here we are taking a daily average
            m_temp = temps[np.mod(j, 24),
                           0]  #0 is bottom of depth,-1 is surface of depth
            ntime = dtime
            time_str = dtime_local.strftime('%Y-%m-%d-%H')
            temp = m_temp * 1.8 + 32
            Year = str(ntime.year)
            Month = str(ntime.month)
            Day = str(ntime.day)
            slons, slats = [], []
            try:
                slons, slats = [], []
                for i in telemetered_dict[Year][Month][Day].index:
                    slons.append(
                        telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                    slats.append(
                        telemetered_dict[Year][Month][Day]['lat'].iloc[i])
            except:
                slons, slats = [], []
            dpi = 80
            plotit(model_name, lons, lats, slons, slats, temp, depth, time_str,
                   path, dpi, Min_temp, Max_temp, area)
    if model_name == 'GOMOFS':
        for j in range(interval):  # loop every days files
            dtime_day = dt + timedelta(days=j)
            dt_utc = local2utc(dt)
            dtime = dt_utc + timedelta(days=j)
            count, skip = 0, 0  #count use to count how many files load successfully
            #skip=0
            for i in range(0, 24,
                           3):  #loop every file of day, every day have 8 files
                dtime_local = dtime_day + timedelta(
                    hours=i)  #for time of title
                ntime = dtime + timedelta(hours=i)
                print(dtime_local)
                url = get_gomofs_url(ntime)
                #url=get_gomofs_url_forecast(ntime,forecastdate=True)
                print(url)
                while True:  #check the internet
                    if zl.isConnected(address=url):
                        break
                    print(
                        'check the website is well or internet is connected?')
                    time.sleep(5)
                while True:  #load data
                    try:
                        nc = NetCDFFile(url)
                        lons = nc.variables['lon_rho'][:]
                        lats = nc.variables['lat_rho'][:]
                        temps = nc.variables['temp']
                        depth = nc.variables['h'][:]
                        #i0,i1,j0,j1 = get_limited_gbox(area,lon=lons,lat=lats)
                        break
                    except KeyboardInterrupt:
                        sys.exit()
                    except OSError:
                        if zl.isConnected(address=url):
                            #print(str(url)+': file not exit.')
                            print('mingchao warning')
                            skip = 1
                            break
                    except:
                        print('reread data:' + str(url))
                if skip == 1:  #if file is not exist
                    continue
                #m_temp=temps[0,0] # JiM added this 2/19/2020
                m_temp = temps[0][0]  #JiM added this 2/28/2020
                time_str = dtime_local.strftime('%Y-%m-%d-%H')
                temp = m_temp * 1.8 + 32
                Year = str(ntime.year)
                Month = str(ntime.month)
                Day = str(ntime.day)
                slons, slats = [], []
                try:
                    slons, slats = [], []
                    for i in telemetered_dict[Year][Month][Day].index:
                        slons.append(
                            telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                        slats.append(
                            telemetered_dict[Year][Month][Day]['lat'].iloc[i])
                except:
                    slons, slats = [], []
                dpi = 80
                plotit(model_name, lons, lats, slons, slats, temp, depth,
                       time_str, path, dpi, Min_temp, Max_temp, area)
コード例 #9
0
def make_images(dpath,
                path,
                dt=datetime(2019, 5, 1, 0, 0, 0),
                interval=31,
                area='OOI',
                clevs=[39., 44., 0.5],
                lat_w=42.5,
                lon_w=-70.3,
                wind=pd.DataFrame(
                    [[datetime.now() - timedelta(1),
                      datetime.now()], [.1, .1], [.1, .1]])):
    '''dpath: the path of dictionary, use to store telemetered data
        path: use to store images
        dt: start time
        interval: how many days we need make 
        dtimes,u,v are wind in m/s
    '''
    with open(dpath, 'rb') as fp:
        telemetered_dict = pickle.load(fp)
    interval = interval * 24
    for j in range(interval):
        #dtime=dt+timedelta(days=j)
        dtime = dt + timedelta(hours=j)
        print(dtime)
        #url=get_doppio_url(dtime)
        url = 'http://tds.marine.rutgers.edu/thredds/dodsC/roms/doppio/2017_da/his/History_Best'
        while True:
            if zl.isConnected(address=url):
                break
            print('check the website is well or internet is connected?')
            time.sleep(5)
        skip = 0
        while True:
            try:
                nc = NetCDFFile(url)
                lons = nc.variables['lon_rho'][:]
                lats = nc.variables['lat_rho'][:]
                temps = nc.variables['temp']
                depth = nc.variables['h'][:]
                time_var = nc['time']
                itime = netCDF4.date2index(dtime, time_var, select='nearest')
                break
            except RuntimeError:
                print(str(url) + ': need reread')
            except OSError:
                if zl.isConnected(address=url):
                    print(str(url) + ': file not exit.')
                    skip = 1
                    break
            except KeyboardInterrupt:
                sys.exit()
        if skip == 1:
            continue

        #m_temp=mean_temp(temps)# here we are taking a daily average
        #m_temp=temps[j,0] # JiM made this change 7/16/2020 since we are looking at hourly not daily images
        m_temp = temps[itime, surf_or_bot]  #-1 for surface?
        #m_temp=temps[np.mod(j,24),0]
        ntime = dtime
        #time_str=ntime.strftime('%Y-%m-%d')
        time_str = ntime.strftime('%Y-%m-%d %H00UTC')
        temp = m_temp * 1.8 + 32
        Year = str(ntime.year)
        Month = str(ntime.month)
        Day = str(ntime.day)
        slons, slats = [], []
        try:
            slons, slats, stemp = [], [], []
            for i in telemetered_dict[Year][Month][Day].index:
                slons.append(telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                slats.append(telemetered_dict[Year][Month][Day]['lat'].iloc[i])
                stemp.append(
                    telemetered_dict[Year][Month][Day]['temp'].iloc[i])
        except:
            slons, slats, stemp = [], [], []
        plotit(lons,
               lats,
               slons,
               slats,
               stemp,
               temp,
               depth,
               time_str,
               path,
               dpi=80,
               area=area,
               clevs=clevs,
               lat_w=lat_w,
               lon_w=lon_w,
               wind=wind)
コード例 #10
0
def make_images(dpath, path, dt=datetime(2019, 5, 1, 0, 0, 0), interval=31):
    '''dpath: the path of dictionary, use to store telemetered data
        path: use to store images
        dt: start time
        interval: how many days we need make 
    '''
    with open(dpath, 'rb') as fp:  #load the data of observation
        telemetered_dict = pickle.load(fp)
    for j in range(interval):  # loop every days files
        dtime = dt + timedelta(days=j)
        print(dtime)
        count, skip = 0, 0  #count use to count how many files load successfully
        for i in range(0, 24,
                       3):  #loop every file of day, every day have 8 files
            ntime = dtime + timedelta(hours=i)
            url = get_gomofs_url(ntime)
            while True:  #check the internet
                if zl.isConnected(address=url):
                    break
                print('check the website is well or internet is connected?')
                time.sleep(5)
            while True:  #load data
                try:
                    nc = NetCDFFile(url)
                    lons = nc.variables['lon_rho'][:]
                    lats = nc.variables['lat_rho'][:]
                    temps = nc.variables['temp']
                    depth = nc.variables['h'][:]
                    break
                except KeyboardInterrupt:
                    sys.exit()
                except OSError:
                    if zl.isConnected(address=url):
                        print(str(url) + ': file not exit.')
                        skip = 1
                        break
                except:
                    print('reread data:' + str(url))
            if skip == 1:  #if file is not exist
                continue
            if i == 0:
                count += 1
                m_temp = temps[0, 0]
            else:
                m_temp += temps[0, 0]
                count += 1
        m_temp = m_temp / float(count)
        ntime = dtime
        time_str = ntime.strftime('%Y-%m-%d')
        temp = m_temp * 1.8 + 32
        Year = str(ntime.year)
        Month = str(ntime.month)
        Day = str(ntime.day)
        slons, slats = [], []
        try:
            slons, slats = [], []
            for i in telemetered_dict[Year][Month][Day].index:
                slons.append(telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                slats.append(telemetered_dict[Year][Month][Day]['lat'].iloc[i])
        except:
            slons, slats = [], []
        plot(lons, lats, slons, slats, temp, depth, time_str, path)
def temp_min_max(model_name,
                 dt=datetime(2019, 5, 1, 0, 0, 0),
                 interval=31,
                 area='OOI'):
    ''' 
        Loop through each day to find min/max temp
        model_name:name of model
        dt: start time
        interval: how many days we need make
        area:limited area you want look
    '''
    temp_list = []  #store temperature of min and max
    #if model_name == 'DOPPIO':
    if model_name == 'DOPPIO' or 'GOMOFS':
        interval = interval * 24
        for j in range(interval):
            #dtime=dt+timedelta(days=j)
            dt = local2utc(dt)  #change local time to UTC time
            dtime = dt + timedelta(hours=j)
            #print(dtime)
            url = get_doppio_url(dtime)
            while True:
                if zl.isConnected(address=url):
                    break
                print('check the website is well or internet is connected?')
                time.sleep(5)
            skip = 0
            while True:
                try:
                    nc = NetCDFFile(url)
                    lons = nc.variables['lon_rho'][:]
                    lats = nc.variables['lat_rho'][:]
                    temps = nc.variables['temp']
                    i0, i1, j0, j1 = get_limited_gbox(area, lon=lons, lat=lats)
                    break
                except RuntimeError:
                    print(str(url) + ': need reread')
                except OSError:
                    if zl.isConnected(address=url):
                        print(str(url) + ': file not exit.')
                        print('MING CHAO TEST ')
                        skip = 1
                        break
                except KeyboardInterrupt:
                    sys.exit()
            if skip == 1:
                continue
            #m_temp=mean_temp(temps)# here we are taking a daily average
            m_temp = temps[np.mod(j, 24),
                           0]  #0 is bottom of depth,-1 is surface of depth
            #ntime=dtime
            #time_str=ntime.strftime('%Y-%m-%d')
            temp = m_temp * 1.8 + 32
            temp_F = temp[j0:j1, i0:i1]
            Min_temp = int(min(temp_F.data[~np.isnan(temp_F.data)]))
            Max_temp = int(max(temp_F.data[~np.isnan(temp_F.data)]))
            temp_list.append(Min_temp)
            temp_list.append(Max_temp)
    #elif model_name == 'GOMOFS' or 'DOPPIO':
    #for j in range(interval): # loop every days files
    #for j in range(interval): # loop every days files
    #dtime_day=dt+timedelta(days=j)
    #dt_utc=local2utc(dt)
    #dtime=dt_utc+timedelta(days=j)
    #print(dtime_day)
    #count,skip=0,0  #count use to count how many files load successfully
    #skip=0
    #for i in range(0,24,3): #loop every file of day, every day have 8 files
    #dtime_local=dtime_day+timedelta(hours=i)
    #ntime=dtime+timedelta(hours=i)
    #print(dtime_local)
    #url=get_gomofs_url(ntime)
    #while True:#check the internet
    #if zl.isConnected(address=url):
    #break
    #print('check the website is well or internet is connected?')
    #time.sleep(5)
    #while True:  #load data
    #try:
    #nc = NetCDFFile(url)
    #lons=nc.variables['lon_rho'][:]
    #lats=nc.variables['lat_rho'][:]
    #temps=nc.variables['temp']
    #i0,i1,j0,j1 = get_limited_gbox(area,lon=lons,lat=lats)
    #break
    #except KeyboardInterrupt:
    #sys.exit()
    #except OSError:
    #if zl.isConnected(address=url):
    #print(str(url)+': file not exit.')
    #skip=1
    #break
    #except:
    #print('reread data:'+str(url))
    #if skip==1:  #if file is not exist
    #continue
    #m_temp=temps[0][0] # JiM added this 2/19/2020
    #temp=m_temp*1.8+32
    #temp_F = temp[j0:j1, i0:i1]
    #Min_temp=int(min(temp_F.data[~np.isnan(temp_F.data)]))
    #Max_temp=int(max(temp_F.data[~np.isnan(temp_F.data)]))
    #Mingchao created a deepcopy for filtering the wrong max temperature ,such as 1e+37(9999999999999999538762658202121142272)
    #b=copy.deepcopy(list(temp_F.data[~np.isnan(temp_F.data)]))
    #for k in range(len(np.where(temp_F.data[~np.isnan(temp_F.data)]>100)[0])):
    #b.remove(int(list(temp_F.data[~np.isnan(temp_F.data)])[np.where(temp_F.data[~np.isnan(temp_F.data)]>100)[0][k]]))
    #Max_temp=int(max(list(b)))
    #temp_list.append(Min_temp)
    #temp_list.append(Max_temp)
    #Min_temp = min(temp_list)
    Min_temp = min(temp_list)
    #Max_temp = max(temp_list)
    Max_temp = max(
        temp_list
    ) + 3.0  #Gomofs is more warmer than Doppio,so use Doppio's max temperature plus 3 equal Gomofs' max temperature
    return Min_temp, Max_temp
コード例 #12
0
def draw_map(raw_dict,
             tele_dict,
             name,
             start_time_local,
             end_time_local,
             path_picture_save,
             record_file,
             dpi=300):
    """use to draw the location of raw file and telemetered produced"""
    #creat map
    #Create a blank canvas
    fig = plt.figure(figsize=(8, 8.5))
    fig.suptitle('F/V ' + name, fontsize=24, fontweight='bold')

    ax = fig.add_axes([0.02, 0.02, 0.9, 0.9])
    ax.set_title(
        start_time_local.strftime('%Y-%m-%d') + '-' +
        end_time_local.strftime('%Y-%m-%d'))
    ax.axes.title.set_size(16)

    min_lat = record_file['min_lat']
    max_lat = record_file['max_lat']
    max_lon = record_file['max_lon']
    min_lon = record_file['min_lon']
    #keep the max_lon-min_lon>=2
    if (max_lon - min_lon) <= 2:
        max_lon = 1 - (max_lon - min_lon) / 2.0 + (max_lon + min_lon) / 2.0
        min_lon = max_lon - 2
    #adjust the max and min,let map have the same width and height
    if (max_lon - min_lon) > (max_lat - min_lat):
        max_lat = max_lat + ((max_lon - min_lon) - (max_lat - min_lat)) / 2.0
        min_lat = min_lat - ((max_lon - min_lon) - (max_lat - min_lat)) / 2.0
    else:
        max_lon = max_lon + ((max_lat - min_lat) - (max_lon - min_lon)) / 2.0
        min_lon = min_lon - ((max_lat - min_lat) - (max_lon - min_lon)) / 2.0
    #if there only one data in there

    while (not zl.isConnected()):
        time.sleep(120)
    try:
        service = 'Ocean_Basemap'
        xpixels = 5000
        #Build a map background
        map=Basemap(projection='mill',llcrnrlat=min_lat-0.1,urcrnrlat=max_lat+0.1,llcrnrlon=min_lon-0.1,urcrnrlon=max_lon+0.1,\
                resolution='f',lat_0=(record_file['min_lat']+record_file['max_lat'])/2.0,lon_0=(record_file['max_lon']+record_file['min_lon'])/2.0,epsg = 4269)
        map.arcgisimage(service=service, xpixels=xpixels, verbose=False)
        if max_lat - min_lat >= 3:
            step = int((max_lat - min_lat) / 5.0 * 10) / 10.0
        else:
            step = 0.5

        # draw parallels.
        parallels = np.arange(0., 90.0, step)
        map.drawparallels(parallels,
                          labels=[0, 1, 0, 0],
                          fontsize=10,
                          linewidth=0.0)
        # draw meridians
        meridians = np.arange(180., 360., step)
        map.drawmeridians(meridians,
                          labels=[0, 0, 0, 1],
                          fontsize=10,
                          linewidth=0.0)

        #Draw a scatter plot
        if len(raw_dict) > 0 and len(raw_dict) > 0:
            raw_lat, raw_lon = to_list(raw_dict['mean_lat'],
                                       raw_dict['mean_lon'])
            raw_x, raw_y = map(raw_lon, raw_lat)
            ax.plot(raw_x,
                    raw_y,
                    'ro',
                    markersize=6,
                    alpha=0.5,
                    label='raw_data')
            tele_lat, tele_lon = to_list(tele_dict['mean_lat'],
                                         tele_dict['mean_lon'])
            tele_x, tele_y = map(tele_lon, tele_lat)
            ax.plot(tele_x,
                    tele_y,
                    'b*',
                    markersize=6,
                    alpha=0.5,
                    label='telemetry')
            ax.legend()
        else:
            if len(raw_dict) > 0:
                raw_lat, raw_lon = to_list(raw_dict['mean_lat'],
                                           raw_dict['mean_lon'])
                raw_x, raw_y = map(raw_lon, raw_lat)
                ax.plot(raw_x,
                        raw_y,
                        'ro',
                        markersize=6,
                        alpha=0.5,
                        label='raw_data')
                ax.legend()
            else:
                tele_lat, tele_lon = to_list(tele_dict['mean_lat'],
                                             tele_dict['mean_lon'])
                tele_x, tele_y = map(tele_lon, tele_lat)
                ax.plot(tele_x,
                        tele_y,
                        'b*',
                        markersize=6,
                        alpha=0.5,
                        label='telemetry')
                ax.legend()

        if not os.path.exists(path_picture_save + '/picture/' + name + '/'):
            os.makedirs(path_picture_save + '/picture/' + name + '/')
        plt.savefig(path_picture_save + '/picture/' + name + '/' + 'location' +
                    '_' + start_time_local.strftime('%Y-%m-%d') + '_' +
                    end_time_local.strftime('%Y-%m-%d') + '.png',
                    dpi=dpi)
        print(name + ' finished draw!')
    except:
        print(name + ' need redraw!')
コード例 #13
0
def make_images(model_name,
                dpath,
                path,
                dt=datetime(2019, 5, 1, 0, 0, 0),
                interval=31,
                Min_temp=0,
                Max_temp=10,
                area='OOI'):
    '''dpath: the path of dictionary, use to store telemetered data
        path: use to store images
        dt: start time
        interval: how many days we need make 
    '''
    with open(dpath, 'rb') as fp:
        telemetered_dict = pickle.load(fp)
    if model_name == 'DOPPIO':
        interval = interval * 24
        for j in range(interval):
            #dtime=dt+timedelta(days=j)
            dtime = dt + timedelta(hours=j)
            print(dtime)
            url = get_doppio_url(dtime)
            while True:
                if zl.isConnected(address=url):
                    break
                print('check the website is well or internet is connected?')
                time.sleep(5)
            skip = 0
            while True:
                try:
                    nc = NetCDFFile(url)
                    lons = nc.variables['lon_rho'][:]
                    lats = nc.variables['lat_rho'][:]
                    temps = nc.variables['temp']
                    depth = nc.variables['h'][:]
                    #i0,i1,j0,j1 = get_limited_gbox(area,lon=lons,lat=lats)
                    break
                except RuntimeError:
                    print(str(url) + ': need reread')
                except OSError:
                    if zl.isConnected(address=url):
                        print(str(url) + ': file not exit.')
                        skip = 1
                        break
                except KeyboardInterrupt:
                    sys.exit()
            if skip == 1:
                continue
            #m_temp=mean_temp(temps)# here we are taking a daily average
            m_temp = temps[np.mod(j, 24),
                           0]  #0 is bottom of depth,-1 is surface of depth
            ntime = dtime
            #time_str=ntime.strftime('%Y-%m-%d')
            time_str = ntime.strftime('%Y-%m-%d-%H')
            temp = m_temp * 1.8 + 32
            #temp_F = temp[j0:j1, i0:i1]
            #Min_temp=int(min(temp_F.data[~np.isnan(temp_F.data)]))
            #Max_temp=int(max(temp_F.data[~np.isnan(temp_F.data)]))
            Year = str(ntime.year)
            Month = str(ntime.month)
            Day = str(ntime.day)
            slons, slats = [], []
            try:
                slons, slats = [], []
                for i in telemetered_dict[Year][Month][Day].index:
                    slons.append(
                        telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                    slats.append(
                        telemetered_dict[Year][Month][Day]['lat'].iloc[i])
            except:
                slons, slats = [], []
            dpi = 80
            plotit(model_name, lons, lats, slons, slats, temp, depth, time_str,
                   path, dpi, Min_temp, Max_temp, area)
    if model_name == 'GOMOFS':
        for j in range(interval):  # loop every days files
            dtime = dt + timedelta(days=j)
            print(dtime)
            count, skip = 0, 0  #count use to count how many files load successfully
            #skip=0
            for i in range(0, 24,
                           3):  #loop every file of day, every day have 8 files
                ntime = dtime + timedelta(hours=i)
                url = get_gomofs_url(ntime)
                print(url)
                while True:  #check the internet
                    if zl.isConnected(address=url):
                        break
                    print(
                        'check the website is well or internet is connected?')
                    time.sleep(5)
                while True:  #load data
                    try:
                        nc = NetCDFFile(url)
                        lons = nc.variables['lon_rho'][:]
                        lats = nc.variables['lat_rho'][:]
                        temps = nc.variables['temp']
                        depth = nc.variables['h'][:]
                        #i0,i1,j0,j1 = get_limited_gbox(area,lon=lons,lat=lats)
                        break
                    except KeyboardInterrupt:
                        sys.exit()
                    except OSError:
                        if zl.isConnected(address=url):
                            print(str(url) + ': file not exit.')
                            skip = 1
                            break
                    except:
                        print('reread data:' + str(url))
                if skip == 1:  #if file is not exist
                    continue
                m_temp = temps[0, 0]  # JiM added this 2/19/2020
                '''
                if i==0: 
                    count+=1
                    m_temp=temps[0,0]
                else:
                    m_temp+=temps[0,0]
                    count+=1
                '''
                #m_temp=m_temp/float(count)
                #ntime=dtime
                time_str = ntime.strftime('%Y-%m-%d-%H')
                temp = m_temp * 1.8 + 32
                #temp_F = temp[j0:j1, i0:i1]
                #Min_temp=int(min(temp_F.data[~np.isnan(temp_F.data)]))
                #Mingchao created a deepcopy for filtering the wrong max temperature ,such as 1e+37(9999999999999999538762658202121142272)
                #b=copy.deepcopy(list(temp_F.data[~np.isnan(temp_F.data)]))
                #for k in range(len(np.where(temp_F.data[~np.isnan(temp_F.data)]>100)[0])):
                #b.remove(int(list(temp_F.data[~np.isnan(temp_F.data)])[np.where(temp_F.data[~np.isnan(temp_F.data)]>100)[0][k]]))
                #Max_temp=int(max(list(b)))
                Year = str(ntime.year)
                Month = str(ntime.month)
                Day = str(ntime.day)
                slons, slats = [], []
                try:
                    slons, slats = [], []
                    for i in telemetered_dict[Year][Month][Day].index:
                        slons.append(
                            telemetered_dict[Year][Month][Day]['lon'].iloc[i])
                        slats.append(
                            telemetered_dict[Year][Month][Day]['lat'].iloc[i])
                except:
                    slons, slats = [], []
                dpi = 80
                plotit(model_name, lons, lats, slons, slats, temp, depth,
                       time_str, path, dpi, Min_temp, Max_temp, area)