Exemple #1
0
def get_neracoos_wind_data(url,id_s,id_e,id_max_url): #get wind data from neracoos.
         url1=url+'wind_speed[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],wind_direction[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0]'
         database_s=open_url(url1)['wind_speed'][int(id_s):int(id_e)]
         database_d=open_url(url1)['wind_direction'][int(id_s):int(id_e)]
         #lat=database_s['lat']
         #lat=round(lat[0],2)
         #lon=database_s['lon']
         #lon=round(lon[0],2)
         depth=database_s['wind_depth']
         period=database_s['time']
         speed=database_s['wind_speed']
         speed=speed[0:].tolist()
         period=num2date(period[0:]+date2num(dt.datetime(1858, 11, 17, 0, 0)))
         direction=database_d['wind_direction']
         direction=direction[0:].tolist()
         period_str,wind_all=[],[]
         for i in range(len(period)): #convert format to list
             period_str.append(dt.datetime.strftime(period[i],'%Y-%m-%d-%H-%M'))
             wind_all.append([round(depth[0],1),round(speed[i][0][0][0],2),round(direction[i][0][0][0],2)])
         wind,direction=[],[] # figure out bad data and delete
         for i in range(len(wind_all)):
           wind.append(wind_all[i][1])
           direction.append(wind_all[i][2])
         id_bad=ml.find((np.array(wind)>300) | (np.array(wind)<-1) | (np.array(direction)<0)| (np.array(direction)>360))
         #print id_bad
         id_bad=list(id_bad)
         id_bad.reverse()
         for m in id_bad:
            del period_str[m]
            del wind_all[m]
         return period_str,wind_all
def getcodar_ctl_id(model_option,url,datetime_wanted):
    if model_option=='1':
        dtime=open_url(url+'?time')
        dd=dtime['time']  
        #print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2009, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2009, 1, 1, 0, 0))))           
        print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y")
        id=datetime_wanted-date2num(datetime.datetime(2009, 1, 1, 0, 0))
        id=str(int(id))
    if model_option=='6':
        dtime=open_url(url+'?time')
        dd=dtime['time']
        ddd=[]
        #print 'This option has data from '+dd[0].strftime("%B %d, %Y")+' to '+dd[-1] .strftime("%B %d, %Y")
        id=datetime_wanted-date2num(datetime.datetime(2006, 1, 1, 0, 0))
        id=str(int(id))
    else:
        dtime=open_url(url+'?time')
        dd=dtime['time']
        ddd=[]
        for i in list(dtime['time']):
            i=round(i,7)
            ddd.append(i)
        
        #print "This option has data from "+str(num2date(dd[0]+date2num(datetime.datetime(2001, 1, 1, 0, 0))))+" to "+str(num2date(dd[-1]+date2num(datetime.datetime(2001, 1, 1, 0, 0)))) 
        #print 'This option has data from '+num2date(dd[0]).strftime("%B %d, %Y")+' to '+num2date(dd[-1]).strftime("%B %d, %Y")          
        id=ml.find(np.array(ddd)==round(datetime_wanted-date2num(datetime.datetime(2001, 1, 1, 0, 0)),7))
        for i in id:
          id=str(i) 
        #print 'codar id is  '+id
    return id    
Exemple #3
0
def getsst(second):
    #get the index of second from the url
    time_tuple = time.gmtime(second)#calculate the year from the seconds
    year = time_tuple.tm_year
    if year < 1999 or year > 2010:
        print 'Sorry there might not be available data for this year'
    # WARNING: As of Jan 2012, this data is only stored for 1999-2010
    url1 = 'http://tashtego.marine.rutgers.edu:8080/thredds/dodsC/cool/avhrr/bigbight/' + str(year) + '?time[0:1:3269]'
    dataset1 = open_url(url1)
    times = list(dataset1['time'])
    # find the nearest image index
    index_second = int(round(np.interp(second, times, range(len(times)))))

    #get sst, time, lat, lon from the url
    url = 'http://tashtego.marine.rutgers.edu:8080/thredds/dodsC/cool/avhrr/bigbight/' + \
          str(year) + '?lat[0:1:1221],lon[0:1:1182],' + \
          'mcsst[' + str(index_second) + ':1:' + str(index_second) + \
          '][0:1:1221][0:1:1182]' + \
          ',time[' + str(index_second) + \
          ':1:' + str(index_second) + ']'
    try:
        dataset = open_url(url)
    except:
        print "Please check your url! Cannot access dataset."
        sys.exit(0)

    sst = dataset['mcsst'].mcsst
    time1 = dataset['time']
    lat = dataset['lat']
    lon = dataset['lon']
    return sst, time1, lat, lon
    def _load(self, filename, elements, debug=False):
        """Loads data from *.nc, *.p and OpenDap url"""
        # Loading pickle file
        if filename.endswith(".p"):
            f = open(filename, "rb")
            data = pkl.load(f)
            self._origin_file = data["Origin"]
            self.History = data["History"]
            if debug:
                print "Turn keys into attributs"
            self.Grid = ObjectFromDict(data["Grid"])
            self.Variables = ObjectFromDict(data["Variables"])
            try:
                if self._origin_file.startswith("http"):
                    # Look for file through OpenDAP server
                    print "Retrieving data through OpenDap server..."
                    self.Data = open_url(data["Origin"])
                    # Create fake attribut to be consistent with the rest of the code
                    self.Data.variables = self.Data
                else:
                    self.Data = self._load_nc(data["Origin"])
            except:  # TR: need to precise the type of error here
                print "the original *.nc file has not been found"
                pass
        # Loading netcdf file
        elif filename.endswith(".nc"):
            if filename.startswith("http"):
                # Look for file through OpenDAP server
                print "Retrieving data through OpenDap server..."
                self.Data = open_url(filename)
                # Create fake attribut to be consistent with the rest of the code
                self.Data.variables = self.Data
            else:
                # Look for file locally
                print "Retrieving data from " + filename + " ..."
                self.Data = self._load_nc(filename)
            # Metadata
            text = "Created from " + filename
            self._origin_file = filename
            self.History = [text]
            # Calling sub-class
            print "Initialisation..."
            try:
                self.Grid = _load_grid(self.Data, elements, self.History, debug=self._debug)
                self.Variables = _load_var(self.Data, elements, self.Grid, self.History, debug=self._debug)

            except MemoryError:
                print "---Data too large for machine memory---"
                print "Tip: use ax or tx during class initialisation"
                print "---  to use partial data"
                raise

        elif filename.endswith(".mat"):
            raise PyseidonError("---Functionality not yet implemented---")
        else:
            raise PyseidonError("---Wrong file format---")
Exemple #5
0
def get_neracoos_current_data(url,id_s,id_e,id_max_url): #get wind data from neracoos.
         url1=url+'current_speed[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_direction[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_u[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0],current_v[0:1:'+id_max_url+'][0:1:0][0:1:0][0:1:0]'
         database_s=open_url(url1)['current_speed'][int(id_s):int(id_e)] 
         database_d=open_url(url1)['current_direction'][int(id_s):int(id_e)]
         database_u=open_url(url1)['current_u'][int(id_s):int(id_e)]
         database_v=open_url(url1)['current_v'][int(id_s):int(id_e)]
         #lat=database_s['lat']
         #lat=round(lat[0],2)
         #lon=database_s['lon']
         #lon=round(lon[0],2)
         
         period=database_s['time']
         speed=database_s['current_speed']
         speed=speed[0:].tolist()
         period=num2date(period[0:]+date2num(dt.datetime(1858, 11, 17, 0, 0)))
         direction=database_d['current_direction']
         direction=direction[0:].tolist()
         u=database_u['current_u']
         u=u[0:].tolist()    
         v=database_v['current_v']
         v=v[0:].tolist() 
         period_str,current_all=[],[]
         for i in range(len(period)): #convert format to list
             period_str.append(dt.datetime.strftime(period[i],'%Y-%m-%d-%H-%M'))
             current_all.append([round(speed[i][0][0][0],2),round(direction[i][0][0][0],2),round(u[i][0][0][0],2),round(v[i][0][0][0],2)])
         current,u,v,direction=[],[],[],[]# figure out bad data and delete
         for i in range(len(current_all)):
             current.append(current_all[i][0])
             direction.append(current_all[i][1])
             u.append(current_all[i][2])
             v.append(current_all[i][3])
         id_bad=ml.find((np.array(current)>200) | (np.array(current)<-1)|(np.array(direction)<0)| (np.array(direction)>360)|(np.array(u)<-200)| (np.array(u)>200)|(np.array(v)<-200)| (np.array(v)>200))
         #print id_bad
         id_bad=list(id_bad)
         id_bad.reverse()
         for m in id_bad:
            del period_str[m]
            del current_all[m]         
         return period_str,current_all
         
         
         
         
         
         
         
         
         
         
         
         
         
         
         
Exemple #6
0
def station_info(station_id):
  from pydap.client import open_url
  url1 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/stdmet/'+station_id+'/'+station_id+'h9999.nc'
  #url1 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/ocean/'+station_id+'/'+station_id+'o9999.nc' #Ocean Dta
  url2 = 'http://dods.ndbc.noaa.gov/thredds/dodsC/data/stdmet/'+station_id+'/'+station_id+'.ncml'
  #url2=''
  try:
    dataset = open_url(url1)
  except:
    try:
      dataset = open_url(url2)
    except:
      print 'OPENDAP url not found: ' + station_id  
      return False
  return station_info_details(station_id,dataset)      
Exemple #7
0
def test_timeout(sequence_type_data):
    """Test that timeout works properly"""
    TestDataset = DatasetType('Test')
    TestDataset['sequence'] = sequence_type_data
    TestDataset['byte'] = BaseType('byte', 0)
    application = BaseHandler(TestDataset)

    # Explictly add latency on the devel server
    # to guarantee that it timeouts
    def wrap_mocker(func):
        def mock_add_latency(*args, **kwargs):
            time.sleep(1e-1)
            return func(*args, **kwargs)
        return mock_add_latency

    application = wrap_mocker(application)
    with LocalTestServer(application) as server:
        url = ("http://0.0.0.0:%s/" % server.port)

        # test open_url
        assert open_url(url) == TestDataset
        with pytest.raises(HTTPError) as e:
            open_url(url, timeout=1e-5)
        assert 'Timeout' in str(e)

        # test open_dods
        with pytest.raises(HTTPError):
            open_dods(url + '.dods?sequence', timeout=1e-5)
        assert 'Timeout' in str(e)

        # test sequenceproxy
        dataset = open_url(url)
        seq = dataset['sequence']
        assert isinstance(seq.data, SequenceProxy)
        # Change the timeout of the sequence proxy:
        seq.data.timeout = 1e-5
        with pytest.raises(HTTPError) as e:
            next(seq.iterdata())
        assert 'Timeout' in str(e)

        # test baseproxy:
        dat = dataset['byte']
        assert isinstance(dat.data, BaseProxy)
        # Change the timeout of the baseprox proxy:
        dat.data.timeout = 1e-5
        with pytest.raises(HTTPError) as e:
            dat[:]
        assert 'Timeout' in str(e)
Exemple #8
0
def getemolt_sensor(mindtime1,maxdtime1,i_mindepth,i_maxdepth,site2,mindtime,maxdtime):
	  #According to the conditions to select data from "emolt_sensor"
	 
	  url2="http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.SITE,emolt_sensor.TIME_LOCAL,emolt_sensor.YRDAY0_LOCAL,emolt_sensor.TEMP,emolt_sensor.DEPTH_I&emolt_sensor.TIME_LOCAL>="+str(mindtime1)+"&emolt_sensor.TIME_LOCAL<="\
        +str(maxdtime1)+"&emolt_sensor.DEPTH_I>="+str(i_mindepth)+"&emolt_sensor.DEPTH_I<="+str(i_maxdepth)+site2
	  try:   
	           dataset1=open_url(url2)
	  except:
	           print 'Sorry, '+url2+' not available' 
	           sys.exit(0)
	  emolt_sensor=dataset1['emolt_sensor']
	  try:   
	          sites2=list(emolt_sensor['SITE'])
	  except:
	          print "'Sorry, According to your input, here are no value. please check it! ' "
	          sys.exit(0) 
	  #sites2=list(emolt_sensor['SITE'])
	  time=list(emolt_sensor['TIME_LOCAL'])
	  yrday0=list(emolt_sensor['YRDAY0_LOCAL'])
	  temp=list(emolt_sensor['TEMP'])
	  depth1=list(emolt_sensor['DEPTH_I'])
	
	
	  time1,temp1,yrday01,sites1,depth=[],[],[],[],[]
	  for m in range(len(time)):
	      #if mindtime<=dt.datetime.strptime(str(time[m]),'%Y-%m-%d')<=maxdtime:
	      if date2num(mindtime)<=yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))<=date2num(maxdtime):
	      #if str(time[m])=='2012-01-01':
	        temp1.append(temp[m])
	        yrday01.append(yrday0[m]%1+date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d')))
	        sites1.append(sites2[m])
	        time1.append(date2num(dt.datetime.strptime(str(time[m]),'%Y-%m-%d'))) 
	        depth.append(depth1[m])
	  #print len(temp1)     
	  return time1,yrday01,temp1,sites1,depth,
Exemple #9
0
    def test_variable_esgf_query(self):
        assert(os.environ.get('OPENID_ESGF'))
        assert(os.environ.get('PASSWORD_ESGF'))
        session = esgf.setup_session(os.environ.get('OPENID_ESGF'),
                                     os.environ.get('PASSWORD_ESGF'),
                                     check_url=self.url)
        # Ensure authentication:
        res = pydap.net.follow_redirect(self.test_url, session=session)
        assert(res.status_code == 200)

        dataset = open_url(self.url, session=session, output_grid=False)
        data = dataset['pr'][0, 200:205, 100:105]
        expected_data = [[[5.23546005e-05,  5.48864300e-05,
                           5.23546005e-05,  6.23914966e-05,
                           6.26627589e-05],
                          [5.45247385e-05,  5.67853021e-05,
                           5.90458621e-05,  6.51041701e-05,
                           6.23914966e-05],
                          [5.57906533e-05,  5.84129048e-05,
                           6.37478297e-05,  5.99500854e-05,
                           5.85033267e-05],
                          [5.44343166e-05,  5.45247385e-05,
                           5.60619228e-05,  5.58810752e-05,
                           4.91898136e-05],
                          [5.09982638e-05,  4.77430549e-05,
                           4.97323490e-05,  5.43438946e-05,
                           5.26258664e-05]]]
        assert(np.isclose(data, expected_data).all())
Exemple #10
0
 def wrapper(*args, **kwargs):
     url, varname, bbox, dt = fetch(*args, **kwargs)
     ds = open_url(url)
     for var in ds.keys():
         if var.lower().startswith("lon") or var.lower() == "x":
             lonvar = var
         if var.lower().startswith("lat") or var.lower() == "y":
             latvar = var
         if var.lower().startswith("time") or var.lower() == "t":
             timevar = var
     lat = ds[latvar][:].data
     lon = ds[lonvar][:].data
     lon[lon > 180] -= 360
     res = abs(lat[0]-lat[1])  # assume rectangular grid
     i1, i2, j1, j2 = datasets.spatialSubset(np.sort(lat)[::-1], np.sort(lon), res, bbox)
     t = ds[timevar]
     tt = netcdf4.num2date(t[:].data, units=t.units)
     ti = [tj for tj in range(len(tt)) if resetDatetime(tt[tj]) >= dt[0] and resetDatetime(tt[tj]) <= dt[1]]
     if len(ti) > 0:
         lati = np.argsort(lat)[::-1][i1:i2]
         loni = np.argsort(lon)[j1:j2]
         if len(ds[varname].data[0].shape) > 3:
             data = ds[varname].data[0][ti[0]:ti[-1]+1, 0, lati[0]:lati[-1]+1, loni[0]:loni[-1]+1]
         else:
             data = ds[varname].data[0][ti[0]:ti[-1]+1, 0, lati[0]:lati[-1]+1, loni[0]:loni[-1]+1]
         dt = tt[ti]
     else:
         data = None
         dt = None
     lat = np.sort(lat)[::-1][i1:i2]
     lon = np.sort(lon)[j1:j2]
     return data, lat, lon, dt
 def test_parse_constraints_boolean(self):
     test_data = np.asanyarray([True,False,True,True,False])
     self.cov.set_parameter_values('boolean',value=test_data)
     dataset = open_url(self.request_url)
     result = []
     result = np.asanyarray([d for d in dataset['data']['boolean']])
     self.assertTrue(np.array_equal(result, test_data))
Exemple #12
0
 def load(self,fldname, **kwargs):
     """ Load velocity fields for a given day"""
     self._timeparams(**kwargs)
     if fldname == "uv":
         self.load('u',**kwargs)
         self.load('v',**kwargs)
         self.uv = np.sqrt(self.u[:,1:,:]**2 + self.v[:,:,1:]**2)
         return
     
     if self.opendap:
         tpos = int(self.jd) - 714800
         self.k1   = kwargs.get("k1", getattr(self, "k1", self.klev)) 
         self.k2   = kwargs.get("k2", getattr(self, "k2", k1+1))
         dapH = open_url(self.dapurl)
         fld  = dapH[fldname][tpos,self.k1:self.k2,
                              self.j1:self.j2,self.i1:self.i2] 
     else:
         filename = self.jd2filename(self.jd)
         if not os.path.isfile(filename):
             print "File missing"
             url = urlparse.urljoin(self.dataurl,os.path.basename(filename))
             self.retrive_file(url, filename)
         with Dataset(filename) as nc:
             nc.set_auto_mask(False)
             fld =  nc.variables[fldname][:,self.k1:self.k2,
                                          self.j1:self.j2,
                                          self.i1:self.i2].copy()
             self.ssh =  np.squeeze(nc.variables['zeta'][:])
             self.zlev = ((self.depth + self.ssh)[np.newaxis,:,:] *
                           self.Cs_r[:,np.newaxis,np.newaxis])
     
     fld[fld>9999] = np.nan
     setattr(self, fldname, np.squeeze(fld))
Exemple #13
0
 def test_lazy_evaluation_getattr(self):
     """Test that the dataset is only loaded when accessed."""
     original = open_url('/', application=self.app)
     dataset = original.functions.mean(original.SimpleGrid, 0)
     self.assertIsNone(dataset.dataset)
     dataset.SimpleGrid
     self.assertIsNotNone(dataset.dataset)
    def test_Functions(self):
        dataset = open_url('http://localhost:8001/')
        rain = dataset.rain
        self.assertEqual(rain.rain.shape, (2, 3))

        functions = Functions('http://localhost:8001/')

        dataset = functions.mean(rain, 0)
        self.assertEqual(dataset.rain.rain.shape, (3,))
        np.testing.assert_array_equal(dataset.rain.rain.data,
            np.array([1.5, 2.5, 3.5]))
        dataset = functions.mean(rain, 0)
        self.assertEqual(dataset['rain']['rain'].shape, (3,))
        np.testing.assert_array_equal(dataset.rain.rain.data,
            np.array([1.5, 2.5, 3.5]))

        dataset = functions.mean(rain, 1)
        self.assertEqual(dataset.rain.rain.shape, (2,))
        np.testing.assert_array_equal(dataset.rain.rain.data,
            np.array([1.0, 4.0]))
        dataset = functions.mean(rain, 1)
        self.assertEqual(dataset['rain']['rain'].shape, (2,))
        np.testing.assert_array_equal(dataset.rain.rain.data,
            np.array([1.0, 4.0]))

        dataset = functions.mean(functions.mean(rain, 0), 0)
        self.assertEqual(dataset['rain']['rain'].shape, ())
        np.testing.assert_array_equal(dataset.rain.rain.data,
            np.array(2.5))
Exemple #15
0
 def fetch(cls, asset, tile, date):
     """ Get this asset for this tile and date (using OpenDap service) """
     url = cls._assets[asset].get('url', '') % (date.year, tile, date.year)
     source = cls._assets[asset]['source'] 
     loc = "%s/%s" % (url, source)
     print loc
     dataset = open_url(loc)
     x0 = dataset['x'].data[0] - 500.0
     y0 = dataset['y'].data[0] + 500.0
     day = date.timetuple().tm_yday
     iday = day - 1
     data = np.array(dataset[asset][iday, :, :]).squeeze().astype('float32')
     ysz, xsz = data.shape
     description = cls._assets[asset]['description']
     meta = {'ASSET': asset, 'TILE': tile, 'DATE': str(date.date()), 'DESCRIPTION': description}
     sday = str(day).zfill(3)
     fout = os.path.join(cls.Repository.path('stage'), "daymet_%s_%s_%4d%s.tif" % (asset, tile, date.year, sday))
     geo = [float(x0), cls._defaultresolution[0], 0.0, float(y0), 0.0, -cls._defaultresolution[1]]
     geo = np.array(geo).astype('double')
     dtype = create_datatype(data.dtype)
     imgout = gippy.GeoImage(fout, xsz, ysz, 1, dtype)
     imgout.SetBandName(asset, 1)
     imgout.SetNoData(-9999.)
     imgout.SetProjection(PROJ)
     imgout.SetAffine(geo)
     imgout[0].Write(data)    
Exemple #16
0
    def __init__(self, url):
        """ Constructor, reads in a file from the url provided
        @param url: OpenNDAP url for the NetCDF file
        """

        self.url = url

        # Let's open the file using OpenNDAP
        f = open_url(url)

        # Pull out the attributes, and column names
        self.attributes = f.attributes['NC_GLOBAL']
        self.columns = f.keys()

        if 'time' in f:
            # There's a time dimension in this dataset, so let's grab the possible
            # values out
            try:
                time_units = f['time'].attributes['units']
                self.time_values = [parse(t, time_units) for t in f['time']]
            except IndexError:

                # Not a parseable time format
                pass
        del f
Exemple #17
0
def load(url, variable):
    '''Load a Dataset from an OpenDAP URL

    :param url: The OpenDAP URL for the dataset of interest.
    :type url: String
    :param variable: The name of the variable to read from the dataset.
    :type variable: String

    :returns: A Dataset object containing the dataset pointed to by the 
        OpenDAP URL.

    :raises: ServerError
    '''
    # Grab the dataset information and pull the appropriate variable
    d = open_url(url)
    dataset = d[variable]

    # Grab the lat, lon, and time variable names.
    # We assume the variable order is (time, lat, lon)
    dataset_dimensions = dataset.dimensions
    time = dataset_dimensions[0]
    lat = dataset_dimensions[1]
    lon = dataset_dimensions[2]

    # Time is given to us in some units since an epoch. We need to convert
    # these values to datetime objects. Note that we use the main object's
    # time object and not the dataset specific reference to it. We need to 
    # grab the 'units' from it and it fails on the dataset specific object.
    times = np.array(_convert_times_to_datetime(d[time]))

    lats = np.array(dataset[lat][:])
    lons = np.array(dataset[lon][:])
    values = np.array(dataset[:])

    return Dataset(lats, lons, times, values, variable)
Exemple #18
0
    def get_data_from_opendap(self, x, y, start=None, end=None):
        """
        Return list of dicts for data at x, y.

        Start, end are datetimes, and default to the first and last
        datetime in the file.
        """
        try:
            dataset = client.open_url(self.url)
        except ServerError:
            return []

        index_start = 0
        if start is not None:
            index_start = self.index(start)

        if end is None:
            index_end = self.timesteps - 1
        else:
            index_end = self.index(end)

        precipitation = dataset['precipitation']['precipitation']

        tuples = zip(
            iter(self._get_datetime_generator(start=index_start,
                                              end=index_end)),
            precipitation[y, x, index_start: index_end + 1][0, 0, :],
        )

        return [dict(unit='mm/5min', datetime=d, value=float(p))
                for d, p in tuples
                if not p == config.NODATAVALUE]
def read_opendap_index(date=None,domain=None):

	url = parse_url(date)
	print url[0]
	dataset = open_url(url[0])

	lats = dataset['lat'][:]
	lons = dataset['lon'][:]
	pres = dataset['pressure'][:]/100. #[hPa]

	lats_idx = np.where((lats>domain['latn'])&
						(lats<domain['latx']))[0]
	lons_idx = np.where((lons>360+domain['lonn'])&
						(lons<360+domain['lonx']))[0]

	if domain['preslvl'] is not None:
		pres_idx = np.where(pres==domain['preslvl'])[0][0]
		pres = pres[pres_idx]
	else:
		pres_idx = None


	last = lats_idx[0]
	laen = lats_idx[-1]+1
	lost = lons_idx[0]
	loen = lons_idx[-1]+1

	latsnew = lats[lats_idx]
	lonsnew = lons[lons_idx]-360

	index={'last':last,'laen':laen,'lost':lost,'loen':loen,'plvl':pres_idx}
	coords={'lats':latsnew,'lons':lonsnew,'pres':pres}

	return index,coords
def get_coors(modelname, lo, la, lonc, latc, lon, lat, siglay, h, depth,startrecord, endrecord):
    if lo>90:
        [la,lo]=dm2dd(la,lo)
    print 'la, lo',la, lo
    latd,lond=[la],[lo]
    # kf,distanceF=nearlonlat(lonc,latc,lo,la) # nearest triangle center F - face
    # kv,distanceV=nearlonlat(lon,lat,lo,la)
    kf,distanceF = nearest_point_index(lo,la,lonc,latc,num=1)
    kv,distanceV = nearest_point_index(lo,la,lon,lat,num=1)
    kf = kf[0][0]
    kv = kv[0][0]
    print 'kf:', kf
    if h[kv] < 0:
        print 'Sorry, your position is on land, please try another point'
        sys.exit()
    depthtotal=siglay[:,kv]*h[kv]
    layer=np.argmin(abs(depthtotal-depth))
    for i in range(startrecord,endrecord):# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
############read the particular time model from website#########
        # print 'la, lo, i', la, lo, i
        timeurl='['+str(i)+':1:'+str(i)+']'
        uvposition=str([layer])+str([kf])
        data_want = ('u'+timeurl+uvposition, 'v'+timeurl+uvposition)
#        if urlname=="30yr":
#            url='http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3?'+\
#                'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
#        elif urlname == "GOM3":
#            url="http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc?"+\
#                'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
#        else:
#            url="http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc?"+\
#                'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
        url = url_with_time_position(modelname, data_want)
        dataset = open_url(url)
        u=np.array(dataset['u'])
        v=np.array(dataset['v'])
        print 'u, v, i', u[0,0,0], v[0,0,0],i
################get the point according the position###################
        par_u=u[0,0,0]
        par_v=v[0,0,0]
        xdelta=par_u*60*60 #get_coors
        ydelta=par_v*60*60
        latdelta=ydelta/111111
        londelta=(xdelta/(111111*np.cos(la*np.pi/180)))
        la=la+latdelta
        lo=lo+londelta
        latd.append(la)
        lond.append(lo)
#        kf,distanceF=nearlonlat(lonc,latc,lo,la) # nearest triangle center F - face
#        kv,distanceV=nearlonlat(lon,lat,lo,la)# nearest triangle vertex V - vertex
        kf,distanceF = nearest_point_index(lo,la,lonc,latc,num=1)
        kv,distanceV = nearest_point_index(lo,la,lon,lat,num=1)
        kf, kv = kf[0][0], kv[0][0]
        depthtotal=siglay[:,kv]*h[kv]
#        layer=np.argmin(abs(depthtotal-depth))
        if distanceV>=0.3:
            if i==startrecord:
                print 'Sorry, your start position is NOT in the model domain'
                break
    return latd ,lond
Exemple #21
0
    def get_last_forecast(self,url):

        now = datetime.datetime.now(pytz.utc)
        
        # it takes 5.5 hours before data arrive on GrADS site
        last = now + datetime.timedelta(hours = -6)

        for _ in range(6):
            # forecasts are on h=[0,6,12]
            h = min(int(last.hour/6),2)*6
            last = last.replace(hour = h)
    
            hour = '%02d' % last.hour
            date = now.strftime('%Y%m%d')
            url = url.format(date=date,hour=hour)
            try:
                logger.debug('querying '+url)
                dataset = open_url(url)
                logger.debug('Forecast found for date={date}, hour={hour}'.format(date=date,hour=hour))
                return dataset
            except Exception as e:
                logger.warning('Forecast not found: %s' % e)
                # try previous forecast
                last = last + datetime.timedelta(hours = -6)
        logger.error('No GEFS forecast found')
        return None
Exemple #22
0
def fetch_data(URL):
    cnt = 0
    while True:
        if cnt > 10:
            break
        else:
            try:
                dataset = open_url(URL)
                var = dataset['precipitation']
                lon = np.array(dataset['nlon'])
                lat = np.array(dataset['nlat'])
                ind_LonMin = int(np.argmin(abs(lon-float(xmin))))
                ind_LonMax = int(np.argmin(abs(lon-float(xmax))))
                ind_LatMin = int(np.argmin(abs(lat-float(ymin))))
                ind_LatMax = int(np.argmin(abs(lat-float(ymax))))
                Data = var[ind_LonMin:ind_LonMax, ind_LatMin:ind_LatMax]
                LLX  = lon[ind_LonMin]
                LLY  = lat[ind_LatMin]
            except:
                cnt = cnt + 1
                print 'fetch atempt ', cnt
                continue
            break

    d = np.flipud(Data.T)
    return d, LLX, LLY
def pic_trend(lond, latd):
    dt=60*60.
    tau=dt/111111.
    lont=[]
    latt=[]
    ufinal=[]
    vfinal=[]
    for i in range(startrecord,endrecord):
        timeurl = '['+str(i)+':1:'+str(i)+']'
        uvposition = str([0])+'[0:1:90414]' # this is the number of grid points in thie 30yr model
        url = 'http://www.smast.umassd.edu:8080/thredds/dodsC/fvcom/hindcasts/30yr_gom3?'+'Times'+timeurl+',u'+timeurl+uvposition+','+'v'+timeurl+uvposition
        dataset = open_url(url)
        utotal=np.array(dataset['u'])
        vtotal=np.array(dataset['v'])
        times=np.array(dataset['Times'])
        u=utotal[0,0,:]
        v=vtotal[0,0,:]
        lont.append(lond)
        latt.append(latd)
        lond,latd,uinterplation,vinterplation=RungeKutta4_lonlat(lond,latd,Grid,u,v,tau)
        ufinal.append(uinterplation)
        vfinal.append(vinterplation)
        kv,distance=nearlonlat(lon,lat,lond,latd)
        if distance>=0.3:
            break
    fig=plt.figure()     
    Q=plt.quiver(lont,latt,ufinal,vfinal,scale=5.)  
    plt.show() 
Exemple #24
0
    def opendap_fetch(cls, asset, date):
        """ Get array proxy from OpenDap for this asset and date """
        url = cls._assets[asset].get('url', '')
        if url == '':
            raise Exception("%s: URL not defined for asset %s" % (cls.__name__, asset))
        success = False

        for ver in ['100', '200', '300', '301', '400']:

            if asset != "FRLAND":
                f = cls._assets[asset]['source'] % (ver, date.year, date.month, date.day)
                loc = "%s/%04d/%02d/%s" % (url, date.year, date.month, f)
            else:
                f = cls._assets[asset]['source'] % (ver, 0, 0, 0)
                loc = "%s/1980/%s" % (url, f)
            try:
                with Timeout(30):
                    dataset = open_url(loc)
            except Timeout.Timeout:
                print "Timeout"
            except Exception,e:
                pass
            else:
                success = True
                break
def get_dataset(url, *labels):
    dataset = open_url(url)
    data = []
    for label in labels:
        add = np.array(dataset[label])
        data.append(add)
    return data
def get_data(var_id, row, col):

    base_url = ('http://dapds00.nci.org.au/thredds/dodsC/rr9/Climate/eMAST/'
           'ANUClimate/0_01deg/v1m0_aus/mon/land/%s/e_01/1970_2012/' % (var_id))
    emast_id = "eMAST_ANUClimate_mon_tmax_v1m0"
    start_date = "1970-01-01"
    stop_date = "2000-12-31"

    current = datetime.strptime(start_date, "%Y-%m-%d")
    stop = datetime.strptime(stop_date, "%Y-%m-%d")

    tmax = []
    dates = []
    while current < stop:

        if current.month < 10:
            month = "0%s" % (current.month)
        else:
            month = "%s" % (current.month)
        year = current.year

        url = "%s%s_%s%s.nc" % (base_url, emast_id, year, month)

        dataset = open_url(url)
        variable = dataset['air_temperature']
        #print variable[0,2000:2005,2000:2005].array[:]
        tmax.append(variable[0,2000:2005,2000:2005].array[:][0][0][0])
        dates.append(current)

        current += relativedelta(months=1)

    f = open("tmax_%d_%d.txt" % (row, col), "w")
    for i in xrange(tmax):
        f >> tmax
    f.close()
Exemple #27
0
def test_lazy_evaluation_getattr(ssf_app):
    """Test that the dataset is only loaded when accessed."""
    original = open_url('/', application=ssf_app)
    dataset = original.functions.mean(original.SimpleGrid, 0)
    assert dataset.dataset is None
    dataset.SimpleGrid
    assert dataset.dataset is not None
Exemple #28
0
def get_dataset(url):
    try:
        dataset = open_url(url)
    except:
        print "Sorry, " + url + "is not available"
        sys.exit(0)
    return dataset
Exemple #29
0
    def __init__(self, day, month, year, lat, lon, ndays):

        self.day = day
        self.month = month
        self.year  = year
        self.lat = lat
        self.lon = lon
        self.ndays = ndays



        self.datetime_date = datetime.date(self.year, self.month, self.day)

        path = self.construct_path('http://goldsmr2.sci.gsfc.nasa.gov:80/opendap/MERRA/MAT1NXFLX.5.2.0', 'MERRA300.prod.assim.tavg1_2d_flx_Nx.', 0)

        dataset = open_url(path)
        self.native_lat = dataset['YDim'][:]
        self.native_lon = dataset['XDim'][:]


        self.lat_idx_native = np.where(np.min(np.abs(self.native_lat - lat)) == np.abs(self.native_lat - lat))[0][0]
        self.lon_idx_native = np.where(np.min(np.abs(self.native_lon - lon)) == np.abs(self.native_lon - lon))[0][0]


        self.lat_actual = self.native_lat[self.lat_idx_native]
        self.lon_actual = self.native_lon[self.lon_idx_native]

        return
Exemple #30
0
def get_dataset(url):
    try:
        dataset = open_url(url)
    except:
        print 'Sorry, ' + url + 'is not available' 
        sys.exit(0)
    return dataset
def test_verify_open_url(sequence_type_data):
    """Test that open_url raises the correct SSLError"""
    warnings.simplefilter("always")

    TestDataset = DatasetType('Test')
    TestDataset['sequence'] = sequence_type_data
    TestDataset['byte'] = BaseType('byte', 0)
    application = BaseHandler(TestDataset)

    with LocalTestServerSSL(application, ssl_context='adhoc') as server:
        try:
            open_url(server.url, verify=False, session=requests.Session())
        except (ssl.SSLError, requests.exceptions.SSLError):
            pytest.fail("SSLError should not be raised.")

        with pytest.raises(requests.exceptions.SSLError):
            open_url(server.url, session=requests.Session())

        if not (sys.version_info >= (3, 0) and sys.version_info < (3, 4, 4)):
            # verify is disabled by default for python 3 before 3.4.4:
            with pytest.raises(requests.exceptions.SSLError):
                open_url(server.url)
Exemple #32
0
 def test_client(self):
     dataset = open_url("http://localhost:8001/", application=self.app)
     self.assertEqual(self.dataset.keys(), ["foo%5B"])
     self.assertEqual(dataset["foo["].name, "foo%5B")
     self.assertEqual(dataset["foo%5B"][0], 1)
Exemple #33
0
def test_original(ssf_app):
    """Test an unmodified call, without function calls."""
    original = open_url('/', application=ssf_app)
    assert (original.SimpleGrid.SimpleGrid.shape == (2, 3))
Exemple #34
0
 def __init__(self, urlpath):
     self.data = open_url(urlpath)
     self._field = None
Exemple #35
0
def PRECIP_DI_CAL(date='2014-06-06',bbox=[-87.5, -31.1, -29.3, 0.1]):
    opendap_url_mon='http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/gpcp/precip.mon.mean.nc'
    opendap_url_ltm='http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/gpcp/precip.mon.ltm.nc'

    # what is the input of the module
    logging.info(date) 
    logging.info(bbox) 

    # convert iso-date to gregorian calendar and get the month
    dta=(dt.datetime.strptime(date,'%Y-%m-%d').date()-dt.datetime.strptime('1800-01-01','%Y-%m-%d').date()).days
    mon=(dt.datetime.strptime(date,'%Y-%m-%d').date()).month

    # open opendap connection and request the avaialable time + lon/lat
    dataset_mon = open_url(opendap_url_mon)
    time=dataset_mon.time[:]
    lat=dataset_mon.lat[:]
    lon=dataset_mon.lon[:]
    dt_ind=next((index for index,value in enumerate(time) if value > dta),0)-1


    # convert bbox into coordinates and convert OL lon to GPCP lon where needed
    minlon = bbox[0]
    if minlon < 0: minlon += 360 #+ 180 # GPCP is from 0-360, OL is from -180-180
    maxlon = bbox[2]
    if maxlon < 0: maxlon += 360 #+ 180 # GPCP is from 0-360, OL is from -180-180
    minlat = bbox[1]
    maxlat = bbox[3]

    lat_sel = (lat>minlat)&(lat<maxlat)
    lat_sel[np.nonzero(lat_sel)[0]-1] = True

    # ugly method to decide if there are two areas to select
    # prepare lon/lat subset arrays
    check_if = 0 # If this one is 1, than there are two areas to check
    if minlon >= maxlon:
        check_if = 1
        lon_sel = np.invert((lon<minlon)&(lon>maxlon))
    else:
        lon_sel = (lon>minlon)&(lon<maxlon)    

    # request the subset from opendap
    dataset_mon=dataset_mon['precip'][dt_ind,lat_sel,lon_sel]
    dataset_ltm = open_url(opendap_url_ltm)
    dataset_ltm=dataset_ltm['precip'][mon-1,lat_sel,lon_sel]
    
    mon = np.ma.masked_less((dataset_mon['precip'][:]).squeeze(),0)
    ltm = np.ma.masked_less((dataset_ltm['precip'][:]).squeeze(),0)

    # if two areas make sure the subset is applied appropriate 
    if check_if == 1:
        subset = np.ones((mon.shape[0]), dtype=bool)[None].T * lon_sel
        mon = np.roll(mon, 
                      len(lon)/2, 
                      axis=1)[np.roll(subset, 
                                      len(lon)/2, 
                                      axis=1)].reshape(mon.shape[0],
                                                       subset.sum()/mon.shape[0])    
        ltm = np.roll(ltm, 
                      len(lon)/2, 
                      axis=1)[np.roll(subset, 
                                      len(lon)/2, 
                                      axis=1)].reshape(ltm.shape[0],
                                                       subset.sum()/ltm.shape[0])    


    # calculate PAP
    PAP=(mon-ltm)/(ltm+1)*100

    # prepare output for GDAL
    papFileName = 'PRECIP_DI'+date +'.tif'        
    driver = gdal.GetDriverByName( "GTiff" )
    ds = driver.Create(papFileName, mon.shape[1], mon.shape[0], 1,gdal.GDT_Int16)
    
    # set projection information
    projWKT='GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'
    ds.SetProjection(projWKT) 

    # set geotransform information
    geotransform_input = dataset_mon.lon[:]
    if check_if == 1:
        dataset_mon.lon[:][np.where(dataset_mon.lon[:] > 180)] -= 360
        geotransform_input = np.roll(dataset_mon.lon[:], len(lon)/2, axis=0)[np.roll(subset, len(lon)/2, axis=1)[0]]
    geotransform = (min(geotransform_input),2.5, 0,max(dataset_mon.lat[:]),0,-2.5) 
    ds.SetGeoTransform(geotransform) 

    # write the data
    ds.GetRasterBand(1).WriteArray(PAP)
    #ds=None
    return ds
Exemple #36
0
for i in range(len(bbox['lon'])):
    val = bbox['lon'][i]
    if val < 0:
        bbox['lon'][i] = 180 + val

if bbox['lon'][0] > bbox['lon'][1]: [bbox['lon'][1], bbox['lon'][0]]

print(bbox)

#%%

for v in var:
    print(v)
    print(str(pytime.time() - start_time) + ' seconds elapsed')
    url = url + v
    x = open_url(url)
    not_var = ['lat', 'lon', 'time', 'time_bnds']

    if not init:
        init = True

        for n in not_var:
            if n not in ['time_bnds', 'time']:
                dic[n] = x[n][:].data

            if n == 'time':
                conv_time = []
                origin = dt.datetime(1800, 1, 1)
                time = x['time'][:].data
                for t in time:
                    t_0 = origin + dt.timedelta(hours=t)
Exemple #37
0
    def download(self):
        """

            Migrate it to use np.lib.arrayterator.Arrayterator
        """
        url_h = "%s/%s-h-daily" % (self.metadata['urlbase'], self.metadata['source_filename'])
        dataset_h = open_url(url_h)
        url_uv = "%s/%s-uv-daily" % (self.metadata['urlbase'], self.metadata['source_filename'])
        dataset_uv = open_url(url_uv)
        # ----
        if 't_ini' not in self.metadata['limits']:
            self.metadata['limits']['t_ini'] = 0
        if 't_fin' not in self.metadata['limits']:
            self.metadata['limits']['t_fin'] = dataset_h['time'].shape[0]
        if 't_step' not in self.metadata['limits']:
            self.metadata['limits']['t_step'] = 0
        else:
            print "Atention!! t_step set to: %s" % self.metadata['limits']['t_step']
        t_ini = self.metadata['limits']['t_ini']
        t_fin = self.metadata['limits']['t_fin']
        t_step = self.metadata['limits']['t_step']
        # ----
        data={}
        #
        #from coards import from_udunits
        t0=datetime(1950,1,1)
        #if (re.match('^hours since \d{4}-\d{2}-\d{2}$',dataset_h['time'].attributes['units'])):
        if (re.match('^hours since 1950-01-01',dataset_h['time'].attributes['units'])):
            data['datetime']=numpy.array([t0+timedelta(hours=h) for h in dataset_h['time'][t_ini:t_fin:t_step].tolist()])
        else:
            print "Problems interpreting the time"
            return

        #time = self.nc.createVariable('time', 'i', ('time',))
        #time[:] = dataset_h['time'][t_ini:t_fin:t_step]
        #time.units = dataset_h['time'].attributes['units']
        #data['time'] = time
        #
        limits=self.metadata['limits']
        Lat=dataset_h['NbLatitudes']
        Lon=dataset_h['NbLongitudes']
        Latlimits=numpy.arange(Lat.shape[0])[(Lat[:]>=limits["LatIni"]) & (Lat[:]<=limits["LatFin"])]
        Latlimits=[Latlimits[0],Latlimits[-1]]
        Lonlimits=numpy.arange(Lon.shape[0])[(Lon[:]>=limits["LonIni"]) & (Lon[:]<=limits["LonFin"])]
        Lonlimits=[Lonlimits[0],Lonlimits[-1]]

        data['Lon'], data['Lat'] = numpy.meshgrid( (Lon[Lonlimits[0]:Lonlimits[-1]]), (Lat[Latlimits[0]:Latlimits[-1]]) )


        #------
        self.data = data
        #Arrayterator = numpy.lib.arrayterator.Arrayterator
        #dataset = dataset_h['Grid_0001']['Grid_0001']
        #ssh = Arrayterator(dataset)[t_ini:t_fin:t_step]

        #blocks = 1e4

        file = os.path.join(self.metadata['datadir'],self.metadata['source_filename']+".nc")
        nc = pupynere.netcdf_file(file,'w')
        nc.createDimension('time', len(range(t_ini,t_fin,t_step)))
        nc.createDimension('lon', (Lonlimits[-1]-Lonlimits[0]))
        nc.createDimension('lat', (Latlimits[-1]-Latlimits[0]))

        dblocks = max(1,int(1e5/((Lonlimits[-1]-Lonlimits[0])*(Latlimits[-1]-Latlimits[0]))))

        ti = numpy.arange(t_ini, t_fin, t_step)
        blocks = ti[::dblocks]
        if ti[-1] not in blocks:
            blocks = numpy.append(blocks,t_fin)

        ntries = 40
        #------
        for v, dataset, missing_value in zip(['h','u','v'], [dataset_h['Grid_0001']['Grid_0001'], dataset_uv['Grid_0001']['Grid_0001'], dataset_uv['Grid_0002']['Grid_0002']], [dataset_h['Grid_0001']._FillValue, dataset_uv['Grid_0001']._FillValue, dataset_uv['Grid_0002']._FillValue]):

            print "Getting %s" % v
            #data['h'] = ma.masked_all((len(ti),Lonlimits[-1]-Lonlimits[0], Latlimits[-1]-Latlimits[0]), dtype=numpy.float64)
            self.data[v] = nc.createVariable(v, 'f4', ('time', 'lat', 'lon'))
            self.data[v].missing_value = missing_value
            for b1, b2 in zip(blocks[:-1], blocks[1:]):
                print "From %s to %s of %s" % (b1, b2, blocks[-1])
                ind = numpy.nonzero((ti>=b1) & (ti<b2))
                for i in range(ntries):
                    print "Try n: %s" % i
                    try:
                        self.data[v][ind] = dataset[b1:b2:t_step, Lonlimits[0]:Lonlimits[-1],Latlimits[0]:Latlimits[-1]].swapaxes(1,2).astype('f')
                        break
                    except:
                        waitingtime = 30+i*20
                        print "Failed to download. I'll try again in %ss" % waitingtime
                        time.sleep(waitingtime)
Exemple #38
0
def test_open_url(sequence_app):
    """Open an URL and check dataset keys."""
    dataset = open_url('http://localhost:8001/', sequence_app)
    assert list(dataset.keys()) == ["cast"]
def getData(station_id, starttime, endtime, outvar):
    """ 
    Access data from the NOAA opendap database.pointNC2shp(ncfile,shpfile)
    Usage example:
        data,t = getData("8639348","20120501","20120512","conductivity")
    
    Input variables:
        station_id: string
        starttime: string "yyyymmdd"
        endtime: string "yyyymmdd"
        outvar: string with variable name
            one of: 'waterlevel', 'conductivity', 'watertemp', 'airtemp', 'airpressure',
                'windspeed' or 'winddirn'
    Output Variables:
        data: vector of floats with data
        t: time vector (matlab datenum format)
   
    Note that this is not a netcdf file dataserver.
    See this website for guidance:
   https://oceana.mbari.org/confluence/display/OneStopShopping/Examples+using+pydap+from+Python+to+access+BOG+data+via+DRDS
    
    """
    # Items unique to each data site
    baseurl = "http://opendap.co-ops.nos.noaa.gov/dods/IOOS/"
    if outvar == 'waterlevel':
        # Six minute verified waterlevel data
        url = baseurl + "SixMin_Verified_Water_Level"
        seqname = 'WATERLEVEL_6MIN_VFD_PX'
        varname = 'WL_VALUE'
        attribs = {'long_name': 'Water surface elevation', 'units': 'm'}
    elif outvar == 'conductivity':
        # Conductivity (millisiemens/cm)
        url = baseurl + "Conductivity"
        seqname = 'CONDUCTIVITY_PX'
        varname = 'CONDUCTIVITY'
        attribs = {
            'long_name': 'Water conductivity',
            'units': 'millisiemens/cm'
        }
    elif outvar == 'watertemp':
        # Water temperature (degC)
        url = baseurl + "Water_Temperature"
        seqname = 'WATER_TEMPERATURE_PX'
        varname = 'WaterTemp'
        attribs = {'long_name': 'Water temperature', 'units': 'degreesC'}
    elif outvar == 'airtemp':
        # Air Temperature (degC)
        url = baseurl + "Air_Temperature"
        seqname = 'AIR_TEMPERATURE_PX'
        varname = 'AirTemp'
        attribs = {'long_name': ' Air temperature', 'units': 'degreesC'}
    elif outvar == 'airpressure':
        # Air Presure (millibars)
        url = baseurl + "Barometric_Pressure"
        seqname = 'BAROMETRIC_PRESSURE_PX'
        varname = 'BP'
        attribs = {'long_name': 'Air pressure', 'units': 'mb'}
    elif outvar == 'windspeed':
        # Wind Speed (m/s)
        url = baseurl + "Wind"
        seqname = 'WIND_PX'
        varname = 'Wind_Speed'
        attribs = {'long_name': 'Wind speed', 'units': 'm/s'}
    elif outvar == 'winddirn':
        # Wind Direction (degrees)
        url = baseurl + "Wind"
        seqname = 'WIND_PX'
        varname = 'Wind_Direction'
        attribs = {'long_name': 'Wind direction', 'units': 'degrees'}

    # Open the database
    nc = open_url(url)

    #my_station = nc.WATERLEVEL_6MIN_VFD_PX[(nc.WATERLEVEL_6MIN_VFD_PX._STATION_ID == station_id) & \
    #    (nc.WATERLEVEL_6MIN_VFD_PX._DATUM == "MSL") & \
    #    (nc.WATERLEVEL_6MIN_VFD_PX._BEGIN_DATE ==starttime) & \
    #    (nc.WATERLEVEL_6MIN_VFD_PX._END_DATE==endtime)]

    print 'Retrieving data '+outvar+' @ site # '+station_id+' for date range: '+\
        starttime+' to '+endtime+'...'
    try:
        # Build a query with the server
        if outvar == 'waterlevel':
            # Water level requires a datum in the query
            my_station = nc[seqname][(nc[seqname]._STATION_ID == station_id) & \
                (nc[seqname]._DATUM == "MSL") & \
                (nc[seqname]._BEGIN_DATE ==starttime) & \
                (nc[seqname]._END_DATE==endtime)]
        else:
            my_station = nc[seqname][(nc[seqname]._STATION_ID == station_id) & \
                (nc[seqname]._BEGIN_DATE ==starttime) & \
                (nc[seqname]._END_DATE==endtime)]

        print "Query ok - downloading data..."
        # Get the data
        #data = np.zeros((len(my_station['DATE_TIME']),1))
        #t = np.zeros((len(my_station['DATE_TIME']),1))
        k = 0
        data = []
        t = []
        for dt, d in zip(my_station['DATE_TIME'], my_station[varname]):
            #data[k,0]=np.float(d)
            data.append(d)
            t.append(parseDate(dt))
            k = k + 1
    except:
        print "The date range and/or the variable: " + varname + " are not available from station #: " + station_id
        #data = np.zeros([0,1])
        #t=np.zeros([0,1])
        data = []
        t = []

    return data, t, attribs
Exemple #40
0
    def _load(self, filename, elements, debug=False):
        """Loads data from *.nc, *.p and OpenDap url"""
        #Loading pickle file
        if filename.endswith('.p'):
            f = open(filename, "rb")
            data = pkl.load(f)
            self._origin_file = data['Origin']
            self.History = data['History']
            if debug: print "Turn keys into attributs"
            self.Grid = ObjectFromDict(data['Grid'])
            self.Variables = ObjectFromDict(data['Variables'])
            try:
                if self._origin_file.startswith('http'):
                    #Look for file through OpenDAP server
                    print "Retrieving data through OpenDap server..."
                    self.Data = open_url(data['Origin'])
                    #Create fake attribut to be consistent with the rest of the code
                    self.Data.variables = self.Data
                else:
                    #WB_Alternative: self.Data = sio.netcdf.netcdf_file(filename, 'r')
                    #WB_comments: scipy has causes some errors, and even though can be
                    #             faster, can be unreliable
                    #self.Data = nc.Dataset(data['Origin'], 'r')
                    self.Data = netcdf.netcdf_file(data['Origin'], 'r',mmap=True)
            except: #TR: need to precise the type of error here
                print "the original *.nc file has not been found"
                pass

        #Loading netcdf file         
        elif filename.endswith('.nc'):
            if filename.startswith('http'):
                #Look for file through OpenDAP server
                print "Retrieving data through OpenDap server..."
                self.Data = open_url(filename)
                #Create fake attribut to be consistent with the rest of the code
                self.Data.variables = self.Data
            else:
                #Look for file locally
                print "Retrieving data from " + filename + " ..."
                #WB_Alternative: self.Data = sio.netcdf.netcdf_file(filename, 'r')
                #WB_comments: scipy has causes some errors, and even though can be
                #             faster, can be unreliable
                #self.Data = nc.Dataset(filename, 'r')
                self.Data = netcdf.netcdf_file(filename, 'r',mmap=True)
            #Metadata
            text = 'Created from ' + filename
            self._origin_file = filename
            self.History = [text]
            # Calling sub-class
            print "Initialisation..."
            try:
                self.Grid = _load_grid(self.Data,
                                       elements,
                                       self.History,
                                       debug=self._debug)
                self.Variables = _load_var(self.Data,
                                           elements,
                                           self.Grid,
                                           self.History,
                                           debug=self._debug)

            except MemoryError:
                print '---Data too large for machine memory---'
                print 'Tip: use ax or tx during class initialisation'
                print '---  to use partial data'
                raise

        elif filename.endswith('.mat'):
            print "---Functionality not yet implemented---"
            sys.exit()
        else:
            print "---Wrong file format---"
            sys.exit()
Exemple #41
0
 def test_uint16(self):
     """Load an uint16."""
     dataset = open_url("http://localhost:8001/", self.app)
     self.assertEqual(dataset.types.ui16.dtype, np.dtype(">u2"))
Exemple #42
0
 def test_original(self):
     """Test an unmodified call, without function calls."""
     original = open_url('/', application=self.app)
     self.assertEqual(original.SimpleGrid.SimpleGrid.shape, (2, 3))
Exemple #43
0
def near(x,x0):
    """
    Find the index where x has the closer value to x0
    """
    
    dx = x - x0
    dx = np.abs(dx)
    fn = np.where( dx == dx.min() )
    fn = fn[0][0]
    
    return fn
    
###########################################################################

print 'Accessing dods server'
dataset = open_url('http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_60.5')

print 'Downloading lon, lat, depth, time arrays'
tt  = dataset.MT[:]
depth  = dataset['Depth'][:]
lon = dataset.Longitude.array[:]; lon = lon-360
lat = dataset.Latitude.array[:]

# obs: hycom dates are referenced to year 1900
tstart = dt.datetime(2003-1900, 1, 1); tstart = date2num(tstart)
tend   = dt.datetime(2006-1900, 1, 1); tend   = date2num(tend)
fk  = near(depth, 60) 
fj  = np.where( (lon[1,:] >= -60) & (lon[1,:] <= 30) )
fi  = np.where( (lat[:,1] >= -50) & (lat[:,1] <= -5) )
fj1 = fj[0][0]; fj2 = fj[0][1]
fi1 = fi[0][0]; fi2 = fi[0][1]
Exemple #44
0
 def test_client(self):
     dataset = open_url("http://localhost:8001/", application=self.app)
     self.assertEqual(dataset["a.b"].name, "a%2Eb")
     self.assertEqual(dataset["a.b"][0], 1)
def DownloadPoints(variable,
                   latitudes_s,
                   longitudes_s,
                   url,
                   outfile,
                   session=None):
    """   Download time-series for a list of lats/longs from a single opendap dataset (ie CORDEX netcdf).
  
  The output nc file has primary dimensions location and time.
  
  Inputs:
  -------
  
  variable - string. variable to download, eg 'tas' or 'pr'
  
  latitudes_s, longitudes_s are python lists
  
  url - opendap url
  
  outfile  - string. where to write the file.
  
  session - Note or pydap.cas.esgf session. None for a new session.
      Might save a little time if you reuse sessions rather than creating new 
      ones for each download? Might be more trouble than it is worth, seems to 
      get time-outs with re-using sesions. Best to just leave as None. But if
      you want to try:
      session = setup_session(config.openid, config.password, verify=True, check_url=url)
  """

    if session == None:
        session = setup_session(config.openid,
                                config.password,
                                verify=True,
                                check_url=url)

    d = open_url(url, session=session)
    # lat/long to grid mapping
    Message('download from %s' % url, 2)

    ####
    Message('   ... reading header and extracting pixel indices...',
            2,
            newline=False)

    lat = d['lat'].array.data[:, :]
    lon = d['lon'].array.data[:, :]

    for i, l in enumerate(longitudes_s):
        if l > 180:
            longitudes_s[i] = l - 360

    lon_shift = lon > 180
    lon[lon_shift] = lon[lon_shift] - 360

    ns = len(longitudes_s)
    rlat_idx = [None] * ns
    rlon_idx = [None] * ns

    lat_of_pixel = np.zeros(ns, dtype=lat.dtype)
    lon_of_pixel = np.zeros(ns, dtype=lat.dtype)

    Message('done.', 2)

    ####
    Message('   ... reading and processing header for %d locations...' % ns, 2,
            False)

    for i in range(ns):
        longitude = longitudes_s[i]
        latitude = latitudes_s[i]
        dist = ((abs(lon - longitude)**2) + (abs(lat - latitude)**2))
        mask = dist == np.amin(dist)

        lat_of_pixel[i] = lat[mask]
        lon_of_pixel[i] = lon[mask]

        rlat_idx[i] = np.asscalar(mask.nonzero()[0])
        rlon_idx[i] = np.asscalar(mask.nonzero()[1])

    Message('done.', 2)

    ####
    # open a new netCDF file for writing, and download data and write it in directly!
    Message('   ... downloading and writing to %s...' % outfile, 2)

    with Dataset(outfile, 'w', format='NETCDF4') as ncfile:
        for name in d.attributes['NC_GLOBAL']:
            attr_value = d.attributes['NC_GLOBAL'][name]
            if name[0] != '_' and isinstance(attr_value, str):
                ncfile.setncattr(name, attr_value)

        # create the x and y dimensions.
        ncfile.createDimension('location', ns)
        ncfile.createDimension('time', None)

        times = ncfile.createVariable('time', d['time'].dtype.name, ('time', ))
        latitudes = ncfile.createVariable('lat', lat_of_pixel.dtype.name,
                                          ('location', ))
        longitudes = ncfile.createVariable('lon', lon_of_pixel.dtype.name,
                                           ('location', ))
        M = ncfile.createVariable(variable, d[variable].array.dtype.name, (
            'time',
            'location',
        ))

        latitudes[:] = lat_of_pixel
        longitudes[:] = lon_of_pixel
        times[:] = d['time'].data[:]
        for i in range(ns):
            Message('   ... station %d of %d' % (i, ns), 2)
            M[:, i] = d[variable].array.data[:, rlat_idx[i], rlon_idx[i]]

        for cvar in (
                'time',
                variable,
        ):
            for name in d[cvar].attributes:
                attr_value = d[cvar].attributes[name]
                if name[0] != '_' and isinstance(attr_value, str):
                    ncfile.variables[cvar].setncattr(name, attr_value)
        Message('done.', 2)
def mapdap(
    varname = 'hr24_prcp',
    bbox = '-180,-90,180,90',
    url = 'http://opendap.bom.gov.au:8080/thredds/dodsC/PASAP/atmos_latest.nc',
    timeindex = 'Default',
    imgwidth = 256,
    imgheight = 256,
    request = 'GetMap',
    time = 'Default',
    save_local_img = False,
    colorrange = (-4,4),
    palette = 'RdYlGn',
    colorbounds = 'Default',
    style = 'grid',
    ncolors = 10,
    mask = -999,
    plot_mask = True,
    mask_varname = 'mask',
    mask_value = 1.0
    ):
    """ Using Basemap, create a contour plot using some dap available data 
   
        Data is assumed to have dimensions [time,lat,lon] 
            TODO -- deal with other shapes
            TODO -- determine the dimension ordering using CF convention

        varname -- name of variable in opendap file
        bbox -- lonmin,latmin,lonmax,latmax for plot
        url -- OPEnDAP url
        timeindex -- time index to plot
        imgwidth,imgheight -- size of png image to return
        request -- 'GetMap','GetLegend','GetFullFigure'
        time -- time vale to plot. Assumes a particular format."%Y-%m-%dT%H:%M:%S"
        mask -- mask out these values
        if plot_mask is True, mask_varname and mask_value must be given
    
    """
    transparent = True
    lonmin,latmin,lonmax,latmax = tuple([float(a) for a in bbox.rsplit(',')])
   
    # It's not clear there is any point in this. Pydap doesn't actually
    # download data until you subscript 
    
    if url not in cache:
        dset = open_url(url)
    else:
        dset = cache[url]
    
    # Get the correct time.
    time_var = dset['time']
    time_units = time_var.attributes['units']
    available_times = np.array(time_var[:])
    
    

    # TODO there is a potential conflict here between time and timeindex.
    # On the one hand we want to allow using the actual time value.
    # On the other hand we want to make it easy to get a time index
    # without knowing the value.
    timestep=0
    if timeindex == 'Default':
        timestep=0
    else:
        timestep=int(timeindex)
    if time != 'Default':
        dtime = datetime.datetime.strptime(time, "%Y-%m-%dT%H:%M:%S" )
        reftime = date2num(dtime,time_units)
        timestep = np.where(available_times >= reftime)[0].min()

    # TODO Get only the section of the field we need to plot
    # TODO Determine lat/lon box indices and only download this slice

    # TODO Set default range (the below does not work)
    #colorrange = np.min(var),np.max(var)
    
    lat = dset['lat'][:]
    lon = dset['lon'][:]
    
    # CHANGED
    var = dset[varname][timestep,:,:]
 
    #xcoords = lonmin,lonmax
    #xcoords,lon,var = transform_lons(xcoords,lon,var)
 
    # TODO
    # Needs mre thought - the idea here is to only grab a slice of the data
    # Need to grab a slightly larger slice of data so that tiling works.
    #lat_idx = (lat > latmin) & (lat < latmax)
    #lon_idx = (lon > lonmin) & (lon < lonmax)
    #lat = dset['lat'][lat_idx]
    #lon = dset['lon'][lon_idx]
    #latdx1 = np.where(lat_idx)[0].min()
    #latdx2 = np.where(lat_idx)[0].max()
    #londx1 = np.where(lon_idx)[0].min()
    #londx2 = np.where(lon_idx)[0].max()
    #var = var[latdx1:latdx2+1,londx1:londx2+1]
    #var = dset[varname][timestep,latdx1:latdx2+1,londx1:londx2+1]

    # todo clean up this logic
    if 'mask' in dset.keys():
        if plot_mask:
            maskvar = dset['mask'][timestep,:,:]
            #maskvar = dset['mask'][timestep,latdx1:latdx2+1,londx1:londx2+1]
            varm = np.ma.masked_array(var,mask=maskvar)
            mask = varm.mask 
    else:
        varm = np.ma.masked_array(var,mask=np.isinf(var))

    xcoords = lonmin,lonmax
    # Call the trans_coords function to ensure that basemap is asked to
    # plot something sensible.
    xcoords,lon,varm = transform_lons(xcoords,lon,varm)
    lonmin,lonmax = xcoords
    varnc = dset[varname]

    try:
        var_units = varnc.attributes['units']
    except KeyError:
       var_units = '' 


    
    # Plot the data
    # For the basemap drawing we can't go outside the range of coordinates
    # WMS requires us to give an empty (transparent) image for these spurious lats
    
    # uc = upper corner, lc = lower corner
    bmapuclon=lonmax
    bmaplclon=lonmin
    bmapuclat=min(90,latmax)
    bmaplclat=max(-90,latmin)
    if bmaplclat==90:
        bmaplclat = 89.0
    if bmapuclat==-90:
        bmapuclat = -89.0

    # TODO set figsize etc here  
    fig = mpl.figure.Figure()
    canvas = FigureCanvas(fig)
    
    ax = fig.add_axes((0,0,1,1),frameon=False,axisbg='k',alpha=0,visible=False)
    m = Basemap(projection='cyl',resolution='c',urcrnrlon=bmapuclon,
        urcrnrlat=bmapuclat,llcrnrlon=bmaplclon,llcrnrlat=bmaplclat,
        suppress_ticks=True,fix_aspect=False,ax=ax)

    DPI=100.0

    # Convert the latitude extents to Basemap coordinates
    bmaplatmin,bmaplonmin = m(latmin,lonmin)
    bmaplatmax,bmaplonmax = m(latmax,lonmax)
    lon_offset1 = abs(bmaplclon - bmaplonmin)
    lat_offset1 = abs(bmaplclat - bmaplatmin)
    lon_offset2 = abs(bmapuclon - bmaplonmax)
    lat_offset2 = abs(bmapuclat - bmaplatmax)
    lon_normstart = lon_offset1 / abs(bmaplonmax - bmaplonmin)
    lat_normstart = lat_offset1 / abs(bmaplatmax - bmaplatmin)
    ax_xfrac = abs(bmapuclon - bmaplclon)/abs(bmaplonmax - bmaplonmin)
    ax_yfrac = abs(bmapuclat - bmaplclat)/abs(bmaplatmax - bmaplatmin)

    # Set plot_coords, the plot boundaries. If this is a regular WMS request,
    # the plot must fill the figure, with whitespace for invalid regions.
    # If it's a full figure, we need to make sure there is space for the legend
    # and also for the text.
    if request == 'GetFullFigure':
        coords = lonmin,latmin,lonmax,latmax
        plot_coords = figurePlotDims(imgheight,imgwidth,coords)
    else:
        plot_coords = (lon_normstart,lat_normstart,ax_xfrac,ax_yfrac)

    m = Basemap(projection='cyl',resolution='c',urcrnrlon=bmapuclon,
        urcrnrlat=bmapuclat,llcrnrlon=bmaplclon,llcrnrlat=bmaplclat,
        suppress_ticks=True,fix_aspect=False,ax=ax)

    ax = fig.add_axes(plot_coords,frameon=False,axisbg='k')

    m.ax = ax
    varm,lonwrap = addcyclic(varm,lon)
    x,y = m(*np.meshgrid(lonwrap[:],lat[:]))

    """ To plot custom colors
    rgb_cmap = mpl.colors.ListedColormap([  
            (0.0,0.0,0.0),
            (0.25,0.25,0.25),
            (0.3,0.25,0.25),
            (0.5,0.5,0.5),
            (0.6,0.5,0.5),
            (0.75,0.75,0.75),
            (0.75,0.85,0.75),
            (1.0,1.0,1.0) ],name='rgbcm')
    default_color_bounds = [-1,-0.75,-0.5,-0.25,0.0,0.25,0.5,0.75,1.0]
    default_norm = mpl.colors.BoundaryNorm(default_color_bounds, rgb_cmap.N)
    m.contourf(x,y,var,cmap=rgb_cmap,norm=default_norm)
    contours = m.contour(x,y,var,cmap=rgb_cmap,norm=default_norm)
    contours.clabel(colors='k')
    """
    colormap = mpl.cm.get_cmap(palette)
    # colormap = cmap_discretize(colormap,ncolors)
    # if colorbounds = 'Default':
    # colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment))
    # else:
    #    colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment))
    #    Do some checks on the size of the list, and fix if we can
    #    pass

    if style == 'contour':
        # Interpolate to a finer resolution
        # TODO: make this sensitive to the chosen domain
        increment = float(colorrange[1]-colorrange[0]) / float(ncolors-2)
        colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment))
        
        
        # CHANGED
        colormap = cmap_discretize(colormap,ncolors)
        
        colvs =[-999]+colorbounds+[999]
        lat_idx = np.argsort(lat)
        lat = lat[lat_idx]
        varm = varm[lat_idx,:]

        data_lonmin = min(lonwrap)
        data_lonmax = max(lonwrap)
        data_latmin = min(lat)
        data_latmax = max(lat)

        new_lons = np.arange(data_lonmin-1.0,data_lonmax+1.0,1.0)
        new_lats = np.arange(data_latmin-1.0,data_latmax+1.0,1.0)
        newx,newy = m(*np.meshgrid(new_lons[:],new_lats[:]))
        x = newx
        y = newy
        
        # Two pass interpolation to deal with the mask.
        # The first pass does a bilinear, the next pass does a nearest neighbour to keep the mask
        # These steps slow down the plotting significantly
        # It's not clear this is working, and the problem is likely solved by
        # ensuring the right mask is used!
        varm_bl = interp(varm, lonwrap[:], lat[:], newx, newy,order=1)
        varm_nn = interp(varm, lonwrap[:], lat[:], newx, newy,order=0)
        varm = varm_bl
        varm[varm_nn.mask == 1] = varm_nn[varm_nn.mask == 1]

        # contourf has an extent keyword (x0,x1,y0,y1)
        # return "mapdap\n"
        # STUCK it gets stuck here (in apache)        
        main_render = m.contourf(x,y,varm[:,:],colorbounds,extend='both',cmap=colormap,ax=ax)
        
        contours = m.contour(x,y,varm,colorbounds,colors='k',ax=ax)
        contours.clabel(colors='k',rightside_up=True,fmt='%1.1f',inline=True)
        
        
        
    elif style == 'grid':
        main_render = m.pcolormesh(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1],
            cmap=colormap,ax=ax)
    elif style == 'grid_threshold':
        increment = float(colorrange[1]-colorrange[0]) / float(ncolors)
        colorbounds = list(np.arange(colorrange[0],colorrange[1]+increment,increment))
        colornorm = mpl.colors.BoundaryNorm(colorbounds,colormap.N)
        main_render = m.pcolor(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1],
            cmap=colormap,ax=ax,norm=colornorm)
    else:
        main_render = m.pcolormesh(x,y,varm[:,:],vmin=colorrange[0],vmax=colorrange[1],
            cmap=colormap,ax=ax)


    fig.set_dpi(DPI)
    fig.set_size_inches(imgwidth/DPI,imgheight/DPI)

    title_font_size = 9
    tick_font_size = 8
    if request == 'GetFullFigure':
        # Default - draw 5 meridians and 5 parallels
        n_merid = 5
        n_para = 5

        # base depends on zoom
        mint = (lonmax - lonmin)/float(n_merid)
        base = mint
        meridians = [lonmin + i*mint for i in range(n_merid)]
        meridians = [ int(base * round( merid / base)) for merid in meridians]
        
        # Some sensible defaults for debugging
        #meridians = [45,90,135,180,-135,-90,-45]

        pint = int((latmax - latmin)/float(n_para))
        base = pint
        parallels = [latmin + i*pint for i in range(1,n_para+1)] 
        parallels = [ int(base * round( para / base)) for para in parallels]
        #parallels = [-60,-40,-20,0,20,40,60]
        #parallels = [((parallel + 180.) % 360.) - 180. for parallel in parallels]
        m.drawcoastlines(ax=ax)
        
        m.drawmeridians(meridians,labels=[0,1,0,1],fmt='%3.1f',fontsize=tick_font_size)
        m.drawparallels(parallels,labels=[1,0,0,0],fmt='%3.1f',fontsize=tick_font_size)
        m.drawparallels([0],linewidth=1,dashes=[1,0],labels=[0,1,1,1],fontsize=tick_font_size)
        titlex,titley = (0.05,0.98)
        
        # CHANGED 
        # STUCK getting an error somewhere in this function
        # title = get_pasap_plot_title(dset,varname=varname,timestep=timestep)
        title = "We're getting errors in the get title function"
        fig.text(titlex,titley,title,va='top',fontsize=title_font_size)
   
    colorbar_font_size = 8
    if request == 'GetLegendGraphic':
        # Currently we make the plot, and then if the legend is asked for
        # we use the plot as the basis for the legend. This is not optimal.
        # Instead we should be making the legend manually. However we need
        # to set up more variables, and ensure there is a sensible min and max.
        # See the plot_custom_colors code above
        fig = mpl.figure.Figure(figsize=(64/DPI,256/DPI))
        canvas = FigureCanvas(fig)
        # make some axes
        cax = fig.add_axes([0,0.1,0.2,0.8],axisbg='k')
        # put a legend in the axes
        
        
        cbar = fig.colorbar(main_render,cax=cax,extend='both',format='%1.1f')
        cbar.set_label(var_units,fontsize=colorbar_font_size)
        for t in cbar.ax.get_yticklabels():
            t.set_fontsize(colorbar_font_size)
        # i.e. you don't need to plot the figure...
        #fig.colorbar(filled_contours,cax=cax,norm=colornorm,boundaries=colvs,values=colvs,
        #   ticks=colorbounds,spacing='proportional')
    elif request == 'GetFullFigure':
        # Add the legend to the figure itself.
        # Figure layout parameters
        # plot_coords = tuple with (xi,yi,dx,dy)
        # legend_coords = tuple with (xi,yi,dx,dy) as per mpl convention
        # First change the plot coordinates so that they do not cover the whole image
        legend_coords = (0.8,0.1,0.02,plot_coords[3])
        cax = fig.add_axes(legend_coords,axisbg='k')
        cbar = fig.colorbar(main_render,cax=cax,extend='both')
        for t in cbar.ax.get_yticklabels():
            t.set_fontsize(colorbar_font_size)
        cbar.set_label(var_units,fontsize=colorbar_font_size)
        transparent=False
        # Experimenting here with custom color map and ticks. Assigning everything manually
        # (e.g. ticks=[-2,-1,0,1,2]) is easy. Doing it in an automated way given a range is
        # hard...
        #fig.colorbar(filled_contours,cax=cax,boundaries=colvs,ticks=colorbounds)
        #,norm=colornorm,#boundaries=colvs,values=colvs,        #extend='both')
           
    imgdata = StringIO.StringIO()
    fig.savefig(imgdata,format='png',transparent=transparent)
    
    if save_local_img:
        fig.savefig('map_plot_wms_output.png',format='png')
        return

    if url not in cache:
        cache[url] = dset

    value = imgdata.getvalue()

    #imgdata.close()
    fig = None
    
    
    return value
Exemple #47
0
def basemap_detail(lat,lon,bathy,draw_parallels,*parallels_interval):
    ## plot the coastline

    url='http://geoport.whoi.edu/thredds/dodsC/bathy/gom03_v03'
    def get_index_latlon(url):# use the function to calculate the minlat,minlon,maxlat,maxlon location
        try:
          dataset=open_url(url)
        except:
          print "please check your url!"
          sys.exit(0)
        basemap_lat=dataset['lat']
        basemap_lon=dataset['lon']
        basemap_topo=dataset['topo']
    
        # add the detail of basemap
        minlat=min(lat)-0.01
        maxlat=max(lat)+0.01
        minlon=min(lon)+0.01
        maxlon=max(lon)-0.01
        index_minlat=int(round(np.interp(minlat,basemap_lat,range(0,basemap_lat.shape[0]))))
        index_maxlat=int(round(np.interp(maxlat,basemap_lat,range(0,basemap_lat.shape[0]))))
        index_minlon=int(round(np.interp(minlon,basemap_lon,range(0,basemap_lon.shape[0]))))
        index_maxlon=int(round(np.interp(maxlon,basemap_lon,range(0,basemap_lon.shape[0]))))
        return index_minlat,index_maxlat,index_minlon,index_maxlon,basemap_lat,basemap_lon,basemap_topo
    
    
    index_minlat,index_maxlat,index_minlon,index_maxlon,basemap_lat,basemap_lon,basemap_topo = get_index_latlon(url)
    #print index_minlat,index_maxlat,index_minlon,index_maxlon
    if index_minlat==0 or index_maxlat==0 or index_minlon==0 or index_maxlon==0:
        
        url='http://geoport.whoi.edu/thredds/dodsC/bathy/crm_vol1.nc'
        try:
          dataset=open_url(url)
        except:
          print "please check your url!"
          sys.exit(0)
        basemap_lat=dataset['lat']
        basemap_lon=dataset['lon']
        basemap_topo=dataset['topo']
        # add the detail of basemap
        minlat=min(lat)-0.01
        maxlat=max(lat)+0.01
        minlon=min(lon)+0.01
        maxlon=max(lon)-0.01
        basemap_lat=[float(i) for i in basemap_lat]
        basemap_lat.reverse()
        range_basemap_lat=range(len(basemap_lat))
        range_basemap_lat.reverse()
        index_minlat=int(round(np.interp(minlat,basemap_lat,range_basemap_lat)))
        index_maxlat=int(round(np.interp(maxlat,basemap_lat,range_basemap_lat)))
        index_minlon=int(round(np.interp(minlon,basemap_lon,range(0,basemap_lon.shape[0]))))
        index_maxlon=int(round(np.interp(maxlon,basemap_lon,range(0,basemap_lon.shape[0]))))
    min_index_lat=min(index_minlat,index_maxlat)
    max_index_lat=max(index_minlat,index_maxlat)
    X,Y=np.meshgrid(basemap_lon[index_minlon-15:index_maxlon+15],basemap_lat[min_index_lat-15:max_index_lat+15])

    # You can set negative contours to be solid instead of dashed:
    matplotlib.rcParams['contour.negative_linestyle'] = 'solid'
    #plot the bathy
    if bathy==True:
        CS=plt.contour(X,Y,basemap_topo.topo[min_index_lat-15:max_index_lat+15,index_minlon-15:index_maxlon+15],3,colors='gray',linewidths=0.1)
        plt.clabel(CS, fontsize=7,fmt='%5.0f', inline=1)
    #plt.clabel(cs, fontsize=9, inline=1,fmt='%5.0f'+"m")
    plt.contourf(X,Y,basemap_topo.topo[min_index_lat-15:max_index_lat+15,index_minlon-15:index_maxlon+15],[0,1000],colors='grey')
    ax=plt.gca()
    ax.set_xticklabels([])
    ax.set_yticklabels([])

    
    #set up the map in a Equidistant Cylindrical projection
    #m.drawmapboundary()
    #draw major rivers
    #m.drawrivers()
    if draw_parallels==True:
        
      from mpl_toolkits.basemap import Basemap
      m = Basemap(projection='cyl',llcrnrlat=min(lat),urcrnrlat=max(lat),\
          llcrnrlon=min(lon),urcrnrlon=max(lon),resolution='h',suppress_ticks=False)#,fix_aspect=False)
      if len(parallels_interval)<1:
        parallels_interval=1
        #draw parallels     
        m.drawparallels(np.arange(int(min(lat)),int(max(lat)),float(parallels_interval)),labels=[1,0,0,0],fmt=lat2str,dashes=[2,2])
        #draw meridians
        m.drawmeridians(np.arange(int(min(lon)),int(max(lon)),float(parallels_interval)),labels=[0,0,0,1],fmt=lon2str,dashes=[2,2])     
      else:
        parallels_interval=parallels_interval[0]
        #draw parallels
        m.drawparallels(np.arange(round(min(lat),3),round(max(lat),3),parallels_interval),labels=[1,0,0,0],fmt=lat2str,dashes=[2,2])
        #draw meridians
        m.drawmeridians(np.arange(round(min(lon),3),round(max(lon),3),parallels_interval),labels=[0,0,0,1],fmt=lon2str,dashes=[2,2]) 
'''
Script para baixar os resultados de vento a 10 metros do GFS
via opendap. As informações são salvas em um arquivo netCDF.

Criado por Rafael Vieira - maio/2016

'''

from pydap.client import open_url
import numpy as np
from netCDF4 import Dataset

#acessar os resultados via opendap no servidor
u_component = open_url(
    'http://nomads.ncep.noaa.gov:80/dods/gfs_0p25_1hr/gfs20160616/gfs_0p25_1hr_00z'
)['ugrd10m']
v_component = open_url(
    'http://nomads.ncep.noaa.gov:80/dods/gfs_0p25_1hr/gfs20160616/gfs_0p25_1hr_00z'
)['vgrd10m']
tempo = open_url(
    'http://nomads.ncep.noaa.gov:80/dods/gfs_0p25_1hr/gfs20160616/gfs_0p25_1hr_00z'
)['time'][0:121:3]
lats = open_url(
    'http://nomads.ncep.noaa.gov:80/dods/gfs_0p25_1hr/gfs20160616/gfs_0p25_1hr_00z'
)['lat'][:]
lons = open_url(
    'http://nomads.ncep.noaa.gov:80/dods/gfs_0p25_1hr/gfs20160616/gfs_0p25_1hr_00z'
)['lon'][:]

ugrd_10m = u_component.array[0:121:3, :, :]
vgrd_10m = v_component.array[0:121:3, :, :]
lonRange = '[' + str(lonRangeMin) + ':1:' + str(lonRangeMax) + ']'
latRange = '[' + str(latRangeMin) + ':1:' + str(latRangeMax) + ']'
timeRange = '[' + str(timeRangeMin) + ':1:' + str((timeRangeMax)) + ']'  #+3
uwndRange = timeRange + latRange + lonRange
vwndRange = timeRange + latRange + lonRange

generalUrl = 'https://thredds.jpl.nasa.gov/thredds/dodsC/ncml_aggregation/OceanWinds/ccmp/aggregate__CCMP_MEASURES_ATLAS_L4_OW_L3_0_WIND_VECTORS_FLK.ncml'

resultUrl = generalUrl + '?' + 'lon' + lonRange + ',' + 'lat' + latRange + ',' + 'time' + timeRange + ',' + 'uwnd' + uwndRange + ',' + 'vwnd' + vwndRange

print('')
print('URL:', resultUrl)

# OPEN URL

dataset = open_url(resultUrl)

# DOWNLOAD DATA BASE TYPE

lonData = dataset.lon.data[lonArrayMin:lonArrayMax:precisionLon]
latData = dataset.lat.data[latArrayMin:latArrayMax:precisionLat]

print('USED VALUES LON: ', lonData)
print('USED VALUES LAT: ', latData)

timeData = dataset.time.data[timeArray]
passedHours = timeData[0]
usedDate = startDate + datetime.timedelta(hours=passedHours)

print('USED DATE: ', usedDate)
Exemple #50
0
## 12: only "00" and "12" hour tick labels
## 24: no hour tick labels
hr_tick_fq = 12

start = datetime.fromtimestamp(time.mktime(time.strptime(st, "%Y%m%d%H")))
end = datetime.fromtimestamp(time.mktime(time.strptime(et, "%Y%m%d%H")))

times = []
levs = []
bs_data = []
for timestamp in datespan(start, end, delta=timedelta(days=1)):
    src = "http://asn.mesowest.net/data/opendap/PUBLIC/{0:%Y/%m/%d/}{1}_{0:%Y%m%d}.h5"
    source = src.format(timestamp, station)
    print source
    ## pydap created a "dataset" object, which we can see has all the structure of the file.  There is a HEIGHT attribute, which contains the height dimension, and a data attribute which contains all the different data columns.
    data = pd.open_url(source)
    height = data['HEIGHT'][:]
    tt = data['data']['DATTIM'][:]
    bs = data['data']['BS'][:]
    #print height.shape, tt.shape, bs.shape
    if len(times) == 0:
        times = tt
        levs = height
        bs_data = bs
    else:
        if (levs != height).sum() == 0:
            times = np.append(times, tt)
            #levs = np.append(levs,height)
            bs_data = np.append(bs_data, bs, 0)
        else:
            print "attention: different levels"
Exemple #51
0
 def canhandle(url):
     try:
         return isinstance(open_url(url), dap.model.DatasetType)
     except Exception:
         return False
#       University of Alabama in Huntsville
#
#
########################################################

# In[ ]:

import pydap
from pydap.client import open_url

# # Open data stream using OPeNDAP link to file and look for data fields

# In[ ]:

datafile = open_url(
    'https://ghrc.nsstc.nasa.gov/opendap/fieldCampaigns/hs3/HAMSR/data/2013/HAMSR_L2_20130915T061329_20130916T050512_v01.nc'
)
print datafile.keys()

# # Let's plot the ham_dBz field

# In[ ]:

#Import needed Python packages
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.basemap import Basemap
from scipy import ndimage
from datetime import datetime

# # Define the start and end date/time along the flight track
Exemple #53
0
f = open('timereport.csv', 'w')
#g=open('timereportsite.csv','w')
'''
urllatlon = 'http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_site?emolt_site.SITE'
dataset = open_url(urllatlon)
print dataset
var = dataset['emolt_site']
SITE = list(var.SITE)
print "SITE list has been generated"
#for i in range(len([0,1])):
'''   
SITE=['AB01','AG01','BA01','BA02','BC01','BD01','BF01','BI02','BI01','BM01','BM02','BN01','BS02','CJ01','CP01','DC01','DJ01','DK01','DMF1','ET01','GS01','JA01','JC01','JS06','JT04','KO01','MF02','MM01','MW01','NL01','PF01','PM02','PM03','PW01','RA01','RM02','RM04','SJ01','TA14','TA15','TS01']
for i in range(len(SITE)):   
    print SITE[i]
    url='http://gisweb.wh.whoi.edu:8080/dods/whoi/emolt_sensor?emolt_sensor.TIME_LOCAL&emolt_sensor.SITE='
    dataset=open_url(url+'"'+SITE[i]+'"')
    var=dataset['emolt_sensor']
    print 'hold on  ... extracting your eMOLT mooring data'
    year_month_day = list(var.TIME_LOCAL)
    timelocal=[]
    for j in range(len(year_month_day)):
         timelocal.append(datetime.strptime(year_month_day[j],"%Y-%m-%d"))  
    index = range(len(timelocal))
    index.sort(lambda x, y:cmp(timelocal[x], timelocal[y]))
    timelocal = [timelocal[ii] for ii in index]
    print 'now generating a datetime'
    timepd=pd.DataFrame(range(len(timelocal)),index=timelocal)
    timepd['Year']=timepd.index.year
    year=unique(timepd['Year'])
    monthall=[]
    if len(year)>=minyear:
 def _dap_open(self, url):
     pydap.lib.PROXY = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP,
                                          'localhost', PROXY_PORT)
     dataset = open_url(url)
     return dataset
Exemple #55
0
#elif ((year >= 1993) & (year <= 2000)):
#    file_tag = 'MERRA201'
#elif ((year >= 2001) & (year <= 2009)):
#    file_tag = 'MERRA301'
#elif (year >= 2010):
#    file_tag = 'MERRA300'
file_tag = 'MERRA2_400'

#read grid and variable attributes from the first file
year_tag = '%04d' % year
month_tag = '01'
day_tag = '01'
date_tag = year_tag + month_tag + day_tag
url = server + '/opendap/MERRA2/M2T1NXRAD.5.12.4/' + year_tag + '/' + month_tag + '/' + \
      file_tag + '.tavg1_2d_rad_Nx.' + date_tag + '.nc4'
dataset = open_url(url)
lon = dataset['lon'][:]
#shift data between 0 and 360 deg.
gidx = np.where(np.abs(lon) < 1.0e-10)[0][0]
lon = lon + 180
lat = dataset['lat'][:]
spval = dataset[invarname].missing_value
units = dataset[invarname].units
long_name = dataset[invarname].long_name

#get data from NASA opendap
for month in range(12):
    nday = 0

    #create ROMS forcing file
    month_tag = '%02d' % (month + 1)
Exemple #56
0
def load_ASN_data(st, et, bottom, top, station):
    ## Loads the data from the ASN network through OPeNDAP
    print 'loading data...'
    #___________________Load Data__________________________

    start = datetime.strptime(st, "%Y%m%d%H")
    end = datetime.strptime(et, "%Y%m%d%H")

    times = []
    levs = []
    bs_data = []
    fileList = []
    src = "http://asn.mesowest.net/data/opendap/PUBLIC/{0:%Y/%m/%d/}{1}_{0:%Y%m%d}.h5"

    # Create file_list from start time and end time
    if st[0:8] != et[0:8]:
        for timestamp in datespan(start, end, delta=timedelta(hours=1)):
            source = src.format(timestamp, station)
            if source in fileList:
                pass
            else:
                fileList.append(source)
    else:
        timestamp = start
        source = src.format(timestamp, station)
        fileList.append(source)

    #print fileList
    # Get data from each file in file_list using pydap
    for ff in fileList:
        print ff
        ## pydap created a "dataset" object, which we can see has all the structure of the file.  There is a HEIGHT attribute, which contains the height dimension, and a data attribute which contains all the different data columns.
        data = pd.open_url(ff)
        height = data['HEIGHT'][:]
        tt = data['data']['DATTIM'][:]
        bs = data['data']['BS'][:]
        #print height.shape, tt.shape, bs.shape
        if len(times) == 0:
            times = tt
            levs = height
            bs_data = bs
        else:
            if (levs != height).sum() == 0:
                times = np.append(times, tt)
                #levs = np.append(levs,height)
                bs_data = np.append(bs_data, bs, 0)
            else:
                print "attention: different levels"
                pass

    # Convert times into datetime array
    times_utc = np.array([datetime.utcfromtimestamp(i) for i in times])
    times_local = np.array([datetime.fromtimestamp(i) for i in times])

    # Get index for desired times and create new times array
    #utc
    time_start_index_utc = find_nearest(times_utc, start)
    time_end_index_utc = find_nearest(times_utc, end)
    times_utc = times_utc[time_start_index_utc:time_end_index_utc]
    #local
    time_start_index_local = find_nearest(times_local, start)
    time_end_index_local = find_nearest(times_local, end)
    times_local = times_local[time_start_index_local:time_end_index_local]

    # Find index for height range requested and create new height array
    height_bottom_index = np.where(height == bottom)
    height_top_index = np.where(height == top)
    height = height[height_bottom_index[0]:height_top_index[0] +
                    1]  # need plus one to include last index

    # Slice the backscatter data for desired times and heights
    bs_final = bs_data[time_start_index_utc:time_end_index_utc,
                       height_bottom_index[0]:height_top_index[0] + 1]
    bs_final_local = bs_data[time_start_index_local:time_end_index_local,
                             height_bottom_index[0]:height_top_index[0] + 1]

    return bs_final, bs_final_local, times_utc, times_local, height
Exemple #57
0
 def test_axis_mean(self):
     """Test the mean over an axis, returning a scalar."""
     original = open_url('/', application=self.app)
     dataset = original.functions.mean(original.SimpleGrid.x)
     self.assertEqual(dataset.x.shape, ())
     np.testing.assert_array_equal(dataset.x.data, np.array(1.0))
# Put your desired plot date in here
#                    YYYY,MM,DD
plot_date = datetime(2017, 4, 17)
print(plot_date)


# In[4]:


# Link OPeNDAP datasets
modis_sst_url = 'http://oceanus.meas.ncsu.edu:8080/thredds/dodsC/secoora/modis/sst.nc'
modis_chla_url = 'http://oceanus.meas.ncsu.edu:8080/thredds/dodsC/secoora/modis/chla.nc'
dineof_sst_url =           'http://oceanus.meas.ncsu.edu:8080/thredds/dodsC/secoora/dineof/sst.nc'
dineof_chla_url =           'http://oceanus.meas.ncsu.edu:8080/thredds/dodsC/secoora/dineof/chla.nc'

modis_sst_dataset = open_url(modis_sst_url,output_grid=False)
modis_chla_dataset = open_url(modis_chla_url,output_grid=False)
dineof_sst_dataset = open_url(dineof_sst_url,output_grid=False)
dineof_chla_dataset = open_url(dineof_chla_url,output_grid=False)
print('Available data:')
print('MODIS SST:', modis_sst_dataset.keys)
print('MODIS Chla:', modis_chla_dataset.keys)
print('DINEOF SST:', dineof_sst_dataset.keys)
print('DINEOF Chla:', dineof_chla_dataset.keys)


# In[5]:


# Find time index from available times
Exemple #59
0
 def test_open_url(self):
     """Open an URL and check dataset keys."""
     dataset = open_url('http://localhost:8001/', self.app)
     self.assertEqual(list(dataset.keys()), ["cast"])
    def test_pydap(self):
        if not CFG.get_safe('bootstrap.use_pydap', False):
            raise unittest.SkipTest('PyDAP is off (bootstrap.use_pydap)')
        ph = ParameterHelper(self.dataset_management, self.addCleanup)
        pdict_id = ph.create_extended_parsed()

        stream_def_id = self.pubsub_management.create_stream_definition(
            'example', parameter_dictionary_id=pdict_id)
        self.addCleanup(self.pubsub_management.delete_stream_definition,
                        stream_def_id)

        tdom, sdom = time_series_domain()

        dp = DataProduct(name='example')
        dp.spatial_domain = sdom.dump()
        dp.temporal_domain = tdom.dump()

        data_product_id = self.data_product_management.create_data_product(
            dp, stream_def_id)
        self.addCleanup(self.data_product_management.delete_data_product,
                        data_product_id)

        self.data_product_management.activate_data_product_persistence(
            data_product_id)
        self.addCleanup(
            self.data_product_management.suspend_data_product_persistence,
            data_product_id)

        dataset_id = self.resource_registry.find_objects(data_product_id,
                                                         PRED.hasDataset,
                                                         id_only=True)[0][0]
        monitor = DatasetMonitor(dataset_id)
        self.addCleanup(monitor.stop)

        rdt = ph.get_rdt(stream_def_id)
        ph.fill_rdt(rdt, 10)
        ph.publish_rdt_to_data_product(data_product_id, rdt)
        self.assertTrue(monitor.event.wait(10))

        gevent.sleep(
            1)  # Yield to other greenlets, had an issue with connectivity

        pydap_host = CFG.get_safe('server.pydap.host', 'localhost')
        pydap_port = CFG.get_safe('server.pydap.port', 8001)
        url = 'http://%s:%s/%s' % (pydap_host, pydap_port, dataset_id)

        for i in xrange(
                3
        ):  # Do it three times to test that the cache doesn't corrupt the requests/responses
            ds = open_url(url)
            np.testing.assert_array_equal(ds['time'][:], np.arange(10))
            untested = []
            for k, v in rdt.iteritems():
                if k == rdt.temporal_parameter:
                    continue
                context = rdt.context(k)
                if isinstance(context.param_type, QuantityType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                elif isinstance(context.param_type, ArrayType):
                    if context.param_type.inner_encoding is None:
                        values = np.empty(rdt[k].shape, dtype='O')
                        for i, obj in enumerate(rdt[k]):
                            values[i] = str(obj)
                        np.testing.assert_array_equal(ds[k][k][:][0], values)
                    elif len(rdt[k].shape) > 1:
                        values = np.empty(rdt[k].shape[0], dtype='O')
                        for i in xrange(rdt[k].shape[0]):
                            values[i] = ','.join(
                                map(lambda x: str(x), rdt[k][i].tolist()))
                elif isinstance(context.param_type, ConstantType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                elif isinstance(context.param_type, CategoryType):
                    np.testing.assert_array_equal(ds[k][k][:][0], rdt[k])
                else:
                    untested.append('%s (%s)' % (k, context.param_type))
            if untested:
                raise AssertionError('Untested parameters: %s' % untested)