Beispiel #1
0
    def load(self,**kwargs):
        """ Load Oscar fields for a given day"""
        self._timeparams(**kwargs)
        md  = self.jd - pl.datestr2num('1992-10-05')
        filename = os.path.join(self.datadir, "oscar_vel%i.nc" % self.yr)
        if not os.path.exists(filename):
            self.download(filename)
        filenam2 = os.path.join(self.datadir, "oscar_vel%i.nc" % (self.yr+1))
        if not os.path.exists(filenam2):
            self.download(filenam2)

        nc1 = netcdf_file(filename)        
        tvec = nc1.variables['time'][:]
        t1 = int(np.nonzero((tvec<=md))[0].max())
        print t1,max(tvec)
        if t1<(len(tvec)-1):
            nc2 = nc1
            t2 = t1 +1
        else:
            nc2 = netcdf_file(filenam2)                    
            t2 = 0
        def readfld(ncvar):
            return self.gmt.field(ncvar[t1, 0,:,:self.imt])[self.j1:self.j2,
                                                            self.i1:self.i2]
        u1 = readfld(nc1.variables['u'])
        v1 = readfld(nc1.variables['v'])
        u2 = readfld(nc2.variables['u'])
        v2 = readfld(nc2.variables['v'])
        rat = float(md-tvec[t1])/float(tvec[t2]-tvec[t1])
        self.u = u2*rat + u1*(1-rat)
        self.v = v2*rat + v1*(1-rat)
        print self.jd,md,t1,t2
Beispiel #2
0
    def load(self, fld="nwnd", **kwargs):
	"""Load field for a given julian date. Returns u,v, or nwnd(windnorm)"""
        self._timeparams(**kwargs)
        filename = os.path.join(self.datadir,
                            "uv%04i%02i%02i.nc" % (self.yr, self.mn, self.dy))
        if not os.path.isfile(filename):
            self.download(filename)
        try:
            nc = netcdf_file(filename)
        except:
            os.remove(filename)
            self.download(filename)
            try:
                nc = netcdf_file(filename)
            except:
                filename = filename.rstrip(".nc") + "rt.nc"
                if not os.path.isfile(filename):
                    self.download(filename)
                try:
                    nc = netcdf_file(filename)
                except TypeError:
                    os.remove(filename)
                    self.download(filename)
                    nc = netcdf_file(filename)
                    
        u = nc.variables['u'][:].copy()
        v = nc.variables['v'][:].copy()
        u[u<-999] = np.nan
        v[v<-999] = np.nan
        if (fld=="u") | (fld=="uvel"):
            self.uvel = self.gmt.field(np.squeeze(u))
        elif (fld=="v") | (fld=="vvel"):
            self.vvel = self.gmt.field(np.squeeze(v))
        else:
            self.nwnd = self.gmt.field(np.squeeze(np.sqrt(u**2 + v**2)))
Beispiel #3
0
	def getVarData(fn,var):
		def getMembers(tfh):
			names = sorted(tfh.getnames())
			names.remove('.')
			names.remove('./input.cfg')
			names.remove('./output.log')
			
			return [tfh.getmember(n) for n in names]

		def getPassCount(fl):
			fh = netcdf_file(fl)
			S = fh.variables['u'].shape
			fh.close()
			return S[0]
		
		tfh = tarfile.open(fn)
		members = getMembers(tfh)
		Nf = len(members)
		Np = getPassCount(tfh.extractfile(members[0]))
		
		data = pl.empty( (Nf,Np) )
		for k in range(len(members)):
			fl = tfh.extractfile(members[k])
			fh = netcdf_file(fl)
			data[k,:] = fh.variables[var][:,0,0]
			fh.close()
		tfh.close()
		return data
Beispiel #4
0
 def setup_grid(self):
     """Define lat and lon matrices for njord"""
     gc = netcdf_file(self.gridfile)
     self.lat = gc.variables['lat'][:].copy()
     self.gmt = gmtgrid.Shift(gc.variables['lon'][:].copy())
     self.lon = self.gmt.lonvec         
     self.llon,self.llat = np.meshgrid(self.lon,self.lat)
Beispiel #5
0
 def __init__(self, projname, casename="", **kwargs):
     super(Partsat,self).__init__(projname, casename, *kwargs)
     postgresql.DB.__init__(self, projname, casename, database='partsat')
     self.flddict = {'par':('L3',),'chl':('box8',)}
     if projname == 'oscar':
         import pysea.MODIS
         self.sat = pysea.NASA.nasa(res='4km',
                                    ijarea=(700,1700,2000,4000))
         def calc_jd(ints,intstart):
             return self.base_iso + float(ints)/6-1
     elif projname=="casco":
         self.sat = casco.Sat(res='500m')
         def calc_jd(ints,intstart):
             return (self.base_iso +(ints-(intstart)*10800)/150 +
                     intstart/8)
     elif projname=="gompom":
         n = netcdf_file('/Users/bror/svn/modtraj/box8_gompom.cdf')
         self.gomi = n.variables['igompom'][:]
         self.gomj = n.variables['jgompom'][:]
         self.sati = n.variables['ibox8'][:]
         self.satj = n.variables['jbox8'][:]
     elif projname=="jplSCB":
         from njord import mati
         self.sat = mati.Cal()
     elif projname=="jplNow":
         from njord import mati
         self.sat = mati.Cal()
Beispiel #6
0
 def setup_grid(self):
     """Setup necessary variables for grid """
     g = netcdf_file(self.gridfile)
     self.llat = g.variables['lat_rho'][:].copy()
     self.llon = g.variables['lon_rho'][:].copy()
     self.depth = g.variables['h'][:].copy()
     self.Cs_r = np.array([-0.882485522505154, -0.778777844867132,
                           -0.687254423585503, -0.606483342183883,
                           -0.535200908367393, -0.472291883107274,
                           -0.416772032329648, -0.367772728223386,
                           -0.324527359249072, -0.286359336228826,
                           -0.252671506867986, -0.222936813095075,
                           -0.196690045050832, -0.173520562714503,
                           -0.153065871294677, -0.135005949869352,
                           -0.11905824454479,  -0.104973247799366,
                           -0.0925305948496471, -0.0815356159649889,
                           -0.0718162907903607, -0.0632205570267136,
                           -0.0556139313622304, -0.0488774054330831,
                           -0.042905583895255, -0.0376050354769002,
                           -0.0328928312128658, -0.0286952469915389,
                           -0.0249466101148999, -0.021588271825806,
                           -0.0185676897273263, -0.0158376057382559,
                           -0.0133553067236276, -0.0110819562325242,
                           -0.00898198688799211, -0.00702254392277909,
                           -0.00517297115481568, -0.0034043313603439,
                           -0.00168895354075999, 0.])
Beispiel #7
0
 def load(self, mn=1):
     """ Load mixed layer climatology for a given day"""
     self.mn = mn
     nc = netcdf_file(self.datadir + "/" + self.mldFile)
     self.mld = gmtgrid.convert(nc.variables["mld"][self.mn - 1, self.j1 : self.j2, self.i1 : self.i2], self.gr)
     self.mld[self.mld < 0] = np.nan
     self.mld[self.mld > 1e4] = np.nan
    def __init__(self, fname, variables=None, use_tmpfile=True):
        # try to figure out what we got
        #assert isinstance(fobj, file)
        assert isinstance(fname, str)
        if fname[:4]=='http':
            # it's an OpenDAP url
            self.ncf = netCDF4.Dataset(fname)
        elif os.path.exists(fname):
            if fnmatch(fname, '*.bz2') and use_tmpfile:
                fobj = tempfile.TemporaryFile('w+b', suffix='nc')
                subprocess.call(["bzcat", fname], stdout=fobj)
                fobj.seek(0)
            elif fnmatch(fname, '*.bz2'):
                fobj = bz2.BZ2File(fname, 'rb')
            else:
                fobj = file(fname, 'rb')            
            self.ncf = netcdf_file(fobj)
            #self.ncf = netCDF4.Dataset(fobj)
        else:
            raise IOError("Couldn't figure out how to open " + fname)

        self.fname = fname
        if variables is None:
            variables = self.ncf.variables.keys()
        for varname in variables:
            self._get_and_scale_variable(varname)
Beispiel #9
0
 def setup_grid(self):
     gc = netcdf_file(self.gridfile)
     print self.gridfile
     self.llat  = gc.variables['y'][:]
     self.llon  = gc.variables['x'][:]
     self.depth = gc.variables['depth'][:].copy()
     self.depth[self.depth<0] = np.nan
Beispiel #10
0
    def get_data(file_id):
        datafile = DataFile.objects.get(pk=file_id)
        file = netcdf_file(os.path.join(settings.MEDIA_ROOT, settings.WAVE_WATCH_DIR, datafile.file.name))
        variable_names_in_file = file.variables.keys()
        print variable_names_in_file


        # longs = [item for sublist in file.variables['longitude'][:1] for item in sublist]
        # print "longs:"
        # for each in longs:
        #     print each
        # lats = file.variables['latitude'][:, 0]
        # print "lats:"
        # for each in lats:
        #     print each

        all_day_height = file.variables['HTSGW_surface'][:, :, :]
        all_day_lat = file.variables['latitude'][:, :]
        all_day_long = file.variables['longitude'][:, :]

        just_this_forecast_height = all_day_height[0][:1, :]
        just_this_forecast_lat = all_day_lat[ :,0]
        just_this_forecast_long= all_day_long[0][ :]
        print "\n\n\nWAVE HEIGHTS "
        for each in just_this_forecast_height:
            print each

        print "\n\n LATS:"
        print just_this_forecast_lat

        print "\n\n LONGS:"
        print just_this_forecast_long
Beispiel #11
0
 def load(self,fldname, **kwargs):
     """ Load velocity fields for a given day"""
     self._timeparams(**kwargs)
     if fldname == "uv":
         self.load('u',**kwargs)
         self.load('v',**kwargs)
         self.uv = np.sqrt(self.u[:,1:,:]**2 + self.v[:,:,1:]**2)
         return
     
     if self.opendap:
         tpos = int(self.jd) - 714800
         k1   = kwargs.get("k1", getattr(self, "k1", self.klev)) 
         k2   = kwargs.get("k2", getattr(self, "k2", k1+1))
         dapH = open_url(self.dapurl)
         fld  = dapH[fldname][tpos,k1:k2,self.j1:self.j2,self.i1:self.i2] 
     else:
         filename = self.jd2filename(self.jd)
         if not os.path.isfile(filename):
             print "File missing"
             url = urlparse.urljoin(self.dataurl, os.path.basename(filename))
             self.retrive_file(url, filename)
         with netcdf_file(filename) as nc:
             fld =  nc.variables[fldname][:].copy()
             self.ssh =  np.squeeze(nc.variables['zeta'][:])
             self.zlev = ((self.depth + self.ssh)[np.newaxis,:,:] *
                           self.Cs_r[:,np.newaxis,np.newaxis])
     
     fld[fld>9999] = np.nan
     setattr(self, fldname, np.squeeze(fld))
Beispiel #12
0
    def get_data(forecast_index, file_id):
        datafile = DataFile.objects.get(pk=file_id)
        file = netcdf_file(os.path.join(settings.MEDIA_ROOT, settings.WAVE_WATCH_DIR, datafile.file.name))
        variable_names_in_file = file.variables.keys()
        print variable_names_in_file

        all_day_height = file.variables['HTSGW_surface'][:, :, :]
        all_day_direction = file.variables['DIRPW_surface'][:,:,:]
        all_day_lat = file.variables['latitude'][:, :]
        all_day_long = file.variables['longitude'][:, :]
        all_day_times = file.variables['time'][:]
        #print "times: "
        #for each in all_day_times:
            #print each

        basetime = datetime.datetime(1970,1,1,0,0,0)

        # Check the first value of the forecast
        forecast_zero = basetime + datetime.timedelta(all_day_times[0]/3600.0/24.0,0,0)
        print(forecast_zero)

        directions = all_day_direction[forecast_index, ::10, :]
        directions_mod = 90.0 - directions + 180.0
        index = directions_mod > 180
        directions_mod[index] = directions_mod[index] - 360;

        index = directions_mod < -180;
        directions_mod[index] = directions_mod[index] + 360;

        U = 10.*np.cos(np.deg2rad(directions_mod))
        V = 10.*np.sin(np.deg2rad(directions_mod))

        print "height:", all_day_height[:10, :10]
Beispiel #13
0
def read_woce_netcdf(fnm):
    """ Read a CTD cast from a WOCE NetCDF file. """

    def getvariable(nc, key):
        return nc.variables[key].data.copy()

    nc = netcdf_file(fnm)
    coords = (getvariable(nc, "longitude")[0], getvariable(nc, "latitude")[0])

    pres = getvariable(nc, "pressure")
    sal = getvariable(nc, "salinity")
    salqc = getvariable(nc, "salinity_QC")
    sal[salqc!=2] = np.nan
    temp = getvariable(nc, "temperature")
    # tempqc = getvariable(nc, "temperature_QC")
    # temp[tempqc!=2] = np.nan
    oxy = getvariable(nc, "oxygen")
    oxyqc = getvariable(nc, "oxygen_QC")
    oxy[oxyqc!=2] = np.nan

    date = getvariable(nc, "woce_date")
    time = getvariable(nc, "woce_time")
    return narwhal.CTDCast(pres, sal, temp, oxygen=oxy,
                           coords=coords,
                           properties={"woce_time":time, "woce_date":date})
Beispiel #14
0
    def load(self,fldname, **kwargs):
        """ Load velocity fields for a given day"""
        if fldname == "uv":
            self.load('u',jd=jd, yr=yr, mn=mn, dy=dy, hr=hr)
            self.load('v',jd=jd, yr=yr, mn=mn, dy=dy, hr=hr)
            self.uv = np.sqrt(self.u[:,1:,:]**2 + self.v[:,:,1:]**2)
            return
        self._timeparams(**kwargs)
        tpos = np.nonzero(self.jdvec <= self.jd)[0].max()
        vc = {'uvel': ['u',    'ecom.cdf',      9.155553e-05,  0],
              'vvel': ['v',    'ecom.cdf',      9.155553e-05,  0],
              'wvel': ['v',    'ecom.cdf',      6.103702e-08,  0],
              'temp': ['temp', 'ecom.cdf',      0.0005340739, 12.5],
              'salt': ['salt', 'ecom.cdf',      0.0006103702, 20],
              'chlo': ['chl',  'bem_water.cdf', 0.001525925,  50],
              'newp': ['np',   'bem_water.cdf', 0.001525925,  50],
              'netp': ['pp',   'bem_water.cdf', 0.001525925,  50],
              'tpoc': ['tpoc', 'bem_water.cdf', 0.01525925,  500],
              }
        try:
            nc = netcdf_file(self.datadir + vc[fldname][1])
        except KeyError:
            raise KeyError, "%s is not included" % fldname

        fld = nc.variables[vc[fldname][0]][tpos,:,
                                           self.j1:self.j2,
                                           self.i1:self.i2]
        fld = (fld * vc[fldname][2] + vc[fldname][3]).astype(np.float32)
        fld[:,self.landmask] = np.nan
        self.__dict__[fldname] = fld
        return tpos
Beispiel #15
0
    def test_netcdf_wave_format(self):
        print("Running Wave NetCDF Format Test: ")
        #directory of where files will be saved at
        destination_directory = os.path.join(settings.MEDIA_ROOT, settings.WAVE_WATCH_DIR)

        print "Downloading File"
        #file names might need to be created dynamically in the future if ftp site changes
        file_name = "outer.nc"

        #Connect to FTP site to get the file modification data
        ftp = FTP('cil-www.oce.orst.edu')
        ftp.login()

        #retrieve the ftp modified datetime format
        ftp_dtm = ftp.sendcmd('MDTM' + " /pub/outgoing/ww3data/" + file_name)

        #convert ftp datetime format to a string datetime
        modified_datetime = datetime.strptime(ftp_dtm[4:], "%Y%m%d%H%M%S").strftime("%Y-%m-%d")

        #Create File Name and Download actual File into media folder
        url = urljoin(settings.WAVE_WATCH_URL, file_name)
        filename = "{0}_{1}_{2}.nc".format("OuterGrid", modified_datetime, uuid4())
        urllib.urlretrieve(url=url, filename=os.path.join(destination_directory, filename))

        datafile_read_object = netcdf_file(os.path.join(settings.MEDIA_ROOT, settings.WAVE_WATCH_DIR, filename))
        print "Checking Variables: latitude, longitude, HTSGW_surface"
        surface = datafile_read_object.variables['HTSGW_surface'][:, :, :]
        long = datafile_read_object.variables['longitude'][:]
        lat = datafile_read_object.variables['latitude'][:]
        ftp.quit()
        self.assertIsNotNone(surface)
        self.assertIsNotNone(long)
        self.assertIsNotNone(lat)
    def _default_cache_entry_factory(self, key):
        """Called on a DataCache access __missing__() call.
        Gets all (step, row, col) entries for the file indexed by key and reads all data
        returning the entry for the requested key

        Arguments:
        key - (pixel_step, row, col) tuple

        """
        # A cache miss will generate a file lookup, read and cache of the associated data.
        path, _, _, _ = self._get_data_location(*key)   # path of file containing our data

        # Generate all the entries like the following for data in the file path
        # {(step, row, col): [module_data object reference, 0-3], ... }
        # First, enumerate data indices in current file.
        indices = self._enumerate_all_data_indices_in_file(os.path.basename(path))

        # OK, now read everything from the file
        f = netcdf_file(path, 'r')
        # buffer_ix, module_ix
        for pixel_step, row, col, channel, buffer_ix, module_ix in indices:
            self.module_data_cache[(pixel_step, row, col)] = \
                [self._get_mode1_pixel_data(f, buffer_ix, module_ix), channel]
        f.close()

        return self.module_data_cache[key]
  def get_searise(thklim = 0.0):
    
    filename = inspect.getframeinfo(inspect.currentframe()).filename
    home     = os.path.dirname(os.path.abspath(filename))
 
    direc = home + "/greenland/searise/Greenland_5km_dev1.2.nc"
    data  = netcdf_file(direc, mode = 'r')
    vara  = dict()
    
    # retrieve data :
    x     = array(data.variables['x1'][:])
    y     = array(data.variables['y1'][:])
    h     = array(data.variables['usrf'][:][0])
    adot  = array(data.variables['smb'][:][0])
    b     = array(data.variables['topg'][:][0])
    T     = array(data.variables['surftemp'][:][0]) + 273.15
    q_geo = array(data.variables['bheatflx'][:][0]) * 60 * 60 * 24 * 365
    lat   = array(data.variables['lat'][:][0])
    lon   = array(data.variables['lon'][:][0])
    U_sar = array(data.variables['surfvelmag'][:][0])
    dhdt  = array(data.variables['dhdt'][:][0])
 
    direc = home + "/greenland/searise/smooth_target.mat" 
    U_ob  = loadmat(direc)['st']
    
    H             = h - b
    h[H < thklim] = b[H < thklim] + thklim
    H[H < thklim] = thklim

    Tn            = 41.83 - 6.309e-3*h - 0.7189*lat - 0.0672*lon + 273
    
    # extents of domain :
    east  = max(x)
    west  = min(x)
    north = max(y)
    south = min(y)

    #projection info :
    proj   = 'stere'
    lat_0  = '90'
    lat_ts = '71'
    lon_0  = '-39'
 
    names = ['H', 'S', 'adot', 'B', 'T', 'q_geo','U_sar', \
             'U_ob', 'lat', 'lon', 'Tn','dhdt']
    ftns  = [H, h, adot, b, T, q_geo,U_sar, U_ob, lat, lon, Tn, dhdt]

    vara['dataset'] = 'searise'
    for n, f in zip(names, ftns):
      vara[n] = {'map_data'          : f,
                 'map_western_edge'  : west, 
                 'map_eastern_edge'  : east, 
                 'map_southern_edge' : south, 
                 'map_northern_edge' : north,
                 'projection'        : proj,
                 'standard lat'      : lat_0,
                 'standard lon'      : lon_0,
                 'lat true scale'    : lat_ts}
    return vara
Beispiel #18
0
 def setup_grid(self):
     """Define lat and lon matrices for njord"""
     gc = netcdf_file(self.gridfile)
     self.lat = gc.variables["lat"][:].copy()
     lon = gc.variables["lon"][:].copy()
     lon[lon > 360] = lon[lon > 360] - 360
     self.lon, self.gr = gmtgrid.config(lon, dim=0)
     self.llon, self.llat = np.meshgrid(self.lon, self.lat)
Beispiel #19
0
    def load(self, name):
        if isinstance(name, basestring):
            p = self._load_netcdf(netcdf_file(name, 'r'))
        else:
            p = self._load_netcdf(name)

        self.profiles = SIUnits().apply(p)
        return self
Beispiel #20
0
    def get_currents_data(forecast_index, file_id):
        datafile = DataFile.objects.get(pk=file_id)
        data_file = netcdf_file(os.path.join(settings.MEDIA_ROOT, settings.NETCDF_STORAGE_DIR, datafile.file.name))
        currents_u = data_file.variables['u'][forecast_index][39]
        currents_v = data_file.variables['v'][forecast_index][39]

        print "currents u:", 10.0*currents_u
        print "\n\n\ncurrents v:", 10.0*currents_v
Beispiel #21
0
    def get_period_data(forecast_index, file_id):
        datafile = DataFile.objects.get(pk=file_id)
        file = netcdf_file(os.path.join(settings.MEDIA_ROOT, settings.WAVE_WATCH_DIR, datafile.file.name))
        variable_names_in_file = file.variables.keys()
        print variable_names_in_file

        all_day_period = file.variables['PERPW_surface'][forecast_index][:,:]
        print "Period of waves, in seconds:", all_day_period
Beispiel #22
0
    def dev_to_gmt(self):
        """
        Should write grid to a NetCDF file
        """
        values = np.random.rand(10, 20, 30)
        grd = CartesianGrid3D(values)

        fname = 'temp.grd'
        if os.path.isfile(fname):
            os.remove(fname)

        # should create a NetCDF file 
        grd.to_gmt(fname)
        nc = netcdf_file(fname, 'r')
        for k in ['x_range', 'spacing', 'z', 'y_range',
                'dimension', 'z_range']:
            self.assertTrue(k in nc.variables)

        # default should write values[:, :, 0]
        # Note swapped ordering here
        self.assertEqual(nc.variables['dimension'][0], values.shape[1])
        self.assertEqual(nc.variables['dimension'][1], values.shape[0])

        zz = np.reshape(nc.variables['z'][::].copy(),
                (nc.variables['dimension'][1],
                    nc.variables['dimension'][0]))

        for ix in range(values.shape[0]):
            for iy in range(values.shape[1]):
                self.assertEqual(zz[ix, iy], values[ix, iy, 0])


        # should write values[:, iy, :]
        iy0 = 3
        grd.to_gmt(fname, iy=iy0)
        nc = netcdf_file(fname, 'r')
        self.assertEqual(nc.variables['dimension'][1], values.shape[0])
        self.assertEqual(nc.variables['dimension'][0], values.shape[2])
        
        zz = np.reshape(nc.variables['z'][::].copy(),
                (nc.variables['dimension'][1],
                    nc.variables['dimension'][0]))
        
        for ix in range(values.shape[0]):
            for iy in range(values.shape[2]):
                self.assertEqual(zz[ix, iy], values[ix, iy0, iy])
Beispiel #23
0
 def setup_grid(self):
     gc = netcdf_file(self.gridfile)
     dlon, dlat = gc.variables["spacing"][:]
     lon1, lon2 = gc.variables["x_range"][:]
     lat1, lat2 = gc.variables["y_range"][:]
     self.lon = np.arange(lon1, lon2, dlon)
     self.lat = np.arange(lat1, lat2, dlat)
     self.llon, self.llat = np.meshgrid(self.lon, self.lat)
Beispiel #24
0
 def setup_grid(self):
     """Setup necessary variables for grid """
     g = netcdf_file(self.gridfile, 'r')
     self.llat = g.variables['TLAT'][:]
     self.gmt  = gmtgrid.Shift(g.variables['TLONG'][0,:].copy())
     self.lon  = self.gmt.lonvec
     self.llon = g.variables['TLONG'][:].copy()
     self.llon[self.llon>180] = self.llon[self.llon>180]-360
     self.llon = self.gmt.field(self.llon)       
Beispiel #25
0
def readResults(rootFile):

# Read inverted profiles
	ff = io.netcdf_file(rootFile+'.inversion', 'r')
	synthProf = ff.variables['map'][:]
	ff.close()
	
# Read inverted parameters
	ff = io.netcdf_file(rootFile+'.parameters', 'r')
	pars = ff.variables['map'][:]
	ff.close()
	
# Read errors
	ff = io.netcdf_file(rootFile+'.errors', 'r')
	errors = ff.variables['map'][:]
	ff.close()
	
	return synthProf, pars, errors
Beispiel #26
0
 def setup_grid(self):
     print self.dataurl
     if not os.path.exists(self.gridfile):
         self.download(self.gridfile)
     gc = netcdf_file(self.gridfile)
     self.lat = gc.variables['latitude'][:]
     self.gmt = gmtgrid.Shift(gc.variables['longitude'][:self.imt].copy())
     self.lon = self.gmt.lonvec 
     self.llon,self.llat = np.meshgrid(self.lon,self.lat)
Beispiel #27
0
 def setup_grid(self):
     """Setup necessary variables for grid """
     gc = netcdf_file(self.datadir + '/land.nc')
     self.lat = gc.variables['lat'][:]
     self.gmt = gmtgrid.Shift(gc.variables['lon'][:].copy())
     self.lon = self.gmt.lonvec 
     self.llon,self.llat = np.meshgrid(self.lon,self.lat)
     self.landmask = self.gmt.field(
         gc.variables['land'][:].copy()).astype(np.bool)
Beispiel #28
0
 def setup_grid(self):
     """Setup necessary variables for grid """
     if not os.path.isfile(self.gridfile):
         self.download(self.gridfile, 'vvel')
     g = netcdf_file(self.gridfile, 'r')
     self.latvec = g.variables['lat'][:]
     self.gmt = gmtgrid.Shift(g.variables['lon'][:].copy())
     self.lonvec = self.gmt.lonvec
     self.llon,self.llat = np.meshgrid(self.lonvec,self.latvec)
Beispiel #29
0
 def load(self, fldname, **kwargs):
     """ Load NCEP reanalysis fields for a given day"""
     self._timeparams(**kwargs)
     filename = '%s/%s.%04i.nc' % (self.datadir,self.pardict[fldname],self.yr)
     if not os.path.exists(filename): self.download(filename)
     nc  = netcdf_file(filename)        
     fobj = nc.variables[self.pardict[fldname]]
     fld = self.gmt.field(fobj.data * fobj.scale_factor + fobj.add_offset)
     self.__dict__[fldname] = fld[self.yd-1,
                                  self.j1:self.j2, self.i1:self.i2]
Beispiel #30
0
 def setup_grid(self):
     """Setup necessary variables for grid """
     if not os.path.isfile(self.gridfile):
         self.download(self.gridfile, 'vvel')
     g = netcdf_file(self.gridfile, 'r')
     self.llat = g.variables['Latitude'][:]
     self.gmt = gmtgrid.Shift(g.variables['Longitude'][1649,:].copy())
     self.llon = self.gmt.field(g.variables['Longitude'][:].copy())
     self.llon[self.llon>180]  = self.llon[self.llon>180]-360
     self.llon[self.llon<-180] = self.llon[self.llon<-180]+360
Beispiel #31
0
def to_netcdf_yr(spat_lc, map_idx, lat, lon, resin, final_landclasses, yr, model, out_file):
    """
    Build a NetCDF file for each time step that contains the gridded fraction
    of land cover for each land class.

    :param spat_lc:                 An array of gridded data as fraction land cover (n_grids, n_landclasses)
    :param map_idx:                 An array of cell index positions for spatially mapping the gridded data (n_grids, n_landclasses)
    :param lat:                     An array of latitude values for mapping (n)
    :param lon:                     An array of longitude values for mapping (n)
    :param resin:                   The input spatial resolution in geographic degrees (float)
    :param final_landclasses:       An array of land classes (n_classes)
    :param yr:                      The target time step (int)
    :param model:                   The name of the model running (str)
    :param out_file:                A full path string of the output file with extension (str)
    :return:                        A NetCDF classic file.
    """

    # create NetCDF file
    with sio.netcdf_file(out_file, 'w') as f:

        # add scenario
        f.history = 'test file'

        # create dimensions
        f.createDimension('lat', len(lat))
        f.createDimension('lon', len(lon))
        f.createDimension('pft', len(final_landclasses))
        f.createDimension('nv', 2)

        # create variables
        lts = f.createVariable('lat', 'f4', ('lat',))
        lns = f.createVariable('lon', 'f4', ('lon',))
        lcs = f.createVariable('pft', 'i', ('pft',))

        lc_frac = f.createVariable('landcoverfraction', 'f8', ('pft', 'lat', 'lon',))

        # create metadata
        lts.units = 'degrees_north'
        lts.standard_name = 'latitude'
        lns.units = 'degrees_east'
        lns.standard_name = 'longitude'
        lcs.description = 'Land cover class'

        lc_frac.units = 'fraction'
        lc_frac.scale_factor = 1.
        lc_frac.add_offset = 0.
        lc_frac.projection = 'WGS84'
        lc_frac.description = 'Fraction land cover for {0} at {1} degree.'.format(yr, resin)
        lc_frac.comment = 'See scale_factor (divide by 100 to get percentage, offset is zero)'
        lc_frac.title = 'Downscaled land use projections at {0} degree, downscaled from {1}'.format(resin, model)

        # assign data
        lts[:] = lat
        lns[:] = lon
        lcs[:] = range(1, len(final_landclasses) + 1)

        # set missing value to -1
        lc_frac.missing_value = -1.

        for pft in range(0, len(final_landclasses), 1):

            # create land use matrix and populate with -1
            pft_mat = np.zeros(shape=(len(lat), len(lon))) - 1

            # extract base land use data for the target PFT
            slh = spat_lc[:, pft]

            # assign values to matrix
            pft_mat[np.int_(map_idx[0, :]), np.int_(map_idx[1, :])] = slh

            # set negative values to -1
            pft_mat[pft_mat < 0] = -1

            # assign to variable
            lc_frac[pft, :, :] = pft_mat
Beispiel #32
0
    return contours


# Define the bounds of the output grid
x_min = 171312
x_max = 633468
y_min = 5286950
y_max = 5904080
z_val = None
cl = [-100, -75, -50, -40, -30, -20, -15, -12, -9, -6, -3]
grid_spacing = 100
fid_out = "contours_depth_utm60s.vtk"

# Read the .grd file from Williams
with netcdf_file("grid_exclude_wgs84.grd") as data:
    x = np.copy(data.variables["x"][:])
    y = np.copy(data.variables["y"][:])
    z = np.copy(data.variables["z"][:])

# Convert grid-like data to flat arrays
x, y = np.meshgrid(x, y)
x = x.flatten()
y = y.flatten()
z = z.flatten()

# Convert to UTM60S
x, y = lonlat_utm(x, y, -60, False)

# Create arrays for interpolation
points = np.vstack((x, y)).T
Beispiel #33
0
                     normed=(1016,1022),
                     norm_method='max')


if __name__ == "__main__":
    if argv[1:] and np.all([path.isdir(dname) for dname in argv[1:]]):
        files = {dir: glob(path.join(dir, '*.CDF')) for dir in argv[1:]}
    elif argv[1:]:
        files = {'Input': argv[1:]}
    else:
        files = glob('*/*.CDF')
        files = {dir: glob(path.join(dir, '*.CDF'))
                 for dir in {path.dirname(file) for file in files}}

    for day in files:
        day_files = [netcdf_file(file) for file in files[day]]
        sample_types = {file.experiment_title.decode().split('_r')[0]
                        for file in day_files}
        if not day_files: continue
        mpl.figure()
        mpl.title(day)
        color_list = {type: color for type, color in
                      zip(sample_types,
                          mpl.cm.Set3(np.linspace(0, 1, len(sample_types)))
                         )
                     }
        for file in day_files:
            type = file.experiment_title.decode().split('_r')[0]
            plot_tic(file,
                     color=color_list[type],
                     zeroed=20,
Beispiel #34
0
            continue
        else:
            st = changeidx[idx - 1]
            fileptr.write(
                '%d %d %s^%s-%s+%s=%s\n' %
                (st * resolu, et * resolu, phonList[phoneBag[0][st]],
                 phonList[phoneBag[1][st]], phonList[phoneBag[2][st]],
                 phonList[phoneBag[3][st]], phonList[phoneBag[4][st]]))
    fileptr.close()


#####  data extraction #########

# read in the mean and variance
if dataMV is not None:
    mv = io.netcdf_file(dataMV, 'r')
    if inOutData == 1:
        meanVec = mv.variables['inputMeans'][:].copy()
        varVec = mv.variables['inputStdevs'][:].copy()
    else:
        meanVec = mv.variables['outputMeans'][:].copy()
        varVec = mv.variables['outputStdevs'][:].copy()
    mv.close()
else:
    print('dataMV is not specified. Extracted data will be normalized data.\n')

#
dataList = os.listdir(dataDir)
for dataFile in dataList:
    if re.search(dataPattern, dataFile):
        print(dataFile)
Beispiel #35
0
  def get_searise(thklim = 0.0):
    
    s    = "::: getting Searise data from DataFactory :::"
    print_text(s, DataFactory.color)
    
    global home
    
    direc = home + "/greenland/searise/Greenland_5km_dev1.2.nc"
    data  = netcdf_file(direc, mode = 'r')
    vara  = dict()
    
    needed_vars = {'topg'       : 'B',
                   'usrf'       : 'S',
                   'surftemp'   : 'T',
                   'lat'        : 'lat',
                   'lon'        : 'lon',
                   'smb'        : 'adot',
                   'bheatflx'   : 'q_geo',
                   'dhdt'       : 'dhdt',
                   'surfvelmag' : 'U_sar'}
    
    s    = "    - data-fields collected : python dict key to access -"
    print_text(s, DataFactory.color)
    for v in data.variables:
      try:
        txt = '"' + needed_vars[v] + '"'
      except KeyError:
        txt = ''
      print_text('      Searise : %-*s key : %s '%(30,v, txt), '230')
    
    
    # retrieve data :
    x     = array(data.variables['x1'][:])
    y     = array(data.variables['y1'][:])
    S     = array(data.variables['usrf'][:][0])
    adot  = array(data.variables['smb'][:][0])
    B     = array(data.variables['topg'][:][0])
    T     = array(data.variables['surftemp'][:][0]) + 273.15
    q_geo = array(data.variables['bheatflx'][:][0]) * 60 * 60 * 24 * 365
    lat   = array(data.variables['lat'][:][0])
    lon   = array(data.variables['lon'][:][0])
    U_sar = array(data.variables['surfvelmag'][:][0])
    dhdt  = array(data.variables['dhdt'][:][0])
 
    direc = home + "/greenland/searise/smooth_target.mat" 
    U_ob  = loadmat(direc)['st']
    
    H             = S - B
    S[H < thklim] = B[H < thklim] + thklim

    Tn            = 41.83 - 6.309e-3*S - 0.7189*lat - 0.0672*lon + 273
    
    # extents of domain :
    east  = max(x)
    west  = min(x)
    north = max(y)
    south = min(y)

    #projection info :
    proj   = 'stere'
    lat_0  = '90'
    lat_ts = '71'
    lon_0  = '-39'
    
    # create projection :
    txt  =   " +proj="   + proj \
           + " +lat_0="  + lat_0 \
           + " +lat_ts=" + lat_ts \
           + " +lon_0="  + lon_0 \
           + " +k=1 +x_0=0 +y_0=0 +no_defs +a=6378137 +rf=298.257223563" \
           + " +towgs84=0.000,0.000,0.000 +to_meter=1"
    p    = Proj(txt)
    
    # save the data in matlab format :
    vara['pyproj_Proj']       = p
    vara['map_western_edge']  = west 
    vara['map_eastern_edge']  = east 
    vara['map_southern_edge'] = south 
    vara['map_northern_edge'] = north
    vara['nx']                = len(x)
    vara['ny']                = len(y)
 
    names = ['S', 'adot', 'B', 'T', 'q_geo','U_sar', \
             'U_ob', 'lat', 'lon', 'Tn','dhdt']
    ftns  = [S, adot, B, T, q_geo,U_sar, U_ob, lat, lon, Tn, dhdt]

    vara['dataset']   = 'Searise'
    vara['continent'] = 'greenland'
    for n, f in zip(names, ftns):
      vara[n] = f
    return vara
Beispiel #36
0
  def get_ant_measures(res = 900):
    
    s    = "::: getting Antarctica measures data from DataFactory :::"
    print_text(s, DataFactory.color)

    global home
 
    if res == 900:
      direc    = home + '/antarctica/measures/antarctica_ice_velocity_900m.nc' 
    elif res == 450:
      direc    = home + '/antarctica/measures/antarctica_ice_velocity_450m.nc' 
    else:
      print "get_ant_measures() 'res' arg must be either 900 or 450"
      exit(0)

    data     = netcdf_file(direc, mode = 'r')
    vara     = dict()
  
    # retrieve data :
    vx   = array(data.variables['vx'][:])
    vy   = array(data.variables['vy'][:])
    err  = array(data.variables['err'][:])
    mask = (vx != 0.0).astype('i')
    
    names = ['vx', 'vy', 'v_err', 'mask']
    ftns  = [ vx,   vy,   err,     mask ]
    
    for n in names:
      print_text('      Measures : %-*s key : "%s" '%(30,n,n), '230')
    
    # extents of domain :
    nx,ny =  shape(vx)
    dx    =  res
    west  = -2800000.0
    east  =  west + nx*dx
    north =  2800000.0
    south =  north - ny*dx

    #projection info :
    proj   = 'stere'
    lat_0  = '-90'
    lat_ts = '-71'
    lon_0  = '0'
    
    # create projection :
    txt  =   " +proj="   + proj \
           + " +lat_0="  + lat_0 \
           + " +lat_ts=" + lat_ts \
           + " +lon_0="  + lon_0 \
           + " +k=1 +x_0=0 +y_0=0 +no_defs +a=6378137 +rf=298.257223563" \
           + " +towgs84=0.000,0.000,0.000 +to_meter=1"
    p    = Proj(txt)
    
    # save the data in matlab format :
    vara['pyproj_Proj']       = p
    vara['map_western_edge']  = west 
    vara['map_eastern_edge']  = east 
    vara['map_southern_edge'] = south 
    vara['map_northern_edge'] = north
    vara['nx']                = nx
    vara['ny']                = ny
    
    # save the data in matlab format :
    vara['dataset']   = 'measures'
    vara['continent'] = 'antarctica'
    for n, f in zip(names, ftns):
      vara[n] = f[::-1, :]
    return vara
Beispiel #37
0
  def get_rignot():
    
    s    = "::: getting Greenland Rignot data from DataFactory :::"
    print_text(s, DataFactory.color)
    
    global home
    
    direc = home + '/greenland/rignot/velocity_greenland_v4Aug2014.nc'
    data  = netcdf_file(direc, mode = 'r')
    vara  = dict()
    
    needed_vars = {'vx'  : 'vx',
                   'vy'  : 'vy',
                   'err' : 'v_err'}
    
    s    = "    - data-fields collected : python dict key to access -"
    print_text(s, DataFactory.color)
    for v in data.variables:
      try:
        txt = '"' + needed_vars[v] + '"'
      except KeyError:
        txt = ''
      print_text('      Rignot : %-*s key : %s '%(30,v, txt), '230')
    
    # retrieve data :
    vx   = array(data.variables['vx'][:])
    vy   = array(data.variables['vy'][:])
    err  = array(data.variables['err'][:])
    mask = (vx != 0.0).astype('i')
     
    # extents of domain :
    ny,nx =  shape(vx)
    dx    =  150
    west  = -638000.0
    east  =  west + nx*dx
    north = -657600.0
    south =  north - ny*dx

    #projection info :
    proj   = 'stere'
    lat_0  = '90'
    lat_ts = '70'
    lon_0  = '-45'
    
    # create projection :
    txt  =   " +proj="   + proj \
           + " +lat_0="  + lat_0 \
           + " +lat_ts=" + lat_ts \
           + " +lon_0="  + lon_0 \
           + " +k=1 +x_0=0 +y_0=0 +no_defs +a=6378137 +rf=298.257223563" \
           + " +towgs84=0.000,0.000,0.000 +to_meter=1"
    p    = Proj(txt)
    
    # save the data in matlab format :
    vara['pyproj_Proj']       = p
    vara['map_western_edge']  = west 
    vara['map_eastern_edge']  = east 
    vara['map_southern_edge'] = south 
    vara['map_northern_edge'] = north
    vara['nx']                = nx
    vara['ny']                = ny
    
    names = ['vx', 'vy', 'v_err', 'mask']
    ftns  = [ vx,   vy,   err,     mask ]
    
    print_text('      Rignot : %-*s key : "%s"'%(30,names[-1],names[-1]), '230')
    
    # save the data in matlab format :
    vara['dataset']   = 'Rignot'
    vara['continent'] = 'greenland'
    for n, f in zip(names, ftns):
      vara[n] = f[::-1, :]
    return vara
Beispiel #38
0
 def setup_grid(self):
     gc = netcdf_file(self.gridfile)
     self.llat = gc.variables['y'][:].copy()
     self.llon = gc.variables['x'][:].copy()
     self.depth = gc.variables['depth'][:].copy()
     self.depth[self.depth<0] = np.nan
Beispiel #39
0
	def open(self):
		self.ncdf = sio.netcdf_file(self.filename, mode='r')
		self.opened = True
def manage_outputs(Data, Opti, Config, it):

    # -- Report the full BasinSummary.txt files?
    #if Config.repBS == 1:
    #    os.system('mv '+Config.PATH_EXEC+'/BasinSummary.txt '+Config.PATH_OUT+'/BasinSummary_run'+str(it+1)+'.txt')

    # -- Group the output files in one across simulations,
    #    separating by observations points and veg type where it applies
    for oname in Data.names:
        if (Data.obs[oname]['type'] != 'map'
                or Data.obs[oname]['type'] != 'mapTs') and (it == 0 or
                                                            Opti.begfail == 1):
            # Historic time series file names
            if Config.restart == 0:
                Data.obs[oname][
                    'sim_hist'] = Config.PATH_OUT + '/' + oname + '_all.tab'
            # Header of files
            with open(Data.obs[oname]['sim_hist'], 'w') as f_out:
                f_out.write('Sample,' +
                            ','.join([str(i + 1)
                                      for i in range(Config.trimL)]) + '\n')

    # Reinit begfail (otherwise will never write all!)
    Opti.begfail = 0

    # Save current run outputs (and delete the file to relieve Maxwell...)
    for oname in Data.names:
        #print oname,

        # Integrated variables (in BasinSummary.txt)
        if Data.obs[oname]['type'] == 'Total':
            idx = Data.obs[oname]['sim_pts'] - 1

            tmp = np.genfromtxt(Data.obs[oname]['sim_file'],
                                delimiter='\t',
                                skip_header=1,
                                unpack=True)[idx] * Data.obs[oname]['conv']

            # Shave off the transient part (if any)
            if Config.trimB > 1:
                tmp = tmp[Config.trimB - 1:Config.trimB - 1 + Config.trimL]
                if len(tmp) != Config.trimL:
                    sys.exit("ERROR -> Problem with output trim: we've got " +
                             str(len(tmp)) + ' instead of ' +
                             str(Config.trimL))

            with open(Data.obs[oname]['sim_hist'], 'a') as f_out:
                f_out.write(
                    str(it + 1) + ',' + ','.join([str(j)
                                                  for j in list(tmp)]) + '\n')

        # Time series
        if Data.obs[oname]['type'] == 'Ts':
            print(oname)

            hskip = Data.nts + 3
            idx = np.argsort(np.array(
                Data.sim_order))[Data.obs[oname]['sim_pts'] - 1] + 1

            tmp = np.genfromtxt(Data.obs[oname]['sim_file'],
                                delimiter='\t',
                                skip_header=hskip,
                                unpack=True)[idx] * Data.obs[oname]['conv']

            # Shave off the transient part (if any)
            if Config.trimB > 1:
                tmp = tmp[Config.trimB - 1:Config.trimB - 1 + Config.trimL]

            with open(Data.obs[oname]['sim_hist'], 'a') as f_out:
                f_out.write(
                    str(it + 1) + ',' + ','.join([str(j)
                                                  for j in list(tmp)]) + '\n')

        # Fixed-value (initial-value) maps ---------------------------------------------------------
        if Data.obs[oname]['type'] == 'map':

            # Missing vaue for PCraster to numpy conversion
            MV = -9999.

            f_m = Config.PATH_EXEC + '/' + Data.obs[oname]['sim_file'] + '.map'
            if (len(f_m) == 0):
                print("Warning: the variable " + oname +
                      " seems to be missing from the EcH2O outputs...")
                continue

            # Now that we have what we need, read the PCraster map...
            var_val = pcr2numpy(readmap(f_m), MV)

            # Write output NCDF file
            ncFile = Config.PATH_OUT + '/' + oname + '_all.nc'
            # -open nc dataset
            # If first run, create file
            if (it == 0):
                ncFile = Config.PATH_OUT + '/' + oname + '_all.nc'
                rootgrp = spio.netcdf_file(ncFile, 'w')
                rootgrp.createDimension('time', 0)
                var_y = pcr2numpy(ycoordinate(Config.cloneMap), MV)[:, 1]
                var_x = pcr2numpy(xcoordinate(Config.cloneMap), MV)[1, :]
                rootgrp.createDimension('latitude', len(var_y))
                rootgrp.createDimension('longitude', len(var_x))
                rootgrp.createDimension('ensemble', Config.nEns)
                lat = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
                lat.standard_name = 'Latitude'
                lat.long_name = 'Latitude cell centres'
                lon = rootgrp.createVariable('longitude', 'f4',
                                             ('longitude', ))
                lon.standard_name = 'Longitude'
                lon.long_name = 'Longitude cell centres'
                ens = rootgrp.createVariable('ensemble', 'i', ('ensemble', ))
                ens.standard_name = 'Ensemble'
                ens.long_name = 'Ensembles of runs'
                # -assign lat and lon to variables
                lat[:] = var_y
                lon[:] = var_x
                ens[:] = np.arange(Config.nEns) + 1
                # -set netCDF attribute
                rootgrp.title = 'Maps of ' + oname
                rootgrp.institution = 'NRI, University of Aberdeen'
                rootgrp.author = 'A. Neill'
                rootgrp.history = 'Created on %s' % (datetime.now())
                varStructure = ('latitude', 'longitude', 'ensemble')
                ncVariable = rootgrp.createVariable(oname, 'f4', varStructure)
                ncVariable.standard_name = oname
                # -write to file
                rootgrp.sync()
                rootgrp.close()

            # Write the actual values for this run
            rootgrp = spio.netcdf_file(ncFile, 'a')
            # - write data
            ncVariable = rootgrp.variables[oname]
            ncVariable[:, :, it] = var_val
            # -update file and close
            rootgrp.sync()
            rootgrp.close()

        # Time-varying maps ------------------------------------------------------------------------
        if Data.obs[oname]['type'] == 'mapTs':

            #print oname

            # Missing vaue for PCraster to numpy conversion
            MV = -9999.

            lensuf = 8 - len(Data.obs[oname]['sim_file'])
            #print lensuf

            MapNames = []
            itOK = []

            for it2 in range(1, Data.lsim + 1):
                # Only save files beyond the spinup/transient period (if any)
                if it2 > Config.spinup and it2 >= Config.trimB and it2 < Config.trimB + Config.trimL:
                    suf = ''.join(list(np.repeat('0', lensuf))) + '.' + format(
                        it2, '03')
                    suf2 = format(it2, '04')

                    # Sometimes, an output has _ as final character, which is replaced with number for sim > 1000
                    if lensuf == 0 and Data.obs[oname]['sim_file'][-1] == "_":
                        if it2 >= 1000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '1.' + format(
                                    it2 - 1000, '03')
                        if it2 >= 2000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '2.' + format(
                                    it2 - 2000, '03')
                        if it2 >= 3000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '3.' + format(
                                    it2 - 3000, '03')
                        if it2 >= 4000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '4.' + format(
                                    it2 - 4000, '03')
                        if it2 >= 5000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '5.' + format(
                                    it2 - 5000, '03')
                        if it2 >= 6000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '6.' + format(
                                    it2 - 6000, '03')
                        if it2 >= 7000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '7.' + format(
                                    it2 - 7000, '03')
                        if it2 >= 8000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '8.' + format(
                                    it2 - 8000, '03')
                        if it2 >= 9000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf))) + '9.' + format(
                                    it2 - 9000, '03')

                    else:
                        if it2 >= 1000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '1.' + format(
                                    it2 - 1000, '03')
                        if it2 >= 2000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '2.' + format(
                                    it2 - 2000, '03')
                        if it2 >= 3000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '3.' + format(
                                    it2 - 3000, '03')
                        if it2 >= 4000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '4.' + format(
                                    it2 - 4000, '03')
                        if it2 >= 5000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '5.' + format(
                                    it2 - 5000, '03')
                        if it2 >= 6000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '6.' + format(
                                    it2 - 6000, '03')
                        if it2 >= 7000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '7.' + format(
                                    it2 - 7000, '03')
                        if it2 >= 8000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '8.' + format(
                                    it2 - 8000, '03')
                        if it2 >= 9000:
                            suf = ''.join(list(np.repeat(
                                '0', lensuf - 1))) + '9.' + format(
                                    it2 - 9000, '03')

                    # Store names and it2 index
                    # If the "_" is replaced when sim > 1000, need to remove and replace with suf
                    if lensuf == 0 and Data.obs[oname]['sim_file'][
                            -1] == "_" and it2 >= 1000:
                        file2Read = Data.obs[oname]['sim_file'][:-1]
                        f_m = Config.PATH_EXEC + '/' + file2Read + suf
                    else:
                        f_m = Config.PATH_EXEC + '/' + Data.obs[oname][
                            'sim_file'] + suf

                    if len(glob.glob(f_m)) == 0:
                        continue
                    else:
                        MapNames += [f_m]
                        itOK += [it2]
                        print(f_m)
            # Time values for netCDF output
            var_t = np.array([(Config.treal[x - Config.trimB] -
                               datetime(1901, 1, 1, 0, 0)).days for x in itOK])
            if (len(var_t) == 0):
                print("Warning: the variable " + oname +
                      " seems to be missing from the EcH2O outputs...")
                continue

            # Second now that we have what we need...
            for it2 in range(len(itOK)):
                # Read map at first time step of interest, convert to array using a missing value,
                # and add an extra 3rd dimension (empty) for later appending
                if (it2 == 0):
                    var_val = pcr2numpy(readmap(MapNames[it2]), MV)[None, ...]
                # Read subsequent map, same procedure and then append
                else:
                    var_val = np.append(var_val,
                                        pcr2numpy(readmap(MapNames[it2]),
                                                  MV)[None, ...],
                                        axis=0)

            # Write output NCDF file
            ncFile = Config.PATH_OUT + '/' + oname + '_all.nc'
            # -open nc dataset
            # If first run, create file
            if (it == 0):
                ncFile = Config.PATH_OUT + '/' + oname + '_all.nc'
                rootgrp = spio.netcdf_file(ncFile, 'w')
                rootgrp.createDimension('time', 0)
                var_y = pcr2numpy(ycoordinate(Config.cloneMap), MV)[:, 1]
                var_x = pcr2numpy(xcoordinate(Config.cloneMap), MV)[1, :]
                rootgrp.createDimension('latitude', len(var_y))
                rootgrp.createDimension('longitude', len(var_x))
                if Config.mode == 'forward_runs':
                    rootgrp.createDimension('ensemble', Config.nEns)
                elif Config.mode == 'calib_runs':
                    rootgrp.createDimension('ensemble', Opti.nit)
                date_time = rootgrp.createVariable('time', 'f8', ('time', ))
                date_time.standard_name = 'time'
                date_time.long_name = 'Days since 1901-01-01 00:00:00.0'
                date_time.units = 'Days since 1901-01-01 00:00:00.0'
                date_time.calendar = 'gregorian'
                lat = rootgrp.createVariable('latitude', 'f4', ('latitude', ))
                lat.standard_name = 'Latitude'
                lat.long_name = 'Latitude cell centres'
                lon = rootgrp.createVariable('longitude', 'f4',
                                             ('longitude', ))
                lon.standard_name = 'Longitude'
                lon.long_name = 'Longitude cell centres'
                ens = rootgrp.createVariable('ensemble', 'i', ('ensemble', ))
                ens.standard_name = 'Ensemble'
                ens.long_name = 'Ensembles of runs'
                # -assign lat, lon and t to variables
                lat[:] = var_y
                lon[:] = var_x
                date_time[:] = var_t

                if Config.mode == 'forward_runs':
                    ens[:] = np.arange(Config.nEns) + 1
                elif Config.mode == 'calib_runs':
                    ens[:] = np.arange(Opti.nit) + 1

                #print 'var_x'
                #print var_x
                #print 'var_y'
                #print var_y
                #print 'var_t'
                #print var_t

                # -set netCDF attribute
                rootgrp.title = 'Maps of ' + oname
                rootgrp.institution = 'NRI, University of Aberdeen'
                rootgrp.author = 'A. Neill'
                rootgrp.history = 'Created on %s' % (datetime.now())
                varStructure = ('time', 'latitude', 'longitude', 'ensemble')
                ncVariable = rootgrp.createVariable(oname, 'f4', varStructure)
                ncVariable.standard_name = oname
                # -write to file
                rootgrp.sync()
                rootgrp.close()

            # Write the actual values for this run
            rootgrp = spio.netcdf_file(ncFile, 'a')
            # - write data
            ncVariable = rootgrp.variables[oname]
            ncVariable[:, :, :, it] = var_val
            # -update file and close
            rootgrp.sync()
            rootgrp.close()
Beispiel #41
0
yearinsec=3600*365*24.

#path where to store output
outputpath='/work/mh0033/m300411/DataEB/RESULTS/PAPER/Prelim2/'

################## READ IN ARCTIC OCEAN AREA (computed in AOmask.sh) ###################################

d["AOarea_ocean_mod"]=np.zeros((len(model_list)+1))

for i,mod in enumerate(model_list):
  print mod,'AO area'

  inputpath_time='/work/mh0033/m300411/DataEB/WORK_DATA/'
  
  file0=glob.glob(inputpath_time+'AOarea2/Arctic_ocean_totalarea_%s.nc' %mod)
  fid0=sio.netcdf_file(file0[0])
  d["AOarea_ocean_mod"][i]=fid0.variables['tos'][0]

d["AOarea_ocean_mod"][len(model_list)]=np.nanmean(d["AOarea_ocean_mod"][0:len(model_list)])

###############################################################################################################

#############################################################################################################

#Define axis labels and titles for each variable
tit={}
lab={}
tit['rsds']='ISW'
lab['rsds']='Incoming SW [W/m$^2$]'
tit['rsus']='OSW'
lab['rsus']='Outgoing SW [W/m$^2$]'
def read_csv(fname):
    with sio.netcdf_file(fname, "r") as f:
        tt = np.array([
            f.variables['x_ret'][0], f.variables['x_ret_err'][0],
            f.variables['x_err_res'][0]
        ])
        fi = np.array([
            f.variables['x_ret'][1], f.variables['x_ret_err'][1],
            f.variables['x_err_res'][1]
        ])
        rl = np.array([
            f.variables['x_ret'][2], f.variables['x_ret_err'][2],
            f.variables['x_err_res'][2]
        ])
        ri = np.array([
            f.variables['x_ret'][3], f.variables['x_ret_err'][3],
            f.variables['x_err_res'][3]
        ])
        lwp = np.array([
            f.variables['wp_ret'][0], f.variables['wp_ret_err'][0],
            f.variables['wp_err_res'][0]
        ])
        iwp = np.array([
            f.variables['wp_ret'][1], f.variables['wp_ret_err'][1],
            f.variables['wp_err_res'][1]
        ])
        twp = np.array([
            f.variables['wp_ret'][2], f.variables['wp_ret_err'][2],
            f.variables['wp_err_res'][2]
        ])
        ctemp = np.array(f.variables["av_ctemp"][:])
        res = f.variables['residuum'][:].copy()
        wn = f.variables['wavenumber'][:].copy()
        rms = np.sqrt(np.mean(np.array(res)**2))
        conv = f.variables['conv'][:].copy()
        pwv = f.variables['pwv'][:].copy()
        cloud = np.array([
            np.float_(f.variables['cloud_base'][:]),
            np.float_(f.variables['cloud_top'][:])
        ])
        x_a = np.array(f.variables['x_a'][:].copy())
        x_a_err = np.array(f.variables['x_a_err'][:].copy())
        pos = [0.0, 0.0]
        pos[0] = f.variables['lat'][0].copy()
        pos[1] = f.variables['lon'][0].copy()
        print("Results:")
        print("Filename: {}".format(fname))
        print("Average Cloud temperature (K): {}\n".format(ctemp[0]))
        print("Cloud base height (m): {}".format(cloud[0]))
        print("Cloud top height (m): {}\n".format(cloud[1]))
        print("Converged: {}\n".format(conv))
        print("DIRECT PRODUCTS")
        print("Liquid Optical Depth (1): ({} +- {})".format(
            np.float_(tt[0]), np.float_(tt[1])))
        print("Ice Optical Depth (1): ({} +- {})".format(
            np.float_(fi[0]), np.float_(fi[1])))
        print("Liquid Radius (um): ({} +- {})".format(np.float_(rl[0]),
                                                      np.float_(rl[1])))
        print("Ice Radius (um): ({} +- {})".format(np.float_(ri[0]),
                                                   np.float_(ri[1])))
        print("Root-Mean-Squared Error (mW/sr (cm-1) m2): {}\n".format(rms))
        print("DERIVED PRODUCTS")
        print("Liquid Water Path (g/m2): ({} +- {})".format(
            np.float_(lwp[0]), np.float_(lwp[1])))
        print("Ice Water Path (g/m2): ({} +- {})".format(
            np.float_(iwp[0]), np.float_(iwp[1])))
        print("Total Water Path (g/m2): ({} +- {})".format(
            np.float_(twp[0]), np.float_(twp[1])))

    return [
        tt, fi, rl, ri, lwp, iwp, twp, rms, ctemp, pwv, conv, cloud, pos, x_a,
        x_a_err, wn, res
    ]
## Created By: Douglas Finch
## Python 2.7
## Take daily CH4 fluxes and make monthly mean netcdf
## Also regrid to 4x5
##==============================================================================
import numpy as np
import scipy.io as io
from datetime import datetime
import calendar
import datetime as dt
##==============================================================================
dirc = '/home/dfinch/Documents/CH4/emissions/regridded_emissions/'
orig_file = '%sGlobal_CH4_flux_daily_05x05_TOTAL.nc' % dirc
new_file = '%sGlobal_CH4_flux_monthly_4x5_TOTAL.nc' % dirc

open_f = io.netcdf_file(orig_file)
lat = open_f.variables['latitude'].data
lon = open_f.variables['longitude'].data
flux = open_f.variables['CH4_Flux'].data

month_grid = np.zeros[12, flux.shape[1], flux.shape[2]]
total_days = 0
for m in range(12):
    month_len = calendar.monthrange(2013, m + 1)[1]
    month_grid[m, :, :] = np.mean(flux[total_days:total_days +
                                       month_len, :, :],
                                  axis=0)

new_lat = np.arange(-90, 94, 4)
new_lat[0] = -88.
new_lat[-1] = 88
Beispiel #44
0
def to_netcdf_lc(spat_lc, lat, lon, resin, final_landclasses, years, step, model, out_dir):
    """
    Build a NetCDF file for each land class that contains the gridded fraction
    of land cover of that land class over all simulation years.

    :param spat_lc:            A 3D array representing fraction of land cover (lat, lon, fraction landclass)
    :param lat:                An array of latitude values for mapping (n)
    :param lon:                An array of longitude values for mapping (n)
    :param resin:              The input spatial resolution in geographic degrees (float)
    :param final_landclasses:  An array of land classes (n_classes)
    :param years:              A list of output years (int)
    :param step:               The current time step (int)
    :param model:              The name of the model running (str)
    :param out_dir:            A full path string of the output directory (str)
    :return:                   A NetCDF classic file.
    """

    temp_file_prefix = 'tmp_lc_'
    out_file_prefix = 'lc_yearly_'

    # just save yearly data until the final year
    if step != years[-1]:
        np.save('{0}/{1}{2}'.format(out_dir, temp_file_prefix, step), spat_lc)
        return

    # at the final year, gather data from all temporary files into one 4D array
    # with dimensions (lat, lon, year, landclass)
    tmp_files = ['{0}/{1}'.format(out_dir, f) for f in os.listdir(out_dir) if 'tmp_lc_' in f]
    lc_yearly = [np.load(f) for f in tmp_files]
    lc_yearly = np.stack(lc_yearly + [spat_lc], 2)

    # set negative values to -1
    lc_yearly[lc_yearly < 0] = -1

    # remove temporary files
    for tf in tmp_files:
        os.remove(tf)

    # output NetCDF file for each land class over all years
    for lc_index, lc in enumerate(final_landclasses):

        out_fname = '{0}/{1}{2}.nc'.format(out_dir, out_file_prefix, lc)

        # create NetCDF file
        with sio.netcdf_file(out_fname, 'w') as f:

            # create dimensions
            f.createDimension('lat', len(lat))
            f.createDimension('lon', len(lon))
            f.createDimension('time', len(years))

            # create variables
            lts = f.createVariable('lat', 'f4', ('lat',))
            lns = f.createVariable('lon', 'f4', ('lon',))
            times = f.createVariable('time', 'i4', ('time',))

            lc_frac = f.createVariable('landcoverfraction', 'f8', ('lat', 'lon', 'time'))

            # create metadata
            lts.units = 'degrees_north'
            lts.standard_name = 'latitude'
            lns.units = 'degrees_east'
            lns.standard_name = 'longitude'
            times.description = 'years'

            lc_frac.units = 'fraction'
            lc_frac.scale_factor = 1.
            lc_frac.add_offset = 0.
            lc_frac.projection = 'WGS84'
            lc_frac.description = 'Fraction land cover for {0} at {1} degree.'.format(lc, resin)
            lc_frac.comment = 'See scale_factor (divide by 100 to get percentage, offset is zero)'
            lc_frac.title = 'Downscaled land use projections at {0} degree, downscaled from {1}'.format(resin, model)

            lc_frac.missing_value = -1.

            # Add data to netcdf object
            lts[:] = lat
            lns[:] = lon
            times[:] = years
            lc_frac[:] = lc_yearly[:, :, :, lc_index]
Beispiel #45
0
def get_variables(filename):
    open_file = io.netcdf_file(filename)
    return open_file
Beispiel #46
0
 def setup_grid(self):
     gc = netcdf_file(self.gridfile)
Beispiel #47
0
  def get_bedmap1(thklim = 0.0):
    
    s    = "::: getting Bedmap 1 data from DataFactory :::"
    print_text(s, DataFactory.color)
    
    global home
 
    direc = home + '/antarctica/bedmap1/ALBMAPv1.nc'
    data  = netcdf_file(direc, mode = 'r')
    vara  = dict()
    
    needed_vars = {'lsrf'       : 'B',
                   'usrf'       : 'S',
                   'temp'       : 'T',
                   'acca'       : 'acca',
                   'accr'       : 'accr',
                   'ghffm'      : 'ghffm',
                   'ghfsr'      : 'ghfsr'}
    
    s    = "    - data-fields collected : python dict key to access -"
    print_text(s, DataFactory.color)
    for v in data.variables:
      try:
        txt = '"' + needed_vars[v] + '"'
      except KeyError:
        txt = ''
      print_text('      Bedmap 1 : %-*s key : %s '%(30,v, txt), '230')
    
    
    # retrieve data :
    x       = array(data.variables['x1'][:])
    y       = array(data.variables['y1'][:])
    b       = array(data.variables['lsrf'][:])
    h       = array(data.variables['usrf'][:])
    adota   = array(data.variables['acca'][:])
    adotr   = array(data.variables['accr'][:])
    mask    = array(data.variables['mask'][:])
    srfTemp = array(data.variables['temp'][:]) + 273.15
    q_geo_f = array(data.variables['ghffm'][:]) * 60 * 60 * 24 * 365 / 1000
    q_geo_s = array(data.variables['ghfsr'][:]) * 60 * 60 * 24 * 365 / 1000

    H             = h - b
    h[H < thklim] = b[H < thklim] + thklim
    H[H < thklim] = thklim
    
    names = ['B','S','H','acca','accr','ghffm','ghfsr','temp']
    ftns  = [b, h, H, adota, adotr, q_geo_f, q_geo_s, srfTemp]
    
    # extents of domain :
    east    = max(x)
    west    = min(x)
    north   = max(y)
    south   = min(y)

    #projection info :
    proj   = 'stere'
    lat_0  = '-90'
    lat_ts = '-71'
    lon_0  = '0'
    
    # create projection :
    txt  =   " +proj="   + proj \
           + " +lat_0="  + lat_0 \
           + " +lat_ts=" + lat_ts \
           + " +lon_0="  + lon_0 \
           + " +k=1 +x_0=0 +y_0=0 +no_defs +a=6378137 +rf=298.257223563" \
           + " +towgs84=0.000,0.000,0.000 +to_meter=1"
    p    = Proj(txt)
    
    # save the data in matlab format :
    vara['dataset']           = 'bedmap 1'
    vara['continent']         = 'antarctica'
    vara['pyproj_Proj']       = p
    vara['map_western_edge']  = west 
    vara['map_eastern_edge']  = east 
    vara['map_southern_edge'] = south 
    vara['map_northern_edge'] = north
    vara['nx']                = len(x)
    vara['ny']                = len(y)
    for n, f in zip(names, ftns):
      vara[n] = f
    return vara 
Beispiel #48
0
def test_read_write_files():
    # test round trip for example file
    cwd = os.getcwd()
    try:
        tmpdir = tempfile.mkdtemp()
        os.chdir(tmpdir)
        with make_simple('simple.nc', 'w') as f:
            pass
        # read the file we just created in 'a' mode
        with netcdf_file('simple.nc', 'a') as f:
            check_simple(f)
            # add something
            f._attributes['appendRan'] = 1

        # To read the NetCDF file we just created::
        with netcdf_file('simple.nc') as f:
            # Using mmap is the default (but not on pypy)
            assert_equal(f.use_mmap, not IS_PYPY)
            check_simple(f)
            assert_equal(f._attributes['appendRan'], 1)

        # Read it in append (and check mmap is off)
        with netcdf_file('simple.nc', 'a') as f:
            assert_(not f.use_mmap)
            check_simple(f)
            assert_equal(f._attributes['appendRan'], 1)

        # Now without mmap
        with netcdf_file('simple.nc', mmap=False) as f:
            # Using mmap is the default
            assert_(not f.use_mmap)
            check_simple(f)

        # To read the NetCDF file we just created, as file object, no
        # mmap.  When n * n_bytes(var_type) is not divisible by 4, this
        # raised an error in pupynere 1.0.12 and scipy rev 5893, because
        # calculated vsize was rounding up in units of 4 - see
        # https://www.unidata.ucar.edu/software/netcdf/guide_toc.html
        with open('simple.nc', 'rb') as fobj:
            with netcdf_file(fobj) as f:
                # by default, don't use mmap for file-like
                assert_(not f.use_mmap)
                check_simple(f)

        # Read file from fileobj, with mmap
        with suppress_warnings() as sup:
            if IS_PYPY:
                sup.filter(
                    RuntimeWarning,
                    "Cannot close a netcdf_file opened with mmap=True.*")
            with open('simple.nc', 'rb') as fobj:
                with netcdf_file(fobj, mmap=True) as f:
                    assert_(f.use_mmap)
                    check_simple(f)

        # Again read it in append mode (adding another att)
        with open('simple.nc', 'r+b') as fobj:
            with netcdf_file(fobj, 'a') as f:
                assert_(not f.use_mmap)
                check_simple(f)
                f.createDimension('app_dim', 1)
                var = f.createVariable('app_var', 'i', ('app_dim', ))
                var[:] = 42

        # And... check that app_var made it in...
        with netcdf_file('simple.nc') as f:
            check_simple(f)
            assert_equal(f.variables['app_var'][:], 42)

    finally:
        if IS_PYPY:
            # windows cannot remove a dead file held by a mmap
            # that has not been collected in PyPy
            break_cycles()
            break_cycles()
        os.chdir(cwd)
        shutil.rmtree(tmpdir)
Beispiel #49
0
  def get_bamber(thklim = 0.0):

    s    = "::: getting Bamber data from DataFactory :::"
    print_text(s, DataFactory.color)
    
    global home
   
    direc = home + '/greenland/bamber13/Greenland_bedrock_topography_V2.nc' 
    data  = netcdf_file(direc, mode = 'r')
    vara  = dict()
    
    needed_vars = {'BedrockElevation' : 'B',
                   'SurfaceElevation' : 'S',
                   'IceThickness'     : 'H',
                   'BedrockError'     : 'Herr',
                   'LandMask'         : 'mask_orig'}
    
    s    = "    - data-fields collected : python dict key to access -"
    print_text(s, DataFactory.color)
    for v in data.variables:
      try:
        txt = '"' + needed_vars[v] + '"'
      except KeyError:
        txt = ''
      print_text('      Bamber : %-*s key : %s '%(30,v, txt), '230')
    
    # retrieve data :
    x         = array(data.variables['projection_x_coordinate'][:])
    y         = array(data.variables['projection_y_coordinate'][:])
    Bo        = array(data.variables['BedrockElevation'][:])
    S         = array(data.variables['SurfaceElevation'][:])
    H         = array(data.variables['IceThickness'][:])
    Herr      = array(data.variables['BedrockError'][:])
    mask_orig = array(data.variables['LandMask'][:])

    # format the mask for cslvr :
    mask = mask_orig.copy(True)
    mask[mask == 1] = 0
    mask[mask == 2] = 1
    mask[mask == 3] = 0
    mask[mask == 4] = 0
    
    # generate mask for lateral boundaries :
    Hc = mask.copy(True)
    
    # calculate mask gradient, to properly mark lateral boundaries :
    gradH = gradient(Hc)
    L     = gradH[0]**2 + gradH[1]**2
    L[L > 0.0] = 1.0
    L[L < 1.0] = 0.0

    # mark one more level in :
    Hc[L > 0.0] = 0
    
    gradH = gradient(Hc)
    L2    = gradH[0]**2 + gradH[1]**2
    L2[L2 > 0.0] = 1.0
    L2[L2 < 1.0] = 0.0
    
    # combine them :
    L[L2 > 0.0] = 1.0
   
    # remove the junk data and impose thickness limit :
    B   = Bo.copy(True)
    H[H == -9999.0] = 0.0
    S[H < thklim] = B[H < thklim] + thklim
    H[H < thklim] = thklim
    B             = S - H

    # extents of domain :
    east  = max(x)
    west  = min(x)
    north = max(y)
    south = min(y)

    #projection info :
    proj   = 'stere'
    lat_0  = '90'
    lat_ts = '71'
    lon_0  = '-39'
    
    # create projection :
    txt  =   " +proj="   + proj \
           + " +lat_0="  + lat_0 \
           + " +lat_ts=" + lat_ts \
           + " +lon_0="  + lon_0 \
           + " +k=1 +x_0=0 +y_0=0 +no_defs +a=6378137 +rf=298.257223563" \
           + " +towgs84=0.000,0.000,0.000 +to_meter=1"
    p    = Proj(txt)
    
    # save the data in matlab format :
    vara['pyproj_Proj']       = p
    vara['map_western_edge']  = west 
    vara['map_eastern_edge']  = east 
    vara['map_southern_edge'] = south 
    vara['map_northern_edge'] = north
    vara['nx']                = len(x)
    vara['ny']                = len(y)
     
    names = ['B', 'Bo', 'S', 'H', 'lat_mask', 'Herr', 'mask', 'mask_orig']
    ftns  = [ B,   Bo,   S,   H,   L,          Herr,   mask,   mask_orig]
    
    # save the data in matlab format :
    vara['dataset']   = 'Bamber'
    vara['continent'] = 'greenland'
    for n, f in zip(names, ftns):
      vara[n] = f
    return vara 
Beispiel #50
0
CHAZ_Int_ENS = 40
PImodelname = 'ERA'
### CHAZ parameters
uBeta = -2.5
vBeta = 1.0
survivalrate = 0.78
seedN = 1000  #annual seeding rate for random seeding
ipath = '/Users/kaitlynnpugliese/Desktop/APAMHurricane/'
opath = '/Users/kaitlynnpugliese/Desktop/APAMHurricane/bt_global_predictors.pik'
obs_bt_path = '/Users/kaitlynnpugliese/Desktop/APAMHurricane/'
Year1 = 1985
Year2 = 1985
ibtracs = '/Users/kaitlynnpugliese/Desktop/APAMHurricane/Allstorms.ibtracs_all.v03r08.nc'

landmaskfile = '/Users/kaitlynnpugliese/Desktop/APAMHurricane/landmask.nc'
f = netcdf_file(landmaskfile)
llon = f.variables['lon'][:]
llat = f.variables['lat'][:]
lldmask = f.variables['landmask'][:, :]
ldmask = lldmask[-12::-24, ::24]
lldmask = lldmask[::-1, :]

###################################################
# Preporcesses                                 ####
# ignore variavbles when run CHAZ is False     ####
###################################################
###################################################
# CHAZ                                         ####
# ignore variavbles when run CHAZ is False     ####
###################################################
runCHAZ = True
Beispiel #51
0
def test_read_with2dVar():
    fname = pjoin(TEST_DATA_PATH, 'example_3_maskedvals.nc')
    with netcdf_file(fname, maskandscale=True) as f:
        vardata = f.variables['var7_2d'][:]
        assert_mask_matches(vardata,
                            [[True, False], [False, False], [False, True]])
Beispiel #52
0
def test_read_withValuesNearFillValue():
    # Regression test for ticket #5626
    fname = pjoin(TEST_DATA_PATH, 'example_3_maskedvals.nc')
    with netcdf_file(fname, maskandscale=True) as f:
        vardata = f.variables['var1_fillval0'][:]
        assert_mask_matches(vardata, [False, True, False])
Beispiel #53
0
import pandas as pd

style.use('lowink')


norm_kwargs = dict(zeroed=20, t_offset='auto', normed=(1010, 1032),
                       norm_method='max')
norm_kwargs2 = norm_kwargs.copy()
norm_kwargs2['normed'] = (1060, 1070)


if __name__ == "__main__":
    normers = {}
    allsamples = defaultdict(list)
    for fname in glob('*/*.CDF'):
        sample = netcdf_file(fname)
        title = sample.experiment_title.decode().replace('pac', '').split('r')[0].strip('_').lstrip('_-').replace('zaza-', 'zaza')
        allsamples[title].append(sample)

    figure()

    plot_cycler2 = (mpl.cycler(lw=[1,2,3,4,5,])
                    * mpl.cycler(linestyle=['-', ':', '-.', '--']))

    clist = OrderedDict([
        (1, 'darkblue'),  # MelWT
        (2, 'lightblue'), # eloF-
        (5, 'red'),        # Sec WT
        (3, 'darkorange'),       # CRISPR A
        (4, 'lightsalmon'),     # CRISPR B
        (6, 'black'),
        if event.artist not in self.polygons.polygons:
            return
        ind = event.ind[0]
        self._emit('deselect', self.selected)
        if (self.i, ind) != self.selected:
            self.selected = (self.i, ind)
            self._emit('select', self.selected)
        else:
            self.selected = None
        self.fig.canvas.draw_idle()


if __name__ == '__main__':
    from matplotlib.animation import FuncAnimation

    ncf = netcdf_file('KTLX_20100510_22Z.nc')
    data = ncf.variables['Reflectivity']
    lats = ncf.variables['lat']
    lons = ncf.variables['lon']
    stormcells = storm_loader('polygons.shp')

    fig, ax = plt.subplots(1, 1)
    raddisp = RadarDisplay(ax, lats, lons)
    raddisp.update_display(data[0])
    fig.colorbar(raddisp.im)
    polycolls = Stormcells(ax, stormcells)
    linecoll = Tracks(ax)

    # Turn on the first frame's polygons
    polycolls.toggle_polygons(0, True)
    ax.autoscale(True)
Beispiel #55
0
 def doit():
     with netcdf_file(filename, mmap=True) as f:
         return f.variables['lat'][:]
Beispiel #56
0
def test_read_withFillValNaN():
    fname = pjoin(TEST_DATA_PATH, 'example_3_maskedvals.nc')
    with netcdf_file(fname, maskandscale=True) as f:
        vardata = f.variables['var5_fillvalNaN'][:]
        assert_mask_matches(vardata, [False, True, False])
Beispiel #57
0
 def read_training(self):
     if self.files_list['training'] is not None:
         f = netcdf_file(self.basedir + os.sep +
                         self.files_list['training'])
Beispiel #58
0
    def get_searise(thklim=0.0):

        filename = inspect.getframeinfo(inspect.currentframe()).filename
        home = os.path.dirname(os.path.abspath(filename))

        direc = home + "/greenland/searise/Greenland_5km_dev1.2.nc"
        data = netcdf_file(direc, mode='r')
        vara = dict()

        # retrieve data :
        x = array(data.variables['x1'][:])
        y = array(data.variables['y1'][:])
        h = array(data.variables['usrf'][:][0])
        adot = array(data.variables['smb'][:][0])
        b = array(data.variables['topg'][:][0])
        T = array(data.variables['surftemp'][:][0]) + 273.15
        q_geo = array(data.variables['bheatflx'][:][0]) * 60 * 60 * 24 * 365
        lat = array(data.variables['lat'][:][0])
        lon = array(data.variables['lon'][:][0])
        U_sar = array(data.variables['surfvelmag'][:][0])
        dhdt = array(data.variables['dhdt'][:][0])

        direc = home + "/greenland/searise/smooth_target.mat"
        U_ob = loadmat(direc)['st']

        H = h - b
        h[H < thklim] = b[H < thklim] + thklim
        H[H < thklim] = thklim

        Tn = 41.83 - 6.309e-3 * h - 0.7189 * lat - 0.0672 * lon + 273

        # extents of domain :
        east = max(x)
        west = min(x)
        north = max(y)
        south = min(y)

        #projection info :
        proj = 'stere'
        lat_0 = '90'
        lat_ts = '71'
        lon_0 = '-39'

        names = ['H', 'S', 'adot', 'B', 'T', 'q_geo','U_sar', \
                 'U_ob', 'lat', 'lon', 'Tn','dhdt']
        ftns = [H, h, adot, b, T, q_geo, U_sar, U_ob, lat, lon, Tn, dhdt]

        vara['dataset'] = 'searise'
        for n, f in zip(names, ftns):
            vara[n] = {
                'map_data': f,
                'map_western_edge': west,
                'map_eastern_edge': east,
                'map_southern_edge': south,
                'map_northern_edge': north,
                'projection': proj,
                'standard lat': lat_0,
                'standard lon': lon_0,
                'lat true scale': lat_ts
            }
        return vara
Beispiel #59
0
        print "Saving in:", F
        np.save(data, c)
        data.close()
        return 0


###############################
# Main analysis loop

trop_temp = np.zeros( ( m 500 ) ) 

for z in range( m ):

	fname = Mods[i] + "_PI_control_0_500_" + opt + ".nc"
	print "Doing:", fname
	f1 = si.netcdf_file(fname, 'r')

	#Get latitude bounds
	l1, l2, j = get_lats_lons( f1 )
	data = f1.variables[ opts[i] ][:, l1:l2, :]

	print "Detrend"
	data = ss.detrend( data - np.mean(data, axis = 0) )

	print "Take annual mean"
	data = yrmn( data )

	print "Take tropical mean"
	trop_temp[z] = tropical_mean( data, f1.variables['lat'][:], f1.variables['lon'][:] )

	f1.close()
Beispiel #60
0
from __future__ import absolute_import
from __future__ import print_function
import scipy
from scipy import io
import os
from ioTools import readwrite as py_rw
import numpy as np

ncFile = "/home/smg/wang/PROJ/DL/RNNJP/DATA/test_align/F009A/data.nc1"
labdir = "/home/smg/takaki/FEAT/F009/data/ver01/full"
labout = "/home/smg/wang/DATA/speech/F009A/nndata/labels/full_align/test_set"
prefix = "ATR_Ximera_F009A_"
resolu = 50000

ncData = io.netcdf_file(ncFile, 'r')
sentNm = ncData.dimensions['numSeqs']
sentNa = ncData.variables['seqTags'][:].copy()
sentTi = ncData.variables['seqLengths'][:].copy()

start = 0
for id, sentId in enumerate(sentNa):
    sentId = ''.join(sentId)
    labinpfile = labdir + os.path.sep + sentId + '.lab'
    laboutfile = labout + os.path.sep + sentId + '.lab'
    labentrys = py_rw.read_txt_list(labinpfile)
    stime, etime = start, start + sentTi[id]
    data = ncData.variables['inputs'][stime:etime, 0:-3].copy()
    data = (data * data).sum(axis=1)
    difd = np.diff(data)
    indx = np.concatenate(