Beispiel #1
0
    def post(self, request, *args, **kwargs):
        dataset = self.form_class(request.POST, request.FILES)
        if dataset.is_valid():
            ds = dataset.save()

            # add attribute variables to dataset
            attr_data = nc(ds.data_file.path, 'r')
            variables = list(attr_data.variables.keys())
            for var in variables:
                shape = attr_data.variables[var].shape
                if len(shape) < 2:
                    shape = [shape[0], shape[0]]
                variable = Variable(name=var,
                                    long_name=attr_data.variables[var].long_name,
                                    dataset=ds,
                                    x_dimension=shape[0],
                                    y_dimension=shape[1]
                                    )
                variable.save()
            attr_data.close()

            # add elevation to dataset
            elev_data = nc(ds.elev_file.path, 'r')
            elev_shape = elev_data.variables['elev'].shape
            elev_variable = Variable(name='elev', dataset=ds, x_dimension=elev_shape[0], y_dimension=elev_shape[1])
            elev_variable.save()
            elev_data.close()
            return redirect('/explore/' + str(ds.id) + '/')
        else:
            return render(request, self.template_name, {'form': self.form_class()})
    def __init__(self, smapfile, areafile):
        with nc(smapfile) as f:
            self.smap = f.variables['state'][:]
            self.cmap = f.variables['county'][:]
        self.states = unique(self.smap)
        self.states = self.states[~self.states.mask]

        with nc(areafile) as f:
            self.acounties = f.variables['county'][:]
            self.area      = f.variables['sum_county'][:]
Beispiel #3
0
    def __init__(self, filename, time, tunits, scens, aggs, aggname, aggunits, agglongname):
        super(AggregationFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension(aggname, len(aggs)) # create aggregation level
            aggsvar = f.createVariable(aggname, 'i4', aggname)
            aggsvar[:] = aggs
            aggsvar.units = aggunits
            aggsvar.long_name = agglongname

            f.createDimension('time', len(time)) # create time
            timevar = f.createVariable('time', 'i4', 'time')
            timevar[:] = time
            timevar.units = tunits
            timevar.long_name = 'time'

            f.createDimension('scen', len(scens)) # create scen
            scenvar = f.createVariable('scen', 'i4', 'scen')
            scenvar[:] = range(1, len(scens) + 1)
            scenvar.units = 'mapping'
            scenvar.long_name = ', '.join(scens)

            f.createDimension('irr', 3) # create irr
            irrvar = f.createVariable('irr', 'i4', 'irr')
            irrvar[:] = range(1, 4)
            irrvar.units = 'mapping'
            irrvar.long_name = 'ir, rf, sum'
Beispiel #4
0
    def get(self, request, *args, **kwargs):
        layer = str(kwargs['layer'])
        dataset = Dataset.objects.get(pk=kwargs['dataset'])
        response = dict()
        variables = Variable.objects.filter(dataset=dataset)
        found = False
        for variable in variables:
            if variable.name == layer:
                layer = variable.name
                found = True
                break

        if not found:
            return JsonResponse({layer: 'False'})

        if layer == 'elev' and dataset.has_elev_file:
            path = dataset.elev_file.path
        else:
            path = dataset.data_file.path

        with nc(path, 'r') as ds:

            var = ds.variables[layer][:]

            minimum = float(var.min())
            maximum = float(var.max())
            x = var.shape[1]
            y = var.shape[0]
            fill_value = str(ds.variables[layer]._FillValue)
            response[layer] = {'min': minimum, 'max': maximum, 'x':float(x), 'y':float(y), 'fill_value' : fill_value}

        return JsonResponse(response)
Beispiel #5
0
def createAggFile(filename, time, tunits, adata, anames, aunits, alongnames, scens, irr, leaddim):
    if leaddim == "scen":
        nscens = None
        ntime = len(time)
    else:
        nscens = len(scens)
        ntime = None

    with nc(filename, "w", format="NETCDF4_CLASSIC") as f:
        f.createDimension("time", ntime)
        timevar = f.createVariable("time", "i4", "time")
        timevar[:] = time
        timevar.units = tunits
        timevar.long_name = "time"
        f.createDimension("scen", nscens)
        scenvar = f.createVariable("scen", "i4", "scen")
        scenvar[:] = scens
        scenvar.units = "no"
        scenvar.long_name = "scenarios"
        f.createDimension("irr", len(irr) + 1)
        irrvar = f.createVariable("irr", "i4", "irr")
        irrvar[:] = range(1, len(irr) + 2)
        irrvar.units = "mapping"
        irrvar.long_name = ", ".join(irr + ["sum"])
        for i in range(len(anames)):
            rname = anames[i] + "_index"
            f.createDimension(rname, len(adata[i]))
            rvar = f.createVariable(rname, "i4", rname)
            rvar[:] = adata[i]
            rvar.units = aunits[i]
            rvar.long_name = alongnames[i]
Beispiel #6
0
def loadData(file):
    with nc(file) as f:
        var = array(f.variables.keys())
        idx = array([v != 'lon' and v != 'lat' and v != 'time' for v in var])
        varname = var[idx][0]
        data = f.variables[varname][:]
    return data, varname
Beispiel #7
0
    def __init__(self, filename, time, lat, lon, irr):
        super(RescaledFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension('time', len(time))
            timevar = f.createVariable('time', 'i4', 'time')
            timevar[:] = time - time[0]
            timevar.units = 'years since {:d}-01-01'.format(int(time[0]))
            timevar.long_name = 'time'

            f.createDimension('lat', len(lat))
            latvar = f.createVariable('lat', 'f4', 'lat')
            latvar[:] = lat
            latvar.units = 'degrees_north'
            latvar.long_name = 'latitude'

            f.createDimension('lon', len(lon))
            lonvar = f.createVariable('lon', 'f4', 'lon')
            lonvar[:] = lon
            lonvar.units = 'degrees_east'
            lonvar.long_name = 'longitude'

            f.createDimension('irr', len(irr))
            irrvar = f.createVariable('irr', 'i4', 'irr')
            irrvar[:] = range(1, len(irr) + 1)
            irrvar.units = 'mapping'
            irrvar.long_name = ', '.join(irr)
Beispiel #8
0
def createAggFile(filename, time, tunits, adata, anames, aunits, alongnames, scens, leaddim):
    if leaddim == 'scen':
        nscens = None
        ntime  = len(time)
    else:
        nscens = len(scens)
        ntime  = None

    with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
        f.createDimension('time', ntime)
        timevar = f.createVariable('time', 'i4', 'time')
        timevar[:] = time
        timevar.units = tunits
        timevar.long_name = 'time'
        f.createDimension('scen', nscens)
        scenvar = f.createVariable('scen', 'i4', 'scen')
        scenvar[:] = scens
        scenvar.units = 'no'
        scenvar.long_name = 'scenarios'
        for i in range(len(anames)):
            rname = anames[i] + '_index'
            f.createDimension(rname, len(adata[i]))
            rvar = f.createVariable(rname, 'i4', rname)
            rvar[:] = adata[i]
            rvar.units = aunits[i]
            rvar.long_name = alongnames[i]
Beispiel #9
0
    def get(self, request, *args, **kwargs):
        y_start = int(kwargs['y'])
        y_end = y_start + EEMS_TILE_SIZE[0]
        x_start = int(kwargs['x'])
        x_end = x_start + EEMS_TILE_SIZE[1]
        layer = 'elev'
        response = dict()

        with nc(elev_path, 'r') as ds:
            var = ds.variables[layer][:]

            if x_start > var.shape[1] or y_start > var.shape[0]:
                return JsonResponse({layer: 'False'})

            x_end = var.shape[1] if x_end > var.shape[1] else x_end
            y_end = var.shape[0] if y_end > var.shape[0] else y_end
            response['fill_value'] = str(ds.variables[layer]._FillValue)
            if isinstance(var, np.ma.core.MaskedArray):
                sub_matrix = var.data[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]
            else:
                sub_matrix = var[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]

        return JsonResponse(response)
Beispiel #10
0
def createAggFile(filename, time, tunits, adata, anames, aunits, alongnames, scens, irr, leaddim, hasscen):
    if leaddim == 'scen':
        nscens = None
        ntime  = len(time)
    else:
        nscens = len(scens) if hasscen else 0
        ntime  = None

    with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
        f.createDimension('time', ntime)
        timevar = f.createVariable('time', 'i4', 'time')
        timevar[:] = time
        timevar.units = tunits
        timevar.long_name = 'time'

        if hasscen:
            f.createDimension('scen', nscens)
            scenvar = f.createVariable('scen', 'i4', 'scen')
            scenvar[:] = scens
            scenvar.units = 'no'
            scenvar.long_name = 'scenarios'

        f.createDimension('irr', len(irr) + 1)
        irrvar = f.createVariable('irr', 'i4', 'irr')
        irrvar[:] = range(1, len(irr) + 2)
        irrvar.units = 'mapping'
        irrvar.long_name = ', '.join(irr + ['sum'])

        for i in range(len(anames)):
            rname = str(anames[i])
            f.createDimension(rname, len(adata[i]))
            rvar = f.createVariable(rname, 'i4', rname)
            rvar[:] = adata[i]
            rvar.units = aunits[i]
            rvar.long_name = alongnames[i]
Beispiel #11
0
    def get(self, request, *args, **kwargs):
        layer = str(kwargs['layer'])
        provider = str(kwargs['provider'])
        response = dict()
        path = climate_paths[layer][provider]

        #raw_data = RawData(path, layer)




        with nc(path, 'r') as ds:

            var = ds.variables[layer][:]

            minimum = float(var.min())
            maximum = float(var.max())
            x = var.shape[1]
            y = var.shape[0]
            try:
                fill_value = str(ds.variables[layer]._FillValue)
            except:
                fill_value = 9.969209968386869e+36
            response[layer] = {'min': minimum, 'max': maximum, 'x':float(x), 'y':float(y), 'fill_value' : fill_value}

        return JsonResponse(response)
Beispiel #12
0
    def get(self, request, *args, **kwargs):
        y_start = int(kwargs['y'])
        y_end = y_start + EEMS_TILE_SIZE[0]
        x_start = int(kwargs['x'])
        x_end = x_start + EEMS_TILE_SIZE[1]
        layer = kwargs['layer']
        provider = str(kwargs['provider'])

        response = dict()
        path = climate_paths[layer][provider]

        with nc(path, 'r') as ds:
            var = ds.variables[layer][:]

            if x_start > var.shape[1] or y_start > var.shape[0]:
                return JsonResponse({layer: 'False'})

            x_end = var.shape[1] if x_end > var.shape[1] else x_end
            y_end = var.shape[0] if y_end > var.shape[0] else y_end
            if isinstance(var, np.ma.core.MaskedArray):
                sub_matrix = var.data[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]
            else:
                sub_matrix = var[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]

        return JsonResponse(response)
Beispiel #13
0
    def get(self, request, *args, **kwargs):

        layer = 'elev'
        elev_path = os.path.join(settings.MEDIA_ROOT, 'uploads', 'elevation', 'Elev_30AS_ForTaylor.nc')

        with nc(elev_path, 'r') as ds:
            var = ds.variables[layer][:]

            minimum = float(var.min())
            maximum = float(var.max())
            x = var.shape[1]
            y = var.shape[0]

            fill_value = str(ds.variables[layer]._FillValue)
            response = {
                'min': minimum,
                'max': maximum,
                'x':float(x),
                'y':float(y),
                'fill_value' : fill_value
            }

            response['lat_min'] = float(ds.variables['lat'][:].min())
            response['lat_max'] = float(ds.variables['lat'][:].max())
            response['lon_min'] = float(ds.variables['lon'][:].min())
            response['lon_max'] = float(ds.variables['lon'][:].max())

        return JsonResponse(response)
Beispiel #14
0
    def __loadwts(self, var, vals):
        if self.wfile is None:
            return ones(len(vals))
        else:
            vars  = var.split('/')
            nvars = len(vars)

            v = [0] * nvars
            w = [0] * nvars
            with nc(self.wfile) as f:
                for i in range(nvars):
                    if f.variables[vars[i]].units == 'mapping':
                        v[i] = array(f.variables[vars[i]].long_name.split(', '))
                    else:
                        v[i] = f.variables[vars[i]][:]
                    w[i] = f.variables['weights_' + vars[i]][:]

            nvals = len(vals)
            wts = masked_array(zeros(nvals), mask = ones(nvals))
            for i in range(nvals):
                svals = vals[i].split('/')
                for j in range(nvars):
                    if svals[j].isdigit():
                        svals[j] = double(svals[j]) # convert to number
                    idx = where(v[j] == svals[j])[0]
                    if idx.size:
                        if isMaskedArray(wts[i]):
                            wts[i] = w[j][idx[0]]
                        else:
                            wts[i] *= w[j][idx[0]]

            return wts
Beispiel #15
0
    def __init__(self, cpfile, smapfile, areafile, crop, varname):
        with nc(cpfile) as f:
            self.year    = f.variables['year'][:]
            self.week    = f.variables['week'][:]
            self.county  = f.variables['county'][:]
            self.day     = f.variables['day'][:]
            self.rawdata = f.variables[varname][:]

            varatt = f.variables['var'].ncattrs()
            if 'units' in varatt and f.variables['var'].units == 'mapping':
                self.var = array(f.variables['var'].long_name.split(', '))
                self.varmap = self.varmap_str[crop]
            else:          
                self.var = f.variables['var'][:]
                self.varmap = self.varmap_num[crop]

        self.crop = crop

        nyears, nweeks, ncounties, nvars = len(self.year), len(self.week), len(self.county), len(self.vars)

        self.data = masked_array(zeros((nyears, nweeks, ncounties, nvars)), mask = ones((nyears, nweeks, ncounties, nvars)))
        for i in range(nvars):
            vmap = self.varmap[i]

            if isinstance(vmap, list):
                for j in range(ncounties):
                    for k in range(len(vmap)):
                        if vmap[k] in self.var: # variable in list
                            varidx = where(self.var == vmap[k])[0][0]
                            data   = self.rawdata[:, :, varidx, j]
                            if not isMaskedArray(data) or not data.mask.all():
                                self.data[:, :, j, i] = data
                                break
            elif vmap != '':
                if vmap in self.var:
                    varidx = where(self.var == vmap)[0][0]
                    self.data[:, :, :, i] = self.rawdata[:, :, varidx, :]
            else: # no data
                continue

            # discard counties with insufficient data
            for j in range(ncounties):
                for k in range(nyears):
                    data = self.data[k, :, j, i]

                    if isMaskedArray(data):
                        data = data[~data.mask]

                    if data.size and data[-1] - data[0] < 40:
                        self.data[k, :, j, i].mask = True # mask

        # aggregate to state level
        aggregator = StateAggregator(smapfile, areafile)
        self.sdata = aggregator.aggregate(self.data, self.county)
        self.state = aggregator.states
Beispiel #16
0
 def __init__(self, filename, outdir, fmt):
     self.filename = filename
     self.outdir = outdir
     self.fmt = fmt
     with nc(filename) as f:
         self.varkeys = f.variables.keys()  # get available variables
         self.scennames = [s.strip(" ") for s in f.variables["scen"].long_name.split(",")]  # get scenario names
         self.irr = f.variables["irr"][:]  # get irrigation values and names
         self.irrnames = [i.strip(" ") for i in f.variables["irr"].long_name.split(",")]
         self.time = f.variables["time"][:]  # get time and units
         self.tunits = f.variables["time"].units
Beispiel #17
0
def createAggFile(filename, time, tunits, adata, anames, aunits, alongnames):
    with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
        f.createDimension('time', len(time))
        timevar = f.createVariable('time', 'i4', ('time',))
        timevar[:] = time
        timevar.units = tunits
        timevar.long_name = 'time'
        for i in range(len(anames)):
            rname = anames[i] + '_index'
            f.createDimension(rname, len(adata[i]))
            rvar = f.createVariable(rname, 'i4', (rname,))
            rvar[:] = adata[i]
            rvar.units = aunits[i]
            rvar.long_name = alongnames[i]
Beispiel #18
0
    def __init__(self, file, vars = None):
        with nc(file) as f:
            if vars is None:
                vars = setdiff1d(f.variables, ['latitude', 'longitude', 'time', 'ftime', 'scen'])
                newvars = list(vars.copy())
                for v in vars:
                    if compile('missing_*').match(v): # remove missing_* variables
                        newvars.remove(v)
                vars = array(newvars)

            self.lat, self.lon = f.variables['latitude'][0], f.variables['longitude'][0]

            self.scen  = f.variables['scen'][:]
            self.time  = f.variables['time'][:]
            self.ftime = f.variables['ftime'][:]            

            tunits = f.variables['time'].units
            ts = tunits.split('days since ')[1].split(' ')
            yr0, mth0, day0 = [int(t) for t in ts[0].split('-')[0 : 3]]
            if len(ts) > 1:
                hr0, min0, sec0 = [int(t) for t in ts[1].split(':')[0 : 3]]
            else:
                hr0 = min0 = sec0 = 0
            self.reftime = datetime(yr0, mth0, day0, hr0, min0, sec0)

            nv, ns, nt, nf = len(vars), len(self.scen), len(self.time), len(self.ftime)
            self.data      = masked_array(zeros((nv, ns, nt, nf)), mask = ones((nv, ns, nt, nf)))
            self.missing   = zeros((nv, ns, nt))
            self.units     = zeros(nv, dtype = '|S64')
            self.longnames = zeros(nv, dtype = '|S64')
            for i in range(nv):
                if vars[i] in f.variables:
                    var = f.variables[vars[i]]
                    msg = f.variables['missing_' + vars[i]]
                else:
                    vidx = foundVar(f.variables.keys(), vars[i])
                    var  = f.variables[f.variables.keys()[vidx]]
                    msg  = f.variables['missing_' + f.variables.keys()[vidx]]
                self.data[i] = var[:, :, :, 0, 0]
                self.missing[i] = msg[:]
                self.units[i] = var.units
                self.longnames[i] = var.long_name

            self.vars = vars # store variable names

            self.pridx = foundVar(vars, 'pr') # variable indices
            self.maidx = foundVar(vars, 'tmax')
            self.miidx = foundVar(vars, 'tmin')

        self.__fillMissing()
Beispiel #19
0
    def __init__(self, file, lat, lon, vars = None):
        with nc(file) as f:
            if vars is None:
                vars = setdiff1d(f.variables, ['lat', 'lon', 'time'])

            lats, lons = f.variables['lat'][:], f.variables['lon'][:]

            if isMaskedArray(f.variables[vars[0]][0]):
                mask = f.variables[vars[0]][0].mask # pull mask from first variable, first time
            else:
                mask = zeros((len(lats), len(lons)))

            latd = resize(lats, (len(lons), len(lats))).T - lat
            lond = resize(lons, (len(lats), len(lons))) - lon
            latd = masked_where(mask, latd)
            lond = masked_where(mask, lond)
            totd = latd ** 2 + lond ** 2
            idx = where(totd == totd.min())
            latidx, lonidx = idx[0][0], idx[1][0]

            self.time = f.variables['time'][:]

            tunits = f.variables['time'].units
            ts = tunits.split('months since ')[1].split(' ')
            yr0, mth0, day0 = [int(t) for t in ts[0].split('-')[0 : 3]]
            if len(ts) > 1:
                hr0, min0, sec0 = [int(t) for t in ts[1].split(':')[0 : 3]]
            else:
                hr0 = min0 = sec0 = 0
            self.reftime = datetime(yr0, mth0, day0, hr0, min0, sec0)

            nv, nt = len(vars), len(self.time)
            self.data  = zeros((nv, nt))
            self.units = zeros(nv, dtype = '|S32')
            for i in range(nv):
                if vars[i] in f.variables:
                    var = f.variables[vars[i]]
                else:
                    vidx = foundVar(f.variables.keys(), vars[i])
                    var  = f.variables[f.variables.keys()[vidx]]
                self.data[i]  = var[:, latidx, lonidx]
                self.units[i] = var.units

            self.vars = vars # store variable names

            self.pridx = foundVar(vars, 'pr') # variable indices
            self.maidx = foundVar(vars, 'tmax')
            self.miidx = foundVar(vars, 'tmin')
Beispiel #20
0
    def __init__(self, path, layer_name):

        # parse .xtr file
        with nc(path, 'r') as ds:

            self.lats = ds.variables['lat'][:]
            self.lngs = ds.variables['lon'][:]
            self.numlngs = int(self.lngs.size)
            self.numlats = int(self.lats.size)
            self.minlng = float(self.lngs[0])
            self.maxlng = float(self.lngs[-1])
            self.minlat = float(self.lats[0])
            self.maxlat = float(self.lats[-1])
            self.data = ds.variables[layer_name][:]
            self.dtype = self.data.data.dtype
            self.ncattrs = {k: ds.variables[layer_name].getncattr(k) for k in ds.variables[layer_name].ncattrs()}
Beispiel #21
0
    def __init__(self, filename, metric, aggs, aggname, aggunits, agglongname, time, dt, mp, cr, nm):
        super(ModelEnsembleFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension(aggname, len(aggs))
            aggsvar = f.createVariable(aggname, 'i4', aggname)
            aggsvar[:] = aggs
            aggsvar.units = aggunits
            aggsvar.long_name = agglongname

            f.createDimension('time', len(time))
            timevar = f.createVariable('time', 'i4', 'time')
            timevar[:] = time - time[0]
            timevar.units = 'years since {:d}-01-01'.format(int(time[0]))
            timevar.long_name = 'time'

            f.createDimension('dt', len(dt))
            dtvar = f.createVariable('dt', 'i4', 'dt')
            dtvar[:] = range(1, len(dt) + 1)
            dtvar.units = 'mapping'
            dtvar.long_name = ', '.join(dt)
            dtvar.note = 'detrend method'

            f.createDimension('mp', len(mp))
            mpvar = f.createVariable('mp', 'i4', 'mp')
            mpvar[:] = range(1, len(mp) + 1)
            mpvar.units = 'mapping'
            mpvar.long_name = ', '.join(mp)
            mpvar.note = 'mean-preserving method'

            f.createDimension('cr', len(cr))
            crvar = f.createVariable('cr', 'i4', 'cr')
            crvar[:] = range(1, len(cr) + 1)
            crvar.units = 'mapping'
            crvar.long_name = ', '.join(cr)
            crvar.note = 'correction method'

            f.createDimension('nm', nm)
            nmvar = f.createVariable('nm', 'i4', 'nm')
            nmvar[:] = range(1, nm + 1)
            nmvar.long_name = 'top models used'

            f.createDimension('wt', 2)
            weightedvar = f.createVariable('wt', 'i4', 'wt')
            weightedvar[:] = [1, 2]
            weightedvar.units = 'mapping'
            weightedvar.long_name = 'unweighted, %s-weighted' % metric
Beispiel #22
0
    def __init__(self, filename, aggs, aggname, aggunits, agglongname, times, dt, mp, cr, nm, wt):
        super(MultimetricsEnsembleFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension(aggname, len(aggs))
            aggsvar = f.createVariable(aggname, 'i4', aggname)
            aggsvar[:] = aggs
            aggsvar.units = aggunits
            aggsvar.long_name = agglongname

            f.createDimension('time_range', len(times))
            timesvar = f.createVariable('time_range', 'i4', 'time_range')
            timesvar[:] = range(1, len(times) + 1)
            timesvar.units = 'mapping'
            timesvar.long_name = ', '.join(times)

            f.createDimension('dt', len(dt))
            dtvar = f.createVariable('dt', 'i4', 'dt')
            dtvar[:] = range(1, len(dt) + 1)
            dtvar.units = 'mapping'
            dtvar.long_name = ', '.join(dt)
            dtvar.note = 'detrend method'

            f.createDimension('mp', len(mp))
            mpvar = f.createVariable('mp', 'i4', 'mp')
            mpvar[:] = range(1, len(mp) + 1)
            mpvar.units = 'mapping'
            mpvar.long_name = ', '.join(mp)
            mpvar.note = 'mean-preserving method'

            f.createDimension('cr', len(cr))
            crvar = f.createVariable('cr', 'i4', 'cr')
            crvar[:] = range(1, len(cr) + 1)
            crvar.units = 'mapping'
            crvar.long_name = ', '.join(cr)
            crvar.note = 'correction method'

            f.createDimension('nm', nm)
            nmvar = f.createVariable('nm', 'i4', 'nm')
            nmvar[:] = range(1, nm + 1)
            nmvar.long_name = 'top models used'

            f.createDimension('wt', len(wt))
            weightedvar = f.createVariable('wt', 'i4', 'wt')
            weightedvar[:] = range(1, len(wt) + 1)
            weightedvar.units = 'mapping'
            weightedvar.long_name = ', '.join(wt)
Beispiel #23
0
    def get(self, request, *args, **kwargs):
        y_start = int(kwargs['y'])
        y_end = y_start + EEMS_TILE_SIZE[0]
        x_start = int(kwargs['x'])
        x_end = x_start + EEMS_TILE_SIZE[1]
        layer = kwargs['layer']
        dataset = Dataset.objects.get(pk=kwargs['dataset'])
        response = dict()

        variables = Variable.objects.filter(dataset=dataset)

        found = False
        for variable in variables:
            if variable.name == layer:
                layer = variable.name
                found = True
                break

        if not found:
            return JsonResponse({layer : 'False'})

        if layer == 'elev' and dataset.has_elev_file:
            path = dataset.elev_file.path
        else:
            path = dataset.data_file.path

        with nc(path, 'r') as ds:
            var = ds.variables[layer][:]

            if x_start > var.shape[1] or y_start > var.shape[0]:
                return JsonResponse({layer: 'False'})

            x_end = var.shape[1] if x_end > var.shape[1] else x_end
            y_end = var.shape[0] if y_end > var.shape[0] else y_end
            response['fill_value'] = str(ds.variables[layer]._FillValue)
            if isinstance(var, np.ma.core.MaskedArray):
                sub_matrix = var.data[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]
            else:
                sub_matrix = var[y_start:y_end, x_start:x_end]
                response[layer] = sub_matrix.ravel().tolist()
                response['x'] = sub_matrix.shape[1]
                response['y'] = sub_matrix.shape[0]

        return JsonResponse(response)
Beispiel #24
0
    def __init__(self, filename, varnames = None, lats = None, lons = None, incl_global = False):
        f = nc(filename) # open file

        if varnames is None: # no variables specified
            varnames = f.variables.keys()
            varnames = [v for v in varnames if not v in ['lat', 'lon']] # remove lat, lon
            if incl_global: varnames += ['global']
        else:
            if not isinstance(varnames, list): # make list
                varnames = [varnames]

        self.lats, self.lons = f.variables['lat'][:], f.variables['lon'][:]

        self.dat = {'names': [], 'units': [], 'longnames': [], 'data': []}

        for v in varnames:
            if v != 'global' or (v == 'global' and 'global' in f.variables):
                var = f.variables[v]
                self.dat['names'].append(v)
                self.dat['units'].append(var.units if 'units' in var.ncattrs() else '')
                self.dat['longnames'].append(var.long_name if 'long_name' in var.ncattrs() else '')
                self.dat['data'].append(var[:])
            else:
                nlats = self.lats.size
                nlons = self.lons.size

                self.dat['names'].append('global') # global mask
                self.dat['units'].append('')
                self.dat['longnames'].append('')
                self.dat['data'].append(masked_array(ones((nlats, nlons)), mask = zeros((nlats, nlons))))

        f.close()

        tol = 1e-5
        if not lats is None: # restrict latitude range
            sellat = logical_and(self.lats >= lats.min() - tol, self.lats <= lats.max() + tol)
            self.lats = self.lats[sellat]
            for i in range(len(self.dat['names'])):
                self.dat['data'][i] = self.dat['data'][i][sellat]
        if not lons is None: # restrict longitude range
            sellon = logical_and(self.lons >= lons.min() - tol, self.lons <= lons.max() + tol)
            self.lons = self.lons[sellon]
            for i in range(len(self.dat['names'])):
                self.dat['data'][i] = self.dat['data'][i][:, sellon]
Beispiel #25
0
    def __init__(self, filename, aggs, aggname, aggunits, agglongname, scen, times, dt, mp, cr):
        super(MultimetricsFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension(aggname, len(aggs))
            aggsvar = f.createVariable(aggname, 'i4', aggname)
            aggsvar[:] = aggs
            aggsvar.units = aggunits
            aggsvar.long_name = agglongname

            f.createDimension('scen', len(scen))
            scenvar = f.createVariable('scen', 'i4', 'scen')
            scenvar[:] = range(1, len(scen) + 1)
            scenvar.units = 'mapping'
            scenvar.long_name = ', '.join(scen)

            f.createDimension('time_range', len(times))
            timesvar = f.createVariable('time_range', 'i4', 'time_range')
            timesvar[:] = range(1, len(times) + 1)
            timesvar.units = 'mapping'
            timesvar.long_name = ', '.join(times)

            f.createDimension('dt', len(dt))
            dtvar = f.createVariable('dt', 'i4', 'dt')
            dtvar[:] = range(1, len(dt) + 1)
            dtvar.units = 'mapping'
            dtvar.long_name = ', '.join(dt)
            dtvar.note = 'detrend method'

            f.createDimension('mp', len(mp))
            mpvar = f.createVariable('mp', 'i4', 'mp')
            mpvar[:] = range(1, len(mp) + 1)
            mpvar.units = 'mapping'
            mpvar.long_name = ', '.join(mp)
            mpvar.note = 'mean-preserving method'

            f.createDimension('cr', len(cr))
            crvar = f.createVariable('cr', 'i4', 'cr')
            crvar[:] = range(1, len(cr) + 1)
            crvar.units = 'mapping'
            crvar.long_name = ', '.join(cr)
            crvar.note = 'correction method'
Beispiel #26
0
    def __init__(self, filename, aggs, aggname, aggunits, agglongname, time, scen, dt, mp, cr):
        super(BiasCorrectFile, self).__init__(filename)

        with nc(filename, 'w', format = 'NETCDF4_CLASSIC') as f:
            f.createDimension(aggname, len(aggs)) # create aggregation level
            aggsvar = f.createVariable(aggname, 'i4', aggname)
            aggsvar[:] = aggs
            aggsvar.units = aggunits
            aggsvar.long_name = agglongname

            f.createDimension('time', len(time)) # create time
            timevar = f.createVariable('time', 'i4', 'time')
            timevar[:] = time - time[0]
            timevar.units = 'years since {:d}-01-01'.format(int(time[0]))
            timevar.long_name = 'time'

            f.createDimension('scen', len(scen)) # create scen
            scenvar = f.createVariable('scen', 'i4', 'scen')
            scenvar[:] = range(1, len(scen) + 1)
            scenvar.units = 'mapping'
            scenvar.long_name = ', '.join(scen)

            f.createDimension('dt', len(dt)) # create dt
            dtvar = f.createVariable('dt', 'i4', 'dt')
            dtvar[:] = range(1, len(dt) + 1)
            dtvar.units = 'mapping'
            dtvar.long_name = ', '.join(dt)
            dtvar.note = 'detrend method'

            f.createDimension('mp', len(mp)) # create mp
            mpvar = f.createVariable('mp', 'i4', 'mp')
            mpvar[:] = range(1, len(mp) + 1)
            mpvar.units = 'mapping'
            mpvar.long_name = ', '.join(mp)
            mpvar.note = 'mean-preserving method'

            f.createDimension('cr', len(cr)) # create cr
            crvar = f.createVariable('cr', 'i4', 'cr')
            crvar[:] = range(1, len(cr) + 1)
            crvar.units = 'mapping'
            crvar.long_name = ', '.join(cr)
            crvar.note = 'correction method'
Beispiel #27
0
 def __init__(self, filename, meta, mask, outdir, fmt):
     super(MaskPlotter, self).__init__(filename, outdir, fmt)
     with nc(self.filename) as f:
         self.area = f.variables['area_' + mask][:]
         self.indices = f.variables[mask + '_index'][:]
     metadata = [] # load meta data
     with open(meta, 'rU') as f:
         for row in csv.reader(f, delimiter = '\t'):
             metadata.append(row)
     self.metadic = {}
     for i in range(len(metadata)):
         md = metadata[i][0].split(',')
         self.metadic[md[0]] = md[1]
     filesplit = filename.split('_') # get model, climate, crop
     self.model = filesplit[0]
     self.climate = filesplit[1]
     self.crop = filesplit[3]
     totarea = self.area[:, 0, 0, :].sum(axis = 1)
     self.sidx = [a[0] for a in sorted(enumerate(totarea), key = lambda x: x[1], reverse = True)]
     self.mask = mask
Beispiel #28
0
def createnc(filename, lat, lon, time, time_units):
    # create file
    f = nc(filename, 'w', format = 'NETCDF4_CLASSIC')
    # create longitude
    f.createDimension('lon', len(lon))
    lon_var = f.createVariable('lon', 'f8', ('lon',), zlib = True, shuffle = False, complevel = 9, chunksizes = [len(lon)])
    lon_var[:] = lon
    lon_var.units = 'degrees_east'
    lon_var.long_name = 'longitude'  
    # create latitude
    f.createDimension('lat', len(lat))
    lat_var = f.createVariable('lat', 'f8', ('lat',), zlib = True, shuffle = False, complevel = 9, chunksizes = [len(lat)])
    lat_var[:] = lat
    lat_var.units = 'degrees_north'
    lat_var.long_name = 'latitude'
    # create time
    f.createDimension('time', None)
    time_var = f.createVariable('time', 'f8', ('time',), zlib = True, shuffle = False, complevel = 9, chunksizes = [1], endian = 'little')
    time_var[:] = time
    time_var.units = time_units
    time_var.long_name = 'time'
    # close file
    f.close()
Beispiel #29
0
    def __init__(self, file, vars = None):
        with nc(file) as f:
            if vars is None:
                vars = setdiff1d(f.variables, ['latitude', 'longitude', 'time'])

            self.lat, self.lon = f.variables['latitude'][0], f.variables['longitude'][0]

            self.time = f.variables['time'][:]

            tunits = f.variables['time'].units
            ts = tunits.split('days since ')[1].split(' ')
            yr0, mth0, day0 = [int(t) for t in ts[0].split('-')[0 : 3]]
            if len(ts) > 1:
                hr0, min0, sec0 = [int(t) for t in ts[1].split(':')[0 : 3]]
            else:
                hr0 = min0 = sec0 = 0
            self.reftime = datetime(yr0, mth0, day0, hr0, min0, sec0)

            nv, nt = len(vars), len(self.time)
            self.data      = zeros((nv, nt))
            self.units     = zeros(nv, dtype = '|S64')
            self.longnames = zeros(nv, dtype = '|S64')
            for i in range(nv):
                if vars[i] in f.variables:
                    var = f.variables[vars[i]]
                else:
                    vidx = foundVar(f.variables.keys(), vars[i])
                    var  = f.variables[f.variables.keys()[vidx]]
                self.data[i] = var[:, 0, 0]
                self.units[i] = var.units
                self.longnames[i] = var.long_name

            self.vars = vars # store variable names

            self.pridx = foundVar(vars, 'pr') # variable indices
            self.maidx = foundVar(vars, 'tmax')
            self.miidx = foundVar(vars, 'tmin')
Beispiel #30
0
 def __init__(self, filename, meta, mask, outdir, fmt):
     super(MaskPlotter, self).__init__(filename, outdir, fmt)
     with nc(self.filename) as f:
         self.area = f.variables["area_" + mask][:]
         self.areaglob = f.variables["area_global"][:]
         self.indices = f.variables[mask + "_index"][:]
     metadata = []  # load meta data
     with open(meta, "rU") as f:
         for row in csv.reader(f, delimiter="\t"):
             metadata.append(row)
     self.metadic = {}
     for i in range(len(metadata)):
         md = metadata[i][0].split(",")
         self.metadic[md[0]] = md[1]
     filesplit = basename(filename).split("_")  # get model, climate, crop
     self.model = filesplit[0]
     self.climate = filesplit[1]
     self.crop = filesplit[3]
     self.yr0 = filesplit[5]
     self.yr1 = filesplit[6].split(".")[0]
     totarea = self.area[:, 0, 0, :].sum(axis=1)
     self.sidx = [a[0] for a in sorted(enumerate(totarea), key=lambda x: x[1], reverse=True)]
     self.mask = mask
     self.prefix = "_".join([self.model, self.climate, self.yr0, self.yr1]) + "_"
Beispiel #31
0
                  dest="co2file",
                  default="",
                  type="string",
                  help="csv file of CO2 values",
                  metavar="FILE")
parser.add_option("-o",
                  "--output",
                  dest="output",
                  default="Generic.met",
                  type="string",
                  help="Output CL! file pattern")
options, args = parser.parse_args()

variables = options.variables.split(',')

infile = nc(options.inputfile)
vlist = infile.variables.keys()
time = infile.variables['time'][:]
tunits = infile.variables['time'].units

varlists = od([('TMAX', ['tmax', 'tasmax']), \
               ('TMIN', ['tmin', 'tasmin']), \
               ('SRAD', ['solar', 'rad', 'rsds', 'srad']), \
               ('RAIN', ['precip', 'pr', 'rain'])])
varnames = array(varlists.keys())
unitnames = array([['oc', 'degc'], ['oc', 'degc'],
                   ['mj/m^2', 'mj/m2', 'mjm-2'], ['mm']])
varnames = array(varlists.keys())
alldata = zeros((len(time), len(varnames)))
for i in range(len(varnames)):
    var_name = varnames[i]
Beispiel #32
0
            idx = dates.index(pyear)
        array_data = asarray(data[4 + j])[:, variable_idx]
        array_data[array_data == '?'] = '-99'  # replace '?' with '-99'
        if has_pdate:
            # convert pdate from dd_mmm_yyyy to Julian day
            pdate = array_data[pdate_idx].split('_')
            pdate = datetime.date(int(pdate[2]), mth2num[pdate[1]],
                                  int(pdate[0]))
            array_data[pdate_idx] = pdate.strftime('%j')
        var_data[idx, i, :] = array_data.astype(double)

# create pSIMS NetCDF3 file
dirname = os.path.dirname(options.outputfile)
if dirname and not os.path.exists(dirname):
    raise Exception('Directory to output file does not exist')
root_grp = nc(options.outputfile, 'w', format='NETCDF3_CLASSIC')

# add latitude and longitude
root_grp.createDimension('longitude', 1)
root_grp.createDimension('latitude', 1)
lon_var = root_grp.createVariable('longitude', 'f8', ('longitude', ))
lon_var[:] = lon
lon_var.units = 'degrees_east'
lon_var.long_name = 'longitude'
lat_var = root_grp.createVariable('latitude', 'f8', ('latitude', ))
lat_var[:] = lat
lat_var.units = 'degrees_north'
lat_var.long_name = 'latitude'

# create time and scenario dimensions
root_grp.createDimension('time', None)
Beispiel #33
0
    def run(self, latidx, lonidx):
        try:
            num_scenarios = self.config.get('scens')
            num_years = self.config.get('num_years')
            variables = self.config.get('variables')
            units = self.config.get('var_units')
            delta = self.config.get('delta')
            ref_year = self.config.get('ref_year')
            inputfile = self.config.get_dict(self.translator_type,
                                             'inputfile',
                                             default='Generic.out')
            outputfile = self.config.get_dict(
                self.translator_type,
                'outputfile',
                default='../../outputs/output_%04d_%04d.psims.nc' %
                (latidx, lonidx))

            # get out files(s)
            num_scenarios = num_scenarios
            basename, fileext = os.path.splitext(inputfile)
            outfiles = [''] * num_scenarios
            for i in range(num_scenarios):
                outfiles[i] = inputfile if not i else basename + str(
                    i) + fileext

            # get variables
            variables = array(variables.split(','))  # split variable names
            latidx = int(latidx)
            lonidx = int(lonidx)
            delta = delta.split(',')
            if len(delta) < 1 or len(delta) > 2:
                raise Exception('Wrong number of delta values')
            latdelta = double(
                delta[0]) / 60.  # convert from arcminutes to degrees
            londelta = latdelta if len(delta) == 1 else double(delta[1]) / 60.

            # get number of variables
            num_vars = len(variables)

            # get units
            units = units.split(',')
            if len(units) != num_vars:
                raise Exception(
                    'Number of units must be same as number of variables')

            # compute latitude and longitude
            lat = 90. - latdelta * (latidx - 0.5)
            lon = -180. + londelta * (lonidx - 0.5)

            # get reference time, number of years, and dates
            ref_year = ref_year
            ref_date = datetime.datetime(ref_year, 1, 1)
            num_years = num_years
            dates = range(ref_year, ref_year + num_years)

            # whether or not planting_date is among reported variables
            has_pdate = 'planting_date' in variables
            if has_pdate:
                pdate_idx = where(variables == 'planting_date')[0][0]
                mth2num = {v: k for k, v in enumerate(calendar.month_abbr)}

            # iterate through scenarios
            var_data = -99 * ones((num_years, num_scenarios, num_vars))
            for i in range(num_scenarios):
                try:
                    data = [l.split() for l in tuple(open(outfiles[i]))]
                except IOError:
                    print 'Out file', i + 1, 'does not exist'
                    continue
                if len(data) < 5:
                    continue  # no data, move to next file
                num_data = len(data[4:])

                # search for variables within list of all variables
                all_variables = data[2]
                variable_idx = []
                for v in variables:
                    if not v in all_variables:
                        raise Exception(
                            'Variable {:s} not in out file {:d}'.format(
                                v, i + 1))
                    else:
                        variable_idx.append(all_variables.index(v))

                # remove header, select variables, and convert to numpy array of doubles
                prev_year = nan
                prev_idx = nan
                date_idx = all_variables.index('Date')
                if has_pdate:
                    pdate_idx2 = all_variables.index('planting_date')
                for j in range(num_data):
                    if data[4 + j] == []:
                        continue  # blank line
                    if num_data == num_years:  # number of dates in file exactly matches number of years
                        idx = j
                    else:
                        pyear = int(data[4 + j][pdate_idx2].split('_')[2])
                        idx = dates.index(pyear)
                    array_data = asarray(data[4 + j])[variable_idx]
                    array_data[array_data ==
                               '?'] = '-99'  # replace '?' with '-99'
                    if has_pdate:
                        # convert pdate from dd_mmm_yyyy to Julian day
                        pdate = array_data[pdate_idx].split('_')
                        pdate = datetime.date(int(pdate[2]), mth2num[pdate[1]],
                                              int(pdate[0]))
                        array_data[pdate_idx] = pdate.strftime('%j')
                    var_data[idx, i, :] = array_data.astype(double)

            # create pSIMS NetCDF3 file
            root_grp = nc(outputfile, 'w', format='NETCDF3_CLASSIC')

            # add latitude and longitude
            root_grp.createDimension('lon', 1)
            root_grp.createDimension('lat', 1)
            lon_var = root_grp.createVariable('lon', 'f8', 'lon')
            lon_var[:] = lon
            lon_var.units = 'degrees_east'
            lon_var.long_name = 'longitude'
            lat_var = root_grp.createVariable('lat', 'f8', 'lat')
            lat_var[:] = lat
            lat_var.units = 'degrees_north'
            lat_var.long_name = 'latitude'

            # create time and scenario dimensions
            root_grp.createDimension('time', None)
            root_grp.createDimension('scen', num_scenarios)

            # add time and scenario variables
            time_var = root_grp.createVariable('time', 'i4', 'time')
            time_var[:] = range(1, num_years + 1)
            time_var.units = 'growing seasons since {:s}'.format(str(ref_date))
            time_var.long_name = 'time'
            scenario_var = root_grp.createVariable('scen', 'i4', 'scen')
            scenario_var[:] = range(1, num_scenarios + 1)
            scenario_var.units = 'no'
            scenario_var.long_name = 'scenario'

            # add data
            for i in range(num_vars):
                var = root_grp.createVariable(variables[i],
                                              'f4',
                                              ('time', 'scen', 'lat', 'lon'),
                                              zlib=True,
                                              shuffle=False,
                                              complevel=9,
                                              fill_value=1e20)
                var[:] = var_data[:, :, i]
                var.units = units[i]
                var.long_name = variables[i]

            # close file
            root_grp.close()
            return True
        except:
            print "[%s]: %s" % (os.path.basename(__file__),
                                traceback.format_exc())
            return False
Beispiel #34
0
import unittest as ut
import numpy.testing as nt
import numpy as np
import numpy.ma as ma
import os, sys
import subprocess

from netCDF4 import Dataset as nc

from wrf import *

NCL_EXE = "/Users/ladwig/nclbuild/6.3.0/bin/ncl"
TEST_FILE = "/Users/ladwig/Documents/wrf_files/wrfout_d01_2010-06-13_21:00:00"
OUT_NC_FILE = "/tmp/wrftest.nc"
NCFILE = nc(TEST_FILE)
NCGROUP = [NCFILE, NCFILE, NCFILE]

# Python 3
if sys.version_info > (3, ):
    xrange = range

ROUTINE_MAP = {
    "avo": avo,
    "eth": eth,
    "cape_2d": cape_2d,
    "cape_3d": cape_3d,
    "ctt": ctt,
    "dbz": dbz,
    "helicity": srhel,
    "omg": omega,