Esempio n. 1
0
def get_bathymetry(bathymetry_file, bathymetry_file2, mesh2d):
    nc = NetCDFFile(bathymetry_file)
    lat = nc.variables['lat'][:]
    lon = nc.variables['lon'][:]
    values = nc.variables['elevation'][:, :]
    #values = values.filled(9999.)
    interpolator = scipy.interpolate.RegularGridInterpolator((lat, lon),
                                                             values)

    nc2 = NetCDFFile(bathymetry_file2)
    lat2 = nc2.variables['lat'][:]
    lon2 = nc2.variables['lon'][:]
    values2 = nc2.variables['z'][:, :]
    #values = values.filled(9999.)
    interpolator2 = scipy.interpolate.RegularGridInterpolator((lat2, lon2),
                                                              values2)

    P1_2d = FunctionSpace(mesh2d, 'CG', 1)
    bathymetry2d = Function(P1_2d, name="bathymetry")
    xvector = mesh2d.coordinates.dat.data
    bvector = bathymetry2d.dat.data
    assert xvector.shape[0] == bvector.shape[0]
    for i, xy in enumerate(xvector):
        lat, lon = utm.to_latlon(xy[0], xy[1], utm_zone, utm_band)

        if lat <= lat2.max() and lat >= lat2.min() and lon <= lon2.max(
        ) and lon >= lon2.min():
            bvector[i] = max(-interpolator2((lat, lon)), minimum_depth)
        else:
            bvector[i] = max(-interpolator((lat, lon)), minimum_depth)
    return bathymetry2d
    def init(self,agent,env,**kw):
        super(LoggingRLI,self).init(agent,env,**kw)

        self.step_count = self.ep_count = 0

        if os.access(self.episode_filename,os.F_OK):
            self.remove_or_rename(self.episode_filename)

        self.episode_data = ed = NetCDFFile(self.episode_filename,'w')
        ed.createDimension('index',None)
        ed.createDimension('value',1)
        ed.createVariable('start','d',('index','value'))
        ed.createVariable('length','d',('index','value'))
        ed.createVariable('reward','f',('index','value'))

        for name,(fn,type,size) in self.ep_vars.items():
            ed.createDimension(name+'_dim',size)
            ed.createVariable(name,type,('index',name+'_dim'))

        if self.step_vars:
            if os.access(self.step_filename,os.F_OK):
                self.remove_or_rename(self.step_filename)

            self.step_data = sd = NetCDFFile(self.step_filename,'a')
            sd.createDimension('index',None)
            for name,(fn,type,size) in self.step_vars.items():
                sd.createDimension(name+'_dim',size)
                sd.createVariable(name,type,('index',name+'_dim'))

        self.last_ckpt_step = 0
        self.last_ckpt_episode = 0
Esempio n. 3
0
def read_nc(fid):
    """
    Read a NetCDF file (.nc) and return the underlying data. It is assumed these
    nc files are from srtm30p and that the internal variables are: x, y, z
    given in lon, lat, meters (positive z up)

    The file is given as a grid so we also want to convert it into a single
    3-column array so it's easier to work with

    :type fid: str
    :param fid: .nc file to read
    :rtype: np.array
    :return: Nx3 array with columns representing x, y, z
    """
    with NetCDFFile(fid) as data:
        try:
            x = data.variables["x"][:]
            y = data.variables["y"][:]
            z = data.variables["z"][:]

            # convert x, y, and z to MxN arrays
            x, y = np.meshgrid(x, y)

            # convert x, y, z to Nx1 arrays
            x = x.flatten()
            y = y.flatten()
            z = z.flatten()

            # convert x, y, z to a single Nx3 array
            return np.vstack((x, y, z)).T
        except KeyError:
            print("Unexpected internal structure of .nc file")
            sys.exit(-1)
Esempio n. 4
0
def getJulDatefromNc(ncfile):
    #from netCDF4 import Dataset as NetCDFFile

    from astropy.time import Time
    from datetime import datetime
    nc = NetCDFFile(ncfile)

    retval = -1
    try:
        utdate = getattr(nc, 'date')
        uttime = getattr(nc, 'start_time')
        utdate = utdate.replace("\"", "")
        utString = "%s %s" % (utdate, uttime)
        t = Time(datetime.strptime(utString, "%m/%d/%Y %H:%M:%S"),
                 scale="utc",
                 format="datetime")
        retval = t.jd
    except Exception:
        #try:
        utdate = nc.variables['Header.TimePlace.UTDate'].getValue()
        t = Time(float(utdate), scale="utc", format="decimalyear")
        retval = t.jd
    #except Exception:

    nc.close()
    return retval
Esempio n. 5
0
 def load_netcdf(self, year, day):
     print "loading %s%.4i/%.4i%.3i.nc" % (self.rpath, year, year, day)
     filein = NetCDFFile(self.rpath + "%.4i/%.4i%.3i.nc" %
                         (year, year, day),
                         "r",
                         mmap=False)
     return filein
Esempio n. 6
0
def save_2D(filenameout,var,lat,lon,miss_val):
    import numpy as np
    from scipy.io.netcdf import netcdf_file as NetCDFFile

    n = NetCDFFile(filenameout, 'w',) # open it for writing 
    n.title = 'saved netcdf variable'

    n.createDimension('lat',len(lat))
    n.createDimension('lon',len(lon))

    latitude = n.createVariable('lat','f',('lat',))
    latitude.longname = 'latitude'
    latitude.units = 'degrees_north'
    latitude[:] = lat.astype(np.float32)

    longitude = n.createVariable('lon','f',('lon',))
    longitude.longname = 'longitude'
    longitude.units = 'degrees_east'
    longitude[:] = lon[:].astype(np.float32)

    varnc = n.createVariable('varnc','f',('lat','lon',))
    varnc.missing_value = np.array(miss_val,np.float32)
    varnc.FillValue = np.array(miss_val,np.float32)

    varnc[:,:]=var[:,:].astype(np.float32)

    n.close()
Esempio n. 7
0
 def __init__(self,filename):
     self.filename=filename
     try:
         self.ncf=NetCDFFile(filename)
         self.readmaps(self.ncf)
     except IOError:
         raise NameError("Failed reading map {}".format(self.filename))
         self.ncf.close()
    def ckpt_restore_state(self,filename):
        from plastk import pkl
        ckpt = pkl.load(filename)

        self.verbose("Restoring checkpoint state")
        for a in self.ckpt_attribs:
            self.verbose(a,' = ', ckpt[a])
            setattr(self,a,ckpt[a])
            
        rand.seed(*ckpt['rand_seed'])

        self.env.sim = self.agent.sim = self
        
        self.episode_data = NetCDFFile(self.episode_filename,'a')
        if self.step_vars:
            self.step_data = NetCDFFile(self.step_filename,'a')
        return ckpt
Esempio n. 9
0
 def __init__(self, filename):
     self.filename = filename
     if not os.path.isfile(self.filename):
         raise OSError('{0}: no such file'.format(self.filename))
     self.fh = NetCDFFile(self.filename, 'r')
     self.df = self.read_db()
     self.jobid = self.get_jobid()
     self.fh.close()
Esempio n. 10
0
def getObsNum(ncfile):

    nc = NetCDFFile(ncfile)
    try:
        onum = nc.variables['Header.Dcs.ObsNum'].getValue()
    except:
        raise Exception(
            "This does not seems to be a LMT file. Use ASTE functions instead")

    nc.close()
    return onum
Esempio n. 11
0
def save_3D_stack(filenameout, var, varname, lat, lon, miss_val, time_it):
    import numpy as np
    from scipy.io.netcdf import netcdf_file as NetCDFFile
    """
    savenetcdf_3D(filenameout,var,varname,lat,lon,miss_val,time_it)
    save a 3D (time,lat,lon) numpy array into netcdf
    arguments:
    filenameout: string 
    var: np.array
    varname: string
    lat: np.array 
    lon: np.array
    miss_val: real
    time_it: np.array (dtype=int)
    """

    n = NetCDFFile(
        filenameout,
        'w',
    )  # open it for writing
    n.title = 'saved netcdf variable'

    n.createDimension('time', None)
    n.createDimension('lat', len(lat))
    n.createDimension('lon', len(lon))

    latitude = n.createVariable('lat', 'f', ('lat', ))
    latitude.longname = 'latitude'
    latitude.units = 'degrees_north'
    latitude[:] = lat[:].astype(np.float32)

    longitude = n.createVariable('lon', 'f', ('lon', ))
    longitude.longname = 'longitude'
    longitude.units = 'degrees_east'
    longitude[:] = lon[:].astype(np.float32)

    time = n.createVariable('time', 'i', ('time', ))
    time.units = 'days since 1979-1-1 00:00:0.0'
    time.delta_t = '0000-00-01 00:00:00'

    varnc = n.createVariable(varname, 'f', (
        'time',
        'lat',
        'lon',
    ))
    varnc.missing_value = np.array(miss_val, np.float32)
    varnc._FillValue = np.array(miss_val, np.float32)

    for l in range(0, var.shape[0]):
        time[l] = time_it
        varnc[l, :, :] = var[l, :, :].astype(np.float32)

    n.close()
Esempio n. 12
0
def isvalidObs(ncfile):

    nc = NetCDFFile(ncfile)
    try:
        valid = nc.variables['Header.ScanFile.Valid'].getValue()
        if valid > 0:
            return True
    except KeyError:
        print "Valid flag not found in nc file %s" % ncfile
        pass

    return False
Esempio n. 13
0
    def __init__(self, filename):
        '''
        Notes
        -----
        The EXOFile class is an interface to the Exodus II api. Its methods
        are named after the analogous method from the Exodus II C bindings,
        minus the prefix 'ex_'.

        '''
        self.fh = NetCDFFile(filename, mode='w')
        self.jobid = os.path.splitext(os.path.basename(filename))[0]
        self.filename = filename
Esempio n. 14
0
def getOffsetFromMap(ncfile):

    nc = NetCDFFile(ncfile)
    try:
        signalv = nc.variables['signal']
        azOffset = signalv.offset_x
        elOffset = signalv.offset_y
    except KeyError:
        raise Exception(
            "This does not seems to be a LMT file. Use ASTE functions instead")

    nc.close()
    return azOffset, elOffset
    def load_ncep_data(self, air_temp_file=None, spec_hum_file=None, trop_pres_file=None, geo_height_file=None, surface_pres_file=None):
        """Loads NCEP Reanalysis data into NetCdf objects. Returns a named tuple with objects."""
        filenames = (air_temp_file, spec_hum_file, trop_pres_file, geo_height_file, surface_pres_file)
        dflt_search_globs = ("air.*.nc", "shum.*.nc", "pres.tropp.*.nc", "hgt.*.nc", "pres.sfc.*.nc")

        data_list = []
        for data_filename, srch_glob, file_type_name in zip(filenames, dflt_search_globs, NCEPData._fields):
            if not data_filename:
                srch_res = glob(srch_glob)
                if len(srch_res) != 1:
                    raise Exception("For file type: %s expected to find 1 NCEP file, instead found: %s, using glob: %s" % (file_type_name, srch_res, srch_glob))
                data_filename = srch_res[0]
            logger.debug("Loading NCEP file for %s: %s" % (file_type_name, data_filename))
    
            # Add filename for use elsewhere
            ncep_obj = NetCDFFile(data_filename, "r")
            ncep_obj.__dict__["filename"] = data_filename
    
            data_list.append(ncep_obj)

        return NCEPData(*data_list)
Esempio n. 16
0
def read(filename):
    import numpy as np
    from numpy import ma
    # from Scientific.IO.NetCDF import NetCDFFile
    from scipy.io.netcdf import netcdf_file as NetCDFFile
    """
    read a netcdf file and return a dictionnary with 
    key: name of the variables: value (array)
    array is masked is missing_values defined 
    in the netcdf 
    """

    nc = NetCDFFile(filename, 'r')  # open it for writing

    #     number of dimensions
    #     ndim = nc.dimensions.__len__()
    #     dim_dict = nc.dimensions

    var_dict = {}

    for i in nc.variables.iterkeys():
        exec("cdf_var = nc.variables['" + i + "']")
        #exec(i + " = nc.variables['" + i + "'].getValue()")
        exec(i + " = nc.variables['" + i + "'][...]")
        exec(i + " = np.squeeze(" + i + ")")
        if '_FillValue' in dir(cdf_var):
            miss_val = np.float(cdf_var._FillValue)
            exec(i + " = ma.masked_values(" + i + ",miss_val)")
        if 'FillValue' in dir(cdf_var):
            miss_val = np.float(cdf_var.FillValue)
            exec(i + " = ma.masked_values(" + i + ",miss_val)")
        if '_missing_value' in dir(cdf_var):
            miss_val = np.float(cdf_var._missing_value)
            exec(i + " = ma.masked_values(" + i + ",miss_val)")
        if 'missing_value' in dir(cdf_var):
            miss_val = np.float(cdf_var.missing_value)
            exec(i + " = ma.masked_values(" + i + ",miss_val)")
        exec("var_dict['" + i + "'] = " + i + ".astype(np.float32)")
    nc.close()
    return var_dict
Esempio n. 17
0
    def __init__(self, ncFileName, source=None):
        if os.path.exists(ncFileName):
            self.filename = ncFileName
            self.filtered = False
            if not source is None:
                self.source = source
            try:
                ncFile = NetCDFFile(ncFileName)
                self.loadFromNcFile(ncFile)
                ncFile.close()
            except RuntimeError:
                #try:
                savFile = readsav(ncFileName)
                self.loadFromIDLSav(savFile)
                #except Exception:
                #raise  Exception ("AzTEC Map Error. Unknown format")
#
            self.fixWeightMap()
            self.calculateSNMap()

        else:
            raise Exception("AztecMap Error. No such file or directory")
Esempio n. 18
0
 def writeToNc(self, ncFileName):
     ncFile = NetCDFFile(ncFileName, 'w')
     ncFile.createDimension("nrows", self.RaCoords.shape[0])
     ncFile.createDimension("ncols", self.DecCoords.shape[0])
     dims = ("nrows", "ncols")
     drow = ("nrows", )
     dcol = ("ncols", )
     ncFile.createVariable("signal", "d", dims)
     ncFile.createVariable("weight", "d", dims)
     ncFile.createVariable("kernel", "d", dims)
     ncFile.createVariable("rowCoordsPhys", "d", drow)
     ncFile.createVariable("colCoordsPhys", "d", dcol)
     ncFile.createVariable("xCoordsAbs", "d", dims)
     ncFile.createVariable("yCoordsAbs", "d", dims)
     ncFile.createVariable("filteredSignal", "d", dims)
     ncFile.createVariable("filteredWeight", "d", dims)
     ncFile.createVariable("filteredKernel", "d", dims)
     setattr(ncFile, "source", "%s" % self.source)
     setattr(ncFile, "MasterGrid[0]", self.SourceRa / 180.0 * (npy.pi))
     setattr(ncFile, "MasterGrid[1]", self.SourceDec / 180.0 * (npy.pi))
     ncFile.variables['signal'][:] = self.signal.value
     ncFile.variables['weight'][:] = self.weight.value
     ncFile.variables['rowCoordsPhys'][:] = self.RaCoords / 180.0 * (npy.pi)
     ncFile.variables['colCoordsPhys'][:] = self.DecCoords / 180.0 * (
         npy.pi)
     ncFile.variables['kernel'][:] = self.kernel
     ncFile.variables['xCoordsAbs'][:] = self.AbsRaCoords.to("rad").value
     ncFile.variables['yCoordsAbs'][:] = self.AbsDecCoords.to("rad").value
     if self.filtered:
         ncFile.variables['filteredSignal'][:] = self.fSignal.value
         ncFile.variables['filteredWeight'][:] = self.fWeight.value
         ncFile.variables['filteredKernel'][:] = self.fKernel
     else:
         ncFile.variables['filteredSignal'][:] = self.signal.value
         ncFile.variables['filteredWeight'][:] = self.weight.value
         ncFile.variables['filteredKernel'][:] = self.kernel
     ncFile.sync()
     ncFile.close()
Esempio n. 19
0
    def data(self):
        f = NetCDFFile(open(self.filename, 'rb'))
        t = f.variables['scan_acquisition_time'].data / 60.

        # this is half the speed of the following code
        # ions = list(set(f.variables['mass_values'].data))
        # cols = np.array([ions.index(i) for i in \
        #                  f.variables['mass_values'].data])

        # TODO: slow; there has to be a way to vectorize this more?
        ions = np.array(list(set(f.variables['mass_values'].data)))
        rcols = f.variables['mass_values'].data
        cols = np.empty(rcols.shape, dtype=int)
        for i, ion in enumerate(ions):
            cols[rcols == ion] = i

        vals = f.variables['intensity_values'].data
        rowst = np.add.accumulate(f.variables['point_count'].data)
        rowst = np.insert(rowst, 0, 0)

        data = scipy.sparse.csr_matrix((vals, cols, rowst),
                                       shape=(len(t), len(ions)), dtype=float)
        return Chromatogram(data.todense(), t, ions)
Esempio n. 20
0
import numpy as np

from ase.test import NotAvailable

try:
    from scipy.io.netcdf import NetCDFFile
except ImportError:
    raise NotAvailable('Scipy too old')

# Write array
a1 = np.random.rand(5, 5)
a2 = a1 * 2 - 5
nc = NetCDFFile('test.nc', 'w')
nc.createDimension('dimx', a1.shape[0])
nc.createDimension('dimy', a1.shape[1])
nc.createVariable('matrix1', 'd', ('dimx', 'dimy'))[:] = a1
nc.createVariable('matrix2', 'd', ('dimx', 'dimy'))[:] = a2
nc.sync()
nc.close()

# Read array
nc = NetCDFFile('test.nc', 'r')
b1 = nc.variables['matrix1'][:]
b2 = nc.variables['matrix2'][:]

assert np.all(a1 == b1) and np.all(a2 == b2)
Esempio n. 21
0
def write_netcdf(filename, df, info=None):
    # FIXME: still a lot of issues here
    if info is None:
        info = {}

    f = NetCDFFile(filename, 'w', version=1)

    f.createDimension('_2_byte_string', 2)
    f.createDimension('_4_byte_string', 4)
    f.createDimension('_8_byte_string', 8)
    f.createDimension('_16_byte_string', 16)
    f.createDimension('_32_byte_string', 32)
    f.createDimension('_64_byte_string', 64)
    f.createDimension('_128_byte_string', 128)
    f.createDimension('_255_byte_string', 255)
    f.createDimension('error_number', 1)
    f.flush()

    # TODO: check that these do anything
    f.createDimension('instrument_number', 1)
    f.createDimension('range', 2)
    f.flush()

    f.dataset_completeness = 'C1'  # TODO: save peaks too? ('C1+C2')
    f.aia_template_revision = '1.0'
    f.ms_template_revision = '1.0.1'
    f.netcdf_revision = '2.3.2'
    f.languages = 'English'

    f.administrative_comments = 'none'
    f.dataset_origin = 'none'
    f.dataset_owner = ' '
    f.dataset_date_time_stamp = ' '
    f.flush()

    f.company_method_name = ' '
    f.pre_experiment_program_name = ' '
    f.post_experiment_program_name = ' '
    f.source_file_reference = ' '

    f.experiment_title = info.get('name', ' ')
    f.operator_name = info.get('operator', ' ')
    # TODO: wrong format for injection_date_time_stamp
    # example: 20141027123030-0500
    f.injection_date_time_stamp = info.get('date', ' ')
    f.company_method_id = info.get('method', ' ')
    f.sample_name = info.get('sample', ' ')
    f.flush()

    f.sample_id_comments = 'none'
    f.sample_id = 'none'
    f.sample_name = 'none'
    f.sample_type = 'none'
    f.sample_injection_volume = 'none'
    f.sample_amount = 'none'
    f.flush()

    f.retention_unit = 'Seconds'

    # TODO: need to merge this back into access method
    if hasattr(df.values, 'todense'):
        scan_locs = (df.values != 0).todense()
    else:
        scan_locs = df.values != 0

    f.createVariable('error_log', 'c', ('error_number', '_64_byte_string'))

    f.createDimension('scan_number', len(df.index))
    v = f.createVariable('scan_acquisition_time', '>d', ('scan_number',))
    v[:] = 60. * df.index.astype('d')
    v = f.createVariable('total_intensity', '>d', ('scan_number',))
    v[:] = df.values.sum(axis=1).astype('d').flatten()
    v = f.createVariable('point_count', '>i', ('scan_number',))
    v[:] = np.sum(scan_locs, axis=1).astype('i').flatten()
    f.flush()

    f.createDimension('point_number', np.sum(scan_locs))
    stretch_t = np.resize(df.index, df.values.T.shape).T
    v = f.createVariable('mass_values', '>f', ('point_number',))
    v[:] = stretch_t[scan_locs]
    v = f.createVariable('intensity_values', '>f', ('point_number',))
    if hasattr(df.values, 'todense'):
        v[:] = df.values.todense()[scan_locs]
    else:
        v[:] = df.values[scan_locs]

    # TODO: check that these do anything
    # f.createVariable('time_values', 'd', ('point_number',))
    v = f.createVariable('resolution', 'd', ('scan_number',))
    v[:] = -9999
    f.createVariable('actual_scan_number', 'i', ('scan_number',))
    v[:] = -9999
    f.createVariable('scan_index', 'i', ('scan_number',))
    v[:] = np.cumsum(np.sum(scan_locs, axis=1).astype('i'))
    f.createVariable('mass_range_min', 'd', ('scan_number',))
    v[:] = np.min(stretch_t[scan_locs]) * np.ones(stretch_t.shape[0])
    v = f.createVariable('mass_range_max', 'd', ('scan_number',))
    v[:] = np.max(stretch_t[scan_locs]) * np.ones(stretch_t.shape[0])
    v = f.createVariable('a_d_sampling_rate', 'd', ('scan_number',))
    v[:] = -9999
    v = f.createVariable('a_d_coaddition_factor', 'h', ('scan_number',))
    v[:] = -9999
    v = f.createVariable('flag_count', 'i', ('scan_number',))
    v[:] = 0
    f.createVariable('inter_scan_time', 'd', ('scan_number',))
    v[0] = 0
    v[1:] = np.diff(df.index.astype('d'))
    f.createVariable('scan_duration', 'd', ('scan_number',))
    v[:] = 1
    v = f.createVariable('time_range_min', 'd', ('scan_number',))
    v[:] = -9999
    v = f.createVariable('time_range_max', 'd', ('scan_number',))
    v[:] = -9999

    inst_tup = ('instrument_number', '_32_byte_string')
    f.createVariable('instrument_serial_no', 'c', inst_tup)
    f.createVariable('instrument_fw_version', 'c', inst_tup)
    f.createVariable('instrument_app_version', 'c', inst_tup)
    f.createVariable('instrument_os_version', 'c', inst_tup)
    f.createVariable('instrument_sw_version', 'c', inst_tup)
    f.createVariable('instrument_comments', 'c', inst_tup)
    f.createVariable('instrument_model', 'c', inst_tup)
    f.createVariable('instrument_name', 'c', inst_tup)
    f.createVariable('instrument_id', 'c', inst_tup)
    f.createVariable('instrument_mfr', 'c', inst_tup)

    f.close()
Esempio n. 22
0
 def total_trace(self, twin=None):
     f = NetCDFFile(open(self.filename, 'rb'))
     tme = f.variables['scan_acquisition_time'].data / 60.
     tic = f.variables['total_intensity'].data
     return Trace(tic, tme, name='TIC')
Esempio n. 23
0
def Tohoku_domain(res=3, split=False):
    """
    Set up a mesh, along with function spaces and functions, for the 2D ocean domain associated with the Tohoku tsunami 
    problem.
    
    :param res: mesh resolution value, ranging from 'extra coarse' (1) to extra fine (5).
    :param split: choose whether to consider the velocity space as vector P2 or as a pair of scalar P2 spaces.
    :return: associated mesh, mixed function space forward and adjoint variables and bathymetry field. 
    """

    # Define mesh and function spaces:
    if res == 1:
        mesh = Mesh('resources/meshes/TohokuXFine.msh'
                    )  # 226,967 vertices, ~45 seconds per timestep
        print(
            'WARNING: chosen mesh resolution can be extremely computationally intensive'
        )
        if raw_input('Are you happy to proceed? (y/n)') == 'n':
            exit(23)
    elif res == 2:
        mesh = Mesh('resources/meshes/TohokuFine.msh'
                    )  # 97,343 vertices, ~1 second per timestep
    elif res == 3:
        mesh = Mesh('resources/meshes/TohokuMedium.msh'
                    )  # 25,976 vertices, ~0.25 seconds per timestep
    elif res == 4:
        mesh = Mesh('resources/meshes/TohokuCoarse.msh'
                    )  # 7,194 vertices, ~0.07 seconds per timestep
    elif res == 5:
        mesh = Mesh('resources/meshes/TohokuXCoarse.msh'
                    )  # 3,126 vertices, ~0.03 seconds per timestep
    else:
        raise ValueError(
            'Please try again, choosing an integer in the range 1-5.')
    mesh_coords = mesh.coordinates.dat.data

    if split:
        # Define Taylor-Hood mixed function space:
        W = FunctionSpace(mesh, 'CG', 2) * FunctionSpace(
            mesh, 'CG', 2) * FunctionSpace(mesh, 'CG', 1)

        # Construct functions to store forward and adjoint variables, along with bathymetry:
        q_ = Function(W)
        lam_ = Function(W)
        u_, v_, eta_ = q_.split()
        lu_, lv_, le_ = lam_.split()
        eta0 = Function(W.sub(2), name='Initial surface')
        b = Function(W.sub(2), name='Bathymetry')

        # Specify zero initial fluid velocity:
        u_.interpolate(Expression(0))
        v_.interpolate(Expression(0))
        lu_.interpolate(Expression(0))
        lv_.interpolate(Expression(0))
    else:
        # Define Taylor-Hood mixed function space:
        W = VectorFunctionSpace(mesh, 'CG', 2) * FunctionSpace(mesh, 'CG', 1)

        # Construct functions to store forward and adjoint variables, along with bathymetry:
        q_ = Function(W)
        lam_ = Function(W)
        u_, eta_ = q_.split()
        lu_, le_ = lam_.split()
        eta0 = Function(W.sub(1), name='Initial surface')
        b = Function(W.sub(1), name='Bathymetry')

        # Specify zero initial fluid velocity:
        u_.interpolate(Expression([0, 0]))
        lu_.interpolate(Expression([0, 0]))

    # Read and interpolate initial surface data (courtesy of Saito):
    nc1 = NetCDFFile('resources/Saito_files/init_profile.nc', mmap=False)
    lon1 = nc1.variables['x'][:]
    lat1 = nc1.variables['y'][:]
    x1, y1 = conversion.vectorlonlat2utm(
        lat1, lon1,
        force_zone_number=54)  # Our mesh mainly resides in UTM zone 54
    elev1 = nc1.variables['z'][:, :]
    interpolator_surf = si.RectBivariateSpline(y1, x1, elev1)
    eta0vec = eta0.dat.data
    assert mesh_coords.shape[0] == eta0vec.shape[0]

    # Read and interpolate bathymetry data (courtesy of GEBCO):
    nc2 = NetCDFFile('resources/bathy_data/GEBCO_bathy.nc', mmap=False)
    lon2 = nc2.variables['lon'][:]
    lat2 = nc2.variables['lat'][:-1]
    x2, y2 = conversion.vectorlonlat2utm(lat2, lon2, force_zone_number=54)
    elev2 = nc2.variables['elevation'][:-1, :]
    interpolator_bath = si.RectBivariateSpline(y2, x2, elev2)
    b_vec = b.dat.data
    assert mesh_coords.shape[0] == b_vec.shape[0]

    # Interpolate data onto initial surface and bathymetry profiles:
    for i, p in enumerate(mesh_coords):
        eta0vec[i] = interpolator_surf(p[1], p[0])
        b_vec[i] = -interpolator_surf(p[1], p[0]) - interpolator_bath(
            p[1], p[0])

    # Assign initial surface and post-process the bathymetry to have a minimum depth of 30m:
    eta_.assign(eta0)
    le_.assign(0)
    b.assign(conditional(lt(30, b), b, 30))

    # Plot initial surface and bathymetry profiles:
    File('plots/tsunami_outputs/init_surf.pvd').write(eta0)
    File('plots/tsunami_outputs/tsunami_bathy.pvd').write(b)

    if split:
        return mesh, W, q_, u_, v_, eta_, lam_, lu_, lv_, b
    else:
        return mesh, W, q_, u_, eta_, lam_, lu_, le_, b
Esempio n. 24
0
def createBstatsFile(ncfile, outFile=None):

    from scipy.interpolate import interp1d
    from xml.etree.ElementTree import Element, SubElement

    if outFile is None:
        outFile = ncfile.replace(".nc", ".bstats")

    jdate = getJulDatefromNc(ncfile)
    season = getAztecSeason(jdate)

    if season.find("LMT") > -1:

        nc = NetCDFFile(ncfile)
        elSignal = npy.mean(nc.variables['Data.AztecBackend.TelElAct'][:])
        elSignal = elSignal.flatten()
        meanEl = npy.degrees(npy.median(elSignal))
        nc.close()
        namePrefix = "Data.AztecBackend."
    else:
        meanEl = None
        namePrefix = ""

    calpath = os.path.join(os.getenv("AZTEC_MACANA_PATH"), "calibration/")
    bololistfile = os.path.join(calpath, "parameters_%s" % season,
                                "bololist.csv")
    tausave = os.path.join(calpath, "parameters_%s" % season,
                           "fit_parameters_bolodc2tau_%s.sav" % season)
    ressave = os.path.join(
        calpath, "parameters_%s" % season,
        "fit_parameters_bolodc2responsivity_%s.sav" % season)

    bololist = npy.loadtxt(bololistfile,
                           delimiter=",",
                           comments="#",
                           dtype={
                               'names': (
                                   'boloname',
                                   'flags',
                               ),
                               'formats': ('|S10', 'i4')
                           })

    nbolo = len(bololist)
    boloInfo = []
    jbolo = 1
    for ibolo in bololist:
        boloInfo.append({
            'boloName': namePrefix + ibolo[0],
            'valid': ibolo[1],
            'id': jbolo
        })
        jbolo += 1

    tauInfo = readsav(tausave)
    resInfo = readsav(ressave)

    for ibolo, i in zip(boloInfo, range(nbolo)):

        if "offset" in tauInfo.keys():
            ibolo['tau_offset'] = tauInfo['offset'][i]
            ibolo['tau_offset_err'] = tauInfo['offset_err'][i]
            ibolo['tau_slope'] = tauInfo['slope'][i]
            ibolo['tau_slope_err'] = tauInfo['slope_err'][i]
            ibolo['tau_quad'] = 0.0
            ibolo['tau_quad_err'] = 0.0
        elif "p0" in tauInfo.keys():
            ibolo['tau_offset'] = tauInfo['p0'][i]
            ibolo['tau_offset_err'] = tauInfo['p0_err'][i]
            ibolo['tau_slope'] = tauInfo['p1'][i]
            ibolo['tau_slope_err'] = tauInfo['p1_err'][i]
            ibolo['tau_quad'] = tauInfo['p2'][i]
            ibolo['tau_quad_err'] = tauInfo['p2_err'][i]
        else:
            raise Exception("Cannot get Tau2dc information properly.")
        ibolo['res_offset'] = resInfo['offset'][i]
        ibolo['res_offset_err'] = resInfo['offset_err'][i]
        ibolo['res_slope'] = resInfo['slope'][i]
        ibolo['res_slope_err'] = resInfo['slope_err'][i]

#Now interpolate the gain values
    interpolateParams(jdate, boloInfo, meanEl=meanEl)
    boloInfo2Xml(outFile, boloInfo)