Пример #1
0
    def test_triangulation_points_georeference(self):
        # 
        #  
        
        filename = tempfile.mktemp("_data_manager.sww")
        outfile = NetCDFFile(filename, netcdf_mode_w)
        points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]])
        volumes = [[0,1,2]]
        elevation = [0,1,2]
        new_origin = None
        points_georeference = Geo_reference(56, 1, 554354)
        points_utm = points_georeference.change_points_geo_ref(points_utm)
        times = [0, 10]
        number_of_volumes = len(volumes)
        number_of_points = len(points_utm)
        sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])        
        sww.store_header(outfile, times, number_of_volumes,
                         number_of_points, description='fully sick testing',
                         verbose=self.verbose,sww_precision=netcdf_float)
        sww.store_triangulation(outfile, points_utm, volumes,
                                elevation,  new_origin=new_origin,
                                points_georeference=points_georeference,
                                verbose=self.verbose)       
        outfile.close()
        fid = NetCDFFile(filename)

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        results_georef = Geo_reference()
        results_georef.read_NetCDF(fid)
        assert results_georef == points_georeference
        fid.close()

        assert num.allclose(num.array(map(None, x,y)), points_utm)
        os.remove(filename)
Пример #2
0
    def test_triangulation_2_geo_refs(self):
        #
        #

        filename = tempfile.mktemp("_data_manager.sww")
        outfile = NetCDFFile(filename, netcdf_mode_w)
        points_utm = num.array([[0., 0.], [1., 1.], [0., 1.]])
        volumes = [[0, 1, 2]]
        elevation = [0, 1, 2]
        new_origin = Geo_reference(56, 1, 1)
        points_georeference = Geo_reference(56, 0, 0)
        points_utm = points_georeference.change_points_geo_ref(points_utm)
        times = [0, 10]
        number_of_volumes = len(volumes)
        number_of_points = len(points_utm)
        sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
        sww.store_header(outfile,
                         times,
                         number_of_volumes,
                         number_of_points,
                         description='fully sick testing',
                         verbose=self.verbose,
                         sww_precision=netcdf_float)
        sww.store_triangulation(outfile,
                                points_utm,
                                volumes,
                                elevation,
                                new_origin=new_origin,
                                points_georeference=points_georeference,
                                verbose=self.verbose)
        outfile.close()
        fid = NetCDFFile(filename)

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        results_georef = Geo_reference()
        results_georef.read_NetCDF(fid)
        assert results_georef == new_origin
        fid.close()

        absolute = Geo_reference(56, 0, 0)
        assert num.allclose(
            num.array(
                absolute.change_points_geo_ref(map(None, x, y), new_origin)),
            points_utm)
        os.remove(filename)
Пример #3
0
    def test_triangulationII(self):
        #
        #

        filename = tempfile.mktemp("_data_manager.sww")
        outfile = NetCDFFile(filename, netcdf_mode_w)
        points_utm = num.array([[0., 0.], [1., 1.], [0., 1.]])
        volumes = [[0, 1, 2]]
        elevation = [0, 1, 2]
        new_origin = None
        #new_origin = Geo_reference(56, 0, 0)
        times = [0, 10]
        number_of_volumes = len(volumes)
        number_of_points = len(points_utm)
        sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
        sww.store_header(outfile,
                         times,
                         number_of_volumes,
                         number_of_points,
                         description='fully sick testing',
                         verbose=self.verbose,
                         sww_precision=netcdf_float)
        sww.store_triangulation(outfile,
                                points_utm,
                                volumes,
                                new_origin=new_origin,
                                verbose=self.verbose)
        sww.store_static_quantities(outfile, elevation=elevation)

        outfile.close()
        fid = NetCDFFile(filename)

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        results_georef = Geo_reference()
        results_georef.read_NetCDF(fid)

        assert results_georef == Geo_reference(zone=None,
                                               xllcorner=0,
                                               yllcorner=0)
        fid.close()

        assert num.allclose(num.array(list(zip(x, y))), points_utm)
        os.remove(filename)
Пример #4
0
    def test_triangulationII(self):
        # 
        #  

        DEFAULT_ZONE = 0 # Not documented anywhere what this should be.
        
        filename = tempfile.mktemp("_data_manager.sww")
        outfile = NetCDFFile(filename, netcdf_mode_w)
        points_utm = num.array([[0.,0.],[1.,1.], [0.,1.]])
        volumes = [[0,1,2]]
        elevation = [0,1,2]
        new_origin = None
        #new_origin = Geo_reference(56, 0, 0)
        times = [0, 10]
        number_of_volumes = len(volumes)
        number_of_points = len(points_utm)
        sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])        
        sww.store_header(outfile, times, number_of_volumes,
                         number_of_points, description='fully sick testing',
                         verbose=self.verbose,sww_precision=netcdf_float)
        sww.store_triangulation(outfile, points_utm, volumes,
                                new_origin=new_origin,
                                verbose=self.verbose)
        sww.store_static_quantities(outfile, elevation=elevation)                                
                                
        outfile.close()
        fid = NetCDFFile(filename)

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        results_georef = Geo_reference()
        results_georef.read_NetCDF(fid)
        assert results_georef == Geo_reference(DEFAULT_ZONE, 0, 0)
        fid.close()

        assert num.allclose(num.array(map(None, x,y)), points_utm)
        os.remove(filename)
Пример #5
0
def create_SWW_input(lista_puntos = 'lista_puntos.txt', time_series = 'time_series.txt',
                     sww_file_out = 'boundary_fixed.sww', num_params = 3, read_dataframe = False):
                     
    if read_dataframe:
        df = pd.read_csv(time_series, sep = '\t', index_col= 0)
        
    else:
        tipo_datos = np.float32()
        #-------------------------------------------------
        #Lee archivos con posiciones y series de tiempo
        header = 3
        fid = open(lista_puntos,'r')
        line = fid.readline()
        fields = line.split()
        nx = int(fields[1])
        line = fid.readline()
        fields = line.split()
        ny = int(fields[1])
        line = fid.readline()
        fields = line.split()
        dt = float(fields[1])
        fid.close()

        points_utm = np.loadtxt(lista_puntos, dtype = tipo_datos, skiprows = header)[:,0:2]
        number_of_points = len(points_utm)
        elevation = np.loadtxt(lista_puntos, dtype = tipo_datos, skiprows = header)[:,2]

        #lee el tamaño del tiempo, y lo hace relativo a 0
        dummy = np.loadtxt(time_series, dtype = tipo_datos, skiprows = header, delimiter = '\t',
                           usecols = (1,2))
        number_of_times = len(dummy)
        times = np.arange(0,number_of_times*dt,dt)

        #determina número de datos válidos
        fid = open(time_series,'r')
        line = fid.readline()
        line = fid.readline()
        fields = line.split()
        num_datos = (len(fields)-1)/(3*num_params)
        fid.close()

        index = []
        for i in range(1, num_datos*3, 3):
            index.append(int(fields[i][:-1]))

        index_tupla = tuple(np.array(index)-1)

        stage_aux = np.loadtxt(time_series, dtype = tipo_datos, skiprows = header, delimiter = '\t',
                           usecols = tuple(range(1,num_datos+1)))

        xmom_aux = np.loadtxt(time_series, dtype = tipo_datos, skiprows = header, delimiter = '\t',
                          usecols = tuple(range(num_datos+1,2*num_datos+1)))

        ymom_aux = np.loadtxt(time_series, dtype = tipo_datos, skiprows = header, delimiter = '\t',
                          usecols = tuple(range(2*num_datos+1,3*num_datos+1)))

    #crea
    stage = np.zeros((number_of_times,number_of_points),dtype = tipo_datos)
    xmom = np.zeros((number_of_times,number_of_points),dtype = tipo_datos)
    ymom = np.zeros((number_of_times,number_of_points),dtype = tipo_datos)

    stage[:,index_tupla] = stage_aux
    xmom[:,index_tupla] = xmom_aux
    ymom[:,index_tupla] = ymom_aux


    #triangulacion a mano
    vertices = np.zeros((nx, ny))
    i = 0
    for k in range(ny):
        for l in range(nx):
            vertices[l, k] = i
            i += 1

    volumes = []
    for l in range(nx-1):
        for k in range(ny-1):
            v1 = vertices[l, k+1]
            v2 = vertices[l, k]
            v3 = vertices[l+1, k+1]
            v4 = vertices[l+1, k]

            volumes.append([v1, v2, v3])
            volumes.append([v4, v3, v2])

    volumes = np.array(volumes, np.int)

    ##Escribe archivo SWW
    # outfile = NetCDFFile(sww_file_out, netcdf_mode_w)
    outfile = netcdf.Dataset(sww_file_out, netcdf_mode_w)
    new_origin = Geo_reference(-1, 0, 0)
    number_of_volumes = len(volumes)

    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])

    sww.store_header(outfile, times, number_of_volumes, number_of_points,
                     description='SWW creado en PRDW',
                     sww_precision = netcdf_float,
                     verbose=True)

    sww.store_triangulation(outfile, points_utm, volumes,
                            new_origin = new_origin,
                            verbose = True)       

    sww.store_static_quantities(outfile, elevation = elevation, verbose = True)

    for i in range(len(times)):
        sww.store_quantities(outfile, sww_precision = netcdf_float, slice_index = i,
                             verbose = True,
                             stage = stage[i,:],
                             xmomentum = xmom[i,:],
                             ymomentum = ymom[i,:])
        
    outfile.close()

        


                     
                     
                     
                     
Пример #6
0
def _sww_merge_parallel_non_smooth(swwfiles, output,  verbose=False, delete_old=False):
    """
        Merge a list of sww files into a single file.

        Used to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.

        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"


    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times    = fid.variables['time'][:]
            n_steps = len(times)
            number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes     = int(fid.number_of_global_nodes)
            number_of_global_triangle_vertices = 3*number_of_global_triangles


            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            g_volumes = num.arange(number_of_global_triangles*3).reshape(-1,3)



            g_x = num.zeros((number_of_global_triangle_vertices,),num.float32)
            g_y = num.zeros((number_of_global_triangle_vertices,),num.float32)

            g_points = num.zeros((number_of_global_triangle_vertices,2),num.float32)

            #=======================================
            # Deal with the vertex based variables
            #=======================================
            quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
                              'ymomentum', 'xvelocity', 'yvelocity', 'height'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if elevation is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            # Static Quantities are stored as a 1D array
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros((3*number_of_global_triangles,),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                              'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)
                
            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros((number_of_global_triangles,),num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False


        # Read in from files and add to global arrays

        tri_l2g  = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]

        f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        f_gids = tri_l2g[f_ids]

        g_vids = (3*f_gids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
        l_vids = (3*f_ids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)


        l_x = num.array(fid.variables['x'][:],dtype=num.float32)
        l_y = num.array(fid.variables['y'][:],dtype=num.float32)

        
        g_x[g_vids] = l_x[l_vids]
        g_y[g_vids] = l_y[l_vids]

        g_points[g_vids,0] = g_x[g_vids]
        g_points[g_vids,1] = g_y[g_vids]


        ## Read in static quantities
        for quantity in static_quantities:
            q = fid.variables[quantity]
            out_s_quantities[quantity][g_vids] = \
                         num.array(q).astype(num.float32)[l_vids]
                         #num.array(q,dtype=num.float32)[l_vids]


        # Read in static c quantities
        for quantity in static_c_quantities:
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][f_gids] = \
                         num.array(q).astype(num.float32)[f_ids]
                         #num.array(q,dtype=num.float32)[f_ids]

        
        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
            print 'Writing file ', output, ':'

    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities, dynamic_c_quantities)
    sww.store_header(fido, starttime,
                             number_of_global_triangles,
                             number_of_global_triangles*3,
                             description=description,
                             sww_precision=netcdf_float32)


    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido, verbose=verbose, **out_s_c_quantities)
    
    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in (dynamic_quantities + dynamic_c_quantities):

        if verbose:
            print '  Writing quantity: ',q
                    
        # Initialise q_values with zeros
        if q in dynamic_quantities:
            q_values = num.zeros((n_steps, 3*number_of_global_triangles), num.float32)
        elif q in dynamic_c_quantities:
            q_values = num.zeros((n_steps, number_of_global_triangles), num.float32)


        # Read the quantities one at a time, to reduce memory usage
        for filename in swwfiles:
            fid = NetCDFFile(filename, netcdf_mode_r)

            # Index information
            tri_l2g  = fid.variables['tri_l2g'][:]
            node_l2g = fid.variables['node_l2g'][:]
            tri_full_flag = fid.variables['tri_full_flag'][:]
            f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
            f_gids = tri_l2g[f_ids]
            g_vids = (3*f_gids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
            l_vids = (3*f_ids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
            for i in range(n_steps):
                # Different indices for vertex and centroid quantities
                if q in dynamic_quantities:
                    q_values[i][g_vids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[l_vids]
                elif q in dynamic_c_quantities:
                    q_values[i][f_gids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[f_ids]

            fid.close()

        # Write to the file
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        if q in dynamic_quantities:
            # This updates the _range values
            q_range = fido.variables[q + Write_sww.RANGE][:]
            q_values_min = num.min(q_values)
            if q_values_min < q_range[0]:
                fido.variables[q + Write_sww.RANGE][0] = q_values_min
            q_values_max = num.max(q_values)
            if q_values_max > q_range[1]:
                fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Пример #7
0
def _sww_merge(swwfiles, output, verbose=False):
    """
        Merge a list of sww files into a single file.
        
        May be useful for parallel runs. Note that colinear points and
        edges are not merged: there will essentially be multiple meshes within
        the one sww file.
        
        The sww files to be merged must have exactly the same timesteps. Note
        that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"
        
    static_quantities = ['elevation']
    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
    
    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'    
    
        fid = NetCDFFile(filename, netcdf_mode_r)



        
        
        tris = fid.variables['volumes'][:]       
         
        if first_file:
            times = fid.variables['time'][:]
            x = []
            y = []
            out_tris = list(tris)  
            out_s_quantities = {}
            out_d_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            
            for quantity in static_quantities:
                out_s_quantities[quantity] = []

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = [ [] for _ in range(len(times))]
                 
            description = 'merged:' + getattr(fid, 'description')          
            first_file = False
        else:
            for tri in tris:
                # Advance new tri indices to point at newly appended points.
                verts = [vertex+tri_offset for vertex in tri]
                out_tris.append(verts)



        try: # works with netcdf4
            num_pts = len(fid.dimensions['number_of_points'])
        except: # works with scientific.io.netcdf
            num_pts = int(fid.dimensions['number_of_points'])

        tri_offset += num_pts
        
        if verbose:
            print '  new triangle index offset is ', tri_offset
            
        x.extend(list(fid.variables['x'][:]))
        y.extend(list(fid.variables['y'][:]))
        
        # Grow the list of static quantities associated with the x,y points
        for quantity in static_quantities:
            out_s_quantities[quantity].extend(fid.variables[quantity][:])
            
        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            time_chunks = fid.variables[quantity][:]
            for i, time_chunk in enumerate(time_chunks):
                out_d_quantities[quantity][i].extend(time_chunk)            
    
    # Mash all points into a single big list    
    points = [[xx, yy] for xx, yy in zip(x, y)]

    points = num.asarray(points).astype(netcdf_float32)

    fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities)
    sww.store_header(fido, times,
                             len(out_tris),
                             len(points),
                             description=description,
                             sww_precision=netcdf_float32)



    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()
    
    sww.store_triangulation(fido, points, out_tris, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;
       
    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)

    # Write out all the dynamic quantities for each timestep
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i, time_slice in enumerate(q_values):
            fido.variables[q][i] = num.array(time_slice, netcdf_float32)
        
        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max        

                                        
    fido.close()
Пример #8
0
def _sww_merge_parallel_smooth(swwfiles, output,  verbose=False, delete_old=False):
    """
        Merge a list of sww files into a single file.
        
        Use to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"
        
    
    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'    
    
        fid = NetCDFFile(filename, netcdf_mode_r)
         
        if first_file:

            times    = fid.variables['time'][:]
            n_steps = len(times)
            #number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)
            
            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes     = int(fid.number_of_global_nodes)

            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            g_volumes = num.zeros((number_of_global_triangles,3),num.int)
            g_x = num.zeros((number_of_global_nodes,),num.float32)
            g_y = num.zeros((number_of_global_nodes,),num.float32)

            g_points = num.zeros((number_of_global_nodes,2),num.float32)

            #=====================================
            # Deal with the vertex based variables
            #=====================================
            quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
                              'ymomentum', 'xvelocity', 'yvelocity', 'height'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)
                
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros((number_of_global_nodes,),num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_nodes),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                              'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)
                
            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros((number_of_global_triangles,),num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_c_quantities:
                out_d_c_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_triangles),num.float32)
                 
            description = 'merged:' + getattr(fid, 'description')          
            first_file = False


        # Read in from files and add to global arrays

        tri_l2g  = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]
        volumes = num.array(fid.variables['volumes'][:],dtype=num.int)
        l_volumes = num.zeros_like(volumes)
        l_old_volumes = num.zeros_like(volumes)


        # Change the local node ids to global id in the
        # volume array

        # FIXME SR: Surely we can knock up a numpy way of doing this
        #for i in range(len(l_volumes)):
        #    g_n0 = node_l2g[volumes[i,0]]
        #    g_n1 = node_l2g[volumes[i,1]]
        #    g_n2 = node_l2g[volumes[i,2]]
        #
        #    l_old_volumes[i,:] = [g_n0,g_n1,g_n2]

        g_n0 = node_l2g[volumes[:,0]].reshape(-1,1)
        g_n1 = node_l2g[volumes[:,1]].reshape(-1,1)
        g_n2 = node_l2g[volumes[:,2]].reshape(-1,1)

        #print g_n0.shape
        l_volumes = num.hstack((g_n0,g_n1,g_n2))

        #assert num.allclose(l_volumes, l_old_volumes)

        # Just pick out the full triangles
        ftri_ids = num.where(tri_full_flag>0)
        ftri_l2g = num.compress(tri_full_flag, tri_l2g)
        
        #f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        #f_gids = tri_l2g[f_ids]

        #print l_volumes
        #print tri_full_flag
        #print tri_l2g
        #print ftri_l2g
        
        f_volumes0 = num.compress(tri_full_flag,volumes[:,0])
        f_volumes1 = num.compress(tri_full_flag,volumes[:,1])
        f_volumes2 = num.compress(tri_full_flag,volumes[:,2])
        
        g_volumes[ftri_l2g,0] = node_l2g[f_volumes0]
        g_volumes[ftri_l2g,1] = node_l2g[f_volumes1]
        g_volumes[ftri_l2g,2] = node_l2g[f_volumes2]

        #fg_volumes = num.compress(tri_full_flag,l_volumes,axis=0)
        #g_volumes[ftri_l2g] = fg_volumes




        #g_x[node_l2g] = fid.variables['x']
        #g_y[node_l2g] = fid.variables['y']

        g_points[node_l2g,0] = fid.variables['x'][:]
        g_points[node_l2g,1] = fid.variables['y'][:]
        

        #print number_of_timesteps


        # FIXME SR: It seems that some of the "ghost" node quantity values
        # are being storded. We should only store those nodes which are associated with
        # full triangles. So we need an index array of "full" nodes, ie those in
        # full triangles

        #use numpy.compress and numpy.unique to get "full nodes

        f_volumes = num.compress(tri_full_flag,volumes,axis=0)
        fl_nodes = num.unique(f_volumes)
        f_node_l2g = node_l2g[fl_nodes]

        #print len(node_l2g)
        #print len(fl_nodes)

        # Read in static quantities
        for quantity in static_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            #print quantity, q.shape
            out_s_quantities[quantity][f_node_l2g] = \
                         num.array(q[:],dtype=num.float32)[fl_nodes]

        
        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                #out_d_quantities[quantity][i][node_l2g] = \
                #           num.array(q[i],dtype=num.float32)
                out_d_quantities[quantity][i][f_node_l2g] = \
                           num.array(q[i],dtype=num.float32)[fl_nodes]


        # Read in static c quantities
        for quantity in static_c_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][ftri_l2g] = \
                         num.array(q).astype(num.float32)[ftri_ids]

        
        #Collate all dynamic c quantities according to their timestep
        for quantity in dynamic_c_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                out_d_c_quantities[quantity][i][ftri_l2g] = \
                           num.array(q[i]).astype(num.float32)[ftri_ids]


        fid.close()


    #---------------------------
    # Write out the SWW file
    #---------------------------
    #print g_points.shape

    #print number_of_global_triangles
    #print number_of_global_nodes


    if verbose:
            print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)

    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities, dynamic_c_quantities)
    sww.store_header(fido, starttime,
                             number_of_global_triangles,
                             number_of_global_nodes,
                             description=description,
                             sww_precision=netcdf_float32)



    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()
    
    sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;
       
    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido, verbose=verbose, **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

        
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        if verbose:
            print '  Writing quantity: ',q
            
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]
        
        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max        

    for q in dynamic_c_quantities:
        if verbose:
            print '  Writing quantity: ',q
            
        q_values = out_d_c_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

                                        
    #print out_s_quantities
    #print out_d_quantities
    
    #print g_x
    #print g_y

    #print g_volumes

    fido.close()
    
    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Пример #9
0
def ferret2sww(basename_in, name_out=None,
               verbose=False,
               minlat=None, maxlat=None,
               minlon=None, maxlon=None,
               mint=None, maxt=None, mean_stage=0,
               origin=None, zscale=1,
               fail_on_NaN=True,
               NaN_filler=0,
               elevation=None,
               inverted_bathymetry=True
               ): #FIXME: Bathymetry should be obtained
                                  #from MOST somehow.
                                  #Alternatively from elsewhere
                                  #or, as a last resort,
                                  #specified here.
                                  #The value of -100 will work
                                  #for the Wollongong tsunami
                                  #scenario but is very hacky
    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
    relative height, x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    min's and max's: If omitted - full extend is used.
    To include a value min may equal it, while max must exceed it.
    Lat and lon are assuemd to be in decimal degrees

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
    which means that longitude is the fastest
    varying dimension (row major order, so to speak)

    ferret2sww uses grid points as vertices in a triangular grid
    counting vertices from lower left corner upwards, then right
    """

    from anuga.file.netcdf import NetCDFFile

    _assert_lat_long(minlat, maxlat, minlon, maxlon)

    if name_out != None and name_out[-4:] != '.sww':
        raise IOError('Output file %s should be of type .sww.' % name_out)

    # Get NetCDF data
    if verbose: log.critical('Reading files %s_*.nc' % basename_in)

    # Wave amplitude (cm)
    file_h = NetCDFFile(basename_in + '_ha.nc', netcdf_mode_r) 
    
    # Velocity (x) (cm/s)
    file_u = NetCDFFile(basename_in + '_ua.nc', netcdf_mode_r)
     
    # Velocity (y) (cm/s)
    file_v = NetCDFFile(basename_in + '_va.nc', netcdf_mode_r)
    
    # Elevation (z) (m)
    file_e = NetCDFFile(basename_in + '_e.nc', netcdf_mode_r)  

    if name_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = name_out

    # Get dimensions of file_h
    for dimension in file_h.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_h_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_h_latitude = dimension
        if dimension[:4] == 'TIME':
            dim_h_time = dimension

    times = file_h.variables[dim_h_time]
    latitudes = file_h.variables[dim_h_latitude]
    longitudes = file_h.variables[dim_h_longitude]

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
                                                  longitudes[:],
                                                  minlat, maxlat,
                                                  minlon, maxlon)
    # get dimensions for file_e
    for dimension in file_e.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_e_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_e_latitude = dimension

    # get dimensions for file_u
    for dimension in file_u.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_u_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_u_latitude = dimension

    # get dimensions for file_v
    for dimension in file_v.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_v_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_v_latitude = dimension

    # Precision used by most for lat/lon is 4 or 5 decimals
    e_lat = num.around(file_e.variables[dim_e_latitude][:], 5)
    e_lon = num.around(file_e.variables[dim_e_longitude][:], 5)

    # Check that files are compatible
    assert num.allclose(latitudes, file_u.variables[dim_u_latitude])
    assert num.allclose(latitudes, file_v.variables[dim_v_latitude])
    assert num.allclose(latitudes, e_lat)

    assert num.allclose(longitudes, file_u.variables[dim_u_longitude])
    assert num.allclose(longitudes, file_v.variables[dim_v_longitude])
    assert num.allclose(longitudes, e_lon)

    if mint is None:
        jmin = 0
        mint = times[0]
    else:
        jmin = num.searchsorted(times, mint)
        
        # numpy.int32 didn't work in slicing of amplitude below
        jmin = int(jmin)

    if maxt is None:
        jmax = len(times)
        maxt = times[-1]
    else:
        jmax = num.searchsorted(times, maxt)
        
        # numpy.int32 didn't work in slicing of amplitude below
        jmax = int(jmax)        

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
                                                  longitudes[:],
                                                  minlat, maxlat,
                                                  minlon, maxlon)


    times = times[jmin:jmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]

    if verbose: log.critical('cropping')

    zname = 'ELEVATION'

    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]

    # Get missing values
    nan_ha = file_h.variables['HA'].missing_value
    nan_ua = file_u.variables['UA'].missing_value
    nan_va = file_v.variables['VA'].missing_value
    if hasattr(file_e.variables[zname],'missing_value'):
        nan_e  = file_e.variables[zname].missing_value
    else:
        nan_e = None

    # Cleanup
    missing = (amplitudes == nan_ha)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ha.nc'
            raise DataMissingValuesError, msg
        else:
            amplitudes = amplitudes*(missing==0) + missing*NaN_filler

    missing = (uspeed == nan_ua)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ua.nc'
            raise DataMissingValuesError, msg
        else:
            uspeed = uspeed*(missing==0) + missing*NaN_filler

    missing = (vspeed == nan_va)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_va.nc'
            raise DataMissingValuesError, msg
        else:
            vspeed = vspeed*(missing==0) + missing*NaN_filler

    missing = (elevations == nan_e)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_e.nc'
            raise DataMissingValuesError, msg
        else:
            elevations = elevations*(missing==0) + missing*NaN_filler

    number_of_times = times.shape[0]
    number_of_latitudes = latitudes.shape[0]
    number_of_longitudes = longitudes.shape[0]

    assert amplitudes.shape[0] == number_of_times
    assert amplitudes.shape[1] == number_of_latitudes
    assert amplitudes.shape[2] == number_of_longitudes

    if verbose:
        _show_stats((latitudes, longitudes), times, amplitudes, \
                    (uspeed, vspeed), elevations)

    # print number_of_latitudes, number_of_longitudes
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2

    file_h.close()
    file_u.close()
    file_v.close()
    file_e.close()

    # NetCDF file definition
    outfile = NetCDFFile(swwname, netcdf_mode_w)

    description = 'Converted from Ferret files: %s, %s, %s, %s' \
                  % (basename_in + '_ha.nc',
                     basename_in + '_ua.nc',
                     basename_in + '_va.nc',
                     basename_in + '_e.nc')

    # Create new file
    starttime = times[0]

    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile, times, number_of_volumes,
                     number_of_points, description=description,
                     verbose=verbose, sww_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn
    x = num.zeros(number_of_points, num.float)  #Easting
    y = num.zeros(number_of_points, num.float)  #Northing

    if verbose:
        log.critical('Making triangular grid')

    # Check zone boundaries
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):       # Y direction
        for l, lon in enumerate(longitudes):  # X direction
            vertices[l, k] = i

            _, easting, northing = redfearn(lat, lon)

            #msg = 'Zone boundary crossed at longitude =', lon
            #assert zone == refzone, msg
            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    #Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes-1):    # X direction
        for k in range(number_of_latitudes-1): # Y direction
            v1 = vertices[l, k+1]
            v2 = vertices[l, k]
            v3 = vertices[l+1, k+1]
            v4 = vertices[l+1, k]

            volumes.append([v1, v2, v3]) #Upper element
            volumes.append([v4, v3, v2]) #Lower element

    volumes = num.array(volumes, num.int)      #array default#

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    if elevation is not None:
        z = elevation
    else:
        if inverted_bathymetry:
            z = -1 * elevations
        else:
            z = elevations
    #FIXME: z should be obtained from MOST and passed in here

    #FIXME use the Write_sww instance(sww) to write this info
    z = num.resize(z, outfile.variables['elevation'][:].shape)
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    #outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
    outfile.variables['elevation'][:] = z
    outfile.variables['volumes'][:] = volumes.astype(num.int32) #For Opteron 64

    #Time stepping
    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose:
        log.critical('Converting quantities')

    n = len(times)
    for j in range(n):
        if verbose and j % ((n+10)/10) == 0:
            log.critical('  Doing %d of %d' % (j, n))

        i = 0
        for k in range(number_of_latitudes):      # Y direction
            for l in range(number_of_longitudes): # X direction
                w = zscale * amplitudes[j, k, l] / 100 + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = uspeed[j, k, l]/100*h
                ymomentum[j, i] = vspeed[j, k, l]/100*h
                i += 1

    #outfile.close()

    #FIXME: Refactor using code from file_function.statistics
    #Something like print swwstats(swwname)
    if verbose:
        time_info = times, starttime, mint, maxt
        _show_sww_stats(outfile, swwname, geo_ref, time_info)

    outfile.close()
Пример #10
0
def ferret2sww(
        basename_in,
        name_out=None,
        verbose=False,
        minlat=None,
        maxlat=None,
        minlon=None,
        maxlon=None,
        mint=None,
        maxt=None,
        mean_stage=0,
        origin=None,
        zscale=1,
        fail_on_NaN=True,
        NaN_filler=0,
        elevation=None,
        inverted_bathymetry=True):  #FIXME: Bathymetry should be obtained
    #from MOST somehow.
    #Alternatively from elsewhere
    #or, as a last resort,
    #specified here.
    #The value of -100 will work
    #for the Wollongong tsunami
    #scenario but is very hacky
    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
    relative height, x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    min's and max's: If omitted - full extend is used.
    To include a value min may equal it, while max must exceed it.
    Lat and lon are assuemd to be in decimal degrees

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
    which means that longitude is the fastest
    varying dimension (row major order, so to speak)

    ferret2sww uses grid points as vertices in a triangular grid
    counting vertices from lower left corner upwards, then right
    """

    from anuga.file.netcdf import NetCDFFile

    _assert_lat_long(minlat, maxlat, minlon, maxlon)

    if name_out != None and name_out[-4:] != '.sww':
        raise IOError('Output file %s should be of type .sww.' % name_out)

    # Get NetCDF data
    if verbose: log.critical('Reading files %s_*.nc' % basename_in)

    # Wave amplitude (cm)
    file_h = NetCDFFile(basename_in + '_ha.nc', netcdf_mode_r)

    # Velocity (x) (cm/s)
    file_u = NetCDFFile(basename_in + '_ua.nc', netcdf_mode_r)

    # Velocity (y) (cm/s)
    file_v = NetCDFFile(basename_in + '_va.nc', netcdf_mode_r)

    # Elevation (z) (m)
    file_e = NetCDFFile(basename_in + '_e.nc', netcdf_mode_r)

    if name_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = name_out

    # Get dimensions of file_h
    for dimension in list(file_h.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_h_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_h_latitude = dimension
        if dimension[:4] == 'TIME':
            dim_h_time = dimension

    times = file_h.variables[dim_h_time]
    latitudes = file_h.variables[dim_h_latitude]
    longitudes = file_h.variables[dim_h_longitude]

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], longitudes[:],
                                                 minlat, maxlat, minlon,
                                                 maxlon)
    # get dimensions for file_e
    for dimension in list(file_e.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_e_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_e_latitude = dimension

    # get dimensions for file_u
    for dimension in list(file_u.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_u_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_u_latitude = dimension

    # get dimensions for file_v
    for dimension in list(file_v.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_v_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_v_latitude = dimension

    # Precision used by most for lat/lon is 4 or 5 decimals
    e_lat = num.around(file_e.variables[dim_e_latitude][:], 5)
    e_lon = num.around(file_e.variables[dim_e_longitude][:], 5)

    # Check that files are compatible
    assert num.allclose(latitudes, file_u.variables[dim_u_latitude])
    assert num.allclose(latitudes, file_v.variables[dim_v_latitude])
    assert num.allclose(latitudes, e_lat)

    assert num.allclose(longitudes, file_u.variables[dim_u_longitude])
    assert num.allclose(longitudes, file_v.variables[dim_v_longitude])
    assert num.allclose(longitudes, e_lon)

    if mint is None:
        jmin = 0
        mint = times[0]
    else:
        jmin = num.searchsorted(times, mint)

        # numpy.int32 didn't work in slicing of amplitude below
        jmin = int(jmin)

    if maxt is None:
        jmax = len(times)
        maxt = times[-1]
    else:
        jmax = num.searchsorted(times, maxt)

        # numpy.int32 didn't work in slicing of amplitude below
        jmax = int(jmax)

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], longitudes[:],
                                                 minlat, maxlat, minlon,
                                                 maxlon)

    times = times[jmin:jmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]

    if verbose: log.critical('cropping')

    zname = 'ELEVATION'

    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax]  #Lon
    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax]  #Lat
    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]

    # Get missing values
    nan_ha = file_h.variables['HA'].missing_value
    nan_ua = file_u.variables['UA'].missing_value
    nan_va = file_v.variables['VA'].missing_value
    if hasattr(file_e.variables[zname], 'missing_value'):
        nan_e = file_e.variables[zname].missing_value
    else:
        nan_e = None

    # Cleanup
    missing = (amplitudes == nan_ha)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ha.nc'
            raise_(DataMissingValuesError, msg)
        else:
            amplitudes = amplitudes * (missing == 0) + missing * NaN_filler

    missing = (uspeed == nan_ua)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ua.nc'
            raise_(DataMissingValuesError, msg)
        else:
            uspeed = uspeed * (missing == 0) + missing * NaN_filler

    missing = (vspeed == nan_va)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_va.nc'
            raise_(DataMissingValuesError, msg)
        else:
            vspeed = vspeed * (missing == 0) + missing * NaN_filler

    missing = (elevations == nan_e)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_e.nc'
            raise_(DataMissingValuesError, msg)
        else:
            elevations = elevations * (missing == 0) + missing * NaN_filler

    number_of_times = times.shape[0]
    number_of_latitudes = latitudes.shape[0]
    number_of_longitudes = longitudes.shape[0]

    assert amplitudes.shape[0] == number_of_times
    assert amplitudes.shape[1] == number_of_latitudes
    assert amplitudes.shape[2] == number_of_longitudes

    if verbose:
        _show_stats((latitudes, longitudes), times, amplitudes, \
                    (uspeed, vspeed), elevations)

    # print number_of_latitudes, number_of_longitudes
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes -
                                                     1) * 2

    file_h.close()
    file_u.close()
    file_v.close()
    file_e.close()

    # NetCDF file definition
    outfile = NetCDFFile(swwname, netcdf_mode_w)

    description = 'Converted from Ferret files: %s, %s, %s, %s' \
                  % (basename_in + '_ha.nc',
                     basename_in + '_ua.nc',
                     basename_in + '_va.nc',
                     basename_in + '_e.nc')

    # Create new file
    starttime = times[0]

    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile,
                     times,
                     number_of_volumes,
                     number_of_points,
                     description=description,
                     verbose=verbose,
                     sww_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn
    x = num.zeros(number_of_points, num.float)  #Easting
    y = num.zeros(number_of_points, num.float)  #Northing

    if verbose:
        log.critical('Making triangular grid')

    # Check zone boundaries
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):  # Y direction
        for l, lon in enumerate(longitudes):  # X direction
            vertices[l, k] = i

            _, easting, northing = redfearn(lat, lon)

            #msg = 'Zone boundary crossed at longitude =', lon
            #assert zone == refzone, msg
            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    #Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes - 1):  # X direction
        for k in range(number_of_latitudes - 1):  # Y direction
            v1 = vertices[l, k + 1]
            v2 = vertices[l, k]
            v3 = vertices[l + 1, k + 1]
            v4 = vertices[l + 1, k]

            volumes.append([v1, v2, v3])  #Upper element
            volumes.append([v4, v3, v2])  #Lower element

    volumes = num.array(volumes, num.int)  #array default#

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    if elevation is not None:
        z = elevation
    else:
        if inverted_bathymetry:
            z = -1 * elevations
        else:
            z = elevations
    #FIXME: z should be obtained from MOST and passed in here

    #FIXME use the Write_sww instance(sww) to write this info
    z = num.resize(z, outfile.variables['elevation'][:].shape)
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    #outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
    outfile.variables['elevation'][:] = z
    outfile.variables['volumes'][:] = volumes.astype(
        num.int32)  #For Opteron 64

    #Time stepping
    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose:
        log.critical('Converting quantities')

    n = len(times)
    for j in range(n):
        if verbose and j % (old_div((n + 10), 10)) == 0:
            log.critical('  Doing %d of %d' % (j, n))

        i = 0
        for k in range(number_of_latitudes):  # Y direction
            for l in range(number_of_longitudes):  # X direction
                w = old_div(zscale * amplitudes[j, k, l], 100) + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = old_div(uspeed[j, k, l], 100) * h
                ymomentum[j, i] = old_div(vspeed[j, k, l], 100) * h
                i += 1

    #outfile.close()

    #FIXME: Refactor using code from file_function.statistics
    #Something like print swwstats(swwname)
    if verbose:
        time_info = times, starttime, mint, maxt
        _show_sww_stats(outfile, swwname, geo_ref, time_info)

    outfile.close()
Пример #11
0
def sts2sww_mesh(basename_in, basename_out=None, 
                 spatial_thinning=1, verbose=False):
    
    from anuga.mesh_engine.mesh_engine import NoTrianglesError
    from anuga.pmesh.mesh import Mesh
    if verbose:
        print "Starting sts2sww_mesh"
    
    mean_stage=0.
    zscale=1.

    if (basename_in[:-4]=='.sts'):
        stsname = basename_in
    else: 
        stsname = basename_in + '.sts'

    if verbose: print "Reading sts NetCDF file: %s" %stsname
    infile = NetCDFFile(stsname, netcdf_mode_r)
    cellsize = infile.cellsize
    ncols = infile.ncols
    nrows = infile.nrows
    no_data = infile.no_data
    refzone = infile.zone
    x_origin = infile.xllcorner
    y_origin = infile.yllcorner
    origin = num.array([x_origin, y_origin])
    x = infile.variables['x'][:]
    y = infile.variables['y'][:]
    times = infile.variables['time'][:]
    wind_speed_full = infile.variables['wind_speed'][:]
    wind_angle_full = infile.variables['wind_angle'][:]
    pressure_full   =   infile.variables['barometric_pressure'][:]
    infile.close()

    number_of_points = nrows*ncols
    points_utm = num.zeros((number_of_points,2),num.float)
    points_utm[:,0]=x+x_origin
    points_utm[:,1]=y+y_origin

    thinned_indices=[]
    for i in range(number_of_points):
        if (i/ncols==0 or i/ncols==ncols-1 or (i/ncols)%(spatial_thinning)==0):
            if ( i%(spatial_thinning)==0 or i%nrows==0 or i%nrows==nrows-1 ):  
                thinned_indices.append(i)

    #Spatial thinning
    points_utm=points_utm[thinned_indices]
    number_of_points = points_utm.shape[0]
    number_of_timesteps = wind_speed_full.shape[0]
    wind_speed = num.empty((number_of_timesteps,number_of_points),dtype=float)
    wind_angle = num.empty((number_of_timesteps,number_of_points),dtype=float)
    barometric_pressure   = num.empty((number_of_timesteps,number_of_points),dtype=float)
    if verbose:
        print "Total number of points: ", nrows*ncols
        print "Number of thinned points: ", number_of_points
    for i in xrange(number_of_timesteps):
        wind_speed[i] = wind_speed_full[i,thinned_indices]
        wind_angle[i] = wind_angle_full[i,thinned_indices]
        barometric_pressure[i]   = pressure_full[i,thinned_indices]

    #P.plot(points_utm[:,0],points_utm[:,1],'ro')
    #P.show()

    if verbose:
        print "Generating sww triangulation of gems data"

    mesh = Mesh()
    mesh.add_vertices(points_utm)
    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
    mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    try:
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    except NoTrianglesError:
        # This is a bit of a hack, going in and changing the data structure.
        mesh.holes = []
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)

    mesh_dic = mesh.Mesh2MeshList()

    points_utm=ensure_numeric(points_utm)
    assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist'])
                       == ensure_numeric(points_utm))

    volumes = mesh_dic['generatedtrianglelist']

    # Write sww intro and grid stuff.
    if (basename_out is not None and basename_out[:-4]=='.sww'): 
        swwname = basename_out
    else: 
        swwname = basename_in + '.sww'

    if verbose: 'Output to %s' % swwname

    if verbose:
        print "Writing sww wind and pressure field file"
    outfile = NetCDFFile(swwname, netcdf_mode_w)
    sww = Write_sww([], ['wind_speed','wind_angle','barometric_pressure'])
    sww.store_header(outfile, times, len(volumes), len(points_utm),
                     verbose=verbose, sww_precision='d')
    outfile.mean_stage = mean_stage
    outfile.zscale = zscale
    sww.store_triangulation(outfile, points_utm, volumes,
                            refzone,  
                            new_origin=origin, #check effect of this line
                            verbose=verbose)

    if verbose: 
        print 'Converting quantities'
    
    # Read in a time slice from the sts file and write it to the SWW file

    #print wind_angle[0,:10]
    for i in range(len(times)):
        sww.store_quantities(outfile,
                             slice_index=i,
                             verbose=verbose,
                             wind_speed=wind_speed[i,:],
                             wind_angle=wind_angle[i,:],
                             barometric_pressure=barometric_pressure[i,:],
                             sww_precision=num.float)

    if verbose: 
        sww.verbose_quantities(outfile)
    outfile.close()
Пример #12
0
def urs_ungridded2sww(basename_in='o', basename_out=None, verbose=False,
                      mint=None, maxt=None,
                      mean_stage=0,
                      origin=None,
                      hole_points_UTM=None,
                      zscale=1):
    """
    Convert URS C binary format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename-z-mux, basefilename-e-mux and
    basefilename-n-mux containing relative height,
    x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum. The latitude and longitude
    information is assumed ungridded grid.

    min's and max's: If omitted - full extend is used.
    To include a value min ans max may equal it.
    Lat and lon are assumed to be in decimal degrees.

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)
    It will be the origin of the sww file. This shouldn't be used,
    since all of anuga should be able to handle an arbitary origin.
    The mux point info is NOT relative to this origin.

    URS C binary format has data organised as TIME, LONGITUDE, LATITUDE
    which means that latitude is the fastest
    varying dimension (row major order, so to speak)

    In URS C binary the latitudes and longitudes are in assending order.

    Note, interpolations of the resulting sww file will be different
    from results of urs2sww.  This is due to the interpolation
    function used, and the different grid structure between urs2sww
    and this function.

    Interpolating data that has an underlying gridded source can
    easily end up with different values, depending on the underlying
    mesh.

    consider these 4 points
    50  -50

    0     0

    The grid can be
     -
    |\|   A
     -
     or;
      -
     |/|  B
      -

    If a point is just below the center of the midpoint, it will have a
    +ve value in grid A and a -ve value in grid B.
    """

    from anuga.mesh_engine.mesh_engine import NoTrianglesError
    from anuga.pmesh.mesh import Mesh

    files_in = [basename_in + WAVEHEIGHT_MUX_LABEL,
                basename_in + EAST_VELOCITY_LABEL,
                basename_in + NORTH_VELOCITY_LABEL]
    quantities = ['HA','UA','VA']

    # instantiate urs_points of the three mux files.
    mux = {}
    for quantity, file in map(None, quantities, files_in):
        mux[quantity] = Read_urs(file)

    # Could check that the depth is the same. (hashing)

    # handle to a mux file to do depth stuff
    a_mux = mux[quantities[0]]

    # Convert to utm
    lat = a_mux.lonlatdep[:,1]
    long = a_mux.lonlatdep[:,0]
    points_utm, zone = convert_from_latlon_to_utm(latitudes=lat,
                                                  longitudes=long)

    elevation = a_mux.lonlatdep[:,2] * -1

    # grid (create a mesh from the selected points)
    # This mesh has a problem.  Triangles are streched over ungridded areas.
    # If these areas could be described as holes in pmesh, that would be great.

    # I can't just get the user to selection a point in the middle.
    # A boundary is needed around these points.
    # But if the zone of points is obvious enough auto-segment should do
    # a good boundary.
    mesh = Mesh()
    mesh.add_vertices(points_utm)
    mesh.auto_segment(smooth_indents=True, expand_pinch=True)

    # To try and avoid alpha shape 'hugging' too much
    mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    if hole_points_UTM is not None:
        point = ensure_absolute(hole_points_UTM)
        mesh.add_hole(point[0], point[1])

    try:
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    except NoTrianglesError:
        # This is a bit of a hack, going in and changing the data structure.
        mesh.holes = []
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)

    mesh_dic = mesh.Mesh2MeshList()

    #mesh.export_mesh_file(basename_in + '_168.tsh')
    #import sys; sys.exit()
    # These are the times of the mux file
    mux_times = []
    for i in range(a_mux.time_step_count):
        mux_times.append(a_mux.time_step * i)
    (mux_times_start_i, mux_times_fin_i) = read_time_from_mux(mux_times, mint, maxt)
    times = mux_times[mux_times_start_i:mux_times_fin_i]

    if mux_times_start_i == mux_times_fin_i:
        # Close the mux files
        for quantity, file in map(None, quantities, files_in):
            mux[quantity].close()
        msg = "Due to mint and maxt there's no time info in the boundary SWW."
        raise Exception(msg)

    # If this raise is removed there is currently no downstream errors

    points_utm=ensure_numeric(points_utm)
    assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist'])
                       == ensure_numeric(points_utm))

    volumes = mesh_dic['generatedtrianglelist']

    # Write sww intro and grid stuff.
    if basename_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = basename_out + '.sww'

    if verbose: log.critical('Output to %s' % swwname)

    outfile = NetCDFFile(swwname, netcdf_mode_w)

    # For a different way of doing this, check out tsh2sww
    # work out sww_times and the index range this covers
    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile, times, len(volumes), len(points_utm),
                     verbose=verbose, sww_precision=netcdf_float)
    outfile.mean_stage = mean_stage
    outfile.zscale = zscale

    sww.store_triangulation(outfile, points_utm, volumes,
                            zone,  
                            new_origin=origin,
                            verbose=verbose)
    sww.store_static_quantities(outfile, elevation=elevation)

    if verbose: log.critical('Converting quantities')

    # Read in a time slice from each mux file and write it to the SWW file
    j = 0
    for ha, ua, va in map(None, mux['HA'], mux['UA'], mux['VA']):
        if j >= mux_times_start_i and j < mux_times_fin_i:
            stage = zscale*ha + mean_stage
            h = stage - elevation
            xmomentum = ua*h
            ymomentum = -1 * va * h # -1 since in mux files south is positive.
            sww.store_quantities(outfile,
                                 slice_index=j-mux_times_start_i,
                                 verbose=verbose,
                                 stage=stage,
                                 xmomentum=xmomentum,
                                 ymomentum=ymomentum,
                                 sww_precision=num.float)
        j += 1

    if verbose: sww.verbose_quantities(outfile)

    outfile.close()
Пример #13
0
def _sww_merge_parallel_non_smooth(swwfiles,
                                   output,
                                   verbose=False,
                                   delete_old=False):
    """
        Merge a list of sww files into a single file.

        Used to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.

        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times = fid.variables['time'][:]
            n_steps = len(times)
            number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes = int(fid.number_of_global_nodes)
            number_of_global_triangle_vertices = 3 * number_of_global_triangles

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            g_volumes = num.arange(number_of_global_triangles * 3).reshape(
                -1, 3)

            g_x = num.zeros((number_of_global_triangle_vertices, ),
                            num.float32)
            g_y = num.zeros((number_of_global_triangle_vertices, ),
                            num.float32)

            g_points = num.zeros((number_of_global_triangle_vertices, 2),
                                 num.float32)

            #=======================================
            # Deal with the vertex based variables
            #=======================================
            quantities = set([
                'elevation', 'friction', 'stage', 'xmomentum', 'ymomentum',
                'xvelocity', 'yvelocity', 'height'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if elevation is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            # Static Quantities are stored as a 1D array
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros(
                    (3 * number_of_global_triangles, ), num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set([
                'elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)

            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros(
                    (number_of_global_triangles, ), num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False

        # Read in from files and add to global arrays

        tri_l2g = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]

        f_ids = num.argwhere(tri_full_flag == 1).reshape(-1, )
        f_gids = tri_l2g[f_ids]

        g_vids = (3 * f_gids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
            -1, )
        l_vids = (3 * f_ids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
            -1, )

        l_x = num.array(fid.variables['x'][:], dtype=num.float32)
        l_y = num.array(fid.variables['y'][:], dtype=num.float32)

        g_x[g_vids] = l_x[l_vids]
        g_y[g_vids] = l_y[l_vids]

        g_points[g_vids, 0] = g_x[g_vids]
        g_points[g_vids, 1] = g_y[g_vids]

        ## Read in static quantities
        for quantity in static_quantities:
            q = fid.variables[quantity]
            out_s_quantities[quantity][g_vids] = \
                         num.array(q).astype(num.float32)[l_vids]
            #num.array(q,dtype=num.float32)[l_vids]

        # Read in static c quantities
        for quantity in static_c_quantities:
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][f_gids] = \
                         num.array(q).astype(num.float32)[f_ids]
            #num.array(q,dtype=num.float32)[f_ids]

        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'

    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities,
                    dynamic_c_quantities)
    sww.store_header(fido,
                     starttime,
                     number_of_global_triangles,
                     number_of_global_triangles * 3,
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            g_points,
                            g_volumes,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido,
                                         verbose=verbose,
                                         **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in (dynamic_quantities + dynamic_c_quantities):

        # Initialise q_values with zeros
        if q in dynamic_quantities:
            q_values = num.zeros((n_steps, 3 * number_of_global_triangles),
                                 num.float32)
        elif q in dynamic_c_quantities:
            q_values = num.zeros((n_steps, number_of_global_triangles),
                                 num.float32)

        # Read the quantities one at a time, to reduce memory usage
        for filename in swwfiles:
            fid = NetCDFFile(filename, netcdf_mode_r)

            # Index information
            tri_l2g = fid.variables['tri_l2g'][:]
            node_l2g = fid.variables['node_l2g'][:]
            tri_full_flag = fid.variables['tri_full_flag'][:]
            f_ids = num.argwhere(tri_full_flag == 1).reshape(-1, )
            f_gids = tri_l2g[f_ids]
            g_vids = (3 * f_gids.reshape(-1, 1) +
                      num.array([0, 1, 2])).reshape(-1, )
            l_vids = (3 * f_ids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
                -1, )
            for i in range(n_steps):
                # Different indices for vertex and centroid quantities
                if q in dynamic_quantities:
                    q_values[i][g_vids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[l_vids]
                elif q in dynamic_c_quantities:
                    q_values[i][f_gids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[f_ids]

            fid.close()

        # Write to the file
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        if q in dynamic_quantities:
            # This updates the _range values
            q_range = fido.variables[q + Write_sww.RANGE][:]
            q_values_min = num.min(q_values)
            if q_values_min < q_range[0]:
                fido.variables[q + Write_sww.RANGE][0] = q_values_min
            q_values_max = num.max(q_values)
            if q_values_max > q_range[1]:
                fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Пример #14
0
def _sww_merge(swwfiles, output, verbose=False):
    """
        Merge a list of sww files into a single file.
        
        May be useful for parallel runs. Note that colinear points and
        edges are not merged: there will essentially be multiple meshes within
        the one sww file.
        
        The sww files to be merged must have exactly the same timesteps. Note
        that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    static_quantities = ['elevation']
    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        tris = fid.variables['volumes'][:]

        if first_file:
            times = fid.variables['time'][:]
            x = []
            y = []
            out_tris = list(tris)
            out_s_quantities = {}
            out_d_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            for quantity in static_quantities:
                out_s_quantities[quantity] = []

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = [[] for _ in range(len(times))]

            description = 'merged:' + getattr(fid, 'description')
            first_file = False
        else:
            for tri in tris:
                # Advance new tri indices to point at newly appended points.
                verts = [vertex + tri_offset for vertex in tri]
                out_tris.append(verts)

        try:  # works with netcdf4
            num_pts = len(fid.dimensions['number_of_points'])
        except:  # works with scientific.io.netcdf
            num_pts = int(fid.dimensions['number_of_points'])

        tri_offset += num_pts

        if verbose:
            print '  new triangle index offset is ', tri_offset

        x.extend(list(fid.variables['x'][:]))
        y.extend(list(fid.variables['y'][:]))

        # Grow the list of static quantities associated with the x,y points
        for quantity in static_quantities:
            out_s_quantities[quantity].extend(fid.variables[quantity][:])

        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            time_chunks = fid.variables[quantity][:]
            for i, time_chunk in enumerate(time_chunks):
                out_d_quantities[quantity][i].extend(time_chunk)

    # Mash all points into a single big list
    points = [[xx, yy] for xx, yy in zip(x, y)]

    points = num.asarray(points).astype(netcdf_float32)

    fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities)
    sww.store_header(fido,
                     times,
                     len(out_tris),
                     len(points),
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            points,
                            out_tris,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)

    # Write out all the dynamic quantities for each timestep
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i, time_slice in enumerate(q_values):
            fido.variables[q][i] = num.array(time_slice, netcdf_float32)

        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()
Пример #15
0
def _sww_merge_parallel_smooth(swwfiles,
                               output,
                               verbose=False,
                               delete_old=False):
    """
        Merge a list of sww files into a single file.
        
        Use to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times = fid.variables['time'][:]
            n_steps = len(times)
            #number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes = int(fid.number_of_global_nodes)

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            g_volumes = num.zeros((number_of_global_triangles, 3), num.int)
            g_x = num.zeros((number_of_global_nodes, ), num.float32)
            g_y = num.zeros((number_of_global_nodes, ), num.float32)

            g_points = num.zeros((number_of_global_nodes, 2), num.float32)

            #=====================================
            # Deal with the vertex based variables
            #=====================================
            quantities = set([
                'elevation', 'friction', 'stage', 'xmomentum', 'ymomentum',
                'xvelocity', 'yvelocity', 'height'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros(
                    (number_of_global_nodes, ), num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_nodes),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set([
                'elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)

            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros(
                    (number_of_global_triangles, ), num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_c_quantities:
                out_d_c_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_triangles),num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False

        # Read in from files and add to global arrays

        tri_l2g = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]
        volumes = num.array(fid.variables['volumes'][:], dtype=num.int)
        l_volumes = num.zeros_like(volumes)
        l_old_volumes = num.zeros_like(volumes)

        # Change the local node ids to global id in the
        # volume array

        # FIXME SR: Surely we can knock up a numpy way of doing this
        #for i in range(len(l_volumes)):
        #    g_n0 = node_l2g[volumes[i,0]]
        #    g_n1 = node_l2g[volumes[i,1]]
        #    g_n2 = node_l2g[volumes[i,2]]
        #
        #    l_old_volumes[i,:] = [g_n0,g_n1,g_n2]

        g_n0 = node_l2g[volumes[:, 0]].reshape(-1, 1)
        g_n1 = node_l2g[volumes[:, 1]].reshape(-1, 1)
        g_n2 = node_l2g[volumes[:, 2]].reshape(-1, 1)

        #print g_n0.shape
        l_volumes = num.hstack((g_n0, g_n1, g_n2))

        #assert num.allclose(l_volumes, l_old_volumes)

        # Just pick out the full triangles
        ftri_ids = num.where(tri_full_flag > 0)
        ftri_l2g = num.compress(tri_full_flag, tri_l2g)

        #f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        #f_gids = tri_l2g[f_ids]

        #print l_volumes
        #print tri_full_flag
        #print tri_l2g
        #print ftri_l2g

        f_volumes0 = num.compress(tri_full_flag, volumes[:, 0])
        f_volumes1 = num.compress(tri_full_flag, volumes[:, 1])
        f_volumes2 = num.compress(tri_full_flag, volumes[:, 2])

        g_volumes[ftri_l2g, 0] = node_l2g[f_volumes0]
        g_volumes[ftri_l2g, 1] = node_l2g[f_volumes1]
        g_volumes[ftri_l2g, 2] = node_l2g[f_volumes2]

        #fg_volumes = num.compress(tri_full_flag,l_volumes,axis=0)
        #g_volumes[ftri_l2g] = fg_volumes

        #g_x[node_l2g] = fid.variables['x']
        #g_y[node_l2g] = fid.variables['y']

        g_points[node_l2g, 0] = fid.variables['x'][:]
        g_points[node_l2g, 1] = fid.variables['y'][:]

        #print number_of_timesteps

        # FIXME SR: It seems that some of the "ghost" node quantity values
        # are being storded. We should only store those nodes which are associated with
        # full triangles. So we need an index array of "full" nodes, ie those in
        # full triangles

        #use numpy.compress and numpy.unique to get "full nodes

        f_volumes = num.compress(tri_full_flag, volumes, axis=0)
        fl_nodes = num.unique(f_volumes)
        f_node_l2g = node_l2g[fl_nodes]

        #print len(node_l2g)
        #print len(fl_nodes)

        # Read in static quantities
        for quantity in static_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            #print quantity, q.shape
            out_s_quantities[quantity][f_node_l2g] = \
                         num.array(q[:],dtype=num.float32)[fl_nodes]

        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                #out_d_quantities[quantity][i][node_l2g] = \
                #           num.array(q[i],dtype=num.float32)
                out_d_quantities[quantity][i][f_node_l2g] = \
                           num.array(q[i],dtype=num.float32)[fl_nodes]

        # Read in static c quantities
        for quantity in static_c_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][ftri_l2g] = \
                         num.array(q).astype(num.float32)[ftri_ids]

        #Collate all dynamic c quantities according to their timestep
        for quantity in dynamic_c_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                out_d_c_quantities[quantity][i][ftri_l2g] = \
                           num.array(q[i]).astype(num.float32)[ftri_ids]

        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------
    #print g_points.shape

    #print number_of_global_triangles
    #print number_of_global_nodes

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)

    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities,
                    dynamic_c_quantities)
    sww.store_header(fido,
                     starttime,
                     number_of_global_triangles,
                     number_of_global_nodes,
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            g_points,
                            g_volumes,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido,
                                         verbose=verbose,
                                         **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max

    for q in dynamic_c_quantities:
        q_values = out_d_c_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

    #print out_s_quantities
    #print out_d_quantities

    #print g_x
    #print g_y

    #print g_volumes

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Пример #16
0
def urs_ungridded2sww(basename_in='o',
                      basename_out=None,
                      verbose=False,
                      mint=None,
                      maxt=None,
                      mean_stage=0,
                      origin=None,
                      hole_points_UTM=None,
                      zscale=1):
    """
    Convert URS C binary format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename-z-mux, basefilename-e-mux and
    basefilename-n-mux containing relative height,
    x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum. The latitude and longitude
    information is assumed ungridded grid.

    min's and max's: If omitted - full extend is used.
    To include a value min ans max may equal it.
    Lat and lon are assumed to be in decimal degrees.

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)
    It will be the origin of the sww file. This shouldn't be used,
    since all of anuga should be able to handle an arbitary origin.
    The mux point info is NOT relative to this origin.

    URS C binary format has data organised as TIME, LONGITUDE, LATITUDE
    which means that latitude is the fastest
    varying dimension (row major order, so to speak)

    In URS C binary the latitudes and longitudes are in assending order.

    Note, interpolations of the resulting sww file will be different
    from results of urs2sww.  This is due to the interpolation
    function used, and the different grid structure between urs2sww
    and this function.

    Interpolating data that has an underlying gridded source can
    easily end up with different values, depending on the underlying
    mesh.

    consider these 4 points
    50  -50

    0     0

    The grid can be
     -
    |\|   A
     -
     or;
      -
     |/|  B
      -

    If a point is just below the center of the midpoint, it will have a
    +ve value in grid A and a -ve value in grid B.
    """

    from anuga.mesh_engine.mesh_engine import NoTrianglesError
    from anuga.pmesh.mesh import Mesh

    files_in = [
        basename_in + WAVEHEIGHT_MUX_LABEL, basename_in + EAST_VELOCITY_LABEL,
        basename_in + NORTH_VELOCITY_LABEL
    ]
    quantities = ['HA', 'UA', 'VA']

    # instantiate urs_points of the three mux files.
    mux = {}
    for quantity, file in zip(quantities, files_in):
        mux[quantity] = Read_urs(file)

    # Could check that the depth is the same. (hashing)

    # handle to a mux file to do depth stuff
    a_mux = mux[quantities[0]]

    # Convert to utm
    lat = a_mux.lonlatdep[:, 1]
    long = a_mux.lonlatdep[:, 0]
    points_utm, zone = convert_from_latlon_to_utm(latitudes=lat,
                                                  longitudes=long)

    elevation = a_mux.lonlatdep[:, 2] * -1

    # grid (create a mesh from the selected points)
    # This mesh has a problem.  Triangles are streched over ungridded areas.
    # If these areas could be described as holes in pmesh, that would be great.

    # I can't just get the user to selection a point in the middle.
    # A boundary is needed around these points.
    # But if the zone of points is obvious enough auto-segment should do
    # a good boundary.
    mesh = Mesh()
    mesh.add_vertices(points_utm)
    mesh.auto_segment(smooth_indents=True, expand_pinch=True)

    # To try and avoid alpha shape 'hugging' too much
    mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    if hole_points_UTM is not None:
        point = ensure_absolute(hole_points_UTM)
        mesh.add_hole(point[0], point[1])

    try:
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    except NoTrianglesError:
        # This is a bit of a hack, going in and changing the data structure.
        mesh.holes = []
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)

    mesh_dic = mesh.Mesh2MeshList()

    #mesh.export_mesh_file(basename_in + '_168.tsh')
    #import sys; sys.exit()
    # These are the times of the mux file
    mux_times = []
    for i in range(a_mux.time_step_count):
        mux_times.append(a_mux.time_step * i)
    (mux_times_start_i,
     mux_times_fin_i) = read_time_from_mux(mux_times, mint, maxt)
    times = mux_times[mux_times_start_i:mux_times_fin_i]

    if mux_times_start_i == mux_times_fin_i:
        # Close the mux files
        for quantity, file in zip(quantities, files_in):
            mux[quantity].close()
        msg = "Due to mint and maxt there's no time info in the boundary SWW."
        raise Exception(msg)

    # If this raise is removed there is currently no downstream errors

    points_utm = ensure_numeric(points_utm)
    assert num.alltrue(
        ensure_numeric(mesh_dic['generatedpointlist']) == ensure_numeric(
            points_utm))

    volumes = mesh_dic['generatedtrianglelist']

    # Write sww intro and grid stuff.
    if basename_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = basename_out + '.sww'

    if verbose: log.critical('Output to %s' % swwname)

    outfile = NetCDFFile(swwname, netcdf_mode_w)

    # For a different way of doing this, check out tsh2sww
    # work out sww_times and the index range this covers
    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile,
                     times,
                     len(volumes),
                     len(points_utm),
                     verbose=verbose,
                     sww_precision=netcdf_float)
    outfile.mean_stage = mean_stage
    outfile.zscale = zscale

    sww.store_triangulation(outfile,
                            points_utm,
                            volumes,
                            zone,
                            new_origin=origin,
                            verbose=verbose)
    sww.store_static_quantities(outfile, elevation=elevation)

    if verbose: log.critical('Converting quantities')

    # Read in a time slice from each mux file and write it to the SWW file
    j = 0
    for ha, ua, va in zip(mux['HA'], mux['UA'], mux['VA']):
        if j >= mux_times_start_i and j < mux_times_fin_i:
            stage = zscale * ha + mean_stage
            h = stage - elevation
            xmomentum = ua * h
            ymomentum = -1 * va * h  # -1 since in mux files south is positive.
            sww.store_quantities(outfile,
                                 slice_index=j - mux_times_start_i,
                                 verbose=verbose,
                                 stage=stage,
                                 xmomentum=xmomentum,
                                 ymomentum=ymomentum,
                                 sww_precision=num.float)
        j += 1

    if verbose: sww.verbose_quantities(outfile)

    outfile.close()
def sts2sww_mesh(basename_in, basename_out=None, 
                 spatial_thinning=1, verbose=False):
    
    from anuga.mesh_engine.mesh_engine import NoTrianglesError
    from anuga.pmesh.mesh import Mesh
    if verbose:
        print("Starting sts2sww_mesh")
    
    mean_stage=0.
    zscale=1.

    if (basename_in[:-4]=='.sts'):
        stsname = basename_in
    else: 
        stsname = basename_in + '.sts'

    if verbose: print("Reading sts NetCDF file: %s" %stsname)
    infile = NetCDFFile(stsname, netcdf_mode_r)
    cellsize = infile.cellsize
    ncols = infile.ncols
    nrows = infile.nrows
    no_data = infile.no_data
    refzone = infile.zone
    x_origin = infile.xllcorner
    y_origin = infile.yllcorner
    origin = num.array([x_origin, y_origin])
    x = infile.variables['x'][:]
    y = infile.variables['y'][:]
    times = infile.variables['time'][:]
    wind_speed_full = infile.variables['wind_speed'][:]
    wind_angle_full = infile.variables['wind_angle'][:]
    pressure_full   =   infile.variables['barometric_pressure'][:]
    infile.close()

    number_of_points = nrows*ncols
    points_utm = num.zeros((number_of_points,2),num.float)
    points_utm[:,0]=x+x_origin
    points_utm[:,1]=y+y_origin

    thinned_indices=[]
    for i in range(number_of_points):
        if (old_div(i,ncols)==0 or old_div(i,ncols)==ncols-1 or (old_div(i,ncols))%(spatial_thinning)==0):
            if ( i%(spatial_thinning)==0 or i%nrows==0 or i%nrows==nrows-1 ):  
                thinned_indices.append(i)

    #Spatial thinning
    points_utm=points_utm[thinned_indices]
    number_of_points = points_utm.shape[0]
    number_of_timesteps = wind_speed_full.shape[0]
    wind_speed = num.empty((number_of_timesteps,number_of_points),dtype=float)
    wind_angle = num.empty((number_of_timesteps,number_of_points),dtype=float)
    barometric_pressure   = num.empty((number_of_timesteps,number_of_points),dtype=float)
    if verbose:
        print("Total number of points: ", nrows*ncols)
        print("Number of thinned points: ", number_of_points)
    for i in range(number_of_timesteps):
        wind_speed[i] = wind_speed_full[i,thinned_indices]
        wind_angle[i] = wind_angle_full[i,thinned_indices]
        barometric_pressure[i]   = pressure_full[i,thinned_indices]

    #P.plot(points_utm[:,0],points_utm[:,1],'ro')
    #P.show()

    if verbose:
        print("Generating sww triangulation of gems data")

    mesh = Mesh()
    mesh.add_vertices(points_utm)
    mesh.auto_segment(smooth_indents=True, expand_pinch=True)
    mesh.auto_segment(mesh.shape.get_alpha() * 1.1)
    try:
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)
    except NoTrianglesError:
        # This is a bit of a hack, going in and changing the data structure.
        mesh.holes = []
        mesh.generate_mesh(minimum_triangle_angle=0.0, verbose=False)

    mesh_dic = mesh.Mesh2MeshList()

    points_utm=ensure_numeric(points_utm)
    assert num.alltrue(ensure_numeric(mesh_dic['generatedpointlist'])
                       == ensure_numeric(points_utm))

    volumes = mesh_dic['generatedtrianglelist']

    # Write sww intro and grid stuff.
    if (basename_out is not None and basename_out[:-4]=='.sww'): 
        swwname = basename_out
    else: 
        swwname = basename_in + '.sww'

    if verbose: 'Output to %s' % swwname

    if verbose:
        print("Writing sww wind and pressure field file")
    outfile = NetCDFFile(swwname, netcdf_mode_w)
    sww = Write_sww([], ['wind_speed','wind_angle','barometric_pressure'])
    sww.store_header(outfile, times, len(volumes), len(points_utm),
                     verbose=verbose, sww_precision='d')
    outfile.mean_stage = mean_stage
    outfile.zscale = zscale
    sww.store_triangulation(outfile, points_utm, volumes,
                            refzone,  
                            new_origin=origin, #check effect of this line
                            verbose=verbose)

    if verbose: 
        print('Converting quantities')
    
    # Read in a time slice from the sts file and write it to the SWW file

    #print wind_angle[0,:10]
    for i in range(len(times)):
        sww.store_quantities(outfile,
                             slice_index=i,
                             verbose=verbose,
                             wind_speed=wind_speed[i,:],
                             wind_angle=wind_angle[i,:],
                             barometric_pressure=barometric_pressure[i,:],
                             sww_precision=num.float)

    if verbose: 
        sww.verbose_quantities(outfile)
    outfile.close()