Esempio n. 1
0
def _sww_merge_parallel_non_smooth(swwfiles, output,  verbose=False, delete_old=False):
    """
        Merge a list of sww files into a single file.

        Used to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.

        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"


    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times    = fid.variables['time'][:]
            n_steps = len(times)
            number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes     = int(fid.number_of_global_nodes)
            number_of_global_triangle_vertices = 3*number_of_global_triangles


            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            g_volumes = num.arange(number_of_global_triangles*3).reshape(-1,3)



            g_x = num.zeros((number_of_global_triangle_vertices,),num.float32)
            g_y = num.zeros((number_of_global_triangle_vertices,),num.float32)

            g_points = num.zeros((number_of_global_triangle_vertices,2),num.float32)

            #=======================================
            # Deal with the vertex based variables
            #=======================================
            quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
                              'ymomentum', 'xvelocity', 'yvelocity', 'height'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if elevation is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            # Static Quantities are stored as a 1D array
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros((3*number_of_global_triangles,),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                              'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)
                
            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros((number_of_global_triangles,),num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False


        # Read in from files and add to global arrays

        tri_l2g  = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]

        f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        f_gids = tri_l2g[f_ids]

        g_vids = (3*f_gids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
        l_vids = (3*f_ids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)


        l_x = num.array(fid.variables['x'][:],dtype=num.float32)
        l_y = num.array(fid.variables['y'][:],dtype=num.float32)

        
        g_x[g_vids] = l_x[l_vids]
        g_y[g_vids] = l_y[l_vids]

        g_points[g_vids,0] = g_x[g_vids]
        g_points[g_vids,1] = g_y[g_vids]


        ## Read in static quantities
        for quantity in static_quantities:
            q = fid.variables[quantity]
            out_s_quantities[quantity][g_vids] = \
                         num.array(q).astype(num.float32)[l_vids]
                         #num.array(q,dtype=num.float32)[l_vids]


        # Read in static c quantities
        for quantity in static_c_quantities:
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][f_gids] = \
                         num.array(q).astype(num.float32)[f_ids]
                         #num.array(q,dtype=num.float32)[f_ids]

        
        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
            print 'Writing file ', output, ':'

    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities, dynamic_c_quantities)
    sww.store_header(fido, starttime,
                             number_of_global_triangles,
                             number_of_global_triangles*3,
                             description=description,
                             sww_precision=netcdf_float32)


    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido, verbose=verbose, **out_s_c_quantities)
    
    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in (dynamic_quantities + dynamic_c_quantities):

        if verbose:
            print '  Writing quantity: ',q
                    
        # Initialise q_values with zeros
        if q in dynamic_quantities:
            q_values = num.zeros((n_steps, 3*number_of_global_triangles), num.float32)
        elif q in dynamic_c_quantities:
            q_values = num.zeros((n_steps, number_of_global_triangles), num.float32)


        # Read the quantities one at a time, to reduce memory usage
        for filename in swwfiles:
            fid = NetCDFFile(filename, netcdf_mode_r)

            # Index information
            tri_l2g  = fid.variables['tri_l2g'][:]
            node_l2g = fid.variables['node_l2g'][:]
            tri_full_flag = fid.variables['tri_full_flag'][:]
            f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
            f_gids = tri_l2g[f_ids]
            g_vids = (3*f_gids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
            l_vids = (3*f_ids.reshape(-1,1) + num.array([0,1,2])).reshape(-1,)
            for i in range(n_steps):
                # Different indices for vertex and centroid quantities
                if q in dynamic_quantities:
                    q_values[i][g_vids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[l_vids]
                elif q in dynamic_c_quantities:
                    q_values[i][f_gids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[f_ids]

            fid.close()

        # Write to the file
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        if q in dynamic_quantities:
            # This updates the _range values
            q_range = fido.variables[q + Write_sww.RANGE][:]
            q_values_min = num.min(q_values)
            if q_values_min < q_range[0]:
                fido.variables[q + Write_sww.RANGE][0] = q_values_min
            q_values_max = num.max(q_values)
            if q_values_max > q_range[1]:
                fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Esempio n. 2
0
def dem2dem(name_in, stencil, cellsize_new, name_out=None,
                 verbose=False):
    """Read Digitial Elevation model from the following NetCDF format (.dem)

    Example:

    ncols         3121
    nrows         1800
    xllcorner     722000
    yllcorner     5893000
    cellsize      25
    NODATA_value  -9999
    138.3698 137.4194 136.5062 135.5558 ..........

    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
    """

    import os
    from anuga.file.netcdf import NetCDFFile

    if name_in[-4:] != '.dem':
        raise IOError('Input file %s should be of type .dem.' % name_in)

    if name_out != None and basename_out[-4:] != '.dem':
        raise IOError('Input file %s should be of type .dem.' % name_out)

    #Open existing netcdf file to read
    infile = NetCDFFile(name_in, netcdf_mode_r)

    if verbose: log.critical('Reading DEM from %s' % inname)

    # Read metadata (convert from numpy.int32 to int where appropriate)
    ncols = int(infile.ncols)
    nrows = int(infile.nrows)
    xllcorner = infile.xllcorner
    yllcorner = infile.yllcorner
    cellsize = int(infile.cellsize)
    NODATA_value = int(infile.NODATA_value)
    zone = int(infile.zone)
    false_easting = infile.false_easting
    false_northing = infile.false_northing
    projection = infile.projection
    datum = infile.datum
    units = infile.units

    dem_elevation = infile.variables['elevation']

    #Get output file name
    if name_out == None:
        outname = name_in[:-4] + '_' + repr(cellsize_new) + '.dem'
    else:
        outname = name_out

    if verbose: log.critical('Write decimated NetCDF file to %s' % outname)

    #Determine some dimensions for decimated grid
    (nrows_stencil, ncols_stencil) = stencil.shape
    x_offset = ncols_stencil / 2
    y_offset = nrows_stencil / 2
    cellsize_ratio = int(cellsize_new / cellsize)
    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio

    #print type(ncols_new), ncols_new
    
    #Open netcdf file for output
    outfile = NetCDFFile(outname, netcdf_mode_w)

    #Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF DEM format for compact and portable ' \
                          'storage of spatial point data'

    #Georeferencing
    outfile.zone = zone
    outfile.projection = projection
    outfile.datum = datum
    outfile.units = units

    outfile.cellsize = cellsize_new
    outfile.NODATA_value = NODATA_value
    outfile.false_easting = false_easting
    outfile.false_northing = false_northing

    outfile.xllcorner = xllcorner + (x_offset * cellsize)
    outfile.yllcorner = yllcorner + (y_offset * cellsize)
    outfile.ncols = ncols_new
    outfile.nrows = nrows_new

    # dimension definition
    #print nrows_new, ncols_new, nrows_new*ncols_new
    #print type(nrows_new), type(ncols_new), type(nrows_new*ncols_new)
    outfile.createDimension('number_of_points', nrows_new*ncols_new)

    # variable definition
    outfile.createVariable('elevation', netcdf_float, ('number_of_points',))

    # Get handle to the variable
    elevation = outfile.variables['elevation']

    dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))

    #Store data
    global_index = 0
    for i in range(nrows_new):
        if verbose: log.critical('Processing row %d of %d' % (i, nrows_new))

        lower_index = global_index
        telev = num.zeros(ncols_new, num.float)
        local_index = 0
        trow = i * cellsize_ratio

        for j in range(ncols_new):
            tcol = j * cellsize_ratio
            tmp = dem_elevation_r[trow:trow+nrows_stencil,
                                  tcol:tcol+ncols_stencil]

            #if dem contains 1 or more NODATA_values set value in
            #decimated dem to NODATA_value, else compute decimated
            #value using stencil
            if num.sum(num.sum(num.equal(tmp, NODATA_value))) > 0:
                telev[local_index] = NODATA_value
            else:
                telev[local_index] = num.sum(num.sum(tmp * stencil))

            global_index += 1
            local_index += 1

        upper_index = global_index

        elevation[lower_index:upper_index] = telev

    assert global_index == nrows_new*ncols_new, \
           'index not equal to number of points'

    infile.close()
    outfile.close()
Esempio n. 3
0
def _sww_merge_parallel_smooth(swwfiles, output,  verbose=False, delete_old=False):
    """
        Merge a list of sww files into a single file.
        
        Use to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"
        
    
    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'    
    
        fid = NetCDFFile(filename, netcdf_mode_r)
         
        if first_file:

            times    = fid.variables['time'][:]
            n_steps = len(times)
            #number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)
            
            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes     = int(fid.number_of_global_nodes)

            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            g_volumes = num.zeros((number_of_global_triangles,3),num.int)
            g_x = num.zeros((number_of_global_nodes,),num.float32)
            g_y = num.zeros((number_of_global_nodes,),num.float32)

            g_points = num.zeros((number_of_global_nodes,2),num.float32)

            #=====================================
            # Deal with the vertex based variables
            #=====================================
            quantities = set(['elevation', 'friction', 'stage', 'xmomentum',
                              'ymomentum', 'xvelocity', 'yvelocity', 'height'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)
                
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros((number_of_global_nodes,),num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_nodes),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set(['elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                              'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)
            
            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)
                
            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros((number_of_global_triangles,),num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_c_quantities:
                out_d_c_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_triangles),num.float32)
                 
            description = 'merged:' + getattr(fid, 'description')          
            first_file = False


        # Read in from files and add to global arrays

        tri_l2g  = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]
        volumes = num.array(fid.variables['volumes'][:],dtype=num.int)
        l_volumes = num.zeros_like(volumes)
        l_old_volumes = num.zeros_like(volumes)


        # Change the local node ids to global id in the
        # volume array

        # FIXME SR: Surely we can knock up a numpy way of doing this
        #for i in range(len(l_volumes)):
        #    g_n0 = node_l2g[volumes[i,0]]
        #    g_n1 = node_l2g[volumes[i,1]]
        #    g_n2 = node_l2g[volumes[i,2]]
        #
        #    l_old_volumes[i,:] = [g_n0,g_n1,g_n2]

        g_n0 = node_l2g[volumes[:,0]].reshape(-1,1)
        g_n1 = node_l2g[volumes[:,1]].reshape(-1,1)
        g_n2 = node_l2g[volumes[:,2]].reshape(-1,1)

        #print g_n0.shape
        l_volumes = num.hstack((g_n0,g_n1,g_n2))

        #assert num.allclose(l_volumes, l_old_volumes)

        # Just pick out the full triangles
        ftri_ids = num.where(tri_full_flag>0)
        ftri_l2g = num.compress(tri_full_flag, tri_l2g)
        
        #f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        #f_gids = tri_l2g[f_ids]

        #print l_volumes
        #print tri_full_flag
        #print tri_l2g
        #print ftri_l2g
        
        f_volumes0 = num.compress(tri_full_flag,volumes[:,0])
        f_volumes1 = num.compress(tri_full_flag,volumes[:,1])
        f_volumes2 = num.compress(tri_full_flag,volumes[:,2])
        
        g_volumes[ftri_l2g,0] = node_l2g[f_volumes0]
        g_volumes[ftri_l2g,1] = node_l2g[f_volumes1]
        g_volumes[ftri_l2g,2] = node_l2g[f_volumes2]

        #fg_volumes = num.compress(tri_full_flag,l_volumes,axis=0)
        #g_volumes[ftri_l2g] = fg_volumes




        #g_x[node_l2g] = fid.variables['x']
        #g_y[node_l2g] = fid.variables['y']

        g_points[node_l2g,0] = fid.variables['x'][:]
        g_points[node_l2g,1] = fid.variables['y'][:]
        

        #print number_of_timesteps


        # FIXME SR: It seems that some of the "ghost" node quantity values
        # are being storded. We should only store those nodes which are associated with
        # full triangles. So we need an index array of "full" nodes, ie those in
        # full triangles

        #use numpy.compress and numpy.unique to get "full nodes

        f_volumes = num.compress(tri_full_flag,volumes,axis=0)
        fl_nodes = num.unique(f_volumes)
        f_node_l2g = node_l2g[fl_nodes]

        #print len(node_l2g)
        #print len(fl_nodes)

        # Read in static quantities
        for quantity in static_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            #print quantity, q.shape
            out_s_quantities[quantity][f_node_l2g] = \
                         num.array(q[:],dtype=num.float32)[fl_nodes]

        
        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                #out_d_quantities[quantity][i][node_l2g] = \
                #           num.array(q[i],dtype=num.float32)
                out_d_quantities[quantity][i][f_node_l2g] = \
                           num.array(q[i],dtype=num.float32)[fl_nodes]


        # Read in static c quantities
        for quantity in static_c_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][ftri_l2g] = \
                         num.array(q).astype(num.float32)[ftri_ids]

        
        #Collate all dynamic c quantities according to their timestep
        for quantity in dynamic_c_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                out_d_c_quantities[quantity][i][ftri_l2g] = \
                           num.array(q[i]).astype(num.float32)[ftri_ids]


        fid.close()


    #---------------------------
    # Write out the SWW file
    #---------------------------
    #print g_points.shape

    #print number_of_global_triangles
    #print number_of_global_nodes


    if verbose:
            print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)

    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities, dynamic_c_quantities)
    sww.store_header(fido, starttime,
                             number_of_global_triangles,
                             number_of_global_nodes,
                             description=description,
                             sww_precision=netcdf_float32)



    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()
    
    sww.store_triangulation(fido, g_points, g_volumes, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;
       
    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido, verbose=verbose, **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

        
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        if verbose:
            print '  Writing quantity: ',q
            
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]
        
        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max        

    for q in dynamic_c_quantities:
        if verbose:
            print '  Writing quantity: ',q
            
        q_values = out_d_c_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

                                        
    #print out_s_quantities
    #print out_d_quantities
    
    #print g_x
    #print g_y

    #print g_volumes

    fido.close()
    
    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Esempio n. 4
0
def _sww_merge(swwfiles, output, verbose=False):
    """
        Merge a list of sww files into a single file.
        
        May be useful for parallel runs. Note that colinear points and
        edges are not merged: there will essentially be multiple meshes within
        the one sww file.
        
        The sww files to be merged must have exactly the same timesteps. Note
        that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"
        
    static_quantities = ['elevation']
    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']
    
    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'    
    
        fid = NetCDFFile(filename, netcdf_mode_r)



        
        
        tris = fid.variables['volumes'][:]       
         
        if first_file:
            times = fid.variables['time'][:]
            x = []
            y = []
            out_tris = list(tris)  
            out_s_quantities = {}
            out_d_quantities = {}


            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            order      = fid.order
            xllcorner  = fid.xllcorner;
            yllcorner  = fid.yllcorner ;
            zone       = fid.zone;
            false_easting  = fid.false_easting;
            false_northing = fid.false_northing;
            datum      = fid.datum;
            projection = fid.projection;

            
            for quantity in static_quantities:
                out_s_quantities[quantity] = []

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = [ [] for _ in range(len(times))]
                 
            description = 'merged:' + getattr(fid, 'description')          
            first_file = False
        else:
            for tri in tris:
                # Advance new tri indices to point at newly appended points.
                verts = [vertex+tri_offset for vertex in tri]
                out_tris.append(verts)



        try: # works with netcdf4
            num_pts = len(fid.dimensions['number_of_points'])
        except: # works with scientific.io.netcdf
            num_pts = int(fid.dimensions['number_of_points'])

        tri_offset += num_pts
        
        if verbose:
            print '  new triangle index offset is ', tri_offset
            
        x.extend(list(fid.variables['x'][:]))
        y.extend(list(fid.variables['y'][:]))
        
        # Grow the list of static quantities associated with the x,y points
        for quantity in static_quantities:
            out_s_quantities[quantity].extend(fid.variables[quantity][:])
            
        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            time_chunks = fid.variables[quantity][:]
            for i, time_chunk in enumerate(time_chunks):
                out_d_quantities[quantity][i].extend(time_chunk)            
    
    # Mash all points into a single big list    
    points = [[xx, yy] for xx, yy in zip(x, y)]

    points = num.asarray(points).astype(netcdf_float32)

    fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities)
    sww.store_header(fido, times,
                             len(out_tris),
                             len(points),
                             description=description,
                             sww_precision=netcdf_float32)



    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()
    
    sww.store_triangulation(fido, points, out_tris, points_georeference=geo_reference)

    fido.order      = order
    fido.xllcorner  = xllcorner;
    fido.yllcorner  = yllcorner ;
    fido.zone       = zone;
    fido.false_easting  = false_easting;
    fido.false_northing = false_northing;
    fido.datum      = datum;
    fido.projection = projection;
       
    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)

    # Write out all the dynamic quantities for each timestep
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i, time_slice in enumerate(q_values):
            fido.variables[q][i] = num.array(time_slice, netcdf_float32)
        
        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max        

                                        
    fido.close()
    def _generic_dem2pts(
        self,
        name_in,
        name_out=None,
        quantity_name=None,
        verbose=False,
        easting_min=None,
        easting_max=None,
        northing_min=None,
        northing_max=None,
    ):
        """Read raster from the following NetCDF format (.dem)

        Internal function. See public function generic_dem2pts for details.
        """

        # FIXME: Can this be written feasibly using write_pts?

        import os
        from anuga.file.netcdf import NetCDFFile

        root = name_in[:-4]

        if name_in[-4:] == ".asc":
            intermediate = root + ".dem"
            if verbose:
                log.critical("Preconvert %s from asc to %s" % (name_in, intermediate))
            asc2dem(name_in)
            name_in = intermediate
        elif name_in[-4:] != ".dem":
            raise IOError("Input file %s should be of type .asc or .dem." % name_in)

        if name_out != None and basename_out[-4:] != ".pts":
            raise IOError("Input file %s should be of type .pts." % name_out)

        # Get NetCDF
        infile = NetCDFFile(name_in, netcdf_mode_r)

        if verbose:
            log.critical("Reading raster from %s" % (name_in))

        ncols = int(infile.ncols)
        nrows = int(infile.nrows)
        xllcorner = float(infile.xllcorner)  # Easting of lower left corner
        yllcorner = float(infile.yllcorner)  # Northing of lower left corner
        cellsize = float(infile.cellsize)
        NODATA_value = float(infile.NODATA_value)

        dem_elevation = infile.variables[quantity_name]

        zone = int(infile.zone)
        false_easting = float(infile.false_easting)
        false_northing = float(infile.false_northing)

        # print ncols, nrows, xllcorner,yllcorner, cellsize, NODATA_value, zone

        # Text strings
        projection = infile.projection
        datum = infile.datum
        units = infile.units

        # print projection, datum, units

        # Get output file
        if name_out == None:
            ptsname = root + ".pts"
        else:
            ptsname = name_out

        if verbose:
            log.critical("Store to NetCDF file %s" % ptsname)

        # NetCDF file definition
        outfile = NetCDFFile(ptsname, netcdf_mode_w)

        # Create new file
        outfile.institution = "Geoscience Australia"
        outfile.description = "NetCDF pts format for compact and portable " "storage of spatial point data"

        # Assign default values
        if easting_min is None:
            easting_min = xllcorner
        if easting_max is None:
            easting_max = xllcorner + ncols * cellsize
        if northing_min is None:
            northing_min = yllcorner
        if northing_max is None:
            northing_max = yllcorner + nrows * cellsize

        # print easting_min, easting_max, northing_min, northing_max

        # Compute offsets to update georeferencing
        easting_offset = xllcorner - easting_min
        northing_offset = yllcorner - northing_min

        # Georeferencing
        outfile.zone = zone
        outfile.xllcorner = easting_min  # Easting of lower left corner
        outfile.yllcorner = northing_min  # Northing of lower left corner
        outfile.false_easting = false_easting
        outfile.false_northing = false_northing

        outfile.projection = projection
        outfile.datum = datum
        outfile.units = units

        # Grid info (FIXME: probably not going to be used, but heck)
        outfile.ncols = ncols
        outfile.nrows = nrows

        dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))
        totalnopoints = nrows * ncols

        # ========================================
        # Do the preceeding with numpy
        # ========================================
        y = num.arange(nrows, dtype=num.float)
        y = yllcorner + (nrows - 1) * cellsize - y * cellsize

        x = num.arange(ncols, dtype=num.float)
        x = xllcorner + x * cellsize

        xx, yy = num.meshgrid(x, y)

        xx = xx.flatten()
        yy = yy.flatten()

        flag = num.logical_and(
            num.logical_and((xx <= easting_max), (xx >= easting_min)),
            num.logical_and((yy <= northing_max), (yy >= northing_min)),
        )

        dem = dem_elevation[:].flatten()

        id = num.where(flag)[0]

        xx = xx[id]
        yy = yy[id]
        dem = dem[id]

        clippednopoints = len(dem)
        # print clippedpoints

        # print xx
        # print yy
        # print dem

        data_flag = dem != NODATA_value

        data_id = num.where(data_flag)

        xx = xx[data_id]
        yy = yy[data_id]
        dem = dem[data_id]

        nn = clippednopoints - len(dem)

        nopoints = len(dem)

        if verbose:
            log.critical("There are %d values in the raster" % totalnopoints)
            log.critical("There are %d values in the clipped raster" % clippednopoints)
            log.critical("There are %d NODATA_values in the clipped raster" % nn)

        outfile.createDimension("number_of_points", nopoints)
        outfile.createDimension("number_of_dimensions", 2)  # This is 2d data

        # Variable definitions
        outfile.createVariable("points", netcdf_float, ("number_of_points", "number_of_dimensions"))
        outfile.createVariable(quantity_name, netcdf_float, ("number_of_points",))

        # Get handles to the variables
        points = outfile.variables["points"]
        elevation = outfile.variables[quantity_name]

        points[:, 0] = xx - easting_min
        points[:, 1] = yy - northing_min
        elevation[:] = dem

        infile.close()
        outfile.close()
Esempio n. 6
0
    def test_decimate_dem(self):
        """Test decimation of dem file
        """

        import os
        from anuga.file.netcdf import NetCDFFile

        #Write test dem file
        root = 'decdemtest'

        filename = root + '.dem'
        fid = NetCDFFile(filename, netcdf_mode_w)

        fid.institution = 'Geoscience Australia'
        fid.description = 'NetCDF DEM format for compact and portable ' +\
                          'storage of spatial point data'

        nrows = 15
        ncols = 18

        fid.ncols = ncols
        fid.nrows = nrows
        fid.xllcorner = 2000.5
        fid.yllcorner = 3000.5
        fid.cellsize = 25
        fid.NODATA_value = -9999

        fid.zone = 56
        fid.false_easting = 0.0
        fid.false_northing = 0.0
        fid.projection = 'UTM'
        fid.datum = 'WGS84'
        fid.units = 'METERS'

        fid.createDimension('number_of_points', nrows * ncols)

        fid.createVariable('elevation', netcdf_float, ('number_of_points', ))

        elevation = fid.variables['elevation']

        elevation[:] = (num.arange(nrows * ncols))

        fid.close()

        #generate the elevation values expected in the decimated file
        ref_elevation = [
            (0 + 1 + 2 + 18 + 19 + 20 + 36 + 37 + 38) / 9.0,
            (4 + 5 + 6 + 22 + 23 + 24 + 40 + 41 + 42) / 9.0,
            (8 + 9 + 10 + 26 + 27 + 28 + 44 + 45 + 46) / 9.0,
            (12 + 13 + 14 + 30 + 31 + 32 + 48 + 49 + 50) / 9.0,
            (72 + 73 + 74 + 90 + 91 + 92 + 108 + 109 + 110) / 9.0,
            (76 + 77 + 78 + 94 + 95 + 96 + 112 + 113 + 114) / 9.0,
            (80 + 81 + 82 + 98 + 99 + 100 + 116 + 117 + 118) / 9.0,
            (84 + 85 + 86 + 102 + 103 + 104 + 120 + 121 + 122) / 9.0,
            (144 + 145 + 146 + 162 + 163 + 164 + 180 + 181 + 182) / 9.0,
            (148 + 149 + 150 + 166 + 167 + 168 + 184 + 185 + 186) / 9.0,
            (152 + 153 + 154 + 170 + 171 + 172 + 188 + 189 + 190) / 9.0,
            (156 + 157 + 158 + 174 + 175 + 176 + 192 + 193 + 194) / 9.0,
            (216 + 217 + 218 + 234 + 235 + 236 + 252 + 253 + 254) / 9.0,
            (220 + 221 + 222 + 238 + 239 + 240 + 256 + 257 + 258) / 9.0,
            (224 + 225 + 226 + 242 + 243 + 244 + 260 + 261 + 262) / 9.0,
            (228 + 229 + 230 + 246 + 247 + 248 + 264 + 265 + 266) / 9.0
        ]

        # generate a stencil for computing the decimated values
        stencil = num.ones((3, 3), num.float) / 9.0

        dem2dem(filename, stencil=stencil, cellsize_new=100)

        # Open decimated NetCDF file
        fid = NetCDFFile(root + '_100.dem', netcdf_mode_r)

        # Get decimated elevation
        elevation = fid.variables['elevation']

        # Check values
        assert num.allclose(elevation, ref_elevation)

        # Cleanup
        fid.close()

        os.remove(root + '.dem')
        os.remove(root + '_100.dem')
Esempio n. 7
0
def _dem2pts(name_in, name_out=None, verbose=False,
            easting_min=None, easting_max=None,
            northing_min=None, northing_max=None):
    """Read Digitial Elevation model from the following NetCDF format (.dem)

    Internal function. See public function dem2pts for details.
    """

    # FIXME: Can this be written feasibly using write_pts?

    import os
    from anuga.file.netcdf import NetCDFFile

    root = name_in[:-4]

    if name_in[-4:] == '.asc':
        intermediate = root + '.dem'
        if verbose:
            log.critical('Preconvert %s from asc to %s' % \
                                    (name_in, intermediate))
        asc2dem(name_in)
        name_in = intermediate
    elif name_in[-4:] != '.dem':
        raise IOError('Input file %s should be of type .asc or .dem.' % name_in)

    if name_out != None and basename_out[-4:] != '.pts':
        raise IOError('Input file %s should be of type .pts.' % name_out)

    # Get NetCDF
    infile = NetCDFFile(name_in, netcdf_mode_r) 

    if verbose: log.critical('Reading DEM from %s' % (name_in))

    ncols = int(infile.ncols)
    nrows = int(infile.nrows)
    xllcorner = float(infile.xllcorner)  # Easting of lower left corner
    yllcorner = float(infile.yllcorner)  # Northing of lower left corner
    cellsize = float(infile.cellsize)
    NODATA_value = float(infile.NODATA_value)

    dem_elevation = infile.variables['elevation']

    zone = int(infile.zone)
    false_easting = float(infile.false_easting)
    false_northing = float(infile.false_northing)

    #print ncols, nrows, xllcorner,yllcorner, cellsize, NODATA_value, zone


    # Text strings
    projection = infile.projection
    datum = infile.datum
    units = infile.units

    #print projection, datum, units

    # Get output file
    if name_out == None:
        ptsname = root + '.pts'
    else:
        ptsname = name_out

    if verbose: log.critical('Store to NetCDF file %s' % ptsname)

    # NetCDF file definition
    outfile = NetCDFFile(ptsname, netcdf_mode_w)

    # Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF pts format for compact and portable ' \
                          'storage of spatial point data'

    # Assign default values
    if easting_min is None: easting_min = xllcorner
    if easting_max is None: easting_max = xllcorner + ncols*cellsize
    if northing_min is None: northing_min = yllcorner
    if northing_max is None: northing_max = yllcorner + nrows*cellsize


    #print easting_min, easting_max, northing_min, northing_max

    # Compute offsets to update georeferencing
    easting_offset = xllcorner - easting_min
    northing_offset = yllcorner - northing_min

    # Georeferencing
    outfile.zone = zone
    outfile.xllcorner = easting_min # Easting of lower left corner
    outfile.yllcorner = northing_min # Northing of lower left corner
    outfile.false_easting = false_easting
    outfile.false_northing = false_northing

    outfile.projection = projection
    outfile.datum = datum
    outfile.units = units

    # Grid info (FIXME: probably not going to be used, but heck)
    outfile.ncols = ncols
    outfile.nrows = nrows

    #dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))
    totalnopoints = nrows*ncols






#    #=======================================================================
#    # Calculating number of NODATA_values for each row in clipped region
#    # FIXME: use array operations to do faster
#    nn = 0
#    k = 0
#    i1_0 = 0
#    j1_0 = 0
#    thisj = 0
#    thisi = 0
#    for i in range(nrows):
#        y = (nrows-i-1)*cellsize + yllcorner
#        for j in range(ncols):
#            x = j*cellsize + xllcorner
#            if easting_min <= x <= easting_max \
#               and northing_min <= y <= northing_max:
#                thisj = j
#                thisi = i
#                if dem_elevation_r[i,j] == NODATA_value:
#                    nn += 1
#
#                if k == 0:
#                    i1_0 = i
#                    j1_0 = j
#
#                k += 1
#
#    index1 = j1_0
#    index2 = thisj
#
#    # Dimension definitions
#    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
#    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
#
#    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
#    nopoints = clippednopoints-nn
#
#    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
#
#    if verbose:
#        log.critical('There are %d values in the elevation' % totalnopoints)
#        log.critical('There are %d values in the clipped elevation'
#                     % clippednopoints)
#        log.critical('There are %d NODATA_values in the clipped elevation' % nn)
#
#    outfile.createDimension('number_of_points', nopoints)
#    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
#
#    # Variable definitions
#    outfile.createVariable('points', netcdf_float, ('number_of_points',
#                                                    'number_of_dimensions'))
#    outfile.createVariable('elevation', netcdf_float, ('number_of_points',))
#
#    # Get handles to the variables
#    points = outfile.variables['points']
#    elevation = outfile.variables['elevation']
#
#    # Number of points
#    N = points.shape[0]
#
#    lenv = index2-index1+1
#
#    # Store data
#    global_index = 0
#    # for i in range(nrows):
#    for i in range(i1_0, thisi+1, 1):
#        if verbose and i % ((nrows+10)/10) == 0:
#            log.critical('Processing row %d of %d' % (i, nrows))
#
#        lower_index = global_index
#
#        v = dem_elevation_r[i,index1:index2+1]
#        no_NODATA = num.sum(v == NODATA_value)
#        if no_NODATA > 0:
#            newcols = lenv - no_NODATA  # ncols_in_bounding_box - no_NODATA
#        else:
#            newcols = lenv              # ncols_in_bounding_box
#
#        telev = num.zeros(newcols, num.float)
#        tpoints = num.zeros((newcols, 2), num.float)
#
#        local_index = 0
#
#        y = (nrows-i-1)*cellsize + yllcorner
#        #for j in range(ncols):
#        for j in range(j1_0,index2+1,1):
#            x = j*cellsize + xllcorner
#            if easting_min <= x <= easting_max \
#               and northing_min <= y <= northing_max \
#               and dem_elevation_r[i,j] != NODATA_value:
#
#                #print [x-easting_min, y-northing_min]
#                #print x , y
#                #print easting_min, northing_min
#                #print xllcorner, yllcorner
#                #print cellsize
#
#                tpoints[local_index, :] = [x-easting_min, y-northing_min]
#                telev[local_index] = dem_elevation_r[i, j]
#                global_index += 1
#                local_index += 1
#
#        upper_index = global_index
#
#        if upper_index == lower_index + newcols:
#
#            # Seems to be an error with the windows version of
#            # Netcdf. The following gave errors
#
#            try:
#                points[lower_index:upper_index, :] = tpoints
#                elevation[lower_index:upper_index] = telev
#            except:
#                # so used the following if an error occurs
#                for index in range(newcols):
#                    points[index+lower_index, :] = tpoints[index,:]
#                    elevation[index+lower_index] = telev[index]
#
#    assert global_index == nopoints, 'index not equal to number of points'


    #========================================
    # Do the preceeding with numpy
    #========================================
    y = num.arange(nrows,dtype=num.float)
    y = yllcorner + (nrows-1)*cellsize - y*cellsize

    x = num.arange(ncols,dtype=num.float)
    x = xllcorner + x*cellsize

    xx,yy = num.meshgrid(x,y)

    xx = xx.flatten()
    yy = yy.flatten()

    
    flag = num.logical_and(num.logical_and((xx <= easting_max),(xx >= easting_min)),
                           num.logical_and((yy <= northing_max),(yy >= northing_min)))

    
    dem = dem_elevation[:].flatten()


    id = num.where(flag)[0]

    xx = xx[id]
    yy = yy[id]
    dem = dem[id]


    clippednopoints = len(dem)
    #print clippedpoints
    
    #print xx
    #print yy
    #print dem

    data_flag = dem != NODATA_value

    data_id = num.where(data_flag)

    xx = xx[data_id]
    yy = yy[data_id]
    dem = dem[data_id]

    nn = clippednopoints - len(dem)

    nopoints = len(dem)


    if verbose:
        log.critical('There are %d values in the elevation' % totalnopoints)
        log.critical('There are %d values in the clipped elevation'
                     % clippednopoints)
        log.critical('There are %d NODATA_values in the clipped elevation' % nn)

    outfile.createDimension('number_of_points', nopoints)
    outfile.createDimension('number_of_dimensions', 2) #This is 2d data

    # Variable definitions
    outfile.createVariable('points', netcdf_float, ('number_of_points',
                                                    'number_of_dimensions'))
    outfile.createVariable('elevation', netcdf_float, ('number_of_points',))

    # Get handles to the variables
    points = outfile.variables['points']
    elevation = outfile.variables['elevation']

    points[:,0] = xx - easting_min
    points[:,1] = yy - northing_min
    elevation[:] = dem


    infile.close()
    outfile.close()
Esempio n. 8
0
def _generic_dem2pts(name_in,
                     name_out=None,
                     quantity_name=None,
                     verbose=False,
                     easting_min=None,
                     easting_max=None,
                     northing_min=None,
                     northing_max=None):
    """Read raster from the following NetCDF format (.dem)

    Internal function. See public function generic_dem2pts for details.
    """

    # FIXME: Can this be written feasibly using write_pts?

    import os
    from anuga.file.netcdf import NetCDFFile

    root = name_in[:-4]

    if name_in[-4:] == '.asc':
        intermediate = root + '.dem'
        if verbose:
            log.critical('Preconvert %s from asc to %s' % \
                                    (name_in, intermediate))
        asc2dem(name_in)
        name_in = intermediate
    elif name_in[-4:] != '.dem':
        raise IOError('Input file %s should be of type .asc or .dem.' %
                      name_in)

    if name_out != None and basename_out[-4:] != '.pts':
        raise IOError('Input file %s should be of type .pts.' % name_out)

    # Get NetCDF
    infile = NetCDFFile(name_in, netcdf_mode_r)

    if verbose: log.critical('Reading raster from %s' % (name_in))

    ncols = int(infile.ncols)
    nrows = int(infile.nrows)
    xllcorner = float(infile.xllcorner)  # Easting of lower left corner
    yllcorner = float(infile.yllcorner)  # Northing of lower left corner
    cellsize = float(infile.cellsize)
    NODATA_value = float(infile.NODATA_value)

    dem_elevation = infile.variables[quantity_name]

    zone = int(infile.zone)
    false_easting = float(infile.false_easting)
    false_northing = float(infile.false_northing)

    #print ncols, nrows, xllcorner,yllcorner, cellsize, NODATA_value, zone

    # Text strings
    projection = infile.projection
    datum = infile.datum
    units = infile.units

    #print projection, datum, units

    # Get output file
    if name_out == None:
        ptsname = root + '.pts'
    else:
        ptsname = name_out

    if verbose: log.critical('Store to NetCDF file %s' % ptsname)

    # NetCDF file definition
    outfile = NetCDFFile(ptsname, netcdf_mode_w)

    # Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF pts format for compact and portable ' \
                          'storage of spatial point data'

    # Assign default values
    if easting_min is None: easting_min = xllcorner
    if easting_max is None: easting_max = xllcorner + ncols * cellsize
    if northing_min is None: northing_min = yllcorner
    if northing_max is None: northing_max = yllcorner + nrows * cellsize

    #print easting_min, easting_max, northing_min, northing_max

    # Compute offsets to update georeferencing
    easting_offset = xllcorner - easting_min
    northing_offset = yllcorner - northing_min

    # Georeferencing
    outfile.zone = zone
    outfile.xllcorner = easting_min  # Easting of lower left corner
    outfile.yllcorner = northing_min  # Northing of lower left corner
    outfile.false_easting = false_easting
    outfile.false_northing = false_northing

    outfile.projection = projection
    outfile.datum = datum
    outfile.units = units

    # Grid info (FIXME: probably not going to be used, but heck)
    outfile.ncols = ncols
    outfile.nrows = nrows

    dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))
    totalnopoints = nrows * ncols

    #========================================
    # Do the preceeding with numpy
    #========================================
    y = num.arange(nrows, dtype=num.float)
    y = yllcorner + (nrows - 1) * cellsize - y * cellsize

    x = num.arange(ncols, dtype=num.float)
    x = xllcorner + x * cellsize

    xx, yy = num.meshgrid(x, y)

    xx = xx.flatten()
    yy = yy.flatten()

    flag = num.logical_and(
        num.logical_and((xx <= easting_max), (xx >= easting_min)),
        num.logical_and((yy <= northing_max), (yy >= northing_min)))

    dem = dem_elevation[:].flatten()

    id = num.where(flag)[0]

    xx = xx[id]
    yy = yy[id]
    dem = dem[id]

    clippednopoints = len(dem)
    #print clippedpoints

    #print xx
    #print yy
    #print dem

    data_flag = dem != NODATA_value

    data_id = num.where(data_flag)

    xx = xx[data_id]
    yy = yy[data_id]
    dem = dem[data_id]

    nn = clippednopoints - len(dem)

    nopoints = len(dem)

    if verbose:
        log.critical('There are %d values in the raster' % totalnopoints)
        log.critical('There are %d values in the clipped raster' %
                     clippednopoints)
        log.critical('There are %d NODATA_values in the clipped raster' % nn)

    outfile.createDimension('number_of_points', nopoints)
    outfile.createDimension('number_of_dimensions', 2)  #This is 2d data

    # Variable definitions
    outfile.createVariable('points', netcdf_float,
                           ('number_of_points', 'number_of_dimensions'))
    outfile.createVariable(quantity_name, netcdf_float, ('number_of_points', ))

    # Get handles to the variables
    points = outfile.variables['points']
    elevation = outfile.variables[quantity_name]

    points[:, 0] = xx - easting_min
    points[:, 1] = yy - northing_min
    elevation[:] = dem

    infile.close()
    outfile.close()
Esempio n. 9
0
def dem2dem(name_in, stencil, cellsize_new, name_out=None, verbose=False):
    """Read Digitial Elevation model from the following NetCDF format (.dem)

    Example:

    ncols         3121
    nrows         1800
    xllcorner     722000
    yllcorner     5893000
    cellsize      25
    NODATA_value  -9999
    138.3698 137.4194 136.5062 135.5558 ..........

    Decimate data to cellsize_new using stencil and write to NetCDF dem format.
    """

    import os
    from anuga.file.netcdf import NetCDFFile

    if name_in[-4:] != '.dem':
        raise IOError('Input file %s should be of type .dem.' % name_in)

    if name_out != None and basename_out[-4:] != '.dem':
        raise IOError('Input file %s should be of type .dem.' % name_out)

    #Open existing netcdf file to read
    infile = NetCDFFile(name_in, netcdf_mode_r)

    if verbose: log.critical('Reading DEM from %s' % inname)

    # Read metadata (convert from numpy.int32 to int where appropriate)
    ncols = int(infile.ncols)
    nrows = int(infile.nrows)
    xllcorner = infile.xllcorner
    yllcorner = infile.yllcorner
    cellsize = int(infile.cellsize)
    NODATA_value = int(infile.NODATA_value)
    zone = int(infile.zone)
    false_easting = infile.false_easting
    false_northing = infile.false_northing
    projection = infile.projection
    datum = infile.datum
    units = infile.units

    dem_elevation = infile.variables['elevation']

    #Get output file name
    if name_out == None:
        outname = name_in[:-4] + '_' + repr(cellsize_new) + '.dem'
    else:
        outname = name_out

    if verbose: log.critical('Write decimated NetCDF file to %s' % outname)

    #Determine some dimensions for decimated grid
    (nrows_stencil, ncols_stencil) = stencil.shape
    x_offset = ncols_stencil / 2
    y_offset = nrows_stencil / 2
    cellsize_ratio = int(cellsize_new / cellsize)
    ncols_new = 1 + (ncols - ncols_stencil) / cellsize_ratio
    nrows_new = 1 + (nrows - nrows_stencil) / cellsize_ratio

    #print type(ncols_new), ncols_new

    #Open netcdf file for output
    outfile = NetCDFFile(outname, netcdf_mode_w)

    #Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF DEM format for compact and portable ' \
                          'storage of spatial point data'

    #Georeferencing
    outfile.zone = zone
    outfile.projection = projection
    outfile.datum = datum
    outfile.units = units

    outfile.cellsize = cellsize_new
    outfile.NODATA_value = NODATA_value
    outfile.false_easting = false_easting
    outfile.false_northing = false_northing

    outfile.xllcorner = xllcorner + (x_offset * cellsize)
    outfile.yllcorner = yllcorner + (y_offset * cellsize)
    outfile.ncols = ncols_new
    outfile.nrows = nrows_new

    # dimension definition
    #print nrows_new, ncols_new, nrows_new*ncols_new
    #print type(nrows_new), type(ncols_new), type(nrows_new*ncols_new)
    outfile.createDimension('number_of_points', nrows_new * ncols_new)

    # variable definition
    outfile.createVariable('elevation', netcdf_float, ('number_of_points', ))

    # Get handle to the variable
    elevation = outfile.variables['elevation']

    dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))

    #Store data
    global_index = 0
    for i in range(nrows_new):
        if verbose: log.critical('Processing row %d of %d' % (i, nrows_new))

        lower_index = global_index
        telev = num.zeros(ncols_new, num.float)
        local_index = 0
        trow = i * cellsize_ratio

        for j in range(ncols_new):
            tcol = j * cellsize_ratio
            tmp = dem_elevation_r[trow:trow + nrows_stencil,
                                  tcol:tcol + ncols_stencil]

            #if dem contains 1 or more NODATA_values set value in
            #decimated dem to NODATA_value, else compute decimated
            #value using stencil
            if num.sum(num.sum(num.equal(tmp, NODATA_value))) > 0:
                telev[local_index] = NODATA_value
            else:
                telev[local_index] = num.sum(num.sum(tmp * stencil))

            global_index += 1
            local_index += 1

        upper_index = global_index

        elevation[lower_index:upper_index] = telev

    assert global_index == nrows_new*ncols_new, \
           'index not equal to number of points'

    infile.close()
    outfile.close()
Esempio n. 10
0
def _convert_dem_from_llasc2pts(name_in, name_out = None,
                                show_progress=False,
                                verbose=False,):
    """Read Digital Elevation model from the following LL ASCII format (.asc)

    Internal function. See public function convert_dem_from_ascii2netcdf
    for details.
    """

    import os
    from anuga.file.netcdf import NetCDFFile

    #Read DEM data
    datafile = open(name_in)

    if verbose: log.critical('Reading DEM from %s' % (name_in))

    lines = datafile.readlines()
    datafile.close()

    if verbose: log.critical('Got %d lines' % len(lines))

    ncols = int(lines[0].split()[1].strip())
    nrows = int(lines[1].split()[1].strip())

    # Do cellsize (line 4) before line 2 and 3
    cellsize = float(lines[4].split()[1].strip())

    xref = lines[2].split()
    if xref[0].strip() == 'xllcorner':
        xllcorner = float(xref[1].strip()) 
    elif xref[0].strip() == 'xllcenter':
        xllcorner = float(xref[1].strip()) # - 0.5*cellsize # Correct offset
    else:
        msg = 'Unknown keyword: %s' % xref[0].strip()
        raise_(Exception, msg)

    yref = lines[3].split()
    if yref[0].strip() == 'yllcorner':
        yllcorner = float(yref[1].strip()) 
    elif yref[0].strip() == 'yllcenter':
        yllcorner = float(yref[1].strip()) # - 0.5*cellsize # Correct offset
    else:
        msg = 'Unknown keyword: %s' % yref[0].strip()
        raise_(Exception, msg)

    NODATA_value = float(lines[5].split()[1].strip())

    assert len(lines) == nrows + 6

    dem_elevation = num.loadtxt(lines, skiprows=6, dtype=float)


    totalnopoints = nrows*ncols

    y = num.arange(nrows,dtype=num.float)
    y = yllcorner + (nrows-1)*cellsize - y*cellsize

    x = num.arange(ncols,dtype=num.float)
    x = xllcorner + x*cellsize

    #print(xllcorner)
    #print(x)

    #print(yllcorner)
    #print(y)

    xx,yy = num.meshgrid(x,y)

    xx = xx.flatten()
    yy = yy.flatten()
    dem = dem_elevation[:].flatten()
    
    # ====================
    # remove NODATA points
    # ====================
    data_flag = dem != NODATA_value

    data_id = num.where(data_flag)

    xx = xx[data_id]
    yy = yy[data_id]
    dem = dem[data_id]

    nn = totalnopoints - len(dem)

    nopoints = len(dem)

    # =====================================
    # Convert xx and yy to UTM
    # =====================================
    points_UTM, zone = convert_from_latlon_to_utm(latitudes=yy, longitudes=xx, show_progress=show_progress)

    points_UTM = num.asarray(points_UTM, dtype=float)

    corners, zone_c = convert_from_latlon_to_utm(latitudes=yllcorner, longitudes=xllcorner)

    xllcorner = corners[0][0]
    yllcorner = corners[0][1]

    assert zone == zone_c

    points_UTM = points_UTM - corners

    # ===============================
    # Step up for writing to pts file
    # ===============================

    if name_out is None:
        netcdfname = name_in[:-4]+'.pts'
    else:
        netcdfname = name_out + '.pts'

    if verbose: log.critical('Store to NetCDF file %s' % netcdfname)

    # NetCDF file definition
    outfile = NetCDFFile(netcdfname, netcdf_mode_w)

    # Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF pts format for compact and portable ' \
                          'storage of spatial point data'


    # Georeferencing
    outfile.zone = zone
    outfile.xllcorner = xllcorner # Easting of lower left corner
    outfile.yllcorner = yllcorner # Northing of lower left corner
    
    # Default settings
    outfile.false_easting = 500000.0
    outfile.false_northing = 10000000.0
    outfile.projection = 'UTM'
    outfile.datum = 'WGS84'
    outfile.units = 'METERS'

    # Grid info (FIXME: probably not going to be used, but heck)
    outfile.ncols = ncols
    outfile.nrows = nrows




    if verbose:
        log.critical('There are %d values in the elevation' % totalnopoints)
        log.critical('There are %d NODATA_values in the clipped elevation' % nn)

    outfile.createDimension('number_of_points', nopoints)
    outfile.createDimension('number_of_dimensions', 2) #This is 2d data

    # Variable definitions
    outfile.createVariable('points', netcdf_float, ('number_of_points',
                                                    'number_of_dimensions'))
    outfile.createVariable('elevation', netcdf_float, ('number_of_points',))

    # Get handles to the variables
    points = outfile.variables['points']
    elevation = outfile.variables['elevation']

    points[:,:]= points_UTM
    elevation[:] = dem

    outfile.close()
Esempio n. 11
0
def _sww_merge_parallel_non_smooth(swwfiles,
                                   output,
                                   verbose=False,
                                   delete_old=False):
    """
        Merge a list of sww files into a single file.

        Used to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.

        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times = fid.variables['time'][:]
            n_steps = len(times)
            number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes = int(fid.number_of_global_nodes)
            number_of_global_triangle_vertices = 3 * number_of_global_triangles

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            g_volumes = num.arange(number_of_global_triangles * 3).reshape(
                -1, 3)

            g_x = num.zeros((number_of_global_triangle_vertices, ),
                            num.float32)
            g_y = num.zeros((number_of_global_triangle_vertices, ),
                            num.float32)

            g_points = num.zeros((number_of_global_triangle_vertices, 2),
                                 num.float32)

            #=======================================
            # Deal with the vertex based variables
            #=======================================
            quantities = set([
                'elevation', 'friction', 'stage', 'xmomentum', 'ymomentum',
                'xvelocity', 'yvelocity', 'height'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if elevation is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            # Static Quantities are stored as a 1D array
            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros(
                    (3 * number_of_global_triangles, ), num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set([
                'elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)

            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros(
                    (number_of_global_triangles, ), num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False

        # Read in from files and add to global arrays

        tri_l2g = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]

        f_ids = num.argwhere(tri_full_flag == 1).reshape(-1, )
        f_gids = tri_l2g[f_ids]

        g_vids = (3 * f_gids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
            -1, )
        l_vids = (3 * f_ids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
            -1, )

        l_x = num.array(fid.variables['x'][:], dtype=num.float32)
        l_y = num.array(fid.variables['y'][:], dtype=num.float32)

        g_x[g_vids] = l_x[l_vids]
        g_y[g_vids] = l_y[l_vids]

        g_points[g_vids, 0] = g_x[g_vids]
        g_points[g_vids, 1] = g_y[g_vids]

        ## Read in static quantities
        for quantity in static_quantities:
            q = fid.variables[quantity]
            out_s_quantities[quantity][g_vids] = \
                         num.array(q).astype(num.float32)[l_vids]
            #num.array(q,dtype=num.float32)[l_vids]

        # Read in static c quantities
        for quantity in static_c_quantities:
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][f_gids] = \
                         num.array(q).astype(num.float32)[f_ids]
            #num.array(q,dtype=num.float32)[f_ids]

        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'

    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities,
                    dynamic_c_quantities)
    sww.store_header(fido,
                     starttime,
                     number_of_global_triangles,
                     number_of_global_triangles * 3,
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            g_points,
                            g_volumes,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido,
                                         verbose=verbose,
                                         **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in (dynamic_quantities + dynamic_c_quantities):

        # Initialise q_values with zeros
        if q in dynamic_quantities:
            q_values = num.zeros((n_steps, 3 * number_of_global_triangles),
                                 num.float32)
        elif q in dynamic_c_quantities:
            q_values = num.zeros((n_steps, number_of_global_triangles),
                                 num.float32)

        # Read the quantities one at a time, to reduce memory usage
        for filename in swwfiles:
            fid = NetCDFFile(filename, netcdf_mode_r)

            # Index information
            tri_l2g = fid.variables['tri_l2g'][:]
            node_l2g = fid.variables['node_l2g'][:]
            tri_full_flag = fid.variables['tri_full_flag'][:]
            f_ids = num.argwhere(tri_full_flag == 1).reshape(-1, )
            f_gids = tri_l2g[f_ids]
            g_vids = (3 * f_gids.reshape(-1, 1) +
                      num.array([0, 1, 2])).reshape(-1, )
            l_vids = (3 * f_ids.reshape(-1, 1) + num.array([0, 1, 2])).reshape(
                -1, )
            for i in range(n_steps):
                # Different indices for vertex and centroid quantities
                if q in dynamic_quantities:
                    q_values[i][g_vids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[l_vids]
                elif q in dynamic_c_quantities:
                    q_values[i][f_gids] = \
                    num.array(fid.variables[q][i], dtype=num.float32)[f_ids]

            fid.close()

        # Write to the file
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        if q in dynamic_quantities:
            # This updates the _range values
            q_range = fido.variables[q + Write_sww.RANGE][:]
            q_values_min = num.min(q_values)
            if q_values_min < q_range[0]:
                fido.variables[q + Write_sww.RANGE][0] = q_values_min
            q_values_max = num.max(q_values)
            if q_values_max > q_range[1]:
                fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Esempio n. 12
0
def _sww_merge(swwfiles, output, verbose=False):
    """
        Merge a list of sww files into a single file.
        
        May be useful for parallel runs. Note that colinear points and
        edges are not merged: there will essentially be multiple meshes within
        the one sww file.
        
        The sww files to be merged must have exactly the same timesteps. Note
        that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    static_quantities = ['elevation']
    dynamic_quantities = ['stage', 'xmomentum', 'ymomentum']

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        tris = fid.variables['volumes'][:]

        if first_file:
            times = fid.variables['time'][:]
            x = []
            y = []
            out_tris = list(tris)
            out_s_quantities = {}
            out_d_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            for quantity in static_quantities:
                out_s_quantities[quantity] = []

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = [[] for _ in range(len(times))]

            description = 'merged:' + getattr(fid, 'description')
            first_file = False
        else:
            for tri in tris:
                # Advance new tri indices to point at newly appended points.
                verts = [vertex + tri_offset for vertex in tri]
                out_tris.append(verts)

        try:  # works with netcdf4
            num_pts = len(fid.dimensions['number_of_points'])
        except:  # works with scientific.io.netcdf
            num_pts = int(fid.dimensions['number_of_points'])

        tri_offset += num_pts

        if verbose:
            print '  new triangle index offset is ', tri_offset

        x.extend(list(fid.variables['x'][:]))
        y.extend(list(fid.variables['y'][:]))

        # Grow the list of static quantities associated with the x,y points
        for quantity in static_quantities:
            out_s_quantities[quantity].extend(fid.variables[quantity][:])

        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            time_chunks = fid.variables[quantity][:]
            for i, time_chunk in enumerate(time_chunks):
                out_d_quantities[quantity][i].extend(time_chunk)

    # Mash all points into a single big list
    points = [[xx, yy] for xx, yy in zip(x, y)]

    points = num.asarray(points).astype(netcdf_float32)

    fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)
    sww = Write_sww(static_quantities, dynamic_quantities)
    sww.store_header(fido,
                     times,
                     len(out_tris),
                     len(points),
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            points,
                            out_tris,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)

    # Write out all the dynamic quantities for each timestep
    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i, time_slice in enumerate(q_values):
            fido.variables[q][i] = num.array(time_slice, netcdf_float32)

        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max

    fido.close()
Esempio n. 13
0
def _sww_merge_parallel_smooth(swwfiles,
                               output,
                               verbose=False,
                               delete_old=False):
    """
        Merge a list of sww files into a single file.
        
        Use to merge files created by parallel runs.

        The sww files to be merged must have exactly the same timesteps.

        It is assumed that the separate sww files have been stored in non_smooth
        format.

        Note that some advanced information and custom quantities may not be
        exported.
        
        swwfiles is a list of .sww files to merge.
        output is the output filename, including .sww extension.
        verbose True to log output information
    """

    if verbose:
        print "MERGING SWW Files"

    first_file = True
    tri_offset = 0
    for filename in swwfiles:
        if verbose:
            print 'Reading file ', filename, ':'

        fid = NetCDFFile(filename, netcdf_mode_r)

        if first_file:

            times = fid.variables['time'][:]
            n_steps = len(times)
            #number_of_timesteps = fid.dimensions['number_of_timesteps']
            #print n_steps, number_of_timesteps
            starttime = int(fid.starttime)

            out_s_quantities = {}
            out_d_quantities = {}

            out_s_c_quantities = {}
            out_d_c_quantities = {}

            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner

            number_of_global_triangles = int(fid.number_of_global_triangles)
            number_of_global_nodes = int(fid.number_of_global_nodes)

            order = fid.order
            xllcorner = fid.xllcorner
            yllcorner = fid.yllcorner
            zone = fid.zone
            false_easting = fid.false_easting
            false_northing = fid.false_northing
            datum = fid.datum
            projection = fid.projection

            g_volumes = num.zeros((number_of_global_triangles, 3), num.int)
            g_x = num.zeros((number_of_global_nodes, ), num.float32)
            g_y = num.zeros((number_of_global_nodes, ), num.float32)

            g_points = num.zeros((number_of_global_nodes, 2), num.float32)

            #=====================================
            # Deal with the vertex based variables
            #=====================================
            quantities = set([
                'elevation', 'friction', 'stage', 'xmomentum', 'ymomentum',
                'xvelocity', 'yvelocity', 'height'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_quantities = []
            dynamic_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_quantities.append(quantity)
                else:
                    static_quantities.append(quantity)

            for quantity in static_quantities:
                out_s_quantities[quantity] = num.zeros(
                    (number_of_global_nodes, ), num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_quantities:
                out_d_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_nodes),num.float32)

            #=======================================
            # Deal with the centroid based variables
            #=======================================
            quantities = set([
                'elevation_c', 'friction_c', 'stage_c', 'xmomentum_c',
                'ymomentum_c', 'xvelocity_c', 'yvelocity_c', 'height_c'
            ])
            variables = set(fid.variables.keys())

            quantities = list(quantities & variables)

            static_c_quantities = []
            dynamic_c_quantities = []

            for quantity in quantities:
                # Test if quantity is static
                if n_steps == fid.variables[quantity].shape[0]:
                    dynamic_c_quantities.append(quantity)
                else:
                    static_c_quantities.append(quantity)

            for quantity in static_c_quantities:
                out_s_c_quantities[quantity] = num.zeros(
                    (number_of_global_triangles, ), num.float32)

            # Quantities are stored as a 2D array of timesteps x data.
            for quantity in dynamic_c_quantities:
                out_d_c_quantities[quantity] = \
                      num.zeros((n_steps,number_of_global_triangles),num.float32)

            description = 'merged:' + getattr(fid, 'description')
            first_file = False

        # Read in from files and add to global arrays

        tri_l2g = fid.variables['tri_l2g'][:]
        node_l2g = fid.variables['node_l2g'][:]
        tri_full_flag = fid.variables['tri_full_flag'][:]
        volumes = num.array(fid.variables['volumes'][:], dtype=num.int)
        l_volumes = num.zeros_like(volumes)
        l_old_volumes = num.zeros_like(volumes)

        # Change the local node ids to global id in the
        # volume array

        # FIXME SR: Surely we can knock up a numpy way of doing this
        #for i in range(len(l_volumes)):
        #    g_n0 = node_l2g[volumes[i,0]]
        #    g_n1 = node_l2g[volumes[i,1]]
        #    g_n2 = node_l2g[volumes[i,2]]
        #
        #    l_old_volumes[i,:] = [g_n0,g_n1,g_n2]

        g_n0 = node_l2g[volumes[:, 0]].reshape(-1, 1)
        g_n1 = node_l2g[volumes[:, 1]].reshape(-1, 1)
        g_n2 = node_l2g[volumes[:, 2]].reshape(-1, 1)

        #print g_n0.shape
        l_volumes = num.hstack((g_n0, g_n1, g_n2))

        #assert num.allclose(l_volumes, l_old_volumes)

        # Just pick out the full triangles
        ftri_ids = num.where(tri_full_flag > 0)
        ftri_l2g = num.compress(tri_full_flag, tri_l2g)

        #f_ids = num.argwhere(tri_full_flag==1).reshape(-1,)
        #f_gids = tri_l2g[f_ids]

        #print l_volumes
        #print tri_full_flag
        #print tri_l2g
        #print ftri_l2g

        f_volumes0 = num.compress(tri_full_flag, volumes[:, 0])
        f_volumes1 = num.compress(tri_full_flag, volumes[:, 1])
        f_volumes2 = num.compress(tri_full_flag, volumes[:, 2])

        g_volumes[ftri_l2g, 0] = node_l2g[f_volumes0]
        g_volumes[ftri_l2g, 1] = node_l2g[f_volumes1]
        g_volumes[ftri_l2g, 2] = node_l2g[f_volumes2]

        #fg_volumes = num.compress(tri_full_flag,l_volumes,axis=0)
        #g_volumes[ftri_l2g] = fg_volumes

        #g_x[node_l2g] = fid.variables['x']
        #g_y[node_l2g] = fid.variables['y']

        g_points[node_l2g, 0] = fid.variables['x'][:]
        g_points[node_l2g, 1] = fid.variables['y'][:]

        #print number_of_timesteps

        # FIXME SR: It seems that some of the "ghost" node quantity values
        # are being storded. We should only store those nodes which are associated with
        # full triangles. So we need an index array of "full" nodes, ie those in
        # full triangles

        #use numpy.compress and numpy.unique to get "full nodes

        f_volumes = num.compress(tri_full_flag, volumes, axis=0)
        fl_nodes = num.unique(f_volumes)
        f_node_l2g = node_l2g[fl_nodes]

        #print len(node_l2g)
        #print len(fl_nodes)

        # Read in static quantities
        for quantity in static_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            #print quantity, q.shape
            out_s_quantities[quantity][f_node_l2g] = \
                         num.array(q[:],dtype=num.float32)[fl_nodes]

        #Collate all dynamic quantities according to their timestep
        for quantity in dynamic_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                #out_d_quantities[quantity][i][node_l2g] = \
                #           num.array(q[i],dtype=num.float32)
                out_d_quantities[quantity][i][f_node_l2g] = \
                           num.array(q[i],dtype=num.float32)[fl_nodes]

        # Read in static c quantities
        for quantity in static_c_quantities:
            #out_s_quantities[quantity][node_l2g] = \
            #             num.array(fid.variables[quantity],dtype=num.float32)
            q = fid.variables[quantity]
            out_s_c_quantities[quantity][ftri_l2g] = \
                         num.array(q).astype(num.float32)[ftri_ids]

        #Collate all dynamic c quantities according to their timestep
        for quantity in dynamic_c_quantities:
            q = fid.variables[quantity]
            #print q.shape
            for i in range(n_steps):
                out_d_c_quantities[quantity][i][ftri_l2g] = \
                           num.array(q[i]).astype(num.float32)[ftri_ids]

        fid.close()

    #---------------------------
    # Write out the SWW file
    #---------------------------
    #print g_points.shape

    #print number_of_global_triangles
    #print number_of_global_nodes

    if verbose:
        print 'Writing file ', output, ':'
    fido = NetCDFFile(output, netcdf_mode_w)

    sww = Write_sww(static_quantities, dynamic_quantities, static_c_quantities,
                    dynamic_c_quantities)
    sww.store_header(fido,
                     starttime,
                     number_of_global_triangles,
                     number_of_global_nodes,
                     description=description,
                     sww_precision=netcdf_float32)

    from anuga.coordinate_transforms.geo_reference import Geo_reference
    geo_reference = Geo_reference()

    sww.store_triangulation(fido,
                            g_points,
                            g_volumes,
                            points_georeference=geo_reference)

    fido.order = order
    fido.xllcorner = xllcorner
    fido.yllcorner = yllcorner
    fido.zone = zone
    fido.false_easting = false_easting
    fido.false_northing = false_northing
    fido.datum = datum
    fido.projection = projection

    sww.store_static_quantities(fido, verbose=verbose, **out_s_quantities)
    sww.store_static_quantities_centroid(fido,
                                         verbose=verbose,
                                         **out_s_c_quantities)

    # Write out all the dynamic quantities for each timestep

    for i in range(n_steps):
        fido.variables['time'][i] = times[i]

    for q in dynamic_quantities:
        q_values = out_d_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

        # This updates the _range values
        q_range = fido.variables[q + Write_sww.RANGE][:]
        q_values_min = num.min(q_values)
        if q_values_min < q_range[0]:
            fido.variables[q + Write_sww.RANGE][0] = q_values_min
        q_values_max = num.max(q_values)
        if q_values_max > q_range[1]:
            fido.variables[q + Write_sww.RANGE][1] = q_values_max

    for q in dynamic_c_quantities:
        q_values = out_d_c_quantities[q]
        for i in range(n_steps):
            fido.variables[q][i] = q_values[i]

    #print out_s_quantities
    #print out_d_quantities

    #print g_x
    #print g_y

    #print g_volumes

    fido.close()

    if delete_old:
        import os
        for filename in swwfiles:

            if verbose:
                print 'Deleting file ', filename, ':'
            os.remove(filename)
Esempio n. 14
0
def _convert_dem_from_ascii2netcdf(name_in, name_out = None,
                                   verbose = False):
    """Read Digital Elevation model from the following ASCII format (.asc)

    Internal function. See public function convert_dem_from_ascii2netcdf
    for details.
    """

    import os
    from anuga.file.netcdf import NetCDFFile

    root = name_in[:-4]

    # Read Meta data
    if verbose: log.critical('Reading METADATA from %s' % (root + '.prj'))

    metadatafile = open(root + '.prj')
    metalines = metadatafile.readlines()
    metadatafile.close()

    L = metalines[0].strip().split()
    assert L[0].strip().lower() == 'projection'
    projection = L[1].strip()                   #TEXT

    L = metalines[1].strip().split()
    assert L[0].strip().lower() == 'zone'
    zone = int(L[1].strip())

    L = metalines[2].strip().split()
    assert L[0].strip().lower() == 'datum'
    datum = L[1].strip()                        #TEXT

    L = metalines[3].strip().split()
    assert L[0].strip().lower() == 'zunits'     #IGNORE
    zunits = L[1].strip()                       #TEXT

    L = metalines[4].strip().split()
    assert L[0].strip().lower() == 'units'
    units = L[1].strip()                        #TEXT

    L = metalines[5].strip().split()
    assert L[0].strip().lower() == 'spheroid'   #IGNORE
    spheroid = L[1].strip()                     #TEXT

    L = metalines[6].strip().split()
    assert L[0].strip().lower() == 'xshift'
    false_easting = float(L[1].strip())

    L = metalines[7].strip().split()
    assert L[0].strip().lower() == 'yshift'
    false_northing = float(L[1].strip())

    if name_in[-4:] != '.asc':
        raise IOError('Input file %s should be of type .asc.' % name_in)

    #Read DEM data
    datafile = open(name_in)

    if verbose: log.critical('Reading DEM from %s' % (name_in))

    lines = datafile.readlines()
    datafile.close()

    if verbose: log.critical('Got %d lines' % len(lines))

    ncols = int(lines[0].split()[1].strip())
    nrows = int(lines[1].split()[1].strip())

    # Do cellsize (line 4) before line 2 and 3
    cellsize = float(lines[4].split()[1].strip())

    # Checks suggested by Joaquim Luis
    # Our internal representation of xllcorner
    # and yllcorner is non-standard.
    xref = lines[2].split()
    if xref[0].strip() == 'xllcorner':
        xllcorner = float(xref[1].strip()) # + 0.5*cellsize # Correct offset
    elif xref[0].strip() == 'xllcenter':
        xllcorner = float(xref[1].strip())
    else:
        msg = 'Unknown keyword: %s' % xref[0].strip()
        raise Exception, msg

    yref = lines[3].split()
    if yref[0].strip() == 'yllcorner':
        yllcorner = float(yref[1].strip()) # + 0.5*cellsize # Correct offset
    elif yref[0].strip() == 'yllcenter':
        yllcorner = float(yref[1].strip())
    else:
        msg = 'Unknown keyword: %s' % yref[0].strip()
        raise Exception, msg

    NODATA_value = int(float(lines[5].split()[1].strip()))

    assert len(lines) == nrows + 6

    if name_out == None:
        netcdfname = name_in[:-4]+'.dem'
    else:
        netcdfname = name_out + '.dem'

    if verbose: log.critical('Store to NetCDF file %s' % netcdfname)

    # NetCDF file definition
    fid = NetCDFFile(netcdfname, netcdf_mode_w)

    #Create new file
    fid.institution = 'Geoscience Australia'
    fid.description = 'NetCDF DEM format for compact and portable storage ' \
                      'of spatial point data'

    fid.ncols = ncols
    fid.nrows = nrows
    fid.xllcorner = xllcorner
    fid.yllcorner = yllcorner
    fid.cellsize = cellsize
    fid.NODATA_value = NODATA_value

    fid.zone = zone
    fid.false_easting = false_easting
    fid.false_northing = false_northing
    fid.projection = projection
    fid.datum = datum
    fid.units = units

    # dimension definitions
    fid.createDimension('number_of_rows', nrows)
    fid.createDimension('number_of_columns', ncols)

    # variable definitions
    fid.createVariable('elevation', netcdf_float, ('number_of_rows',
                                                   'number_of_columns'))

    # Get handles to the variables
    elevation = fid.variables['elevation']

    #Store data
    import numpy

    datafile = open(name_in)
    elevation[:,:] = numpy.loadtxt(datafile, skiprows=6)
    datafile.close()

#    n = len(lines[6:])
#    for i, line in enumerate(lines[6:]):
#        fields = line.split()
#        if verbose and i % ((n+10)/10) == 0:
#            log.critical('Processing row %d of %d' % (i, nrows))
#
#        if len(fields) != ncols:
#            msg = 'Wrong number of columns in file "%s" line %d\n' % (name_in, i)
#            msg += 'I got %d elements, but there should have been %d\n' % (len(fields), ncols)
#            raise Exception, msg
#
#        elevation[i, :] = num.array([float(x) for x in fields])

    fid.close()
Esempio n. 15
0
def _dem2pts(name_in,
             name_out=None,
             verbose=False,
             easting_min=None,
             easting_max=None,
             northing_min=None,
             northing_max=None):
    """Read Digitial Elevation model from the following NetCDF format (.dem)

    Internal function. See public function dem2pts for details.
    """

    # FIXME: Can this be written feasibly using write_pts?

    import os
    from anuga.file.netcdf import NetCDFFile

    root = name_in[:-4]

    if name_in[-4:] == '.asc':
        intermediate = root + '.dem'
        if verbose:
            log.critical('Preconvert %s from asc to %s' % \
                                    (name_in, intermediate))
        asc2dem(name_in)
        name_in = intermediate
    elif name_in[-4:] != '.dem':
        raise IOError('Input file %s should be of type .asc or .dem.' %
                      name_in)

    if name_out != None and basename_out[-4:] != '.pts':
        raise IOError('Input file %s should be of type .pts.' % name_out)

    # Get NetCDF
    infile = NetCDFFile(name_in, netcdf_mode_r)

    if verbose: log.critical('Reading DEM from %s' % (name_in))

    ncols = int(infile.ncols)
    nrows = int(infile.nrows)
    xllcorner = float(infile.xllcorner)  # Easting of lower left corner
    yllcorner = float(infile.yllcorner)  # Northing of lower left corner
    cellsize = float(infile.cellsize)
    NODATA_value = float(infile.NODATA_value)

    dem_elevation = infile.variables['elevation']

    zone = int(infile.zone)
    false_easting = float(infile.false_easting)
    false_northing = float(infile.false_northing)

    #print ncols, nrows, xllcorner,yllcorner, cellsize, NODATA_value, zone

    # Text strings
    projection = infile.projection
    datum = infile.datum
    units = infile.units

    #print projection, datum, units

    # Get output file
    if name_out is None:
        ptsname = root + '.pts'
    else:
        ptsname = name_out

    if verbose: log.critical('Store to NetCDF file %s' % ptsname)

    # NetCDF file definition
    outfile = NetCDFFile(ptsname, netcdf_mode_w)

    # Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'NetCDF pts format for compact and portable ' \
                          'storage of spatial point data'

    # Assign default values
    if easting_min is None: easting_min = xllcorner
    if easting_max is None: easting_max = xllcorner + ncols * cellsize
    if northing_min is None: northing_min = yllcorner
    if northing_max is None: northing_max = yllcorner + nrows * cellsize

    #print easting_min, easting_max, northing_min, northing_max

    # Compute offsets to update georeferencing
    easting_offset = xllcorner - easting_min
    northing_offset = yllcorner - northing_min

    # Georeferencing
    outfile.zone = zone
    outfile.xllcorner = easting_min  # Easting of lower left corner
    outfile.yllcorner = northing_min  # Northing of lower left corner
    outfile.false_easting = false_easting
    outfile.false_northing = false_northing

    outfile.projection = projection
    outfile.datum = datum
    outfile.units = units

    # Grid info (FIXME: probably not going to be used, but heck)
    outfile.ncols = ncols
    outfile.nrows = nrows

    #dem_elevation_r = num.reshape(dem_elevation, (nrows, ncols))
    totalnopoints = nrows * ncols

    #    #=======================================================================
    #    # Calculating number of NODATA_values for each row in clipped region
    #    # FIXME: use array operations to do faster
    #    nn = 0
    #    k = 0
    #    i1_0 = 0
    #    j1_0 = 0
    #    thisj = 0
    #    thisi = 0
    #    for i in range(nrows):
    #        y = (nrows-i-1)*cellsize + yllcorner
    #        for j in range(ncols):
    #            x = j*cellsize + xllcorner
    #            if easting_min <= x <= easting_max \
    #               and northing_min <= y <= northing_max:
    #                thisj = j
    #                thisi = i
    #                if dem_elevation_r[i,j] == NODATA_value:
    #                    nn += 1
    #
    #                if k == 0:
    #                    i1_0 = i
    #                    j1_0 = j
    #
    #                k += 1
    #
    #    index1 = j1_0
    #    index2 = thisj
    #
    #    # Dimension definitions
    #    nrows_in_bounding_box = int(round((northing_max-northing_min)/cellsize))
    #    ncols_in_bounding_box = int(round((easting_max-easting_min)/cellsize))
    #
    #    clippednopoints = (thisi+1-i1_0)*(thisj+1-j1_0)
    #    nopoints = clippednopoints-nn
    #
    #    clipped_dem_elev = dem_elevation_r[i1_0:thisi+1,j1_0:thisj+1]
    #
    #    if verbose:
    #        log.critical('There are %d values in the elevation' % totalnopoints)
    #        log.critical('There are %d values in the clipped elevation'
    #                     % clippednopoints)
    #        log.critical('There are %d NODATA_values in the clipped elevation' % nn)
    #
    #    outfile.createDimension('number_of_points', nopoints)
    #    outfile.createDimension('number_of_dimensions', 2) #This is 2d data
    #
    #    # Variable definitions
    #    outfile.createVariable('points', netcdf_float, ('number_of_points',
    #                                                    'number_of_dimensions'))
    #    outfile.createVariable('elevation', netcdf_float, ('number_of_points',))
    #
    #    # Get handles to the variables
    #    points = outfile.variables['points']
    #    elevation = outfile.variables['elevation']
    #
    #    # Number of points
    #    N = points.shape[0]
    #
    #    lenv = index2-index1+1
    #
    #    # Store data
    #    global_index = 0
    #    # for i in range(nrows):
    #    for i in range(i1_0, thisi+1, 1):
    #        if verbose and i % ((nrows+10)/10) == 0:
    #            log.critical('Processing row %d of %d' % (i, nrows))
    #
    #        lower_index = global_index
    #
    #        v = dem_elevation_r[i,index1:index2+1]
    #        no_NODATA = num.sum(v == NODATA_value)
    #        if no_NODATA > 0:
    #            newcols = lenv - no_NODATA  # ncols_in_bounding_box - no_NODATA
    #        else:
    #            newcols = lenv              # ncols_in_bounding_box
    #
    #        telev = num.zeros(newcols, num.float)
    #        tpoints = num.zeros((newcols, 2), num.float)
    #
    #        local_index = 0
    #
    #        y = (nrows-i-1)*cellsize + yllcorner
    #        #for j in range(ncols):
    #        for j in range(j1_0,index2+1,1):
    #            x = j*cellsize + xllcorner
    #            if easting_min <= x <= easting_max \
    #               and northing_min <= y <= northing_max \
    #               and dem_elevation_r[i,j] != NODATA_value:
    #
    #                #print [x-easting_min, y-northing_min]
    #                #print x , y
    #                #print easting_min, northing_min
    #                #print xllcorner, yllcorner
    #                #print cellsize
    #
    #                tpoints[local_index, :] = [x-easting_min, y-northing_min]
    #                telev[local_index] = dem_elevation_r[i, j]
    #                global_index += 1
    #                local_index += 1
    #
    #        upper_index = global_index
    #
    #        if upper_index == lower_index + newcols:
    #
    #            # Seems to be an error with the windows version of
    #            # Netcdf. The following gave errors
    #
    #            try:
    #                points[lower_index:upper_index, :] = tpoints
    #                elevation[lower_index:upper_index] = telev
    #            except:
    #                # so used the following if an error occurs
    #                for index in range(newcols):
    #                    points[index+lower_index, :] = tpoints[index,:]
    #                    elevation[index+lower_index] = telev[index]
    #
    #    assert global_index == nopoints, 'index not equal to number of points'

    #========================================
    # Do the preceeding with numpy
    #========================================
    y = num.arange(nrows, dtype=num.float)
    y = yllcorner + (nrows - 1) * cellsize - y * cellsize

    x = num.arange(ncols, dtype=num.float)
    x = xllcorner + x * cellsize

    xx, yy = num.meshgrid(x, y)

    xx = xx.flatten()
    yy = yy.flatten()

    flag = num.logical_and(
        num.logical_and((xx <= easting_max), (xx >= easting_min)),
        num.logical_and((yy <= northing_max), (yy >= northing_min)))

    dem = dem_elevation[:].flatten()

    id = num.where(flag)[0]

    xx = xx[id]
    yy = yy[id]
    dem = dem[id]

    clippednopoints = len(dem)
    #print clippedpoints

    #print xx
    #print yy
    #print dem

    data_flag = dem != NODATA_value

    data_id = num.where(data_flag)

    xx = xx[data_id]
    yy = yy[data_id]
    dem = dem[data_id]

    nn = clippednopoints - len(dem)

    nopoints = len(dem)

    if verbose:
        log.critical('There are %d values in the elevation' % totalnopoints)
        log.critical('There are %d values in the clipped elevation' %
                     clippednopoints)
        log.critical('There are %d NODATA_values in the clipped elevation' %
                     nn)

    outfile.createDimension('number_of_points', nopoints)
    outfile.createDimension('number_of_dimensions', 2)  #This is 2d data

    # Variable definitions
    outfile.createVariable('points', netcdf_float,
                           ('number_of_points', 'number_of_dimensions'))
    outfile.createVariable('elevation', netcdf_float, ('number_of_points', ))

    # Get handles to the variables
    points = outfile.variables['points']
    elevation = outfile.variables['elevation']

    points[:, 0] = xx - easting_min
    points[:, 1] = yy - northing_min
    elevation[:] = dem

    infile.close()
    outfile.close()
def _generic_convert_dem_from_ascii2netcdf(name_in,
                                           name_out=None,
                                           quantity_name=None,
                                           verbose=False):
    """Read raster from the following ASCII format (.asc)

    Internal function. See public function convert_dem_from_ascii2netcdf
    for details.
    """

    import os
    from anuga.file.netcdf import NetCDFFile

    root = name_in[:-4]

    # Read Meta data
    if verbose: log.critical('Reading METADATA from %s' % (root + '.prj'))

    metadatafile = open(root + '.prj')
    metalines = metadatafile.readlines()
    metadatafile.close()

    L = metalines[0].strip().split()
    assert L[0].strip().lower() == 'projection'
    projection = L[1].strip()  #TEXT

    L = metalines[1].strip().split()
    assert L[0].strip().lower() == 'zone'
    zone = int(L[1].strip())

    L = metalines[2].strip().split()
    assert L[0].strip().lower() == 'datum'
    datum = L[1].strip()  #TEXT

    L = metalines[3].strip().split()
    assert L[0].strip().lower() == 'zunits'  #IGNORE
    zunits = L[1].strip()  #TEXT

    L = metalines[4].strip().split()
    assert L[0].strip().lower() == 'units'
    units = L[1].strip()  #TEXT

    L = metalines[5].strip().split()
    assert L[0].strip().lower() == 'spheroid'  #IGNORE
    spheroid = L[1].strip()  #TEXT

    L = metalines[6].strip().split()
    assert L[0].strip().lower() == 'xshift'
    false_easting = float(L[1].strip())

    L = metalines[7].strip().split()
    assert L[0].strip().lower() == 'yshift'
    false_northing = float(L[1].strip())

    if name_in[-4:] != '.asc':
        raise IOError('Input file %s should be of type .asc.' % name_in)

    #Read DEM data
    datafile = open(name_in)

    if verbose: log.critical('Reading raster from %s' % (name_in))

    lines = datafile.readlines()
    datafile.close()

    if verbose: log.critical('Got %d lines' % len(lines))

    ncols = int(lines[0].split()[1].strip())
    nrows = int(lines[1].split()[1].strip())

    # Do cellsize (line 4) before line 2 and 3
    cellsize = float(lines[4].split()[1].strip())

    # Checks suggested by Joaquim Luis
    # Our internal representation of xllcorner
    # and yllcorner is non-standard.
    xref = lines[2].split()
    if xref[0].strip() == 'xllcorner':
        xllcorner = float(xref[1].strip())  # + 0.5*cellsize # Correct offset
    elif xref[0].strip() == 'xllcenter':
        xllcorner = float(xref[1].strip())
    else:
        msg = 'Unknown keyword: %s' % xref[0].strip()
        raise Exception, msg

    yref = lines[3].split()
    if yref[0].strip() == 'yllcorner':
        yllcorner = float(yref[1].strip())  # + 0.5*cellsize # Correct offset
    elif yref[0].strip() == 'yllcenter':
        yllcorner = float(yref[1].strip())
    else:
        msg = 'Unknown keyword: %s' % yref[0].strip()
        raise Exception, msg

    NODATA_value = int(float(lines[5].split()[1].strip()))

    assert len(lines) == nrows + 6

    if name_out == None:
        netcdfname = name_in[:-4] + '.dem'
    else:
        netcdfname = name_out + '.dem'

    if verbose: log.critical('Store to NetCDF file %s' % netcdfname)

    # NetCDF file definition
    fid = NetCDFFile(netcdfname, netcdf_mode_w)

    #Create new file
    fid.institution = 'Geoscience Australia'
    fid.description = 'NetCDF DEM format for compact and portable storage ' \
                      'of spatial point data'

    fid.ncols = ncols
    fid.nrows = nrows
    fid.xllcorner = xllcorner
    fid.yllcorner = yllcorner
    fid.cellsize = cellsize
    fid.NODATA_value = NODATA_value

    fid.zone = zone
    fid.false_easting = false_easting
    fid.false_northing = false_northing
    fid.projection = projection
    fid.datum = datum
    fid.units = units

    # dimension definitions
    fid.createDimension('number_of_rows', nrows)
    fid.createDimension('number_of_columns', ncols)

    # variable definitions
    fid.createVariable(quantity_name, netcdf_float,
                       ('number_of_rows', 'number_of_columns'))

    # Get handles to the variables
    elevation = fid.variables[quantity_name]

    #Store data
    import numpy

    datafile = open(name_in)
    elevation[:, :] = numpy.loadtxt(datafile, skiprows=6)
    datafile.close()

    #    n = len(lines[6:])
    #    for i, line in enumerate(lines[6:]):
    #        fields = line.split()
    #        if verbose and i % ((n+10)/10) == 0:
    #            log.critical('Processing row %d of %d' % (i, nrows))
    #
    #        if len(fields) != ncols:
    #            msg = 'Wrong number of columns in file "%s" line %d\n' % (name_in, i)
    #            msg += 'I got %d elements, but there should have been %d\n' % (len(fields), ncols)
    #            raise Exception, msg
    #
    #        elevation[i, :] = num.array([float(x) for x in fields])

    fid.close()
Esempio n. 17
0
    def test_decimate_dem(self):
        """Test decimation of dem file
        """

        import os
        from anuga.file.netcdf import NetCDFFile

        # Write test dem file
        root = "decdemtest"

        filename = root + ".dem"
        fid = NetCDFFile(filename, netcdf_mode_w)

        fid.institution = "Geoscience Australia"
        fid.description = "NetCDF DEM format for compact and portable " + "storage of spatial point data"

        nrows = 15
        ncols = 18

        fid.ncols = ncols
        fid.nrows = nrows
        fid.xllcorner = 2000.5
        fid.yllcorner = 3000.5
        fid.cellsize = 25
        fid.NODATA_value = -9999

        fid.zone = 56
        fid.false_easting = 0.0
        fid.false_northing = 0.0
        fid.projection = "UTM"
        fid.datum = "WGS84"
        fid.units = "METERS"

        fid.createDimension("number_of_points", nrows * ncols)

        fid.createVariable("elevation", netcdf_float, ("number_of_points",))

        elevation = fid.variables["elevation"]

        elevation[:] = num.arange(nrows * ncols)

        fid.close()

        # generate the elevation values expected in the decimated file
        ref_elevation = [
            (0 + 1 + 2 + 18 + 19 + 20 + 36 + 37 + 38) / 9.0,
            (4 + 5 + 6 + 22 + 23 + 24 + 40 + 41 + 42) / 9.0,
            (8 + 9 + 10 + 26 + 27 + 28 + 44 + 45 + 46) / 9.0,
            (12 + 13 + 14 + 30 + 31 + 32 + 48 + 49 + 50) / 9.0,
            (72 + 73 + 74 + 90 + 91 + 92 + 108 + 109 + 110) / 9.0,
            (76 + 77 + 78 + 94 + 95 + 96 + 112 + 113 + 114) / 9.0,
            (80 + 81 + 82 + 98 + 99 + 100 + 116 + 117 + 118) / 9.0,
            (84 + 85 + 86 + 102 + 103 + 104 + 120 + 121 + 122) / 9.0,
            (144 + 145 + 146 + 162 + 163 + 164 + 180 + 181 + 182) / 9.0,
            (148 + 149 + 150 + 166 + 167 + 168 + 184 + 185 + 186) / 9.0,
            (152 + 153 + 154 + 170 + 171 + 172 + 188 + 189 + 190) / 9.0,
            (156 + 157 + 158 + 174 + 175 + 176 + 192 + 193 + 194) / 9.0,
            (216 + 217 + 218 + 234 + 235 + 236 + 252 + 253 + 254) / 9.0,
            (220 + 221 + 222 + 238 + 239 + 240 + 256 + 257 + 258) / 9.0,
            (224 + 225 + 226 + 242 + 243 + 244 + 260 + 261 + 262) / 9.0,
            (228 + 229 + 230 + 246 + 247 + 248 + 264 + 265 + 266) / 9.0,
        ]

        # generate a stencil for computing the decimated values
        stencil = num.ones((3, 3), num.float) / 9.0

        dem2dem(filename, stencil=stencil, cellsize_new=100)

        # Open decimated NetCDF file
        fid = NetCDFFile(root + "_100.dem", netcdf_mode_r)

        # Get decimated elevation
        elevation = fid.variables["elevation"]

        # Check values
        assert num.allclose(elevation, ref_elevation)

        # Cleanup
        fid.close()

        os.remove(root + ".dem")
        os.remove(root + "_100.dem")