def test_ferret2sww_nz_origin(self):
        from anuga.coordinate_transforms.redfearn import redfearn

        #Call conversion (with nonzero origin)
        ferret2sww(self.test_MOST_file, verbose=self.verbose,
                   origin = (56, 100000, 200000))


        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww', netcdf_mode_r)
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that first coordinate is correctly represented
        assert num.allclose(x[0], e-100000)
        assert num.allclose(y[0], n-200000)

        fid.close()

        #Cleanup
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 2
0
    def test_ferret2sww_2(self):
        """Test that georeferencing etc works when converting from
        ferret format (lat/lon) to sww format (UTM)
        """

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;
        # TIME = 0, 0.1, 0.6, 1.1, 1.6, 2.1 ;
        #
        # First value (index=0) in small_ha.nc is 0.3400644 cm,
        # Fourth value (index==3) is -6.50198 cm

        from anuga.coordinate_transforms.redfearn import redfearn
        #fid = NetCDFFile('small_ha.nc')
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')

        #Pick a coordinate and a value

        time_index = 1
        lat_index = 0
        lon_index = 2

        test_value = fid.variables['HA'][:][time_index, lat_index, lon_index]
        test_time = fid.variables['TIME'][:][time_index]
        test_lat = fid.variables['LAT'][:][lat_index]
        test_lon = fid.variables['LON'][:][lon_index]

        linear_point_index = lat_index * 4 + lon_index
        fid.close()

        #Call conversion (with zero origin)
        ferret2sww(self.test_MOST_file,
                   verbose=self.verbose,
                   origin=(56, 0, 0))

        #Work out the UTM coordinates for test point
        zone, e, n = redfearn(test_lat, test_lon)

        #Read output file 'small.sww'
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that test coordinate is correctly represented
        assert num.allclose(x[linear_point_index], e)
        assert num.allclose(y[linear_point_index], n)

        #Check test value
        stage = fid.variables['stage'][:]

        assert num.allclose(stage[time_index, linear_point_index],
                            old_div(test_value, 100))

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
    def test_ferret2sww_2(self):
        """Test that georeferencing etc works when converting from
        ferret format (lat/lon) to sww format (UTM)
        """

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;
        # TIME = 0, 0.1, 0.6, 1.1, 1.6, 2.1 ;
        #
        # First value (index=0) in small_ha.nc is 0.3400644 cm,
        # Fourth value (index==3) is -6.50198 cm


        from anuga.coordinate_transforms.redfearn import redfearn
        #fid = NetCDFFile('small_ha.nc')
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')

        #Pick a coordinate and a value

        time_index = 1
        lat_index = 0
        lon_index = 2

        test_value = fid.variables['HA'][:][time_index, lat_index, lon_index]
        test_time = fid.variables['TIME'][:][time_index]
        test_lat = fid.variables['LAT'][:][lat_index]
        test_lon = fid.variables['LON'][:][lon_index]

        linear_point_index = lat_index*4 + lon_index
        fid.close()

        #Call conversion (with zero origin)
        ferret2sww(self.test_MOST_file, verbose=self.verbose,
                   origin = (56, 0, 0))


        #Work out the UTM coordinates for test point
        zone, e, n = redfearn(test_lat, test_lon)

        #Read output file 'small.sww'
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that test coordinate is correctly represented
        assert num.allclose(x[linear_point_index], e)
        assert num.allclose(y[linear_point_index], n)

        #Check test value
        stage = fid.variables['stage'][:]

        assert num.allclose(stage[time_index, linear_point_index], test_value/100)

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 4
0
    def test_ferret2sww_nz_origin(self):
        from anuga.coordinate_transforms.redfearn import redfearn

        #Call conversion (with nonzero origin)
        ferret2sww(self.test_MOST_file,
                   verbose=self.verbose,
                   origin=(56, 100000, 200000))

        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww', netcdf_mode_r)
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that first coordinate is correctly represented
        assert num.allclose(x[0], e - 100000)
        assert num.allclose(y[0], n - 200000)

        fid.close()

        #Cleanup
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 5
0
    def test_ferret2sww_lat_longII(self):
        # Test that min lat long works

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;

        #Read
        from anuga.coordinate_transforms.redfearn import redfearn
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')
        first_value = fid.variables['HA'][:][0, 0, 0]
        fourth_value = fid.variables['HA'][:][0, 0, 3]
        fid.close()

        #Call conversion (with zero origin)
        #ferret2sww('small', verbose=False,
        #           origin = (56, 0, 0))
        ferret2sww(self.test_MOST_file,
                   verbose=False,
                   origin=(56, 0, 0),
                   minlat=-34.4,
                   maxlat=-34.2)

        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)
        #print zone, e, n

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww')
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        #Check that first coordinate is correctly represented
        assert 12 == len(x)

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 6
0
def keep_point(lat, long, seg, max_distance):
    """
    seg is two points, UTM
    """

    from math import sqrt

    _, x0, y0 = redfearn(lat, long)
    x1 = seg[0][0]
    y1 = seg[0][1]
    x2 = seg[1][0]
    y2 = seg[1][1]
    x2_1 = x2 - x1
    y2_1 = y2 - y1

    num = (x2_1) * (x2_1) + (y2_1) * (y2_1)
    if sqrt(num) == 0 and abs(num) == 0:
        return True
    else:
        d = old_div(abs((x2_1) * (y1 - y0) - (x1 - x0) * (y2_1)), num)
        return d <= max_distance
Esempio n. 7
0
def keep_point(lat, long, seg, max_distance):
    """
    seg is two points, UTM
    """

    from math import sqrt

    _ , x0, y0 = redfearn(lat, long)
    x1 = seg[0][0]
    y1 = seg[0][1]
    x2 = seg[1][0]
    y2 = seg[1][1]
    x2_1 = x2-x1
    y2_1 = y2-y1


    num = (x2_1)*(x2_1)+(y2_1)*(y2_1)
    if sqrt(num) == 0 and abs(num) == 0:
        return True
    else:
        d = abs((x2_1)*(y1-y0)-(x1-x0)*(y2_1))/num
        return d <= max_distance
    def test_ferret2sww_lat_longII(self):
        # Test that min lat long works

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;
        
        #Read
        from anuga.coordinate_transforms.redfearn import redfearn
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')
        first_value = fid.variables['HA'][:][0,0,0]
        fourth_value = fid.variables['HA'][:][0,0,3]
        fid.close()


        #Call conversion (with zero origin)
        #ferret2sww('small', verbose=False,
        #           origin = (56, 0, 0))
        ferret2sww(self.test_MOST_file, verbose=False,
                   origin = (56, 0, 0), minlat=-34.4, maxlat=-34.2)

        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)
        #print zone, e, n

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww')
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        #Check that first coordinate is correctly represented
        assert 12 == len(x)

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
def ferret2sww(
        basename_in,
        name_out=None,
        verbose=False,
        minlat=None,
        maxlat=None,
        minlon=None,
        maxlon=None,
        mint=None,
        maxt=None,
        mean_stage=0,
        origin=None,
        zscale=1,
        fail_on_NaN=True,
        NaN_filler=0,
        elevation=None,
        inverted_bathymetry=True):  #FIXME: Bathymetry should be obtained
    #from MOST somehow.
    #Alternatively from elsewhere
    #or, as a last resort,
    #specified here.
    #The value of -100 will work
    #for the Wollongong tsunami
    #scenario but is very hacky
    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
    relative height, x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    min's and max's: If omitted - full extend is used.
    To include a value min may equal it, while max must exceed it.
    Lat and lon are assuemd to be in decimal degrees

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
    which means that longitude is the fastest
    varying dimension (row major order, so to speak)

    ferret2sww uses grid points as vertices in a triangular grid
    counting vertices from lower left corner upwards, then right
    """

    from anuga.file.netcdf import NetCDFFile

    _assert_lat_long(minlat, maxlat, minlon, maxlon)

    if name_out != None and name_out[-4:] != '.sww':
        raise IOError('Output file %s should be of type .sww.' % name_out)

    # Get NetCDF data
    if verbose: log.critical('Reading files %s_*.nc' % basename_in)

    # Wave amplitude (cm)
    file_h = NetCDFFile(basename_in + '_ha.nc', netcdf_mode_r)

    # Velocity (x) (cm/s)
    file_u = NetCDFFile(basename_in + '_ua.nc', netcdf_mode_r)

    # Velocity (y) (cm/s)
    file_v = NetCDFFile(basename_in + '_va.nc', netcdf_mode_r)

    # Elevation (z) (m)
    file_e = NetCDFFile(basename_in + '_e.nc', netcdf_mode_r)

    if name_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = name_out

    # Get dimensions of file_h
    for dimension in list(file_h.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_h_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_h_latitude = dimension
        if dimension[:4] == 'TIME':
            dim_h_time = dimension

    times = file_h.variables[dim_h_time]
    latitudes = file_h.variables[dim_h_latitude]
    longitudes = file_h.variables[dim_h_longitude]

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], longitudes[:],
                                                 minlat, maxlat, minlon,
                                                 maxlon)
    # get dimensions for file_e
    for dimension in list(file_e.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_e_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_e_latitude = dimension

    # get dimensions for file_u
    for dimension in list(file_u.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_u_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_u_latitude = dimension

    # get dimensions for file_v
    for dimension in list(file_v.dimensions.keys()):
        if dimension[:3] == 'LON':
            dim_v_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_v_latitude = dimension

    # Precision used by most for lat/lon is 4 or 5 decimals
    e_lat = num.around(file_e.variables[dim_e_latitude][:], 5)
    e_lon = num.around(file_e.variables[dim_e_longitude][:], 5)

    # Check that files are compatible
    assert num.allclose(latitudes, file_u.variables[dim_u_latitude])
    assert num.allclose(latitudes, file_v.variables[dim_v_latitude])
    assert num.allclose(latitudes, e_lat)

    assert num.allclose(longitudes, file_u.variables[dim_u_longitude])
    assert num.allclose(longitudes, file_v.variables[dim_v_longitude])
    assert num.allclose(longitudes, e_lon)

    if mint is None:
        jmin = 0
        mint = times[0]
    else:
        jmin = num.searchsorted(times, mint)

        # numpy.int32 didn't work in slicing of amplitude below
        jmin = int(jmin)

    if maxt is None:
        jmax = len(times)
        maxt = times[-1]
    else:
        jmax = num.searchsorted(times, maxt)

        # numpy.int32 didn't work in slicing of amplitude below
        jmax = int(jmax)

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:], longitudes[:],
                                                 minlat, maxlat, minlon,
                                                 maxlon)

    times = times[jmin:jmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]

    if verbose: log.critical('cropping')

    zname = 'ELEVATION'

    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax]  #Lon
    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax]  #Lat
    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]

    # Get missing values
    nan_ha = file_h.variables['HA'].missing_value
    nan_ua = file_u.variables['UA'].missing_value
    nan_va = file_v.variables['VA'].missing_value
    if hasattr(file_e.variables[zname], 'missing_value'):
        nan_e = file_e.variables[zname].missing_value
    else:
        nan_e = None

    # Cleanup
    missing = (amplitudes == nan_ha)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ha.nc'
            raise_(DataMissingValuesError, msg)
        else:
            amplitudes = amplitudes * (missing == 0) + missing * NaN_filler

    missing = (uspeed == nan_ua)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ua.nc'
            raise_(DataMissingValuesError, msg)
        else:
            uspeed = uspeed * (missing == 0) + missing * NaN_filler

    missing = (vspeed == nan_va)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_va.nc'
            raise_(DataMissingValuesError, msg)
        else:
            vspeed = vspeed * (missing == 0) + missing * NaN_filler

    missing = (elevations == nan_e)
    if num.sometrue(missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_e.nc'
            raise_(DataMissingValuesError, msg)
        else:
            elevations = elevations * (missing == 0) + missing * NaN_filler

    number_of_times = times.shape[0]
    number_of_latitudes = latitudes.shape[0]
    number_of_longitudes = longitudes.shape[0]

    assert amplitudes.shape[0] == number_of_times
    assert amplitudes.shape[1] == number_of_latitudes
    assert amplitudes.shape[2] == number_of_longitudes

    if verbose:
        _show_stats((latitudes, longitudes), times, amplitudes, \
                    (uspeed, vspeed), elevations)

    # print number_of_latitudes, number_of_longitudes
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes -
                                                     1) * 2

    file_h.close()
    file_u.close()
    file_v.close()
    file_e.close()

    # NetCDF file definition
    outfile = NetCDFFile(swwname, netcdf_mode_w)

    description = 'Converted from Ferret files: %s, %s, %s, %s' \
                  % (basename_in + '_ha.nc',
                     basename_in + '_ua.nc',
                     basename_in + '_va.nc',
                     basename_in + '_e.nc')

    # Create new file
    starttime = times[0]

    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile,
                     times,
                     number_of_volumes,
                     number_of_points,
                     description=description,
                     verbose=verbose,
                     sww_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn
    x = num.zeros(number_of_points, num.float)  #Easting
    y = num.zeros(number_of_points, num.float)  #Northing

    if verbose:
        log.critical('Making triangular grid')

    # Check zone boundaries
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):  # Y direction
        for l, lon in enumerate(longitudes):  # X direction
            vertices[l, k] = i

            _, easting, northing = redfearn(lat, lon)

            #msg = 'Zone boundary crossed at longitude =', lon
            #assert zone == refzone, msg
            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    #Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes - 1):  # X direction
        for k in range(number_of_latitudes - 1):  # Y direction
            v1 = vertices[l, k + 1]
            v2 = vertices[l, k]
            v3 = vertices[l + 1, k + 1]
            v4 = vertices[l + 1, k]

            volumes.append([v1, v2, v3])  #Upper element
            volumes.append([v4, v3, v2])  #Lower element

    volumes = num.array(volumes, num.int)  #array default#

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    if elevation is not None:
        z = elevation
    else:
        if inverted_bathymetry:
            z = -1 * elevations
        else:
            z = elevations
    #FIXME: z should be obtained from MOST and passed in here

    #FIXME use the Write_sww instance(sww) to write this info
    z = num.resize(z, outfile.variables['elevation'][:].shape)
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    #outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
    outfile.variables['elevation'][:] = z
    outfile.variables['volumes'][:] = volumes.astype(
        num.int32)  #For Opteron 64

    #Time stepping
    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose:
        log.critical('Converting quantities')

    n = len(times)
    for j in range(n):
        if verbose and j % (old_div((n + 10), 10)) == 0:
            log.critical('  Doing %d of %d' % (j, n))

        i = 0
        for k in range(number_of_latitudes):  # Y direction
            for l in range(number_of_longitudes):  # X direction
                w = old_div(zscale * amplitudes[j, k, l], 100) + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = old_div(uspeed[j, k, l], 100) * h
                ymomentum[j, i] = old_div(vspeed[j, k, l], 100) * h
                i += 1

    #outfile.close()

    #FIXME: Refactor using code from file_function.statistics
    #Something like print swwstats(swwname)
    if verbose:
        time_info = times, starttime, mint, maxt
        _show_sww_stats(outfile, swwname, geo_ref, time_info)

    outfile.close()
Esempio n. 10
0
    def test_file_boundary_stsIV_sinewave_ordering(self):
        """test_file_boundary_stsIV_sinewave_ordering(self):
        Read correct points from ordering file and apply sts to boundary
        This one uses a sine wave and compares to time boundary
        """

        lat_long_points=[[6.01, 97.0], [6.02, 97.0], [6.05, 96.9], [6.0, 97.0]]
        bounding_polygon=[[6.0, 97.0], [6.01, 97.0], [6.02,97.0], \
                            [6.02,97.02], [6.00,97.02]]
        tide = 0.35
        time_step_count = 50
        time_step = 0.1
        times_ref = num.arange(0, time_step_count*time_step, time_step)
        
        n=len(lat_long_points)
        first_tstep=num.ones(n,num.int)
        last_tstep=(time_step_count)*num.ones(n,num.int)
        
        gauge_depth=20*num.ones(n,num.float)
        
        ha1=num.ones((n,time_step_count),num.float)
        ua1=3.*num.ones((n,time_step_count),num.float)
        va1=2.*num.ones((n,time_step_count),num.float)
        for i in range(n):
            ha1[i]=num.sin(times_ref)
        
        
        base_name, files = self.write_mux2(lat_long_points,
                                           time_step_count, time_step,
                                           first_tstep, last_tstep,
                                           depth=gauge_depth,
                                           ha=ha1,
                                           ua=ua1,
                                           va=va1)

        # Write order file
        file_handle, order_base_name = tempfile.mkstemp("")
        os.close(file_handle)
        os.remove(order_base_name)
        d=","
        order_file=order_base_name+'order.txt'
        fid=open(order_file,'w')
        
        # Write Header
        header='index, longitude, latitude\n'
        fid.write(header)
        indices=[3,0,1]
        for i in indices:
            line=str(i)+d+str(lat_long_points[i][1])+d+\
                str(lat_long_points[i][0])+"\n"
            fid.write(line)
        fid.close()

        sts_file=base_name
        urs2sts(base_name, basename_out=sts_file,
                ordering_filename=order_file,
                mean_stage=tide,
                verbose=False)
        self.delete_mux(files)
        
        
        
        # Now read the sts file and check that values have been stored correctly.
        fid = NetCDFFile(sts_file + '.sts')

        # Check the time vector
        times = fid.variables['time'][:]
        
        #print times

        # Check sts quantities
        stage = fid.variables['stage'][:]
        xmomentum = fid.variables['xmomentum'][:]
        ymomentum = fid.variables['ymomentum'][:]
        elevation = fid.variables['elevation'][:]       

        # Create beginnings of boundary polygon based on sts_boundary
        boundary_polygon = create_sts_boundary(base_name)
        
        os.remove(order_file)

        # Append the remaining part of the boundary polygon to be defined by
        # the user
        bounding_polygon_utm=[]
        for point in bounding_polygon:
            zone,easting,northing=redfearn(point[0],point[1])
            bounding_polygon_utm.append([easting,northing])

        boundary_polygon.append(bounding_polygon_utm[3])
        boundary_polygon.append(bounding_polygon_utm[4])

        #print 'boundary_polygon', boundary_polygon
        
        plot=False
        if plot:
            from pylab import plot,show,axis
            boundary_polygon=ensure_numeric(boundary_polygon)
            bounding_polygon_utm=ensure_numeric(bounding_polygon_utm)
            #plot(lat_long_points[:,0],lat_long_points[:,1],'o')
            plot(boundary_polygon[:,0], boundary_polygon[:,1])
            plot(bounding_polygon_utm[:,0],bounding_polygon_utm[:,1])
            show()

        assert num.allclose(bounding_polygon_utm,boundary_polygon)


        extent_res=1000000
        meshname = 'urs_test_mesh' + '.tsh'
        interior_regions=None
        boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}
        
        # have to change boundary tags from last example because now bounding
        # polygon starts in different place.
        create_mesh_from_regions(boundary_polygon,
                                 boundary_tags=boundary_tags,
                                 maximum_triangle_area=extent_res,
                                 filename=meshname,
                                 interior_regions=interior_regions,
                                 verbose=False)
        
        domain_fbound = Domain(meshname)
        domain_fbound.set_quantity('stage', tide)
        Bf = File_boundary(sts_file+'.sts', 
                           domain_fbound, 
                           boundary_polygon=boundary_polygon)
        Br = Reflective_boundary(domain_fbound)

        domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})
        finaltime=time_step*(time_step_count-1)
        yieldstep=time_step
        temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float)
    
        for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step=False)):
            temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2]
    
        
        domain_time = Domain(meshname)
        domain_time.set_quantity('stage', tide)
        Br = Reflective_boundary(domain_time)
        Bw = Time_boundary(domain=domain_time,
                         function=lambda t: [num.sin(t)+tide,3.*(20.+num.sin(t)+tide),2.*(20.+num.sin(t)+tide)])
        domain_time.set_boundary({'ocean': Bw,'otherocean': Br})
        
        temp_time=num.zeros(int(finaltime/yieldstep)+1,num.float)
        
        domain_time.set_starttime(domain_fbound.get_starttime())
        
        for i, t in enumerate(domain_time.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step=False)):
            temp_time[i]=domain_time.quantities['stage'].centroid_values[2]
        
        assert num.allclose(temp_fbound, temp_time)                
        assert num.allclose(domain_fbound.quantities['stage'].vertex_values,
                            domain_time.quantities['stage'].vertex_values)
                        
        assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,
                            domain_time.quantities['xmomentum'].vertex_values)                        
                        
        assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,
                            domain_time.quantities['ymomentum'].vertex_values)                                                
        

        try:
            os.remove(sts_file+'.sts')
        except:
            # Windoze can't remove this file for some reason 
            pass
        
        os.remove(meshname)
    def sequential_time_varying_file_boundary_sts(self):
        """sequential_ltest_time_varying_file_boundary_sts_sequential(self):
        Read correct points from ordering file and apply sts to boundary. The boundary is time varying. FIXME add to test_urs2sts.
        """
        lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]]
        bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],
                          [6.02,97.02],[6.00,97.02]]
        tide = 3.0
        time_step_count = 65
        time_step = 2.
        n=len(lat_long_points)
        first_tstep=num.ones(n,num.int)
        last_tstep=(time_step_count)*num.ones(n,num.int)
        finaltime=num.float(time_step*(time_step_count-1))
        yieldstep=num.float(time_step)
        gauge_depth=20*num.ones(n,num.float)
        ha=2*num.ones((n,time_step_count),num.float)
        ua=10*num.ones((n,time_step_count),num.float)
        va=-10*num.ones((n,time_step_count),num.float)

        times=num.arange(0., num.float(time_step_count*time_step), time_step)
        for i in range(n):
            #ha[i]+=num.sin(times)
            ha[i]+=times/finaltime



        sts_file="test"
        if myid==0:
            base_name, files = self.write_mux2(lat_long_points,
                                               time_step_count,
                                               time_step,
                                               first_tstep,
                                               last_tstep,
                                               depth=gauge_depth,
                                               ha=ha,
                                               ua=ua,
                                               va=va)
            # base name will not exist, but 3 other files are created

            # Write order file
            file_handle, order_base_name = tempfile.mkstemp("")
            os.close(file_handle)
            os.remove(order_base_name)
            d=","
            order_file=order_base_name+'order.txt'
            fid=open(order_file,'w')
        
            # Write Header
            header='index, longitude, latitude\n'
            fid.write(header)
            indices=[3,0,1]
            for i in indices:
                line=str(i)+d+str(lat_long_points[i][1])+d+\
                    str(lat_long_points[i][0])+"\n"
                fid.write(line)
            fid.close()

            urs2sts(base_name,
                    basename_out=sts_file,
                    ordering_filename=order_file,
                    mean_stage=tide,
                    verbose=verbose)
            self.delete_mux(files)

            assert(os.access(sts_file+'.sts', os.F_OK))

            os.remove(order_file)

        barrier()
        boundary_polygon = create_sts_boundary(sts_file)

        # Append the remaining part of the boundary polygon to be defined by
        # the user
        bounding_polygon_utm=[]
        for point in bounding_polygon:
            zone,easting,northing=redfearn(point[0],point[1])
            bounding_polygon_utm.append([easting,northing])

        boundary_polygon.append(bounding_polygon_utm[3])
        boundary_polygon.append(bounding_polygon_utm[4])

        assert num.allclose(bounding_polygon_utm,boundary_polygon)


        extent_res=1000000
        meshname = 'urs_test_mesh' + '.tsh'
        interior_regions=None
        boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}
        
        # have to change boundary tags from last example because now bounding
        # polygon starts in different place.
        if myid==0:
            create_mesh_from_regions(boundary_polygon,
                                     boundary_tags=boundary_tags,
                                     maximum_triangle_area=extent_res,
                                     filename=meshname,
                                     interior_regions=interior_regions,
                                     verbose=verbose)

        barrier()
        
        domain_fbound = Domain(meshname)
        domain_fbound.set_quantities_to_be_stored(None)
        domain_fbound.set_quantity('stage', tide)
        if verbose: print "Creating file boundary condition"
        Bf = File_boundary(sts_file+'.sts',
                           domain_fbound,
                           boundary_polygon=boundary_polygon)
        Br = Reflective_boundary(domain_fbound)

        domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})

        temp_fbound=num.zeros(int(finaltime/yieldstep)+1,num.float)
        if verbose: print "Evolving domain with file boundary condition"
        for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step = False)):
            temp_fbound[i]=domain_fbound.quantities['stage'].centroid_values[2]
            if verbose: domain_fbound.write_time()
            
        
        domain_drchlt = Domain(meshname)
        domain_drchlt.set_quantities_to_be_stored(None)
        domain_drchlt.set_starttime(time_step)
        domain_drchlt.set_quantity('stage', tide)
        Br = Reflective_boundary(domain_drchlt)
        #Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])
        Bd = Time_boundary(domain=domain_drchlt, f=lambda t: [2.0+t/finaltime+tide,220.+10.*tide+10.*t/finaltime,-220.-10.*tide-10.*t/finaltime])
        #Bd = Time_boundary(domain=domain_drchlt,f=lambda t: [2.0+num.sin(t)+tide,10.*(2+20.+num.sin(t)+tide),-10.*(2+20.+num.sin(t)+tide)])
        domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})
        temp_drchlt=num.zeros(int(finaltime/yieldstep)+1,num.float)
        
        for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step = False)):
            temp_drchlt[i]=domain_drchlt.quantities['stage'].centroid_values[2]
            #domain_drchlt.write_time()
        
        #print domain_fbound.quantities['stage'].vertex_values
        #print domain_drchlt.quantities['stage'].vertex_values
                    
        assert num.allclose(temp_fbound,temp_drchlt),temp_fbound-temp_drchlt

        
        assert num.allclose(domain_fbound.quantities['stage'].vertex_values,
                            domain_drchlt.quantities['stage'].vertex_values)
                        
        assert num.allclose(domain_fbound.quantities['xmomentum'].vertex_values,
                            domain_drchlt.quantities['xmomentum'].vertex_values)                        
                        
        assert num.allclose(domain_fbound.quantities['ymomentum'].vertex_values,
                            domain_drchlt.quantities['ymomentum'].vertex_values)
        
        if not sys.platform == 'win32':
            if myid==0: os.remove(sts_file+'.sts')
        
        if myid==0: os.remove(meshname)
Esempio n. 12
0
def esri2sww(bath_dir,
             elevation_dir,
             ucur_dir,
             vcur_dir,
             sww_file,
             minlat=None,
             maxlat=None,
             minlon=None,
             maxlon=None,
             zscale=1,
             mean_stage=0,
             fail_on_NaN=True,
             elevation_NaN_filler=0,
             bath_prefix='ba',
             elevation_prefix='el',
             verbose=False):
    """
    Produce an sww boundary file, from esri ascii data from CSIRO.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    assume:
    All files are in esri ascii format

    4 types of information
    bathymetry
    elevation
    u velocity
    v velocity

    Assumptions
    The metadata of all the files is the same
    Each type is in a seperate directory
    One bath file with extention .000
    The time period is less than 24hrs and uniform.
    """

    from anuga.file.netcdf import NetCDFFile

    from anuga.coordinate_transforms.redfearn import redfearn

    if sww_file[-4:] != '.sww':
        raise IOError('Output file %s should be of type .sww.' % sww_file)

    # So if we want to change the precision it's done here
    precision = netcdf_float

    # go in to the bath dir and load the only file,
    bath_files = os.listdir(bath_dir)
    bath_file = bath_files[0]
    bath_dir_file = bath_dir + os.sep + bath_file
    bath_metadata, bath_grid = _read_asc(bath_dir_file)

    #Use the date.time of the bath file as a basis for
    #the start time for other files
    base_start = bath_file[-12:]

    #go into the elevation dir and load the 000 file
    elevation_dir_file = elevation_dir  + os.sep + elevation_prefix \
                         + base_start

    elevation_files = os.listdir(elevation_dir)
    ucur_files = os.listdir(ucur_dir)
    vcur_files = os.listdir(vcur_dir)
    elevation_files.sort()

    # the first elevation file should be the
    # file with the same base name as the bath data
    assert elevation_files[0] == 'el' + base_start

    number_of_latitudes = bath_grid.shape[0]
    number_of_longitudes = bath_grid.shape[1]
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes -
                                                     1) * 2

    longitudes = [bath_metadata['xllcorner'] + x*bath_metadata['cellsize'] \
                  for x in range(number_of_longitudes)]
    latitudes = [bath_metadata['yllcorner'] + y*bath_metadata['cellsize'] \
                 for y in range(number_of_latitudes)]

    # reverse order of lat, so the first lat represents the first grid row
    latitudes.reverse()

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
                                                 longitudes[:],
                                                 minlat=minlat,
                                                 maxlat=maxlat,
                                                 minlon=minlon,
                                                 maxlon=maxlon)

    bath_grid = bath_grid[kmin:kmax, lmin:lmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]
    number_of_latitudes = len(latitudes)
    number_of_longitudes = len(longitudes)
    number_of_times = len(os.listdir(elevation_dir))
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes -
                                                     1) * 2

    #Work out the times
    if len(elevation_files) > 1:
        # Assume: The time period is less than 24hrs.
        time_period = (int(elevation_files[1][-3:]) \
                       - int(elevation_files[0][-3:])) * 60*60
        times = [x * time_period for x in range(len(elevation_files))]
    else:
        times = [0.0]

    if verbose:
        log.critical('------------------------------------------------')
        log.critical('Statistics:')
        log.critical('  Extent (lat/lon):')
        log.critical('    lat in [%f, %f], len(lat) == %d' %
                     (min(latitudes), max(latitudes), len(latitudes)))
        log.critical('    lon in [%f, %f], len(lon) == %d' %
                     (min(longitudes), max(longitudes), len(longitudes)))
        log.critical('    t in [%f, %f], len(t) == %d' %
                     (min(times), max(times), len(times)))

    ######### WRITE THE SWW FILE #############

    # NetCDF file definition
    outfile = NetCDFFile(sww_file, netcdf_mode_w)

    #Create new file
    outfile.institution = 'Geoscience Australia'
    outfile.description = 'Converted from XXX'

    #For sww compatibility
    outfile.smoothing = 'Yes'
    outfile.order = 1

    #Start time in seconds since the epoch (midnight 1/1/1970)
    outfile.starttime = starttime = times[0]

    # dimension definitions
    outfile.createDimension('number_of_volumes', number_of_volumes)
    outfile.createDimension('number_of_vertices', 3)
    outfile.createDimension('number_of_points', number_of_points)
    outfile.createDimension('number_of_timesteps', number_of_times)

    # variable definitions
    outfile.createVariable('x', precision, ('number_of_points', ))
    outfile.createVariable('y', precision, ('number_of_points', ))
    outfile.createVariable('elevation', precision, ('number_of_points', ))

    #FIXME: Backwards compatibility
    #outfile.createVariable('z', precision, ('number_of_points',))
    #################################

    outfile.createVariable('volumes', netcdf_int,
                           ('number_of_volumes', 'number_of_vertices'))

    outfile.createVariable('time', precision, ('number_of_timesteps', ))

    outfile.createVariable('stage', precision,
                           ('number_of_timesteps', 'number_of_points'))

    outfile.createVariable('xmomentum', precision,
                           ('number_of_timesteps', 'number_of_points'))

    outfile.createVariable('ymomentum', precision,
                           ('number_of_timesteps', 'number_of_points'))

    #Store
    from anuga.coordinate_transforms.redfearn import redfearn

    x = num.zeros(number_of_points, num.float)  #Easting
    y = num.zeros(number_of_points, num.float)  #Northing

    if verbose: log.critical('Making triangular grid')

    #Get zone of 1st point.
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):
        for l, lon in enumerate(longitudes):
            vertices[l, k] = i

            zone, easting, northing = redfearn(lat, lon)

            #msg = 'Zone boundary crossed at longitude =', lon
            #assert zone == refzone, msg
            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    #Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes - 1):  #X direction
        for k in range(number_of_latitudes - 1):  #Y direction
            v1 = vertices[l, k + 1]
            v2 = vertices[l, k]
            v3 = vertices[l + 1, k + 1]
            v4 = vertices[l + 1, k]

            #Note, this is different to the ferrit2sww code
            #since the order of the lats is reversed.
            volumes.append([v1, v3, v2])  #Upper element
            volumes.append([v4, v2, v3])  #Lower element

    volumes = num.array(volumes, num.int)  #array default#

    geo_ref = Geo_reference(refzone, min(x), min(y))
    geo_ref.write_NetCDF(outfile)

    # This will put the geo ref in the middle
    #geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)

    if verbose:
        log.critical('------------------------------------------------')
        log.critical('More Statistics:')
        log.critical('  Extent (/lon):')
        log.critical('    x in [%f, %f], len(lat) == %d' %
                     (min(x), max(x), len(x)))
        log.critical('    y in [%f, %f], len(lon) == %d' %
                     (min(y), max(y), len(y)))
        log.critical('geo_ref: ', geo_ref)

    z = num.resize(bath_grid, outfile.variables['elevation'][:].shape)
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    # FIXME (Ole): Remove once viewer has been recompiled and changed
    #              to use elevation instead of z
    #outfile.variables['z'][:] = z
    outfile.variables['elevation'][:] = z
    outfile.variables['volumes'][:] = volumes.astype(
        num.int32)  # On Opteron 64

    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    outfile.variables['time'][:] = times  #Store time relative

    if verbose: log.critical('Converting quantities')

    n = number_of_times
    for j in range(number_of_times):
        # load in files
        elevation_meta, elevation_grid = \
            _read_asc(elevation_dir + os.sep + elevation_files[j])

        _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
        _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])

        #cut matrix to desired size
        elevation_grid = elevation_grid[kmin:kmax, lmin:lmax]
        u_momentum_grid = u_momentum_grid[kmin:kmax, lmin:lmax]
        v_momentum_grid = v_momentum_grid[kmin:kmax, lmin:lmax]

        # handle missing values
        missing = (elevation_grid == elevation_meta['NODATA_value'])
        if num.sometrue(missing):
            if fail_on_NaN:
                msg = 'File %s contains missing values' \
                      % (elevation_files[j])
                raise_(DataMissingValuesError, msg)
            else:
                elevation_grid = elevation_grid*(missing==0) \
                                 + missing*elevation_NaN_filler

        if verbose and j % (old_div((n + 10), 10)) == 0:
            log.critical('  Doing %d of %d' % (j, n))

        i = 0
        for k in range(number_of_latitudes):  #Y direction
            for l in range(number_of_longitudes):  #X direction
                w = zscale * elevation_grid[k, l] + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = u_momentum_grid[k, l] * h
                ymomentum[j, i] = v_momentum_grid[k, l] * h
                i += 1

    outfile.close()
Esempio n. 13
0
    def test_ferret2sww1(self):
        """Test that georeferencing etc works when converting from
        ferret format (lat/lon) to sww format (UTM)
        """
        import os, sys

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;
        # TIME = 0, 0.1, 0.6, 1.1, 1.6, 2.1 ;
        #
        # First value (index=0) in small_ha.nc is 0.3400644 cm,
        # Fourth value (index==3) is -6.50198 cm

        #Read
        from anuga.coordinate_transforms.redfearn import redfearn
        #fid = NetCDFFile(self.test_MOST_file)
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')
        first_value = fid.variables['HA'][:][0, 0, 0]
        fourth_value = fid.variables['HA'][:][0, 0, 3]
        fid.close()

        #Call conversion (with zero origin)
        #ferret2sww('small', verbose=False,
        #           origin = (56, 0, 0))
        ferret2sww(self.test_MOST_file,
                   verbose=self.verbose,
                   origin=(56, 0, 0))

        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)
        #print zone, e, n

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww')
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that first coordinate is correctly represented
        assert num.allclose(x[0], e)
        assert num.allclose(y[0], n)

        #Check first value
        stage = fid.variables['stage'][:]
        xmomentum = fid.variables['xmomentum'][:]
        ymomentum = fid.variables['ymomentum'][:]

        #print ymomentum

        assert num.allclose(stage[0, 0], old_div(first_value, 100))  #Meters

        #Check fourth value
        assert num.allclose(stage[0, 3], old_div(fourth_value, 100))  #Meters

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 14
0
def urs2sts(basename_in, basename_out=None,
            weights=None,
            verbose=False,
            origin=None,
            zone=None,
            central_meridian=None,            
            mean_stage=0.0,
            zscale=1.0,
            ordering_filename=None):
    """Convert URS mux2 format for wave propagation to sts format

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    inputs:

    basename_in: list of source file prefixes

        These are combined with the extensions:
        WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
        EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum
        NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum

        to create a 2D list of mux2 file. The rows are associated with each
        quantity and must have the above extensions
        the columns are the list of file prefixes.

    ordering: a .txt file name specifying which mux2 gauge points are
              to be stored. This is indicated by the index of the gauge
              in the ordering file.

              ordering file format:
              1st line:    'index,longitude,latitude\n'
              other lines: index,longitude,latitude

              If ordering is None or ordering file is empty then
               all points are taken in the order they
              appear in the mux2 file.


    output:
      basename_out: name of sts file in which mux2 data is stored.
      
      
      
    NOTE: South is positive in mux files so sign of y-component of velocity is reverted
    """

    import os
    from anuga.file.netcdf import NetCDFFile
    from operator import __and__

    if not isinstance(basename_in, list):
        if verbose: log.critical('Reading single source')
        basename_in = [basename_in]

    # This is the value used in the mux file format to indicate NAN data
    # FIXME (Ole): This should be changed everywhere to IEEE NAN when
    #              we upgrade to Numpy
    NODATA = 99

    # Check that basename is a list of strings
    if not reduce(__and__, map(lambda z:isinstance(z,basestring), basename_in)):
        msg= 'basename_in must be a string or list of strings'
        raise Exception, msg

    # Find the number of sources to be used
    numSrc = len(basename_in)

    # A weight must be specified for each source
    if weights is None:
        # Default is equal weighting
        weights = num.ones(numSrc, num.float) / numSrc
    else:
        weights = ensure_numeric(weights)
        msg = 'When combining multiple sources must specify a weight for ' \
              'mux2 source file'
        assert len(weights) == numSrc, msg

    if verbose: log.critical('Weights used in urs2sts: %s' % str(weights))

    # Check output filename
    if basename_out is None:
        msg = 'STS filename must be specified as basename_out ' \
              'in function urs2sts'
        raise Exception, msg

    if basename_out.endswith('.sts'):
        stsname = basename_out
    else:
        stsname = basename_out + '.sts'

    # Create input filenames from basenames and check their existence
    files_in = [[], [], []]
    for files in basename_in:
        files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
        files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
        files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)

    quantities = ['HA','UA','VA'] # Quantity names used in the MUX2 format
    for i in range(len(quantities)):
        for file_in in files_in[i]:
            if (os.access(file_in, os.R_OK) == 0):
                msg = 'File %s does not exist or is not accessible' % file_in
                raise IOError, msg

    # Establish permutation array
    if ordering_filename is not None:
        if verbose is True: log.critical('Reading ordering file %s'
                                         % ordering_filename)

        # Read ordering file
        try:
            fid = open(ordering_filename, 'r')
            file_header = fid.readline().split(',')
            ordering_lines = fid.readlines()
            fid.close()
        except:
            msg = 'Cannot open %s' % ordering_filename
            raise Exception, msg

        reference_header = 'index, longitude, latitude\n'
        reference_header_split = reference_header.split(',')
        for i in range(3):
            if not file_header[i].strip() == reference_header_split[i].strip():
                msg = 'File must contain header: ' + reference_header
                raise Exception, msg

        if len(ordering_lines) < 2:
            msg = 'File must contain at least two points'
            raise Exception, msg

        permutation = [int(line.split(',')[0]) for line in ordering_lines]
        permutation = ensure_numeric(permutation)
    else:
        permutation = None

    # Read MUX2 files
    if (verbose): log.critical('reading mux2 file')

    mux={}
    times_old = 0.0
    latitudes_old = 0.0
    longitudes_old = 0.0
    elevation_old = 0.0
    starttime_old = 0.0
    
    for i, quantity in enumerate(quantities):
        # For each quantity read the associated list of source mux2 file with
        # extention associated with that quantity

        times, latitudes, longitudes, elevation, mux[quantity], starttime \
            = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)

        # Check that all quantities have consistent time and space information
        if quantity != quantities[0]:
            msg = '%s, %s and %s have inconsistent gauge data' \
                  % (files_in[0], files_in[1], files_in[2])
            assert num.allclose(times, times_old), msg
            assert num.allclose(latitudes, latitudes_old), msg
            assert num.allclose(longitudes, longitudes_old), msg
            assert num.allclose(elevation, elevation_old), msg
            assert num.allclose(starttime, starttime_old), msg
        times_old = times
        latitudes_old = latitudes
        longitudes_old = longitudes
        elevation_old = elevation
        starttime_old = starttime

        # Self check - can be removed to improve speed
        #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]
        #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]
        #
        #msg = 'Longitudes specified in ordering file do not match those ' \
        #      'found in mux files. ' \
        #      'I got %s instead of %s (only beginning shown)' \
        #      % (str(longitudes[:10]) + '...',
        #         str(ref_longitudes[:10]) + '...')
        #assert allclose(longitudes, ref_longitudes), msg
        #
        #msg = 'Latitudes specified in ordering file do not match those ' \
        #      'found in mux files. '
        #      'I got %s instead of %s (only beginning shown)' \
        #      % (str(latitudes[:10]) + '...',
        #         str(ref_latitudes[:10]) + '...')
        #assert allclose(latitudes, ref_latitudes), msg

    # Store timeseries in STS file
    msg = 'File is empty and or clipped region not in file region'
    assert len(latitudes > 0), msg

    number_of_points = latitudes.shape[0]      # Number of stations retrieved
    number_of_times = times.shape[0]           # Number of timesteps
    number_of_latitudes = latitudes.shape[0]   # Number latitudes
    number_of_longitudes = longitudes.shape[0] # Number longitudes

    # The permutation vector of contains original indices
    # as given in ordering file or None in which case points
    # are assigned the trivial indices enumerating them from
    # 0 to number_of_points-1
    if permutation is None:
        permutation = num.arange(number_of_points, dtype=num.int)

    # NetCDF file definition
    outfile = NetCDFFile(stsname, netcdf_mode_w)

    description = 'Converted from URS mux2 files: %s' % basename_in

    # Create new file
    sts = Write_sts()
    sts.store_header(outfile,
                     times+starttime,
                     number_of_points,
                     description=description,
                     verbose=verbose,
                     sts_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn

    x = num.zeros(number_of_points, num.float)  # Easting
    y = num.zeros(number_of_points, num.float)  # Northing

    # Check zone boundaries
    if zone is None:
        refzone, _, _ = redfearn(latitudes[0], longitudes[0],
                                 central_meridian=central_meridian)
    else:
        refzone = zone

    old_zone = refzone
    old_easting = 0.0
    old_northing = 0.0

    for i in range(number_of_points):
        computed_zone, easting, northing = redfearn(latitudes[i], longitudes[i],
                                                    zone=zone,
                                                    central_meridian=central_meridian)
        x[i] = easting
        y[i] = northing
        if computed_zone != refzone:
            msg = 'All sts gauges need to be in the same zone. \n'
            msg += 'offending gauge:Zone %d,%.4f, %4f\n' \
                   % (computed_zone, easting, northing)
            msg += 'previous gauge:Zone %d,%.4f, %4f' \
                   % (old_zone, old_easting, old_northing)
            raise Exception, msg
        old_zone = computed_zone
        old_easting = easting
        old_northing = northing

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    elevation = num.resize(elevation, outfile.variables['elevation'][:].shape)
    outfile.variables['permutation'][:] = permutation.astype(num.int32) # Opteron 64
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    outfile.variables['elevation'][:] = elevation

    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose: log.critical('Converting quantities')

    for j in range(len(times)):
        for i in range(number_of_points):
            ha = mux['HA'][i,j]
            ua = mux['UA'][i,j]
            va = mux['VA'][i,j]
            if ha == NODATA:
                if verbose:
                    msg = 'Setting nodata value %d to 0 at time = %f, ' \
                          'point = %d' % (ha, times[j], i)
                    log.critical(msg)
                ha = 0.0
                ua = 0.0
                va = 0.0

            w = zscale*ha + mean_stage
            h = w - elevation[i]
            stage[j,i] = w

            xmomentum[j,i] = ua * h
            ymomentum[j,i] = -va * h # South is positive in mux files


    outfile.close()
    
    if verbose:
        log.critical('Wrote sts file ' + stsname)    
Esempio n. 15
0
def ferret2sww(basename_in, name_out=None,
               verbose=False,
               minlat=None, maxlat=None,
               minlon=None, maxlon=None,
               mint=None, maxt=None, mean_stage=0,
               origin=None, zscale=1,
               fail_on_NaN=True,
               NaN_filler=0,
               elevation=None,
               inverted_bathymetry=True
               ): #FIXME: Bathymetry should be obtained
                                  #from MOST somehow.
                                  #Alternatively from elsewhere
                                  #or, as a last resort,
                                  #specified here.
                                  #The value of -100 will work
                                  #for the Wollongong tsunami
                                  #scenario but is very hacky
    """Convert MOST and 'Ferret' NetCDF format for wave propagation to
    sww format native to abstract_2d_finite_volumes.

    Specify only basename_in and read files of the form
    basefilename_ha.nc, basefilename_ua.nc, basefilename_va.nc containing
    relative height, x-velocity and y-velocity, respectively.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    min's and max's: If omitted - full extend is used.
    To include a value min may equal it, while max must exceed it.
    Lat and lon are assuemd to be in decimal degrees

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    nc format has values organised as HA[TIME, LATITUDE, LONGITUDE]
    which means that longitude is the fastest
    varying dimension (row major order, so to speak)

    ferret2sww uses grid points as vertices in a triangular grid
    counting vertices from lower left corner upwards, then right
    """

    from anuga.file.netcdf import NetCDFFile

    _assert_lat_long(minlat, maxlat, minlon, maxlon)

    if name_out != None and name_out[-4:] != '.sww':
        raise IOError('Output file %s should be of type .sww.' % name_out)

    # Get NetCDF data
    if verbose: log.critical('Reading files %s_*.nc' % basename_in)

    # Wave amplitude (cm)
    file_h = NetCDFFile(basename_in + '_ha.nc', netcdf_mode_r) 
    
    # Velocity (x) (cm/s)
    file_u = NetCDFFile(basename_in + '_ua.nc', netcdf_mode_r)
     
    # Velocity (y) (cm/s)
    file_v = NetCDFFile(basename_in + '_va.nc', netcdf_mode_r)
    
    # Elevation (z) (m)
    file_e = NetCDFFile(basename_in + '_e.nc', netcdf_mode_r)  

    if name_out is None:
        swwname = basename_in + '.sww'
    else:
        swwname = name_out

    # Get dimensions of file_h
    for dimension in file_h.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_h_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_h_latitude = dimension
        if dimension[:4] == 'TIME':
            dim_h_time = dimension

    times = file_h.variables[dim_h_time]
    latitudes = file_h.variables[dim_h_latitude]
    longitudes = file_h.variables[dim_h_longitude]

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
                                                  longitudes[:],
                                                  minlat, maxlat,
                                                  minlon, maxlon)
    # get dimensions for file_e
    for dimension in file_e.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_e_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_e_latitude = dimension

    # get dimensions for file_u
    for dimension in file_u.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_u_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_u_latitude = dimension

    # get dimensions for file_v
    for dimension in file_v.dimensions.keys():
        if dimension[:3] == 'LON':
            dim_v_longitude = dimension
        if dimension[:3] == 'LAT':
            dim_v_latitude = dimension

    # Precision used by most for lat/lon is 4 or 5 decimals
    e_lat = num.around(file_e.variables[dim_e_latitude][:], 5)
    e_lon = num.around(file_e.variables[dim_e_longitude][:], 5)

    # Check that files are compatible
    assert num.allclose(latitudes, file_u.variables[dim_u_latitude])
    assert num.allclose(latitudes, file_v.variables[dim_v_latitude])
    assert num.allclose(latitudes, e_lat)

    assert num.allclose(longitudes, file_u.variables[dim_u_longitude])
    assert num.allclose(longitudes, file_v.variables[dim_v_longitude])
    assert num.allclose(longitudes, e_lon)

    if mint is None:
        jmin = 0
        mint = times[0]
    else:
        jmin = num.searchsorted(times, mint)
        
        # numpy.int32 didn't work in slicing of amplitude below
        jmin = int(jmin)

    if maxt is None:
        jmax = len(times)
        maxt = times[-1]
    else:
        jmax = num.searchsorted(times, maxt)
        
        # numpy.int32 didn't work in slicing of amplitude below
        jmax = int(jmax)        

    kmin, kmax, lmin, lmax = get_min_max_indices(latitudes[:],
                                                  longitudes[:],
                                                  minlat, maxlat,
                                                  minlon, maxlon)


    times = times[jmin:jmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]

    if verbose: log.critical('cropping')

    zname = 'ELEVATION'

    amplitudes = file_h.variables['HA'][jmin:jmax, kmin:kmax, lmin:lmax]
    uspeed = file_u.variables['UA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lon
    vspeed = file_v.variables['VA'][jmin:jmax, kmin:kmax, lmin:lmax] #Lat
    elevations = file_e.variables[zname][kmin:kmax, lmin:lmax]

    # Get missing values
    nan_ha = file_h.variables['HA'].missing_value
    nan_ua = file_u.variables['UA'].missing_value
    nan_va = file_v.variables['VA'].missing_value
    if hasattr(file_e.variables[zname],'missing_value'):
        nan_e  = file_e.variables[zname].missing_value
    else:
        nan_e = None

    # Cleanup
    missing = (amplitudes == nan_ha)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ha.nc'
            raise DataMissingValuesError, msg
        else:
            amplitudes = amplitudes*(missing==0) + missing*NaN_filler

    missing = (uspeed == nan_ua)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_ua.nc'
            raise DataMissingValuesError, msg
        else:
            uspeed = uspeed*(missing==0) + missing*NaN_filler

    missing = (vspeed == nan_va)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_va.nc'
            raise DataMissingValuesError, msg
        else:
            vspeed = vspeed*(missing==0) + missing*NaN_filler

    missing = (elevations == nan_e)
    if num.sometrue (missing):
        if fail_on_NaN:
            msg = 'NetCDFFile %s contains missing values' \
                  % basename_in + '_e.nc'
            raise DataMissingValuesError, msg
        else:
            elevations = elevations*(missing==0) + missing*NaN_filler

    number_of_times = times.shape[0]
    number_of_latitudes = latitudes.shape[0]
    number_of_longitudes = longitudes.shape[0]

    assert amplitudes.shape[0] == number_of_times
    assert amplitudes.shape[1] == number_of_latitudes
    assert amplitudes.shape[2] == number_of_longitudes

    if verbose:
        _show_stats((latitudes, longitudes), times, amplitudes, \
                    (uspeed, vspeed), elevations)

    # print number_of_latitudes, number_of_longitudes
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes-1) * (number_of_longitudes-1) * 2

    file_h.close()
    file_u.close()
    file_v.close()
    file_e.close()

    # NetCDF file definition
    outfile = NetCDFFile(swwname, netcdf_mode_w)

    description = 'Converted from Ferret files: %s, %s, %s, %s' \
                  % (basename_in + '_ha.nc',
                     basename_in + '_ua.nc',
                     basename_in + '_va.nc',
                     basename_in + '_e.nc')

    # Create new file
    starttime = times[0]

    sww = Write_sww(['elevation'], ['stage', 'xmomentum', 'ymomentum'])
    sww.store_header(outfile, times, number_of_volumes,
                     number_of_points, description=description,
                     verbose=verbose, sww_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn
    x = num.zeros(number_of_points, num.float)  #Easting
    y = num.zeros(number_of_points, num.float)  #Northing

    if verbose:
        log.critical('Making triangular grid')

    # Check zone boundaries
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):       # Y direction
        for l, lon in enumerate(longitudes):  # X direction
            vertices[l, k] = i

            _, easting, northing = redfearn(lat, lon)

            #msg = 'Zone boundary crossed at longitude =', lon
            #assert zone == refzone, msg
            #print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    #Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes-1):    # X direction
        for k in range(number_of_latitudes-1): # Y direction
            v1 = vertices[l, k+1]
            v2 = vertices[l, k]
            v3 = vertices[l+1, k+1]
            v4 = vertices[l+1, k]

            volumes.append([v1, v2, v3]) #Upper element
            volumes.append([v4, v3, v2]) #Lower element

    volumes = num.array(volumes, num.int)      #array default#

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    if elevation is not None:
        z = elevation
    else:
        if inverted_bathymetry:
            z = -1 * elevations
        else:
            z = elevations
    #FIXME: z should be obtained from MOST and passed in here

    #FIXME use the Write_sww instance(sww) to write this info
    z = num.resize(z, outfile.variables['elevation'][:].shape)
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    #outfile.variables['z'][:] = z             #FIXME HACK for bacwards compat.
    outfile.variables['elevation'][:] = z
    outfile.variables['volumes'][:] = volumes.astype(num.int32) #For Opteron 64

    #Time stepping
    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose:
        log.critical('Converting quantities')

    n = len(times)
    for j in range(n):
        if verbose and j % ((n+10)/10) == 0:
            log.critical('  Doing %d of %d' % (j, n))

        i = 0
        for k in range(number_of_latitudes):      # Y direction
            for l in range(number_of_longitudes): # X direction
                w = zscale * amplitudes[j, k, l] / 100 + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = uspeed[j, k, l]/100*h
                ymomentum[j, i] = vspeed[j, k, l]/100*h
                i += 1

    #outfile.close()

    #FIXME: Refactor using code from file_function.statistics
    #Something like print swwstats(swwname)
    if verbose:
        time_info = times, starttime, mint, maxt
        _show_sww_stats(outfile, swwname, geo_ref, time_info)

    outfile.close()
    def parallel_time_varying_file_boundary_sts(self):
        """ parallel_test_time_varying_file_boundary_sts_sequential(self):
            Read correct points from ordering file and apply sts to boundary. 
            The boundary is time varying. Compares sequential result with 
            distributed result found using anuga_parallel
        """

        #------------------------------------------------------------
        # Define test variables
        #------------------------------------------------------------
        lat_long_points = [[6.01, 97.0], [6.02, 97.0], [6.05, 96.9],
                           [6.0, 97.0]]
        bounding_polygon = [[6.0, 97.0], [6.01, 97.0], [6.02, 97.0],
                            [6.02, 97.02], [6.00, 97.02]]
        tide = 3.0
        time_step_count = 65
        time_step = 2
        n = len(lat_long_points)
        first_tstep = num.ones(n, num.int)
        last_tstep = (time_step_count) * num.ones(n, num.int)
        finaltime = num.float(time_step * (time_step_count - 1))
        yieldstep = num.float(time_step)
        gauge_depth = 20 * num.ones(n, num.float)
        ha = 2 * num.ones((n, time_step_count), num.float)
        ua = 10 * num.ones((n, time_step_count), num.float)
        va = -10 * num.ones((n, time_step_count), num.float)

        times = num.arange(0, time_step_count * time_step, time_step)
        for i in range(n):
            #ha[i]+=num.sin(times)
            ha[i] += times / finaltime

        #------------------------------------------------------------
        # Write mux data to file then convert to sts format
        #------------------------------------------------------------
        sts_file = "test"
        if myid == 0:
            base_name, files = self.write_mux2(lat_long_points,
                                               time_step_count,
                                               time_step,
                                               first_tstep,
                                               last_tstep,
                                               depth=gauge_depth,
                                               ha=ha,
                                               ua=ua,
                                               va=va)
            # base name will not exist, but 3 other files are created

            # Write order file
            file_handle, order_base_name = tempfile.mkstemp("")
            os.close(file_handle)
            os.remove(order_base_name)
            d = ","
            order_file = order_base_name + 'order.txt'
            fid = open(order_file, 'w')

            # Write Header
            header = 'index, longitude, latitude\n'
            fid.write(header)
            indices = [3, 0, 1]
            for i in indices:
                line=str(i)+d+str(lat_long_points[i][1])+d+\
                    str(lat_long_points[i][0])+"\n"
                fid.write(line)
            fid.close()

            urs2sts(base_name,
                    basename_out=sts_file,
                    ordering_filename=order_file,
                    mean_stage=tide,
                    verbose=verbose)
            self.delete_mux(files)

            assert (os.access(sts_file + '.sts', os.F_OK))

            os.remove(order_file)

        barrier()
        #------------------------------------------------------------
        # Define boundary_polygon on each processor. This polygon defines the
        # urs boundary and lies on a portion of the bounding_polygon
        #------------------------------------------------------------
        boundary_polygon = create_sts_boundary(sts_file)

        # Append the remaining part of the boundary polygon to be defined by
        # the user
        bounding_polygon_utm = []
        for point in bounding_polygon:
            zone, easting, northing = redfearn(point[0], point[1])
            bounding_polygon_utm.append([easting, northing])

        boundary_polygon.append(bounding_polygon_utm[3])
        boundary_polygon.append(bounding_polygon_utm[4])

        assert num.allclose(bounding_polygon_utm, boundary_polygon)

        extent_res = 10000
        meshname = 'urs_test_mesh' + '.tsh'
        interior_regions = None
        boundary_tags = {'ocean': [0, 1], 'otherocean': [2, 3, 4]}

        #------------------------------------------------------------
        # Create mesh on the master processor and store in file. This file
        # is read in by each slave processor when needed
        #------------------------------------------------------------
        if myid == 0:
            create_mesh_from_regions(boundary_polygon,
                                     boundary_tags=boundary_tags,
                                     maximum_triangle_area=extent_res,
                                     filename=meshname,
                                     interior_regions=interior_regions,
                                     verbose=verbose)

            # barrier()
            domain_fbound = Domain(meshname)
            domain_fbound.set_quantities_to_be_stored(None)
            domain_fbound.set_quantity('stage', tide)
            # print domain_fbound.mesh.get_boundary_polygon()
        else:
            domain_fbound = None

        barrier()
        if (verbose and myid == 0):
            print 'DISTRIBUTING PARALLEL DOMAIN'
        domain_fbound = distribute(domain_fbound)

        #--------------------------------------------------------------------
        # Find which sub_domain in which the interpolation points are located
        #
        # Sometimes the interpolation points sit exactly
        # between two centroids, so in the parallel run we
        # reset the interpolation points to the centroids
        # found in the sequential run
        #--------------------------------------------------------------------
        interpolation_points = [[279000, 664000], [280250, 664130],
                                [279280, 665400], [280500, 665000]]

        interpolation_points = num.array(interpolation_points)

        #if myid==0:
        #    import pylab as P
        #    boundary_polygon=num.array(boundary_polygon)
        #    P.plot(boundary_polygon[:,0],boundary_polygon[:,1])
        #    P.plot(interpolation_points[:,0],interpolation_points[:,1],'ko')
        #    P.show()

        fbound_gauge_values = []
        fbound_proc_tri_ids = []
        for i, point in enumerate(interpolation_points):
            fbound_gauge_values.append([])  # Empty list for timeseries

            try:
                k = domain_fbound.get_triangle_containing_point(point)
                if domain_fbound.tri_full_flag[k] == 1:
                    fbound_proc_tri_ids.append(k)
                else:
                    fbound_proc_tri_ids.append(-1)
            except:
                fbound_proc_tri_ids.append(-2)

        if verbose: print 'P%d has points = %s' % (myid, fbound_proc_tri_ids)

        #------------------------------------------------------------
        # Set boundary conditions
        #------------------------------------------------------------
        Bf = File_boundary(sts_file + '.sts',
                           domain_fbound,
                           boundary_polygon=boundary_polygon)
        Br = Reflective_boundary(domain_fbound)

        domain_fbound.set_boundary({'ocean': Bf, 'otherocean': Br})

        #------------------------------------------------------------
        # Evolve the domain on each processor
        #------------------------------------------------------------
        for i, t in enumerate(
                domain_fbound.evolve(yieldstep=yieldstep,
                                     finaltime=finaltime,
                                     skip_initial_step=False)):

            stage = domain_fbound.get_quantity('stage')
            for i in range(4):
                if fbound_proc_tri_ids[i] > -1:
                    fbound_gauge_values[i].append(
                        stage.centroid_values[fbound_proc_tri_ids[i]])

        #------------------------------------------------------------
        # Create domain to be run sequntially on each processor
        #------------------------------------------------------------
        domain_drchlt = Domain(meshname)
        domain_drchlt.set_quantities_to_be_stored(None)
        domain_drchlt.set_starttime(time_step)
        domain_drchlt.set_quantity('stage', tide)
        Br = Reflective_boundary(domain_drchlt)
        #Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])
        Bd = Time_boundary(
            domain=domain_drchlt,
            function=lambda t: [
                2.0 + t / finaltime + tide, 220. + 10. * tide + 10. * t /
                finaltime, -220. - 10. * tide - 10. * t / finaltime
            ])
        #Bd = Time_boundary(domain=domain_drchlt,function=lambda t: [2.0+num.sin(t)+tide,10.*(2+20.+num.sin(t)+tide),-10.*(2+20.+num.sin(t)+tide)])
        domain_drchlt.set_boundary({'ocean': Bd, 'otherocean': Br})

        drchlt_gauge_values = []
        drchlt_proc_tri_ids = []
        for i, point in enumerate(interpolation_points):
            drchlt_gauge_values.append([])  # Empty list for timeseries

            try:
                k = domain_drchlt.get_triangle_containing_point(point)
                if domain_drchlt.tri_full_flag[k] == 1:
                    drchlt_proc_tri_ids.append(k)
                else:
                    drchlt_proc_tri_ids.append(-1)
            except:
                drchlt_proc_tri_ids.append(-2)

        if verbose: print 'P%d has points = %s' % (myid, drchlt_proc_tri_ids)

        #------------------------------------------------------------
        # Evolve entire domain on each processor
        #------------------------------------------------------------
        for i, t in enumerate(
                domain_drchlt.evolve(yieldstep=yieldstep,
                                     finaltime=finaltime,
                                     skip_initial_step=False)):

            stage = domain_drchlt.get_quantity('stage')
            for i in range(4):
                drchlt_gauge_values[i].append(
                    stage.centroid_values[drchlt_proc_tri_ids[i]])

        #------------------------------------------------------------
        # Compare sequential values with parallel values
        #------------------------------------------------------------
        barrier()
        success = True
        for i in range(4):
            if fbound_proc_tri_ids[i] > -1:
                fbound_gauge_values[i] = num.array(fbound_gauge_values[i])
                drchlt_gauge_values[i] = num.array(drchlt_gauge_values[i])
                #print i,fbound_gauge_values[i][4]
                #print i,drchlt_gauge_values[i][4]
                success = success and num.allclose(fbound_gauge_values[i],
                                                   drchlt_gauge_values[i])
                assert success  #, (fbound_gauge_values[i]-drchlt_gauge_values[i])

        #assert_(success)

        if not sys.platform == 'win32':
            if myid == 0: os.remove(sts_file + '.sts')

        if myid == 0: os.remove(meshname)
Esempio n. 17
0
def esri2sww(
    bath_dir,
    elevation_dir,
    ucur_dir,
    vcur_dir,
    sww_file,
    minlat=None,
    maxlat=None,
    minlon=None,
    maxlon=None,
    zscale=1,
    mean_stage=0,
    fail_on_NaN=True,
    elevation_NaN_filler=0,
    bath_prefix="ba",
    elevation_prefix="el",
    verbose=False,
):
    """
    Produce an sww boundary file, from esri ascii data from CSIRO.

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum.

    assume:
    All files are in esri ascii format

    4 types of information
    bathymetry
    elevation
    u velocity
    v velocity

    Assumptions
    The metadata of all the files is the same
    Each type is in a seperate directory
    One bath file with extention .000
    The time period is less than 24hrs and uniform.
    """

    from anuga.file.netcdf import NetCDFFile

    from anuga.coordinate_transforms.redfearn import redfearn

    if sww_file[-4:] != ".sww":
        raise IOError("Output file %s should be of type .sww." % sww_file)

    # So if we want to change the precision it's done here
    precision = netcdf_float

    # go in to the bath dir and load the only file,
    bath_files = os.listdir(bath_dir)
    bath_file = bath_files[0]
    bath_dir_file = bath_dir + os.sep + bath_file
    bath_metadata, bath_grid = _read_asc(bath_dir_file)

    # Use the date.time of the bath file as a basis for
    # the start time for other files
    base_start = bath_file[-12:]

    # go into the elevation dir and load the 000 file
    elevation_dir_file = elevation_dir + os.sep + elevation_prefix + base_start

    elevation_files = os.listdir(elevation_dir)
    ucur_files = os.listdir(ucur_dir)
    vcur_files = os.listdir(vcur_dir)
    elevation_files.sort()

    # the first elevation file should be the
    # file with the same base name as the bath data
    assert elevation_files[0] == "el" + base_start

    number_of_latitudes = bath_grid.shape[0]
    number_of_longitudes = bath_grid.shape[1]
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes - 1) * 2

    longitudes = [bath_metadata["xllcorner"] + x * bath_metadata["cellsize"] for x in range(number_of_longitudes)]
    latitudes = [bath_metadata["yllcorner"] + y * bath_metadata["cellsize"] for y in range(number_of_latitudes)]

    # reverse order of lat, so the first lat represents the first grid row
    latitudes.reverse()

    kmin, kmax, lmin, lmax = get_min_max_indices(
        latitudes[:], longitudes[:], minlat=minlat, maxlat=maxlat, minlon=minlon, maxlon=maxlon
    )

    bath_grid = bath_grid[kmin:kmax, lmin:lmax]
    latitudes = latitudes[kmin:kmax]
    longitudes = longitudes[lmin:lmax]
    number_of_latitudes = len(latitudes)
    number_of_longitudes = len(longitudes)
    number_of_times = len(os.listdir(elevation_dir))
    number_of_points = number_of_latitudes * number_of_longitudes
    number_of_volumes = (number_of_latitudes - 1) * (number_of_longitudes - 1) * 2

    # Work out the times
    if len(elevation_files) > 1:
        # Assume: The time period is less than 24hrs.
        time_period = (int(elevation_files[1][-3:]) - int(elevation_files[0][-3:])) * 60 * 60
        times = [x * time_period for x in range(len(elevation_files))]
    else:
        times = [0.0]

    if verbose:
        log.critical("------------------------------------------------")
        log.critical("Statistics:")
        log.critical("  Extent (lat/lon):")
        log.critical("    lat in [%f, %f], len(lat) == %d" % (min(latitudes), max(latitudes), len(latitudes)))
        log.critical("    lon in [%f, %f], len(lon) == %d" % (min(longitudes), max(longitudes), len(longitudes)))
        log.critical("    t in [%f, %f], len(t) == %d" % (min(times), max(times), len(times)))

    ######### WRITE THE SWW FILE #############

    # NetCDF file definition
    outfile = NetCDFFile(sww_file, netcdf_mode_w)

    # Create new file
    outfile.institution = "Geoscience Australia"
    outfile.description = "Converted from XXX"

    # For sww compatibility
    outfile.smoothing = "Yes"
    outfile.order = 1

    # Start time in seconds since the epoch (midnight 1/1/1970)
    outfile.starttime = starttime = times[0]

    # dimension definitions
    outfile.createDimension("number_of_volumes", number_of_volumes)
    outfile.createDimension("number_of_vertices", 3)
    outfile.createDimension("number_of_points", number_of_points)
    outfile.createDimension("number_of_timesteps", number_of_times)

    # variable definitions
    outfile.createVariable("x", precision, ("number_of_points",))
    outfile.createVariable("y", precision, ("number_of_points",))
    outfile.createVariable("elevation", precision, ("number_of_points",))

    # FIXME: Backwards compatibility
    # outfile.createVariable('z', precision, ('number_of_points',))
    #################################

    outfile.createVariable("volumes", netcdf_int, ("number_of_volumes", "number_of_vertices"))

    outfile.createVariable("time", precision, ("number_of_timesteps",))

    outfile.createVariable("stage", precision, ("number_of_timesteps", "number_of_points"))

    outfile.createVariable("xmomentum", precision, ("number_of_timesteps", "number_of_points"))

    outfile.createVariable("ymomentum", precision, ("number_of_timesteps", "number_of_points"))

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn

    x = num.zeros(number_of_points, num.float)  # Easting
    y = num.zeros(number_of_points, num.float)  # Northing

    if verbose:
        log.critical("Making triangular grid")

    # Get zone of 1st point.
    refzone, _, _ = redfearn(latitudes[0], longitudes[0])

    vertices = {}
    i = 0
    for k, lat in enumerate(latitudes):
        for l, lon in enumerate(longitudes):
            vertices[l, k] = i

            zone, easting, northing = redfearn(lat, lon)

            # msg = 'Zone boundary crossed at longitude =', lon
            # assert zone == refzone, msg
            # print '%7.2f %7.2f %8.2f %8.2f' %(lon, lat, easting, northing)
            x[i] = easting
            y[i] = northing
            i += 1

    # Construct 2 triangles per 'rectangular' element
    volumes = []
    for l in range(number_of_longitudes - 1):  # X direction
        for k in range(number_of_latitudes - 1):  # Y direction
            v1 = vertices[l, k + 1]
            v2 = vertices[l, k]
            v3 = vertices[l + 1, k + 1]
            v4 = vertices[l + 1, k]

            # Note, this is different to the ferrit2sww code
            # since the order of the lats is reversed.
            volumes.append([v1, v3, v2])  # Upper element
            volumes.append([v4, v2, v3])  # Lower element

    volumes = num.array(volumes, num.int)  # array default#

    geo_ref = Geo_reference(refzone, min(x), min(y))
    geo_ref.write_NetCDF(outfile)

    # This will put the geo ref in the middle
    # geo_ref = Geo_reference(refzone, (max(x)+min(x))/2., (max(x)+min(y))/2.)

    if verbose:
        log.critical("------------------------------------------------")
        log.critical("More Statistics:")
        log.critical("  Extent (/lon):")
        log.critical("    x in [%f, %f], len(lat) == %d" % (min(x), max(x), len(x)))
        log.critical("    y in [%f, %f], len(lon) == %d" % (min(y), max(y), len(y)))
        log.critical("geo_ref: ", geo_ref)

    z = num.resize(bath_grid, outfile.variables["elevation"][:].shape)
    outfile.variables["x"][:] = x - geo_ref.get_xllcorner()
    outfile.variables["y"][:] = y - geo_ref.get_yllcorner()
    # FIXME (Ole): Remove once viewer has been recompiled and changed
    #              to use elevation instead of z
    # outfile.variables['z'][:] = z
    outfile.variables["elevation"][:] = z
    outfile.variables["volumes"][:] = volumes.astype(num.int32)  # On Opteron 64

    stage = outfile.variables["stage"]
    xmomentum = outfile.variables["xmomentum"]
    ymomentum = outfile.variables["ymomentum"]

    outfile.variables["time"][:] = times  # Store time relative

    if verbose:
        log.critical("Converting quantities")

    n = number_of_times
    for j in range(number_of_times):
        # load in files
        elevation_meta, elevation_grid = _read_asc(elevation_dir + os.sep + elevation_files[j])

        _, u_momentum_grid = _read_asc(ucur_dir + os.sep + ucur_files[j])
        _, v_momentum_grid = _read_asc(vcur_dir + os.sep + vcur_files[j])

        # cut matrix to desired size
        elevation_grid = elevation_grid[kmin:kmax, lmin:lmax]
        u_momentum_grid = u_momentum_grid[kmin:kmax, lmin:lmax]
        v_momentum_grid = v_momentum_grid[kmin:kmax, lmin:lmax]

        # handle missing values
        missing = elevation_grid == elevation_meta["NODATA_value"]
        if num.sometrue(missing):
            if fail_on_NaN:
                msg = "File %s contains missing values" % (elevation_files[j])
                raise DataMissingValuesError, msg
            else:
                elevation_grid = elevation_grid * (missing == 0) + missing * elevation_NaN_filler

        if verbose and j % ((n + 10) / 10) == 0:
            log.critical("  Doing %d of %d" % (j, n))

        i = 0
        for k in range(number_of_latitudes):  # Y direction
            for l in range(number_of_longitudes):  # X direction
                w = zscale * elevation_grid[k, l] + mean_stage
                stage[j, i] = w
                h = w - z[i]
                xmomentum[j, i] = u_momentum_grid[k, l] * h
                ymomentum[j, i] = v_momentum_grid[k, l] * h
                i += 1

    outfile.close()
    def sequential_time_varying_file_boundary_sts(self):
        """sequential_ltest_time_varying_file_boundary_sts_sequential(self):
        Read correct points from ordering file and apply sts to boundary. The boundary is time varying. FIXME add to test_urs2sts.
        """
        lat_long_points = [[6.01, 97.0], [6.02, 97.0], [6.05, 96.9],
                           [6.0, 97.0]]
        bounding_polygon = [[6.0, 97.0], [6.01, 97.0], [6.02, 97.0],
                            [6.02, 97.02], [6.00, 97.02]]
        tide = 3.0
        time_step_count = 65
        time_step = 2.
        n = len(lat_long_points)
        first_tstep = num.ones(n, num.int)
        last_tstep = (time_step_count) * num.ones(n, num.int)
        finaltime = num.float(time_step * (time_step_count - 1))
        yieldstep = num.float(time_step)
        gauge_depth = 20 * num.ones(n, num.float)
        ha = 2 * num.ones((n, time_step_count), num.float)
        ua = 10 * num.ones((n, time_step_count), num.float)
        va = -10 * num.ones((n, time_step_count), num.float)

        times = num.arange(0., num.float(time_step_count * time_step),
                           time_step)
        for i in range(n):
            #ha[i]+=num.sin(times)
            ha[i] += times / finaltime

        sts_file = "test"
        if myid == 0:
            base_name, files = self.write_mux2(lat_long_points,
                                               time_step_count,
                                               time_step,
                                               first_tstep,
                                               last_tstep,
                                               depth=gauge_depth,
                                               ha=ha,
                                               ua=ua,
                                               va=va)
            # base name will not exist, but 3 other files are created

            # Write order file
            file_handle, order_base_name = tempfile.mkstemp("")
            os.close(file_handle)
            os.remove(order_base_name)
            d = ","
            order_file = order_base_name + 'order.txt'
            fid = open(order_file, 'w')

            # Write Header
            header = 'index, longitude, latitude\n'
            fid.write(header)
            indices = [3, 0, 1]
            for i in indices:
                line=str(i)+d+str(lat_long_points[i][1])+d+\
                    str(lat_long_points[i][0])+"\n"
                fid.write(line)
            fid.close()

            urs2sts(base_name,
                    basename_out=sts_file,
                    ordering_filename=order_file,
                    mean_stage=tide,
                    verbose=verbose)
            self.delete_mux(files)

            assert (os.access(sts_file + '.sts', os.F_OK))

            os.remove(order_file)

        barrier()
        boundary_polygon = create_sts_boundary(sts_file)

        # Append the remaining part of the boundary polygon to be defined by
        # the user
        bounding_polygon_utm = []
        for point in bounding_polygon:
            zone, easting, northing = redfearn(point[0], point[1])
            bounding_polygon_utm.append([easting, northing])

        boundary_polygon.append(bounding_polygon_utm[3])
        boundary_polygon.append(bounding_polygon_utm[4])

        assert num.allclose(bounding_polygon_utm, boundary_polygon)

        extent_res = 1000000
        meshname = 'urs_test_mesh' + '.tsh'
        interior_regions = None
        boundary_tags = {'ocean': [0, 1], 'otherocean': [2, 3, 4]}

        # have to change boundary tags from last example because now bounding
        # polygon starts in different place.
        if myid == 0:
            create_mesh_from_regions(boundary_polygon,
                                     boundary_tags=boundary_tags,
                                     maximum_triangle_area=extent_res,
                                     filename=meshname,
                                     interior_regions=interior_regions,
                                     verbose=verbose)

        barrier()

        domain_fbound = Domain(meshname)
        domain_fbound.set_quantities_to_be_stored(None)
        domain_fbound.set_quantity('stage', tide)
        if verbose: print "Creating file boundary condition"
        Bf = File_boundary(sts_file + '.sts',
                           domain_fbound,
                           boundary_polygon=boundary_polygon)
        Br = Reflective_boundary(domain_fbound)

        domain_fbound.set_boundary({'ocean': Bf, 'otherocean': Br})

        temp_fbound = num.zeros(int(finaltime / yieldstep) + 1, num.float)
        if verbose: print "Evolving domain with file boundary condition"
        for i, t in enumerate(
                domain_fbound.evolve(yieldstep=yieldstep,
                                     finaltime=finaltime,
                                     skip_initial_step=False)):
            temp_fbound[i] = domain_fbound.quantities['stage'].centroid_values[
                2]
            if verbose: domain_fbound.write_time()

        domain_drchlt = Domain(meshname)
        domain_drchlt.set_quantities_to_be_stored(None)
        domain_drchlt.set_starttime(time_step)
        domain_drchlt.set_quantity('stage', tide)
        Br = Reflective_boundary(domain_drchlt)
        #Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])
        Bd = Time_boundary(
            domain=domain_drchlt,
            f=lambda t: [
                2.0 + t / finaltime + tide, 220. + 10. * tide + 10. * t /
                finaltime, -220. - 10. * tide - 10. * t / finaltime
            ])
        #Bd = Time_boundary(domain=domain_drchlt,f=lambda t: [2.0+num.sin(t)+tide,10.*(2+20.+num.sin(t)+tide),-10.*(2+20.+num.sin(t)+tide)])
        domain_drchlt.set_boundary({'ocean': Bd, 'otherocean': Br})
        temp_drchlt = num.zeros(int(finaltime / yieldstep) + 1, num.float)

        for i, t in enumerate(
                domain_drchlt.evolve(yieldstep=yieldstep,
                                     finaltime=finaltime,
                                     skip_initial_step=False)):
            temp_drchlt[i] = domain_drchlt.quantities['stage'].centroid_values[
                2]
            #domain_drchlt.write_time()

        #print domain_fbound.quantities['stage'].vertex_values
        #print domain_drchlt.quantities['stage'].vertex_values

        assert num.allclose(temp_fbound,
                            temp_drchlt), temp_fbound - temp_drchlt

        assert num.allclose(domain_fbound.quantities['stage'].vertex_values,
                            domain_drchlt.quantities['stage'].vertex_values)

        assert num.allclose(
            domain_fbound.quantities['xmomentum'].vertex_values,
            domain_drchlt.quantities['xmomentum'].vertex_values)

        assert num.allclose(
            domain_fbound.quantities['ymomentum'].vertex_values,
            domain_drchlt.quantities['ymomentum'].vertex_values)

        if not sys.platform == 'win32':
            if myid == 0: os.remove(sts_file + '.sts')

        if myid == 0: os.remove(meshname)
    def parallel_time_varying_file_boundary_sts(self):
        """ parallel_test_time_varying_file_boundary_sts_sequential(self):
            Read correct points from ordering file and apply sts to boundary. 
            The boundary is time varying. Compares sequential result with 
            distributed result found using anuga_parallel
        """

        #------------------------------------------------------------
        # Define test variables
        #------------------------------------------------------------
        lat_long_points=[[6.01,97.0],[6.02,97.0],[6.05,96.9],[6.0,97.0]]
        bounding_polygon=[[6.0,97.0],[6.01,97.0],[6.02,97.0],
                          [6.02,97.02],[6.00,97.02]]
        tide = 3.0
        time_step_count = 65
        time_step = 2
        n=len(lat_long_points)
        first_tstep=num.ones(n,num.int)
        last_tstep=(time_step_count)*num.ones(n,num.int)
        finaltime=num.float(time_step*(time_step_count-1))
        yieldstep=num.float(time_step)
        gauge_depth=20*num.ones(n,num.float)
        ha=2*num.ones((n,time_step_count),num.float)
        ua=10*num.ones((n,time_step_count),num.float)
        va=-10*num.ones((n,time_step_count),num.float)

        times=num.arange(0, time_step_count*time_step, time_step)
        for i in range(n):
            #ha[i]+=num.sin(times)
            ha[i]+=times/finaltime

        #------------------------------------------------------------
        # Write mux data to file then convert to sts format
        #------------------------------------------------------------
        sts_file="test"
        if myid==0:
            base_name, files = self.write_mux2(lat_long_points,
                                               time_step_count,
                                               time_step,
                                               first_tstep,
                                               last_tstep,
                                               depth=gauge_depth,
                                               ha=ha,
                                               ua=ua,
                                               va=va)
            # base name will not exist, but 3 other files are created

            # Write order file
            file_handle, order_base_name = tempfile.mkstemp("")
            os.close(file_handle)
            os.remove(order_base_name)
            d=","
            order_file=order_base_name+'order.txt'
            fid=open(order_file,'w')
        
            # Write Header
            header='index, longitude, latitude\n'
            fid.write(header)
            indices=[3,0,1]
            for i in indices:
                line=str(i)+d+str(lat_long_points[i][1])+d+\
                    str(lat_long_points[i][0])+"\n"
                fid.write(line)
            fid.close()

            urs2sts(base_name,
                    basename_out=sts_file,
                    ordering_filename=order_file,
                    mean_stage=tide,
                    verbose=verbose)
            self.delete_mux(files)

            assert(os.access(sts_file+'.sts', os.F_OK))

            os.remove(order_file)

        barrier()
        #------------------------------------------------------------
        # Define boundary_polygon on each processor. This polygon defines the
        # urs boundary and lies on a portion of the bounding_polygon
        #------------------------------------------------------------
        boundary_polygon = create_sts_boundary(sts_file)

        # Append the remaining part of the boundary polygon to be defined by
        # the user
        bounding_polygon_utm=[]
        for point in bounding_polygon:
            zone,easting,northing=redfearn(point[0],point[1])
            bounding_polygon_utm.append([easting,northing])

        boundary_polygon.append(bounding_polygon_utm[3])
        boundary_polygon.append(bounding_polygon_utm[4])


        assert num.allclose(bounding_polygon_utm,boundary_polygon)

        extent_res=10000
        meshname = 'urs_test_mesh' + '.tsh'
        interior_regions=None
        boundary_tags={'ocean': [0,1], 'otherocean': [2,3,4]}
        
        #------------------------------------------------------------
        # Create mesh on the master processor and store in file. This file
        # is read in by each slave processor when needed
        #------------------------------------------------------------
        if myid==0:
            create_mesh_from_regions(boundary_polygon,
                                     boundary_tags=boundary_tags,
                                     maximum_triangle_area=extent_res,
                                     filename=meshname,
                                     interior_regions=interior_regions,
                                     verbose=verbose)
        

            # barrier()
            domain_fbound = Domain(meshname)
            domain_fbound.set_quantities_to_be_stored(None)
            domain_fbound.set_quantity('stage', tide)
            # print domain_fbound.mesh.get_boundary_polygon()
        else:
            domain_fbound=None

        barrier()
        if ( verbose and myid == 0 ): 
            print 'DISTRIBUTING PARALLEL DOMAIN'
        domain_fbound = distribute(domain_fbound)

        #--------------------------------------------------------------------
        # Find which sub_domain in which the interpolation points are located 
        #
        # Sometimes the interpolation points sit exactly
        # between two centroids, so in the parallel run we
        # reset the interpolation points to the centroids
        # found in the sequential run
        #--------------------------------------------------------------------
        interpolation_points = [[279000,664000], [280250,664130], 
                                    [279280,665400], [280500,665000]]

        interpolation_points=num.array(interpolation_points)

        #if myid==0:
        #    import pylab as P
        #    boundary_polygon=num.array(boundary_polygon)
        #    P.plot(boundary_polygon[:,0],boundary_polygon[:,1])
        #    P.plot(interpolation_points[:,0],interpolation_points[:,1],'ko')
        #    P.show()

        fbound_gauge_values = []
        fbound_proc_tri_ids = []
        for i, point in enumerate(interpolation_points):
            fbound_gauge_values.append([]) # Empty list for timeseries

            try:
                k = domain_fbound.get_triangle_containing_point(point)
                if domain_fbound.tri_full_flag[k] == 1:
                    fbound_proc_tri_ids.append(k)
                else:
                    fbound_proc_tri_ids.append(-1)            
            except:
                fbound_proc_tri_ids.append(-2)


        if verbose: print 'P%d has points = %s' %(myid, fbound_proc_tri_ids)

        #------------------------------------------------------------
        # Set boundary conditions
        #------------------------------------------------------------
        Bf = File_boundary(sts_file+'.sts',
                           domain_fbound,
                           boundary_polygon=boundary_polygon)
        Br = Reflective_boundary(domain_fbound)
    
        domain_fbound.set_boundary({'ocean': Bf,'otherocean': Br})

        #------------------------------------------------------------
        # Evolve the domain on each processor
        #------------------------------------------------------------  
        for i, t in enumerate(domain_fbound.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step = False)):

            stage = domain_fbound.get_quantity('stage')
            for i in range(4):
                if fbound_proc_tri_ids[i] > -1:
                    fbound_gauge_values[i].append(stage.centroid_values[fbound_proc_tri_ids[i]])
        
        #------------------------------------------------------------
        # Create domain to be run sequntially on each processor
        #------------------------------------------------------------
        domain_drchlt = Domain(meshname)
        domain_drchlt.set_quantities_to_be_stored(None)
        domain_drchlt.set_starttime(time_step)
        domain_drchlt.set_quantity('stage', tide)
        Br = Reflective_boundary(domain_drchlt)
        #Bd = Dirichlet_boundary([2.0+tide,220+10*tide,-220-10*tide])
        Bd = Time_boundary(domain=domain_drchlt, function=lambda t: [2.0+t/finaltime+tide,220.+10.*tide+10.*t/finaltime,-220.-10.*tide-10.*t/finaltime])
        #Bd = Time_boundary(domain=domain_drchlt,function=lambda t: [2.0+num.sin(t)+tide,10.*(2+20.+num.sin(t)+tide),-10.*(2+20.+num.sin(t)+tide)])
        domain_drchlt.set_boundary({'ocean': Bd,'otherocean': Br})
       
        drchlt_gauge_values = []
        drchlt_proc_tri_ids = []
        for i, point in enumerate(interpolation_points):
            drchlt_gauge_values.append([]) # Empty list for timeseries

            try:
                k = domain_drchlt.get_triangle_containing_point(point)
                if domain_drchlt.tri_full_flag[k] == 1:
                    drchlt_proc_tri_ids.append(k)
                else:
                    drchlt_proc_tri_ids.append(-1)            
            except:
                drchlt_proc_tri_ids.append(-2)


        if verbose: print 'P%d has points = %s' %(myid, drchlt_proc_tri_ids)

        #------------------------------------------------------------
        # Evolve entire domain on each processor
        #------------------------------------------------------------
        for i, t in enumerate(domain_drchlt.evolve(yieldstep=yieldstep,
                                                   finaltime=finaltime, 
                                                   skip_initial_step = False)):

            stage = domain_drchlt.get_quantity('stage')
            for i in range(4):
                drchlt_gauge_values[i].append(stage.centroid_values[drchlt_proc_tri_ids[i]])

        #------------------------------------------------------------
        # Compare sequential values with parallel values
        #------------------------------------------------------------
        barrier()
        success = True
        for i in range(4):
            if fbound_proc_tri_ids[i] > -1:
                fbound_gauge_values[i]=num.array(fbound_gauge_values[i])
                drchlt_gauge_values[i]=num.array(drchlt_gauge_values[i])
                #print i,fbound_gauge_values[i][4]
                #print i,drchlt_gauge_values[i][4]
                success = success and num.allclose(fbound_gauge_values[i], drchlt_gauge_values[i])
                assert success#, (fbound_gauge_values[i]-drchlt_gauge_values[i])

        #assert_(success)       

        if not sys.platform == 'win32':
            if myid==0: os.remove(sts_file+'.sts')
        
        if myid==0: os.remove(meshname)
Esempio n. 20
0
def urs2sts(basename_in,
            basename_out=None,
            weights=None,
            verbose=False,
            origin=None,
            zone=None,
            central_meridian=None,
            mean_stage=0.0,
            zscale=1.0,
            ordering_filename=None):
    """Convert URS mux2 format for wave propagation to sts format

    Also convert latitude and longitude to UTM. All coordinates are
    assumed to be given in the GDA94 datum

    origin is a 3-tuple with geo referenced
    UTM coordinates (zone, easting, northing)

    inputs:

    basename_in: list of source file prefixes

        These are combined with the extensions:
        WAVEHEIGHT_MUX2_LABEL = '-z-mux2' for stage
        EAST_VELOCITY_MUX2_LABEL = '-e-mux2' xmomentum
        NORTH_VELOCITY_MUX2_LABEL = '-n-mux2' and ymomentum

        to create a 2D list of mux2 file. The rows are associated with each
        quantity and must have the above extensions
        the columns are the list of file prefixes.

    ordering: a .txt file name specifying which mux2 gauge points are
              to be stored. This is indicated by the index of the gauge
              in the ordering file.

              ordering file format:
              1st line:    'index,longitude,latitude\n'
              other lines: index,longitude,latitude

              If ordering is None or ordering file is empty then
               all points are taken in the order they
              appear in the mux2 file.


    output:
      basename_out: name of sts file in which mux2 data is stored.
      
      
      
    NOTE: South is positive in mux files so sign of y-component of velocity is reverted
    """

    import os
    from anuga.file.netcdf import NetCDFFile
    from operator import __and__

    if not isinstance(basename_in, list):
        if verbose: log.critical('Reading single source')
        basename_in = [basename_in]

    # This is the value used in the mux file format to indicate NAN data
    # FIXME (Ole): This should be changed everywhere to IEEE NAN when
    #              we upgrade to Numpy
    NODATA = 99

    # Check that basename is a list of strings
    if not reduce(__and__, map(lambda z: isinstance(z, basestring),
                               basename_in)):
        msg = 'basename_in must be a string or list of strings'
        raise Exception, msg

    # Find the number of sources to be used
    numSrc = len(basename_in)

    # A weight must be specified for each source
    if weights is None:
        # Default is equal weighting
        weights = num.ones(numSrc, num.float) / numSrc
    else:
        weights = ensure_numeric(weights)
        msg = 'When combining multiple sources must specify a weight for ' \
              'mux2 source file'
        assert len(weights) == numSrc, msg

    if verbose: log.critical('Weights used in urs2sts: %s' % str(weights))

    # Check output filename
    if basename_out is None:
        msg = 'STS filename must be specified as basename_out ' \
              'in function urs2sts'
        raise Exception, msg

    if basename_out.endswith('.sts'):
        stsname = basename_out
    else:
        stsname = basename_out + '.sts'

    # Create input filenames from basenames and check their existence
    files_in = [[], [], []]
    for files in basename_in:
        files_in[0].append(files + WAVEHEIGHT_MUX2_LABEL),
        files_in[1].append(files + EAST_VELOCITY_MUX2_LABEL)
        files_in[2].append(files + NORTH_VELOCITY_MUX2_LABEL)

    quantities = ['HA', 'UA', 'VA']  # Quantity names used in the MUX2 format
    for i in range(len(quantities)):
        for file_in in files_in[i]:
            if (os.access(file_in, os.R_OK) == 0):
                msg = 'File %s does not exist or is not accessible' % file_in
                raise IOError, msg

    # Establish permutation array
    if ordering_filename is not None:
        if verbose is True:
            log.critical('Reading ordering file %s' % ordering_filename)

        # Read ordering file
        try:
            fid = open(ordering_filename, 'r')
            file_header = fid.readline().split(',')
            ordering_lines = fid.readlines()
            fid.close()
        except:
            msg = 'Cannot open %s' % ordering_filename
            raise Exception, msg

        reference_header = 'index, longitude, latitude\n'
        reference_header_split = reference_header.split(',')
        for i in range(3):
            if not file_header[i].strip() == reference_header_split[i].strip():
                msg = 'File must contain header: ' + reference_header
                raise Exception, msg

        if len(ordering_lines) < 2:
            msg = 'File must contain at least two points'
            raise Exception, msg

        permutation = [int(line.split(',')[0]) for line in ordering_lines]
        permutation = ensure_numeric(permutation)
    else:
        permutation = None

    # Read MUX2 files
    if (verbose): log.critical('reading mux2 file')

    mux = {}
    times_old = 0.0
    latitudes_old = 0.0
    longitudes_old = 0.0
    elevation_old = 0.0
    starttime_old = 0.0

    for i, quantity in enumerate(quantities):
        # For each quantity read the associated list of source mux2 file with
        # extention associated with that quantity

        times, latitudes, longitudes, elevation, mux[quantity], starttime \
            = read_mux2_py(files_in[i], weights, permutation, verbose=verbose)

        # Check that all quantities have consistent time and space information
        if quantity != quantities[0]:
            msg = '%s, %s and %s have inconsistent gauge data' \
                  % (files_in[0], files_in[1], files_in[2])
            assert num.allclose(times, times_old), msg
            assert num.allclose(latitudes, latitudes_old), msg
            assert num.allclose(longitudes, longitudes_old), msg
            assert num.allclose(elevation, elevation_old), msg
            assert num.allclose(starttime, starttime_old), msg
        times_old = times
        latitudes_old = latitudes
        longitudes_old = longitudes
        elevation_old = elevation
        starttime_old = starttime

        # Self check - can be removed to improve speed
        #ref_longitudes = [float(line.split(',')[1]) for line in ordering_lines]
        #ref_latitudes = [float(line.split(',')[2]) for line in ordering_lines]
        #
        #msg = 'Longitudes specified in ordering file do not match those ' \
        #      'found in mux files. ' \
        #      'I got %s instead of %s (only beginning shown)' \
        #      % (str(longitudes[:10]) + '...',
        #         str(ref_longitudes[:10]) + '...')
        #assert allclose(longitudes, ref_longitudes), msg
        #
        #msg = 'Latitudes specified in ordering file do not match those ' \
        #      'found in mux files. '
        #      'I got %s instead of %s (only beginning shown)' \
        #      % (str(latitudes[:10]) + '...',
        #         str(ref_latitudes[:10]) + '...')
        #assert allclose(latitudes, ref_latitudes), msg

    # Store timeseries in STS file
    msg = 'File is empty and or clipped region not in file region'
    assert len(latitudes > 0), msg

    number_of_points = latitudes.shape[0]  # Number of stations retrieved
    number_of_times = times.shape[0]  # Number of timesteps
    number_of_latitudes = latitudes.shape[0]  # Number latitudes
    number_of_longitudes = longitudes.shape[0]  # Number longitudes

    # The permutation vector of contains original indices
    # as given in ordering file or None in which case points
    # are assigned the trivial indices enumerating them from
    # 0 to number_of_points-1
    if permutation is None:
        permutation = num.arange(number_of_points, dtype=num.int)

    # NetCDF file definition
    outfile = NetCDFFile(stsname, netcdf_mode_w)

    description = 'Converted from URS mux2 files: %s' % basename_in

    # Create new file
    sts = Write_sts()
    sts.store_header(outfile,
                     times + starttime,
                     number_of_points,
                     description=description,
                     verbose=verbose,
                     sts_precision=netcdf_float)

    # Store
    from anuga.coordinate_transforms.redfearn import redfearn

    x = num.zeros(number_of_points, num.float)  # Easting
    y = num.zeros(number_of_points, num.float)  # Northing

    # Check zone boundaries
    if zone is None:
        refzone, _, _ = redfearn(latitudes[0],
                                 longitudes[0],
                                 central_meridian=central_meridian)
    else:
        refzone = zone

    old_zone = refzone
    old_easting = 0.0
    old_northing = 0.0

    for i in range(number_of_points):
        computed_zone, easting, northing = redfearn(
            latitudes[i],
            longitudes[i],
            zone=zone,
            central_meridian=central_meridian)
        x[i] = easting
        y[i] = northing
        if computed_zone != refzone:
            msg = 'All sts gauges need to be in the same zone. \n'
            msg += 'offending gauge:Zone %d,%.4f, %4f\n' \
                   % (computed_zone, easting, northing)
            msg += 'previous gauge:Zone %d,%.4f, %4f' \
                   % (old_zone, old_easting, old_northing)
            raise Exception, msg
        old_zone = computed_zone
        old_easting = easting
        old_northing = northing

    if origin is None:
        origin = Geo_reference(refzone, min(x), min(y))
    geo_ref = write_NetCDF_georeference(origin, outfile)

    elevation = num.resize(elevation, outfile.variables['elevation'][:].shape)
    outfile.variables['permutation'][:] = permutation.astype(
        num.int32)  # Opteron 64
    outfile.variables['x'][:] = x - geo_ref.get_xllcorner()
    outfile.variables['y'][:] = y - geo_ref.get_yllcorner()
    outfile.variables['elevation'][:] = elevation

    stage = outfile.variables['stage']
    xmomentum = outfile.variables['xmomentum']
    ymomentum = outfile.variables['ymomentum']

    if verbose: log.critical('Converting quantities')

    for j in range(len(times)):
        for i in range(number_of_points):
            ha = mux['HA'][i, j]
            ua = mux['UA'][i, j]
            va = mux['VA'][i, j]
            if ha == NODATA:
                if verbose:
                    msg = 'Setting nodata value %d to 0 at time = %f, ' \
                          'point = %d' % (ha, times[j], i)
                    log.critical(msg)
                ha = 0.0
                ua = 0.0
                va = 0.0

            w = zscale * ha + mean_stage
            h = w - elevation[i]
            stage[j, i] = w

            xmomentum[j, i] = ua * h
            ymomentum[j, i] = -va * h  # South is positive in mux files

    outfile.close()

    if verbose:
        log.critical('Wrote sts file ' + stsname)
    def test_ferret2sww1(self):
        """Test that georeferencing etc works when converting from
        ferret format (lat/lon) to sww format (UTM)
        """
        import os, sys

        #The test file has
        # LON = 150.66667, 150.83334, 151, 151.16667
        # LAT = -34.5, -34.33333, -34.16667, -34 ;
        # TIME = 0, 0.1, 0.6, 1.1, 1.6, 2.1 ;
        #
        # First value (index=0) in small_ha.nc is 0.3400644 cm,
        # Fourth value (index==3) is -6.50198 cm



        #Read
        from anuga.coordinate_transforms.redfearn import redfearn
        #fid = NetCDFFile(self.test_MOST_file)
        fid = NetCDFFile(self.test_MOST_file + '_ha.nc')
        first_value = fid.variables['HA'][:][0,0,0]
        fourth_value = fid.variables['HA'][:][0,0,3]
        fid.close()


        #Call conversion (with zero origin)
        #ferret2sww('small', verbose=False,
        #           origin = (56, 0, 0))
        ferret2sww(self.test_MOST_file, verbose=self.verbose,
                   origin = (56, 0, 0))

        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(-34.5, 150.66667)
        #print zone, e, n

        #Read output file 'small.sww'
        #fid = NetCDFFile('small.sww')
        fid = NetCDFFile(self.test_MOST_file + '.sww')

        x = fid.variables['x'][:]
        y = fid.variables['y'][:]

        #Check that first coordinate is correctly represented
        assert num.allclose(x[0], e)
        assert num.allclose(y[0], n)

        #Check first value
        stage = fid.variables['stage'][:]
        xmomentum = fid.variables['xmomentum'][:]
        ymomentum = fid.variables['ymomentum'][:]

        #print ymomentum

        assert num.allclose(stage[0,0], first_value/100)  #Meters

        #Check fourth value
        assert num.allclose(stage[0,3], fourth_value/100)  #Meters

        fid.close()

        #Cleanup
        import os
        os.remove(self.test_MOST_file + '.sww')
Esempio n. 22
0
    def test_urs_ungridded2sww (self):
        
        #Zone:   50    
        #Easting:  240992.578  Northing: 7620442.472 
        #Latitude:   -21  30 ' 0.00000 ''  Longitude: 114  30 ' 0.00000 '' 
        lat_long = [[-21.5,114.5],[-21,114.5],[-21,115]]
        time_step_count = 2
        time_step = 400
        tide = 9000000
        base_name, files = self.write_mux(lat_long,
                                          time_step_count, time_step)
        urs_ungridded2sww(base_name, mean_stage=tide,
                          verbose=self.verbose)
        
        # now I want to check the sww file ...
        sww_file = base_name + '.sww'
        
        #Let's interigate the sww file
        # Note, the sww info is not gridded.  It is point data.
        fid = NetCDFFile(sww_file)
        
        # Make x and y absolute
        x = fid.variables['x'][:]
        y = fid.variables['y'][:]
        geo_reference = Geo_reference(NetCDFObject=fid)
        points = geo_reference.get_absolute(map(None, x, y))
        points = ensure_numeric(points)
        x = points[:,0]
        y = points[:,1]
        
        #Check that first coordinate is correctly represented       
        #Work out the UTM coordinates for first point
        zone, e, n = redfearn(lat_long[0][0], lat_long[0][1]) 
        assert num.allclose([x[0],y[0]], [e,n])

        #Check the time vector
        times = fid.variables['time'][:]
        
        times_actual = []
        for i in range(time_step_count):
            times_actual.append(time_step * i)
        
        assert num.allclose(ensure_numeric(times),
                            ensure_numeric(times_actual))
        
        #Check first value
        stage = fid.variables['stage'][:]
        xmomentum = fid.variables['xmomentum'][:]
        ymomentum = fid.variables['ymomentum'][:]
        elevation = fid.variables['elevation'][:]
        assert num.allclose(stage[0,0], e +tide)  #Meters


        #Check the momentums - ua
        #momentum = velocity*(stage-elevation)
        # elevation = - depth
        #momentum = velocity_ua *(stage+depth)
        # = n*(e+tide+n) based on how I'm writing these files
        # 
        answer_x = n*(e+tide+n)
        actual_x = xmomentum[0,0]
        #print "answer_x",answer_x
        #print "actual_x",actual_x 
        assert num.allclose(answer_x, actual_x)  #Meters
        
        #Check the momentums - va
        #momentum = velocity*(stage-elevation)
        # elevation = - depth
        #momentum = velocity_va *(stage+depth)
        # = e*(e+tide+n) based on how I'm writing these files
        # 
        answer_y = -1*e*(e+tide+n)
        actual_y = ymomentum[0,0]
        #print "answer_y",answer_y
        #print "actual_y",actual_y 
        assert num.allclose(answer_y, actual_y)  #Meters

        # check the stage values, first time step.
        # These arrays are equal since the Easting values were used as
        # the stage
        assert num.allclose(stage[0], x +tide)  #Meters
        # check the elevation values.
        # -ve since urs measures depth, sww meshers height,
        # these arrays are equal since the northing values were used as
        # the elevation
        assert num.allclose(-elevation, y)  #Meters
        
        fid.close()
        self.delete_mux(files)
        os.remove(sww_file)