def setup_pumping_stations(domain, project):
    """
    Extract pumping station data from project class
    and apply the internal boundary operator

    """
    pumping_station_data = project.pumping_station_data

    for i in range(len(pumping_station_data)):

        # Extract pumping station parameters
        ps = pumping_station_data[i]

        label = ps[0]
        pump_capacity = ps[1]
        pump_rate_of_increase = ps[2]
        pump_rate_of_decrease = ps[3]
        hw_to_start_pumping = ps[4]
        hw_to_stop_pumping = ps[5]

        # Pumping station basin polygon + elevation are used elsewhere

        exchange_line_0 = su.read_polygon(ps[8])
        exchange_line_1 = su.read_polygon(ps[9])
        exchange_lines = [exchange_line_0, exchange_line_1]

        smoothing_timescale = ps[10]

        print 'Need to implement elevation data adjustments'

        # Function which computes Q
        pump_behaviour = pumping_station_function(
            domain=domain,
            pump_capacity=pump_capacity,
            hw_to_start_pumping=hw_to_start_pumping,
            hw_to_stop_pumping=hw_to_stop_pumping,
            initial_pump_rate=0.,
            pump_rate_of_increase=pump_rate_of_increase,
            pump_rate_of_decrease=pump_rate_of_decrease
            )
        
        # Add operator as side-effect of this operation
        pumping_station = Internal_boundary_operator(
            domain,
            pump_behaviour,
            exchange_lines=exchange_lines,
            enquiry_gap=0.,
            apron = 0.0,
            smoothing_timescale=smoothing_timescale,
            compute_discharge_implicitly=False,
            logging=True,
            label=label,
            verbose=True)

    return
Пример #2
0
def setup_pumping_stations(domain, project):
    """
    Extract pumping station data from project class
    and apply the internal boundary operator

    """
    pumping_station_data = project.pumping_station_data

    for i in range(len(pumping_station_data)):

        # Extract pumping station parameters
        ps = pumping_station_data[i]

        label = ps[0]
        pump_capacity = ps[1]
        pump_rate_of_increase = ps[2]
        pump_rate_of_decrease = ps[3]
        hw_to_start_pumping = ps[4]
        hw_to_stop_pumping = ps[5]

        # Pumping station basin polygon + elevation are used elsewhere

        exchange_line_0 = su.read_polygon(ps[8])
        exchange_line_1 = su.read_polygon(ps[9])
        exchange_lines = [exchange_line_0, exchange_line_1]

        smoothing_timescale = ps[10]

        print 'Need to implement elevation data adjustments'

        # Function which computes Q
        pump_behaviour = pumping_station_function(
            domain=domain,
            pump_capacity=pump_capacity,
            hw_to_start_pumping=hw_to_start_pumping,
            hw_to_stop_pumping=hw_to_stop_pumping,
            initial_pump_rate=0.,
            pump_rate_of_increase=pump_rate_of_increase,
            pump_rate_of_decrease=pump_rate_of_decrease)

        # Add operator as side-effect of this operation
        pumping_station = Internal_boundary_operator(
            domain,
            pump_behaviour,
            exchange_lines=exchange_lines,
            enquiry_gap=0.,
            apron=0.0,
            smoothing_timescale=smoothing_timescale,
            compute_discharge_implicitly=False,
            logging=True,
            label=label,
            verbose=True)

    return
Пример #3
0
def setup_bridges(domain, project):
    """
    Extract bridge data from project class
    and apply the internal boundary operator

    Note that the bridge deck information
    is applied when setting the elevation
    """
    bridge_data = project.bridge_data

    for i in range(len(bridge_data)):

        # Extract bridge parameters
        bd = bridge_data[i]

        label = bd[0]
        # bd[1] and bd[2] are used elsewhere to set deck elevation
        exchange_line_0 = su.read_polygon(bd[3])
        exchange_line_1 = su.read_polygon(bd[4])
        exchange_lines = [exchange_line_0, exchange_line_1]
        enquiry_gap = bd[5]
        #apron = bd[6]
        #assert apron==0.0, 'Apron must be zero until parallel apron issues fixed'
        internal_boundary_curve_file = bd[6]
        vertical_datum_offset = bd[7]
        smoothing_timescale = bd[8]



        # Function which computes Q
        rating_curve = hecras_internal_boundary_function(
            internal_boundary_curves_file=internal_boundary_curve_file,
            allow_sign_reversal=True,
            vertical_datum_offset=vertical_datum_offset)

        # Add operator as side-effect of this operation
        bridge = Internal_boundary_operator(
            domain,
            rating_curve,
            exchange_lines=exchange_lines,
            enquiry_gap=enquiry_gap,
            apron = 0.0,
            zero_outflow_momentum=False,
            smoothing_timescale=smoothing_timescale,
            logging=True,
            label=label,
            verbose=True)

    return
def setup_inlets(domain, project):
    """
    Add inlets to domain
    """

    inlet_data = project.inlet_data

    for i in range(len(inlet_data)):
        name = inlet_data[i][0]
        line_file = inlet_data[i][1]
        timeseries_file = inlet_data[i][2]
        start_time = inlet_data[i][3]

        # Add inlet
        timeseries = numpy.genfromtxt(timeseries_file,
                                      delimiter=',',
                                      skip_header=1)

        # Adjust start time
        timeseries[:, 0] = timeseries[:, 0] - start_time

        # Make discharge function
        qfun = scipy.interpolate.interp1d(timeseries[:, 0],
                                          timeseries[:, 1],
                                          kind='linear')

        # Make cross-section line
        line = su.read_polygon(line_file)

        anuga.Inlet_operator(domain, line, qfun, label='Inlet: ' + str(name))
    return
Пример #5
0
def setup_inlets(domain, project):
    """
    Add inlets to domain
    """

    inlet_data = project.inlet_data

    for i in range(len(inlet_data)):
        name = inlet_data[i][0]
        line_file = inlet_data[i][1]
        timeseries_file = inlet_data[i][2]
        start_time = inlet_data[i][3]

        # Add inlet
        timeseries = numpy.genfromtxt(
            timeseries_file, delimiter=',', skip_header=1)

        # Adjust start time
        timeseries[:, 0] = timeseries[:, 0] - start_time

        # Make discharge function
        qfun = scipy.interpolate.interp1d(
            timeseries[:, 0], timeseries[:, 1], kind='linear')

        # Make cross-section line
        line = su.read_polygon(line_file)

        anuga.Inlet_operator(domain, line, qfun, label='Inlet: ' + str(name))
    return
Пример #6
0
def setup_bridges(domain, project):
    """
    Extract bridge data from project class
    and apply the internal boundary operator

    Note that the bridge deck information
    is applied when setting the elevation
    """
    bridge_data = project.bridge_data

    for i in range(len(bridge_data)):

        # Extract bridge parameters
        bd = bridge_data[i]

        label = bd[0]
        # bd[1] and bd[2] are used elsewhere to set deck elevation
        exchange_line_0 = su.read_polygon(bd[3])
        exchange_line_1 = su.read_polygon(bd[4])
        exchange_lines = [exchange_line_0, exchange_line_1]
        enquiry_gap = bd[5]
        #apron = bd[6]
        #assert apron==0.0, 'Apron must be zero until parallel apron issues fixed'
        internal_boundary_curve_file = bd[6]
        vertical_datum_offset = bd[7]
        smoothing_timescale = bd[8]

        # Function which computes Q
        rating_curve = hecras_internal_boundary_function(
            internal_boundary_curves_file=internal_boundary_curve_file,
            allow_sign_reversal=True,
            vertical_datum_offset=vertical_datum_offset)

        # Add operator as side-effect of this operation
        bridge = Internal_boundary_operator(
            domain,
            rating_curve,
            exchange_lines=exchange_lines,
            enquiry_gap=enquiry_gap,
            apron=0.0,
            zero_outflow_momentum=False,
            smoothing_timescale=smoothing_timescale,
            logging=True,
            label=label,
            verbose=True)

    return
def setup_rainfall(domain, project):
    """
    Function to add rainfall operators to the domain
    """
    rain_data = project.rain_data

    for i in range(len(rain_data)):
        timeseries_file = rain_data[i][0]
        start_time = rain_data[i][1]
        interpolation_type = rain_data[i][2]

        # Get polygon defining rainfall extent
        if ((len(rain_data[i]) >= 4) and (rain_data[i][3] != 'All')):
            polygon = su.read_polygon(rain_data[i][3])
        else:
            polygon = None

        if len(rain_data[i]) >= 5:
            multiplier = rain_data[i][4] * 1.0
        else:
            multiplier = 1.0

        rain_timeseries = scipy.genfromtxt(timeseries_file,
                                           delimiter=',',
                                           skip_header=1)

        # Adjust starttime
        rain_timeseries[:, 0] = rain_timeseries[:, 0] - start_time

        # Convert units to m/s (from mm/hr)
        rain_timeseries[:,
                        1] = rain_timeseries[:,
                                             1] / (3600. * 1000.) * multiplier

        # Sanity check
        assert rain_timeseries[:, 1].min() >= 0., 'Negative rainfall input'

        # Make interpolation function and add to ANUGA as operator
        if rain_timeseries[:, 1].max() >= 0.:
            myrain = scipy.interpolate.interp1d(rain_timeseries[:, 0],
                                                rain_timeseries[:, 1],
                                                kind=interpolation_type)
            anuga.operators.rate_operators.Rate_operator(domain,
                                                         rate=myrain,
                                                         polygon=polygon,
                                                         label=timeseries_file)

    return
Пример #8
0
def setup_rainfall(domain, project):
    """
    Function to add rainfall operators to the domain
    """
    rain_data = project.rain_data

    for i in range(len(rain_data)):
        timeseries_file = rain_data[i][0]
        start_time = rain_data[i][1]
        interpolation_type = rain_data[i][2]

        # Get polygon defining rainfall extent
        if ((len(rain_data[i]) >= 4) and (rain_data[i][3] != 'All')):
            polygon = su.read_polygon(rain_data[i][3])
        else:
            polygon = None

        if len(rain_data[i]) >=5:
            multiplier = rain_data[i][4]*1.0
        else:
            multiplier = 1.0

        rain_timeseries = scipy.genfromtxt(
            timeseries_file, delimiter=',', skip_header=1)

        # Adjust starttime
        rain_timeseries[:, 0] = rain_timeseries[:, 0] - start_time

        # Convert units to m/s (from mm/hr)
        rain_timeseries[:, 1] = rain_timeseries[:, 1] / (3600. * 1000.) * multiplier

        # Sanity check
        assert rain_timeseries[:, 1].min() >= 0., 'Negative rainfall input'

        # Make interpolation function and add to ANUGA as operator
        if rain_timeseries[:, 1].max() >= 0.:
            myrain = scipy.interpolate.interp1d(
                rain_timeseries[:, 0], rain_timeseries[:, 1],
                kind=interpolation_type)
            anuga.operators.rate_operators.Rate_operator(
                domain, rate=myrain, polygon=polygon, label=timeseries_file)

    return
Пример #9
0
def get_initial_condition_data(data_source, worksheet, flag, print_info):
    """Convenience function to extract the initial condition data
       (and initial_condition_additions) from the xls worksheet

       The needs have become more elaborate over time, e.g.
        to support combining 2 line files into a polygon

       Given a character string referring to a quantity which has initial
       conditions in the xls worksheet (e.g. 'Elevation'),
       extract the associated data from the
       'data_source' (an AnugaXls object) on worksheet 'worksheet'

       This assumes a particular format in the excel sheet
    """

    # Read the polygon / value pairs
    quantity_data = data_source.get_paired_list(worksheet,
                                                flag, [1, 1],
                                                post_process=string_or_float)

    # If the polygon is a wildcard, assume it matches 2 lines, read them in,
    # and join them to make a polygon. This is a convenient shorthand
    # for when we have lkl
    for i in range(len(quantity_data)):
        polygon_files = glob.glob(quantity_data[i][0])

        # Check it only matches 0 or 1 or 2 files
        msg = 'Polygon:' + str(i) + ' : ' + quantity_data[i][0] + \
              '  for ' + flag + ' on  worksheet' + \
              worksheet + ' matches > 2 files. We can join at most 2 lines' + \
              'to make a polygon'
        assert len(polygon_files) <= 2, msg

        if len(polygon_files) == 0:
            # Check it is valid
            msg = 'Polygon:' + str(i) + ' : ' + quantity_data[i][0] + \
                  '  for ' + flag + ' on  worksheet' + \
                  worksheet + ' matches no files, and is not All or None ' + \
                  'or Extent (for a raster)'
            assert ((quantity_data[i][0] == 'All') |
                    (quantity_data[i][0] is None) |
                    (quantity_data[i][0] == 'Extent')), msg
        elif len(polygon_files) == 2:
            # If it matches 2, try to combine to 1.
            # This is often required to use pairs of breaklines as polygons
            # Do this by:
            # 1) Setting up the 2 lines as though they were in a
            #    breakline object
            # 2) Using su.polygon_from_matching_breakLines
            print_info.append('Initial ' + flag)
            print_info.append('Combining these files to a polygon: ')
            print_info.append(str(polygon_files))
            print_info.append('')

            l0 = su.read_polygon(polygon_files[0])
            l1 = su.read_polygon(polygon_files[1])
            fake_breakline = {polygon_files[0]: l0, polygon_files[1]: l1}
            fake_match = quantity_data[i][0].split('*')[0]
            out_poly = su.polygon_from_matching_breaklines(
                fake_match, fake_breakline)
            quantity_data[i][0] = out_poly

    # Get the clip_range for each polygon / function pair
    quantity_clip_range = data_source.get_fixed_size_subtable_by_columns(
        worksheet,
        flag,
        dimensions=[2, len(quantity_data)],
        offset=[3, 1],
        post_process=string_or_float)

    new_quantity_clip_range = reformat_clip_range(quantity_clip_range)

    # Get sub-grid size for spatial averaging, if applicable
    spatial_average = data_source.get_var(worksheet,
                                          flag, [5, 1],
                                          post_process=string_or_float)

    if type(spatial_average) == str:
        spatial_average = None

    return quantity_data, new_quantity_clip_range, spatial_average
Пример #10
0
    def F(x, y):
        """This is the function returned by composite_quantity_setting_function
           It can be passed to set_quantity
        """
        isSet = numpy.zeros(len(x))  # 0/1 - record if each point has been set
        quantityVal = x * 0 + numpy.nan  # Function return value

        # Record points which evaluated to nan on their first preference
        # dataset.
        was_ever_nan = (x * 0).astype(int)

        lpf = len(poly_fun_pairs)
        if (lpf <= 0):
            raise Exception('Must have at least 1 fun-poly-pair')

        # Make an array of 'transformed' spatial coordinates, for checking
        # polygon inclusion
        xll = domain.geo_reference.xllcorner
        yll = domain.geo_reference.yllcorner
        xy_array_trans = numpy.vstack([x + xll, y + yll]).transpose()

        # Check that none of the pi polygons [except perhaps the last] is 'All'
        for i in range(lpf - 1):
            if (poly_fun_pairs[i][0] == 'All'):
                # This is only ok if all the othe poly_fun_pairs are None
                remaining_poly_fun_pairs_are_None = \
                    [poly_fun_pairs[j][0] is None for j in range(i+1,lpf)]
                if (not all(remaining_poly_fun_pairs_are_None)):
                    raise Exception('Can only have the last polygon = All')

        # Main Loop
        # Apply the fi inside the pi
        for i in range(lpf):
            fi = poly_fun_pairs[i][1]  # The function
            pi = poly_fun_pairs[i][0]  # The polygon

            # Quick exit
            if (pi is None):
                continue

            ###################################################################
            # Get indices fInds of points in polygon pi which are not already
            # set
            ###################################################################
            if (pi == 'All'):
                # Get all unset points
                fInside = (1 - isSet)
                fInds = (fInside == 1).nonzero()[0]

            else:

                if (pi == 'Extent'):
                    # Here fi MUST be a gdal-compatible raster
                    if (not (type(fi) == str)):
                        msg = ' pi = "Extent" can only be used when fi is a' +\
                              ' raster file name'
                        raise Exception(msg)

                    if (not os.path.exists(fi)):
                        msg = 'fi ' + str(fi) + ' is supposed to be a ' +\
                              ' raster filename, but it could not be found'
                        raise Exception(msg)

                    # Then we get the extent from the raster itself
                    pi_path = su.getRasterExtent(fi, asPolygon=True)

                    if verbose:
                        print 'Extracting extent from raster: ', fi
                        print 'Extent: ', pi_path

                elif ((type(pi) == str) and os.path.isfile(pi)):
                    # pi is a file
                    pi_path = su.read_polygon(pi)

                else:
                    # pi is the actual polygon data
                    pi_path = pi

                # Get the insides of unset points inside pi_path
                notSet = (isSet == 0.).nonzero()[0]
                fInds = inside_polygon(xy_array_trans[notSet, :], pi_path)
                fInds = notSet[fInds]

            if len(fInds) == 0:
                # No points found, move on
                continue

            ###################################################################
            # Evaluate fi at the points inside pi
            ###################################################################

            # We use various tricks to infer whether fi is a function,
            # a constant, a file (raster or csv), or an array
            if (hasattr(fi, '__call__')):
                # fi is a function
                quantityVal[fInds] = fi(x[fInds], y[fInds])

            elif isinstance(fi, (int, long, float)):
                # fi is a numerical constant
                quantityVal[fInds] = fi * 1.0

            elif (type(fi) is str and os.path.exists(fi)):
                # fi is a file which is assumed to be
                # a gdal-compatible raster OR an x,y,z elevation file
                if os.path.splitext(fi)[1] in ['.txt', '.csv']:
                    fi_array = su.read_csv_optional_header(fi)
                    # Check the results
                    if fi_array.shape[1] is not 3:
                        print 'Treated input file ' + fi +\
                              ' as xyz array with an optional header'
                        msg = 'Array should have 3 columns -- x,y,value'
                        raise Exception(msg)

                    newfi = make_nearestNeighbour_quantity_function(
                        fi_array,
                        domain,
                        k_nearest_neighbours=default_k_nearest_neighbours)
                    quantityVal[fInds] = newfi(x[fInds], y[fInds])

                else:
                    # Treating input file as a raster
                    newfi = quantityRasterFun(
                        domain, fi, interpolation=default_raster_interpolation)
                    quantityVal[fInds] = newfi(x[fInds], y[fInds])

            elif (type(fi) is numpy.ndarray):
                if fi.shape[1] is not 3:
                    msg = 'Array should have 3 columns -- x,y,value'
                    raise Exception(msg)
                newfi = make_nearestNeighbour_quantity_function(
                    fi,
                    domain,
                    k_nearest_neighbours=default_k_nearest_neighbours)
                quantityVal[fInds] = newfi(x[fInds], y[fInds])

            else:
                print 'Error with function from'
                print fi
                msg = 'Cannot make function from type ' + str(type(fi))
                raise Exception, msg

            ###################################################################
            # Check for nan values
            ###################################################################
            #nan_flag = (quantityVal[fInds] != quantityVal[fInds])
            nan_flag = 1 * numpy.isnan(quantityVal[fInds])
            nan_inds = nan_flag.nonzero()[0]
            was_ever_nan[fInds[nan_inds]] = 1

            if len(nan_inds) > 0:
                if nan_treatment == 'exception':
                    msg = 'nan values generated by the poly_fun_pair at '\
                          'index ' + str(i) + ' '\
                          'in composite_quantity_setting_function. ' + \
                          'To allow these values to be set by later ' + \
                          'poly_fun pairs, pass the argument ' + \
                          'nan_treatment="fall_through" ' + \
                          'to composite_quantity_setting_function'
                    raise Exception(msg)

                elif nan_treatment == 'fall_through':
                    msg = 'WARNING: nan values generated by the ' + \
                          'poly_fun_pair at index ' + str(i) + ' '\
                          'in composite_quantity_setting_function. ' + \
                          'They will be passed to later poly_fun_pairs'
                    if verbose: print msg
                    not_nan_inds = (1 - nan_flag).nonzero()[0]

                    if len(not_nan_inds) > 0:
                        fInds = fInds[not_nan_inds]
                    else:
                        # All values are nan
                        msg = '( Actually all the values were nan - ' + \
                              'Are you sure they should be? Possible error?)'
                        if verbose: print msg
                        continue

                else:
                    msg = 'Found nan values in ' + \
                          'composite_quantity_setting_function but ' + \
                          'nan_treatment is not a recognized value'
                    raise Exception(msg)

            # Record that the points have been set
            isSet[fInds] = 1

            # Enforce clip_range
            if clip_range is not None:
                lower_bound = clip_range[i][0]
                upper_bound = clip_range[i][1]
                quantityVal[fInds] = numpy.maximum(quantityVal[fInds],
                                                   lower_bound)
                quantityVal[fInds] = numpy.minimum(quantityVal[fInds],
                                                   upper_bound)

        # End of loop

        # Find points which were nan on their first preference dataset + are
        # inside nan_interpolation_region_polygon. Then reinterpolate their
        # values from the other x,y, quantityVal points.
        if (nan_interpolation_region_polygon is not None) &\
           (was_ever_nan.sum() > 0):
            if nan_interpolation_region_polygon == 'All':
                points_to_reinterpolate = was_ever_nan.nonzero()[0]
            else:
                # nan_interpolation_region_polygon contains information on 1 or
                # more polygons
                # Inside those polygons, we need to re-interpolate points which
                # first evaluted to na
                possible_points_to_reint = was_ever_nan.nonzero()[0]
                points_to_reinterpolate = numpy.array([]).astype(int)

                for i in range(len(nan_interpolation_region_polygon)):
                    nan_pi = nan_interpolation_region_polygon[i]

                    # Ensure nan_pi = list of x,y points making a polygon
                    if (type(nan_pi) == str):
                        nan_pi = su.read_polygon(nan_pi)

                    points_in_nan_pi = inside_polygon(
                        xy_array_trans[possible_points_to_reint, :], nan_pi)

                    if len(points_in_nan_pi) > 0:
                        points_to_reinterpolate = numpy.hstack([
                            points_to_reinterpolate,
                            possible_points_to_reint[points_in_nan_pi]
                        ])

            if verbose:
                print 'Re-interpolating ', len(points_to_reinterpolate),\
                      ' points which were nan under their',\
                      ' first-preference and are inside the',\
                      ' nan_interpolation_region_polygon'

            if len(points_to_reinterpolate) > 0:
                msg = 'WARNING: nan interpolation is being applied. This ',\
                      'should be done in serial prior to distributing the ',\
                      'domain, as there is no parallel communication ',\
                      'implemented yet [so parallel results might depend on ',\
                      'the number of processes]'
                if verbose:
                    print msg

            # Find the interpolation points = points not needing reinterpolation
            ip = x * 0 + 1
            ip[points_to_reinterpolate] = 0
            number_of_ip = ip.sum()
            ip = ip.nonzero()[0]

            # Check that none of the ip points has an nan value
            nan_ip = (quantityVal[ip] != quantityVal[ip]).nonzero()[0]

            if len(nan_ip) > 0:
                print 'There are ', len(nan_ip), ' points outside the ',\
                      'nan_interpolation_region_polygon have nan values.'
                print 'The user should ensure this does not happen.'
                print 'The points have the following coordinates:'
                print xy_array_trans[ip[nan_ip], :]
                msg = "There are nan points outside of " +\
                      "nan_interpolation_region_polygon, even after all " +\
                      "fall-through's"
                raise Exception(msg)

            if (number_of_ip < default_k_nearest_neighbours):
                raise Exception('Too few non-nan points to interpolate from')

            # Make function for re-interpolation. Note this requires
            # x,y,z in georeferenced coordinates, whereas x,y are ANUGA
            # coordinates
            reinterp_F = make_nearestNeighbour_quantity_function(
                numpy.vstack([
                    xy_array_trans[ip, 0], xy_array_trans[ip, 1],
                    quantityVal[ip]
                ]).transpose(),
                domain,
                k_nearest_neighbours=default_k_nearest_neighbours)

            # re-interpolate
            quantityVal[points_to_reinterpolate] = reinterp_F(
                x[points_to_reinterpolate], y[points_to_reinterpolate])

            isSet[points_to_reinterpolate] = 1

        # Check there are no remaining nan values
        if (min(isSet) != 1):
            print 'Some points remain as nan, which is not allowed'
            unset_inds = (isSet != 1).nonzero()[0]
            lui = min(5, len(unset_inds))
            print 'There are ', len(unset_inds), ' such points'
            print 'Here are a few:'
            for i in range(lui):
                print x[unset_inds[i]] + xll, y[unset_inds[i]] + yll
            raise Exception('It seems the input data needs to be fixed')

        return quantityVal
    def F(x,y):
        """This is the function returned by composite_quantity_setting_function
           It can be passed to set_quantity
        """
        isSet = numpy.zeros(len(x)) # 0/1 - record if each point has been set
        quantityVal = x*0 + numpy.nan # Function return value

        # Record points which evaluated to nan on their first preference
        # dataset.
        was_ever_nan = (x*0).astype(int)

        lpf = len(poly_fun_pairs)
        if(lpf <= 0):
            raise Exception('Must have at least 1 fun-poly-pair')

        # Make an array of 'transformed' spatial coordinates, for checking
        # polygon inclusion
        xll = domain.geo_reference.xllcorner
        yll = domain.geo_reference.yllcorner
        xy_array_trans = numpy.vstack([x+xll,y+yll]).transpose()

        # Check that none of the pi polygons [except perhaps the last] is 'All'
        for i in range(lpf-1):
            if(poly_fun_pairs[i][0]=='All'):
                # This is only ok if all the othe poly_fun_pairs are None
                remaining_poly_fun_pairs_are_None = \
                    [poly_fun_pairs[j][0] is None for j in range(i+1,lpf)]
                if(not all(remaining_poly_fun_pairs_are_None)):
                    raise Exception('Can only have the last polygon = All')

        # Main Loop
        # Apply the fi inside the pi
        for i in range(lpf):
            fi = poly_fun_pairs[i][1] # The function
            pi = poly_fun_pairs[i][0] # The polygon

            # Quick exit
            if(pi is None):
                continue

            ###################################################################
            # Get indices fInds of points in polygon pi which are not already
            # set
            ###################################################################
            if(pi == 'All'):
                # Get all unset points
                fInside = (1-isSet)
                fInds = (fInside==1).nonzero()[0]

            else:

                if(pi == 'Extent'):
                    # Here fi MUST be a gdal-compatible raster
                    if(not (type(fi) == str)):
                        msg = ' pi = "Extent" can only be used when fi is a' +\
                              ' raster file name'
                        raise Exception(msg)

                    if(not os.path.exists(fi)):
                        msg = 'fi ' + str(fi) + ' is supposed to be a ' +\
                              ' raster filename, but it could not be found'
                        raise Exception(msg)

                    # Then we get the extent from the raster itself
                    pi_path = su.getRasterExtent(fi,asPolygon=True)

                    if verbose:
                        print 'Extracting extent from raster: ', fi
                        print 'Extent: ', pi_path

                elif( (type(pi) == str) and os.path.isfile(pi) ): 
                    # pi is a file
                    pi_path = su.read_polygon(pi)

                else:
                    # pi is the actual polygon data
                    pi_path = pi

                # Get the insides of unset points inside pi_path
                notSet = (isSet==0.).nonzero()[0]
                fInds = inside_polygon(xy_array_trans[notSet,:], pi_path)
                fInds = notSet[fInds]

            if len(fInds) == 0:
                # No points found, move on
                continue

            ###################################################################
            # Evaluate fi at the points inside pi
            ###################################################################

            # We use various tricks to infer whether fi is a function,
            # a constant, a file (raster or csv), or an array
            if(hasattr(fi,'__call__')):
                # fi is a function
                quantityVal[fInds] = fi(x[fInds], y[fInds])

            elif isinstance(fi, (int, long, float)):
                # fi is a numerical constant
                quantityVal[fInds] = fi*1.0

            elif ( type(fi) is str and os.path.exists(fi)):
                # fi is a file which is assumed to be 
                # a gdal-compatible raster OR an x,y,z elevation file
                if os.path.splitext(fi)[1] in ['.txt', '.csv']:
                    fi_array = su.read_csv_optional_header(fi)
                    # Check the results
                    if fi_array.shape[1] is not 3:
                        print 'Treated input file ' + fi +\
                              ' as xyz array with an optional header'
                        msg = 'Array should have 3 columns -- x,y,value'
                        raise Exception(msg)

                    newfi = make_nearestNeighbour_quantity_function(
                        fi_array, domain, 
                        k_nearest_neighbours = default_k_nearest_neighbours)
                    quantityVal[fInds] = newfi(x[fInds], y[fInds])

                else:
                    # Treating input file as a raster
                    newfi = quantityRasterFun(domain, fi, 
                        interpolation = default_raster_interpolation)
                    quantityVal[fInds] = newfi(x[fInds], y[fInds])

            elif(type(fi) is numpy.ndarray):
                if fi.shape[1] is not 3:
                    msg = 'Array should have 3 columns -- x,y,value'
                    raise Exception(msg)
                newfi = make_nearestNeighbour_quantity_function(fi, domain, 
                    k_nearest_neighbours = default_k_nearest_neighbours)
                quantityVal[fInds] = newfi(x[fInds], y[fInds])

            else:
                print 'Error with function from'
                print fi
                msg='Cannot make function from type ' + str(type(fi))
                raise Exception, msg 

            ###################################################################
            # Check for nan values
            ###################################################################
            #nan_flag = (quantityVal[fInds] != quantityVal[fInds])
            nan_flag = 1*numpy.isnan(quantityVal[fInds])
            nan_inds = nan_flag.nonzero()[0]
            was_ever_nan[fInds[nan_inds]] = 1

            if len(nan_inds)>0:
                if nan_treatment == 'exception':
                    msg = 'nan values generated by the poly_fun_pair at '\
                          'index ' + str(i) + ' '\
                          'in composite_quantity_setting_function. ' + \
                          'To allow these values to be set by later ' + \
                          'poly_fun pairs, pass the argument ' + \
                          'nan_treatment="fall_through" ' + \
                          'to composite_quantity_setting_function' 
                    raise Exception(msg)

                elif nan_treatment == 'fall_through':
                    msg = 'WARNING: nan values generated by the ' + \
                          'poly_fun_pair at index ' + str(i) + ' '\
                          'in composite_quantity_setting_function. ' + \
                          'They will be passed to later poly_fun_pairs'
                    if verbose: print msg
                    not_nan_inds = (1-nan_flag).nonzero()[0]

                    if len(not_nan_inds)>0:
                        fInds = fInds[not_nan_inds]
                    else:
                        # All values are nan
                        msg = '( Actually all the values were nan - ' + \
                              'Are you sure they should be? Possible error?)'
                        if verbose: print msg
                        continue

                else:
                    msg = 'Found nan values in ' + \
                          'composite_quantity_setting_function but ' + \
                          'nan_treatment is not a recognized value'
                    raise Exception(msg)

            # Record that the points have been set
            isSet[fInds] = 1

            # Enforce clip_range
            if clip_range is not None:
                lower_bound = clip_range[i][0]
                upper_bound = clip_range[i][1]
                quantityVal[fInds] = numpy.maximum(
                    quantityVal[fInds], lower_bound)
                quantityVal[fInds] = numpy.minimum(
                    quantityVal[fInds], upper_bound)

        # End of loop

        # Find points which were nan on their first preference dataset + are
        # inside nan_interpolation_region_polygon. Then reinterpolate their
        # values from the other x,y, quantityVal points.
        if (nan_interpolation_region_polygon is not None) &\
           (was_ever_nan.sum() > 0):
            if nan_interpolation_region_polygon == 'All':
                points_to_reinterpolate = was_ever_nan.nonzero()[0]
            else:
                # nan_interpolation_region_polygon contains information on 1 or
                # more polygons
                # Inside those polygons, we need to re-interpolate points which
                # first evaluted to na
                possible_points_to_reint = was_ever_nan.nonzero()[0]
                points_to_reinterpolate = numpy.array([]).astype(int) 

                for i in range(len(nan_interpolation_region_polygon)):
                    nan_pi = nan_interpolation_region_polygon[i]

                    # Ensure nan_pi = list of x,y points making a polygon
                    if(type(nan_pi) == str):
                        nan_pi = su.read_polygon(nan_pi)
              
                    points_in_nan_pi = inside_polygon(
                        xy_array_trans[possible_points_to_reint,:],
                        nan_pi) 
                    
                    if len(points_in_nan_pi)>0: 
                        points_to_reinterpolate = numpy.hstack(
                            [points_to_reinterpolate,
                             possible_points_to_reint[points_in_nan_pi]])


            if verbose: 
                print 'Re-interpolating ', len(points_to_reinterpolate),\
                      ' points which were nan under their',\
                      ' first-preference and are inside the',\
                      ' nan_interpolation_region_polygon'

            if len(points_to_reinterpolate) > 0:
                msg = 'WARNING: nan interpolation is being applied. This ',\
                      'should be done in serial prior to distributing the ',\
                      'domain, as there is no parallel communication ',\
                      'implemented yet [so parallel results might depend on ',\
                      'the number of processes]'
                if verbose:
                    print msg
                    

            # Find the interpolation points = points not needing reinterpolation
            ip = x*0 + 1
            ip[points_to_reinterpolate] = 0
            number_of_ip = ip.sum()
            ip = ip.nonzero()[0]

            # Check that none of the ip points has an nan value
            nan_ip = (quantityVal[ip] != quantityVal[ip]).nonzero()[0]

            if len(nan_ip) > 0:
                print 'There are ', len(nan_ip), ' points outside the ',\
                      'nan_interpolation_region_polygon have nan values.'
                print 'The user should ensure this does not happen.'
                print 'The points have the following coordinates:'
                print xy_array_trans[ip[nan_ip],:]
                msg = "There are nan points outside of " +\
                      "nan_interpolation_region_polygon, even after all " +\
                      "fall-through's"
                raise Exception(msg)
         
            if(number_of_ip < default_k_nearest_neighbours):
                raise Exception('Too few non-nan points to interpolate from') 

            # Make function for re-interpolation. Note this requires
            # x,y,z in georeferenced coordinates, whereas x,y are ANUGA
            # coordinates
            reinterp_F = make_nearestNeighbour_quantity_function(
                numpy.vstack([xy_array_trans[ip,0], xy_array_trans[ip,1],
                              quantityVal[ip]]).transpose(),
                domain, 
                k_nearest_neighbours = default_k_nearest_neighbours)

            # re-interpolate
            quantityVal[points_to_reinterpolate] = reinterp_F(
                x[points_to_reinterpolate], y[points_to_reinterpolate])

            isSet[points_to_reinterpolate] = 1
            
        # Check there are no remaining nan values
        if( min(isSet) != 1):
            print 'Some points remain as nan, which is not allowed'
            unset_inds = (isSet != 1).nonzero()[0]
            lui = min(5, len(unset_inds)) 
            print 'There are ', len(unset_inds), ' such points'
            print 'Here are a few:'
            for i in range(lui):
                print x[unset_inds[i]] + xll, y[unset_inds[i]] + yll
            raise Exception('It seems the input data needs to be fixed')

        return quantityVal
Пример #12
0
def start_sim(run_id, Runs, scenario_name, Scenario, session, **kwargs):
    yieldstep = kwargs['yieldstep']
    finaltime = kwargs['finaltime']
    logger = logging.getLogger(run_id)
    max_triangle_area = kwargs['max_triangle_area']
    logger.info('Starting hydrata_project')

    if run_id == 'local_run':
        base_dir = os.getcwd()
    else:
        base_dir = os.getcwd() + '/base_dir/%s/' % run_id

    outname = run_id
    meshname = base_dir + 'outputs/' + run_id + '.msh'

    def get_filename(data_type, file_type):
        files = os.listdir('%sinputs/%s' % (base_dir, data_type))
        filename = '%sinputs/%s/%s' % (
            base_dir, data_type, [f for f in files if f[-4:] == file_type][0])
        return filename

    boundary_data_filename = get_filename('boundary_data', '.shp')
    elevation_data_filename = get_filename('elevation_data', '.tif')
    try:
        structures_filename = get_filename('structures', '.shp')
    except OSError as e:
        structures_filename = None
    try:
        rain_data_filename = get_filename('rain_data', '.shp')
    except OSError as e:
        rain_data_filename = None
    try:
        inflow_data_filename = get_filename('inflow_data', '.shp')
    except OSError as e:
        inflow_data_filename = None
    try:
        friction_data_filename = get_filename('friction_data', '.shp')
    except OSError as e:
        friction_data_filename = None

    logger.info('boundary_data_filename: %s' % boundary_data_filename)
    logger.info('structures_filename: %s' % structures_filename)
    logger.info('rain_data_filename: %s' % rain_data_filename)
    logger.info('inflow_data_filename: %s' % inflow_data_filename)
    logger.info('friction_data_filename: %s' % friction_data_filename)
    logger.info('elevation_data_filename: %s' % elevation_data_filename)

    # create a list of project files
    vector_filenames = [
        boundary_data_filename, structures_filename, rain_data_filename,
        inflow_data_filename, friction_data_filename
    ]

    # set the projection system for ANUGA calculations from the geotiff elevation data
    elevation_data_gdal = gdal.Open(elevation_data_filename)
    project_spatial_ref = osr.SpatialReference()
    project_spatial_ref.ImportFromWkt(elevation_data_gdal.GetProjectionRef())
    project_spatial_ref_epsg_code = int(
        project_spatial_ref.GetAttrValue("AUTHORITY", 1))

    # check the spatial reference system of the project files matches that of the calculation
    for filename in vector_filenames:
        if filename:
            prj_text = open(filename[:-4] + '.prj').read()
            srs = osr.SpatialReference()
            srs.ImportFromESRI([prj_text])
            srs.AutoIdentifyEPSG()
            logger.info('filename is: %s' % filename)
            logger.info('EPSG is: %s' % srs.GetAuthorityCode(None))
            if str(srs.GetAuthorityCode(None)) != str(
                    project_spatial_ref_epsg_code):
                logger.warning('warning spatial refs are not maching: %s, %s' %
                               (srs.GetAuthorityCode(None),
                                project_spatial_ref_epsg_code))

    logger.info('Setting up structures...')
    if structures_filename:
        structures = []
        logger.info('processing structures from :%s' % structures_filename)
        ogr_shapefile = ogr.Open(structures_filename)
        ogr_layer = ogr_shapefile.GetLayer(0)
        ogr_layer_feature = ogr_layer.GetNextFeature()
        while ogr_layer_feature:
            structure = json.loads(ogr_layer_feature.GetGeometryRef().
                                   ExportToJson())['coordinates'][0]
            structures.append(structure)
            ogr_layer_feature = None
            ogr_layer_feature = ogr_layer.GetNextFeature()

        logger.info('structures: %s' % structures)
    else:
        logger.warning('warning: no structures found.')
        structures = None

    logger.info('Setting up friction...')
    frictions = []
    if friction_data_filename:
        logger.info('processing frictions from :%s' % friction_data_filename)
        ogr_shapefile = ogr.Open(friction_data_filename)
        ogr_layer = ogr_shapefile.GetLayer(0)
        ogr_layer_feature = ogr_layer.GetNextFeature()
        while ogr_layer_feature:
            friction_poly = json.loads(ogr_layer_feature.GetGeometryRef().
                                       ExportToJson())['coordinates'][0]
            friction_value = float(ogr_layer_feature.GetField('mannings'))
            friction_couple = [friction_poly, friction_value]
            frictions.append(friction_couple)
            ogr_layer_feature = None
            ogr_layer_feature = ogr_layer.GetNextFeature()

        frictions.append(['All', 0.04])
        logger.info('frictions: %s' % frictions)
    else:
        frictions.append(['All', 0.04])
        logger.info('warning: no frictions found.')

    logger.info('Setting up boundary conditions...')
    ogr_shapefile = ogr.Open(boundary_data_filename)
    ogr_layer = ogr_shapefile.GetLayer(0)
    ogr_layer_definition = ogr_layer.GetLayerDefn()
    logger.info('ogr_layer_definition.GetGeomType: %s' %
                ogr_layer_definition.GetGeomType())
    boundary_tag_index = 0
    bdy_tags = {}
    bdy = {}

    ogr_layer_feature = ogr_layer.GetNextFeature()
    while ogr_layer_feature:
        boundary_tag_key = ogr_layer_feature.GetField('bdy_tag_k')
        boundary_tag_value = ogr_layer_feature.GetField('bdy_tag_v')
        bdy_tags[boundary_tag_key] = [
            boundary_tag_index * 2, boundary_tag_index * 2 + 1
        ]
        bdy[boundary_tag_key] = boundary_tag_value
        geom = ogr_layer_feature.GetGeometryRef().GetPoints()
        ogr_layer_feature = None
        ogr_layer_feature = ogr_layer.GetNextFeature()
        boundary_tag_index = boundary_tag_index + 1
        logger.info('bdy_tags: %s' % bdy_tags)
    logger.info('bdy: %s' % bdy)

    boundary_data = su.read_polygon(boundary_data_filename)

    create_mesh_from_regions(boundary_data,
                             boundary_tags=bdy_tags,
                             maximum_triangle_area=max_triangle_area,
                             interior_regions=None,
                             interior_holes=structures,
                             filename=meshname,
                             use_cache=False,
                             verbose=True)

    domain = Domain(meshname, use_cache=False, verbose=True)
    domain.set_name(outname)
    domain.set_datadir(base_dir + '/outputs')
    logger.info(domain.statistics())
    poly_fun_pairs = [['Extent', elevation_data_filename.encode("utf-8")]]
    topography_function = qs.composite_quantity_setting_function(
        poly_fun_pairs,
        domain,
        nan_treatment='exception',
    )
    friction_function = qs.composite_quantity_setting_function(
        frictions, domain)
    domain.set_quantity('friction', friction_function, verbose=True)
    domain.set_quantity('stage', 0.0)
    domain.set_quantity('elevation',
                        topography_function,
                        verbose=True,
                        alpha=0.99)
    domain.set_minimum_storable_height(0.005)

    logger.info('Applying rainfall...')
    if rain_data_filename:
        ogr_shapefile = ogr.Open(rain_data_filename)
        ogr_layer = ogr_shapefile.GetLayer(0)
        rainfall = 0
        ogr_layer_feature = ogr_layer.GetNextFeature()
        while ogr_layer_feature:
            rainfall = float(ogr_layer_feature.GetField('rate_mm_hr'))
            polygon = su.read_polygon(rain_data_filename)
            logger.info("applying Polygonal_rate_operator with rate, polygon:")
            logger.info(rainfall)
            logger.info(polygon)
            Polygonal_rate_operator(domain,
                                    rate=rainfall,
                                    factor=1.0e-6,
                                    polygon=polygon,
                                    default_rate=0.0)
            ogr_layer_feature = None
            ogr_layer_feature = ogr_layer.GetNextFeature()

    logger.info('Applying surface inflows...')
    if inflow_data_filename:
        ogr_shapefile = ogr.Open(inflow_data_filename)
        ogr_layer = ogr_shapefile.GetLayer(0)
        ogr_layer_definition = ogr_layer.GetLayerDefn()
        ogr_layer_feature = ogr_layer.GetNextFeature()
        while ogr_layer_feature:
            in_fixed = float(ogr_layer_feature.GetField('in_fixed'))
            line = ogr_layer_feature.GetGeometryRef().GetPoints()
            logger.info("applying Inlet_operator with line, in_fixed:")
            logger.info(line)
            logger.info(in_fixed)
            Inlet_operator(domain, line, in_fixed, verbose=False)
            ogr_layer_feature = None
            ogr_layer_feature = ogr_layer.GetNextFeature()

    logger.info('Applying Boundary Conditions...')
    logger.info('Available boundary tags: %s' % domain.get_boundary_tags())

    Br = anuga.Reflective_boundary(domain)
    Bd = anuga.Dirichlet_boundary([0.0, 0.0, 0.0])
    Bt = anuga.Transmissive_boundary(domain)

    for key, value in bdy.iteritems():
        if value == 'Br':
            bdy[key] = Br
        elif value == 'Bd':
            bdy[key] = Bd
        elif value == 'Bt':
            bdy[key] = Bt
        else:
            logger.info(
                'No matching boundary condition exists - please check your shapefile attributes in: %s'
                % boundary_data_filename)

    # set a default value for exterior & interior boundary if it is not already set
    try:
        bdy['exterior']
    except KeyError:
        bdy['exterior'] = Br
    try:
        bdy['interior']
    except KeyError:
        bdy['interior'] = Br

    logger.info('bdy: %s' % bdy)

    domain.set_boundary(bdy)

    domain = distribute(domain)
    logger.info('Beginning evolve phase...')
    for t in domain.evolve(yieldstep, finaltime):
        domain.write_time()
        print domain.timestepping_statistics()
        logger.info(domain.timestepping_statistics(track_speeds=True))
        percentage_complete = round(domain.time / domain.finaltime, 3) * 100
        logger.info('%s percent complete' % percentage_complete)
        if run_id != 'local_run':
            write_percentage_complete(run_id, Runs, scenario_name, Scenario,
                                      session, percentage_complete)
    domain.sww_merge(delete_old=True)
    barrier()
    finalize()
    sww_file = base_dir + '/outputs/' + run_id + '.sww'
    sww_file = sww_file.encode(
        'utf-8',
        'ignore')  # sometimes run_id gets turned to a unicode object by celery
    util.Make_Geotif(swwFile=sww_file,
                     output_quantities=['depth', 'velocity'],
                     myTimeStep='max',
                     CellSize=max_triangle_area,
                     lower_left=None,
                     upper_right=None,
                     EPSG_CODE=project_spatial_ref_epsg_code,
                     proj4string=None,
                     velocity_extrapolation=True,
                     min_allowed_height=1.0e-05,
                     output_dir=(base_dir + '/outputs/'),
                     bounding_polygon=boundary_data,
                     internal_holes=structures,
                     verbose=False,
                     k_nearest_neighbours=3,
                     creation_options=[])
    logger.info("Done. Nice work.")
    def elevation_setter(xc, yc):

        # Return scipy array of values
        out = xc * 0.

        # Get multiple elevation values in each triangle.
        # Process triangles in chunks to reduce function call overhead
        lx = len(xc)
        lx_div_cs = scipy.ceil(lx * 1. / (1. * chunk_size)).astype(int)

        # Crude check that xc/yc are the centroid values
        #
        erMess = ' Result of make_meanFun can ONLY be applied to a vector' +\
            ' of ALL centroid coordinates\n' +\
            ' (since mesh triangles are used to spatially average)'
        assert scipy.all(xc == domain.centroid_coordinates[:, 0]), erMess
        assert scipy.all(yc == domain.centroid_coordinates[:, 1]), erMess

        # Find triangles in which we want to average
        if polygons_for_averaging is not None:

            averaging_flag = 0*xc

            # Need georeferenced centroid coordinates to find which
            # are in the polygon
            xll = domain.geo_reference.xllcorner
            yll = domain.geo_reference.yllcorner
            centroid_coordinates_georef = scipy.vstack([xc + xll, yc + yll]).transpose()

            for j in range(len(polygons_for_averaging)):
                poly_j = polygons_for_averaging[j]
                # poly_j can either be a polygon, or a filename
                if type(poly_j) is str:
                    poly_j = su.read_polygon(poly_j)
                
                points_in_poly_j = inside_polygon(centroid_coordinates_georef, 
                    poly_j)
                
                averaging_flag[points_in_poly_j] = 1
                
        else:
            averaging_flag = 1 + 0*xc
        

        for i in range(lx_div_cs):
            # Evaluate in triangles lb:ub
            lb = i * chunk_size
            ub = min((i + 1) * chunk_size, lx)

            if verbose:
                print 'Averaging in triangles ', lb, '-', ub - 1

            # Store x,y,triangleIndex
            px = scipy.array([])
            py = scipy.array([])
            p_indices = scipy.array([])

            for j in range(lb, ub):
                # If we average this cell, then get a grid
                # of points in it. Otherwise just get the centroid
                # coordinates. 
                if averaging_flag[j] == 1:
                    mesh_tri = \
                        domain.mesh.vertex_coordinates[
                            range(3 * j, 3 * j + 3), :].tolist()

                    pts = su.gridPointsInPolygon(
                        mesh_tri,
                        approx_grid_spacing=approx_grid_spacing)
                else:
                    # Careful to keep this a 2D array
                    pts = domain.centroid_coordinates[j,:, None].transpose()

                px = scipy.hstack([px, pts[:, 0]])

                py = scipy.hstack([py, pts[:, 1]])

                p_indices = scipy.hstack([p_indices,
                                          scipy.repeat(j, len(pts[:, 0]))])

            # Get function values at all px,py
            if verbose:
                print '  Evaluating function at ', len(px), ' points'

            allTopo = q_function(px, py)

            # Set output values in lb:ub
            for j in range(lb, ub):
                out_indices = (p_indices == j).nonzero()[0]
                assert len(out_indices) > 0
                if(averaging == 'mean'):
                    out[j] = allTopo[out_indices].mean()
                elif(averaging == 'min'):
                    out[j] = allTopo[out_indices].min()
                elif(averaging == 'max'):
                    out[j] = allTopo[out_indices].max()
                else:
                    raise Exception('Unknown value of averaging')
        return(out)
Пример #14
0
    def process_project_data(self):
        """Process the input data ready for ANUGA

        """

        # Print messages from the ProjectData.__init__ call
        # This allows us to log those messages without refactoring
        # (Consider refactoring though)
        if myid == 0:
            for p in self.print_info:
                print p
            print ''
            print '---------------------'
            print 'PROCESS_PROJECT_DATA'
            print '---------------------'
            print ''
            # Record the time and broadcast to other processers
            time_number = time.time()
            if numprocs > 1:
                for i in range(1, numprocs):
                    send(time_number, i)
        else:
            time_number = receive(0)

        # We can either use interior regions, or breaklines

        if not self.interior_regions_data == []:
            assert self.pt_areas is None, \
                'Cannot define both ptAreas and non-empty interior regions'

        bounding_polygon_and_tags = \
            read_boundary_tags_line_shapefile(
                self.bounding_polygon_and_tags_file,
                self.boundary_tags_attribute_name)
        self.bounding_polygon = bounding_polygon_and_tags[0]
        self.boundary_tags = bounding_polygon_and_tags[1]

        self.breaklines = su.readListOfBreakLines(self.breakline_files)
        (self.riverwalls, self.riverwall_par) = \
            su.readListOfRiverWalls(self.riverwall_csv_files)

        if self.pt_areas is not None:
            self.region_point_areas = su.readRegionPtAreas(
                self.pt_areas,
                convert_length_to_area=self.region_resolutions_from_length)
        else:
            self.region_point_areas = None

        # Hack to override resolution
        # region_point_areas=\
        # [ region_point_areas[i][0:2]+[150*150*0.5] for i in \
        #                      range(len(region_point_areas))]

        # Redefine interior_regions to contain the polygon data + resolutions

        self.interior_regions = [[su.read_polygon(ir[0]), ir[1]] for ir in
                                 self.interior_regions_data]

        # Deal with intersections in the bounding polygon / breaklines /
        # riverwalls. At the moment we cannot add points to the bounding
        # polygon because the boundary tags are not adjusted -- so check that
        # the length of the bounding polygon doesn't change
        lbp = len(self.bounding_polygon)
        if type(self.break_line_intersect_point_movement_threshold) is not str:
            (self.bounding_polygon, self.breaklines, self.riverwalls) = \
                su.add_intersections_to_domain_features(
                    self.bounding_polygon,
                    self.breaklines,
                    self.riverwalls,
                    point_movement_threshold=self.break_line_intersect_point_movement_threshold,
                    verbose=True)

        msg = 'Bounding polygon had points added or dropped because of ' + \
              'intersections --' + \
              'This is not yet properly supported.  Please add ' + \
              ' the intersection points to the bounding polygon'
        assert lbp == len(self.bounding_polygon), msg

        # Here we make a unique ID based on the all the mesh geometry inputs
        # This tells us if we need to regenerate partitions, or use old ones
        mesh_dependency_information = [
                self.bounding_polygon,
                self.interior_regions,
                self.riverwalls,
                self.breaklines,
                self.region_point_areas,
                self.default_res,
                self.boundary_tags
            ]

        if not self.use_existing_mesh_pickle:
            # Append the time to the mesh dependency so we don't reuse old
            # meshes
            mesh_dependency_information.append([time_number])

        self.mesh_id_hash = hashlib.md5(json.dumps(mesh_dependency_information)).hexdigest()

        # Fix the output tif bounding polygon
        if self.output_tif_bounding_polygon is None:
            self.output_tif_bounding_polygon = self.bounding_polygon
        else:
            self.output_tif_bounding_polygon = \
                su.read_polygon(self.output_tif_bounding_polygon)

        # Make proj4string from projection information
        #

        if isinstance(self.projection_information, int):

            # projection_information describes a UTM zone
            # e.g. '+units=m +ellps=WGS84 +zone=47 +south=False +proj=utm '

            if self.projection_information < 0:
                self.proj4string = '+proj=utm +zone=' \
                    + str(abs(self.projection_information)) \
                    + ' +south +datum=WGS84 +units=m +no_defs'
            else:
                self.proj4string = '+proj=utm +zone=' \
                    + str(self.projection_information) \
                    + ' +datum=WGS84 +units=m +no_defs'
        elif isinstance(self.projection_information, str):
            self.proj4string = self.projection_information
        else:
            msg = 'Invalid projection information ' + \
                ' --  must be a proj4string, or an integer' + \
                ' defining a UTM zone [positive for northern hemisphere,' + \
                ' negative for southern hemisphere]'
            raise Exception(msg)

        # Set up directories etc

        self.partition_basedir = 'PARTITIONS/'
        self.partition_dir = self.partition_basedir + 'Mesh_' +\
            str(self.mesh_id_hash)
        self.meshname = self.output_dir + '/mesh.tsh'
    def process_project_data(self):
        """Process the input data ready for ANUGA

        """

        # Print messages from the ProjectData.__init__ call
        # This allows us to log those messages without refactoring
        # (Consider refactoring though)
        if myid == 0:
            for p in self.print_info:
                print p
            print ''
            print '---------------------'
            print 'PROCESS_PROJECT_DATA'
            print '---------------------'
            print ''
            # Record the time and broadcast to other processers
            time_number = time.time()
            if numprocs > 1:
                for i in range(1, numprocs):
                    send(time_number, i)
        else:
            time_number = receive(0)

        # We can either use interior regions, or breaklines

        if not self.interior_regions_data == []:
            assert self.pt_areas is None, \
                'Cannot define both ptAreas and non-empty interior regions'

        bounding_polygon_and_tags = \
            read_boundary_tags_line_shapefile(
                self.bounding_polygon_and_tags_file,
                self.boundary_tags_attribute_name)
        self.bounding_polygon = bounding_polygon_and_tags[0]
        self.boundary_tags = bounding_polygon_and_tags[1]

        self.breaklines = su.readListOfBreakLines(self.breakline_files)
        (self.riverwalls, self.riverwall_par) = \
            su.readListOfRiverWalls(self.riverwall_csv_files)

        if self.pt_areas is not None:
            self.region_point_areas = su.readRegionPtAreas(
                self.pt_areas,
                convert_length_to_area=self.region_resolutions_from_length)
        else:
            self.region_point_areas = None

        # Hack to override resolution
        # region_point_areas=\
        # [ region_point_areas[i][0:2]+[150*150*0.5] for i in \
        #                      range(len(region_point_areas))]

        # Redefine interior_regions to contain the polygon data + resolutions

        self.interior_regions = [[su.read_polygon(ir[0]), ir[1]]
                                 for ir in self.interior_regions_data]

        # Deal with intersections in the bounding polygon / breaklines /
        # riverwalls. At the moment we cannot add points to the bounding
        # polygon because the boundary tags are not adjusted -- so check that
        # the length of the bounding polygon doesn't change
        lbp = len(self.bounding_polygon)
        if type(self.break_line_intersect_point_movement_threshold) is not str:
            (self.bounding_polygon, self.breaklines, self.riverwalls) = \
                su.add_intersections_to_domain_features(
                    self.bounding_polygon,
                    self.breaklines,
                    self.riverwalls,
                    point_movement_threshold=self.break_line_intersect_point_movement_threshold,
                    verbose=True)

        msg = 'Bounding polygon had points added or dropped because of ' + \
              'intersections --' + \
              'This is not yet properly supported.  Please add ' + \
              ' the intersection points to the bounding polygon'
        assert lbp == len(self.bounding_polygon), msg

        # Here we make a unique ID based on the all the mesh geometry inputs
        # This tells us if we need to regenerate partitions, or use old ones
        mesh_dependency_information = [
            self.bounding_polygon, self.interior_regions, self.riverwalls,
            self.breaklines, self.region_point_areas, self.default_res,
            self.boundary_tags
        ]

        if not self.use_existing_mesh_pickle:
            # Append the time to the mesh dependency so we don't reuse old
            # meshes
            mesh_dependency_information.append([time_number])

        self.mesh_id_hash = hashlib.md5(
            json.dumps(mesh_dependency_information)).hexdigest()

        # Fix the output tif bounding polygon
        if self.output_tif_bounding_polygon is None:
            self.output_tif_bounding_polygon = self.bounding_polygon
        else:
            self.output_tif_bounding_polygon = \
                su.read_polygon(self.output_tif_bounding_polygon)

        # Make proj4string from projection information
        #

        if isinstance(self.projection_information, int):

            # projection_information describes a UTM zone
            # e.g. '+units=m +ellps=WGS84 +zone=47 +south=False +proj=utm '

            if self.projection_information < 0:
                self.proj4string = '+proj=utm +zone=' \
                    + str(abs(self.projection_information)) \
                    + ' +south +datum=WGS84 +units=m +no_defs'
            else:
                self.proj4string = '+proj=utm +zone=' \
                    + str(self.projection_information) \
                    + ' +datum=WGS84 +units=m +no_defs'
        elif isinstance(self.projection_information, str):
            self.proj4string = self.projection_information
        else:
            msg = 'Invalid projection information ' + \
                ' --  must be a proj4string, or an integer' + \
                ' defining a UTM zone [positive for northern hemisphere,' + \
                ' negative for southern hemisphere]'
            raise Exception(msg)

        # Set up directories etc

        self.partition_basedir = 'PARTITIONS/'
        self.partition_dir = self.partition_basedir + 'Mesh_' +\
            str(self.mesh_id_hash)
        self.meshname = self.output_dir + '/mesh.tsh'
Пример #16
0
    def elevation_setter(xc, yc):

        # Return scipy array of values
        out = xc * 0.

        # Get multiple elevation values in each triangle.
        # Process triangles in chunks to reduce function call overhead
        lx = len(xc)
        lx_div_cs = scipy.ceil(lx * 1. / (1. * chunk_size)).astype(int)

        # Crude check that xc/yc are the centroid values
        #
        erMess = ' Result of make_meanFun can ONLY be applied to a vector' +\
            ' of ALL centroid coordinates\n' +\
            ' (since mesh triangles are used to spatially average)'
        assert scipy.all(xc == domain.centroid_coordinates[:, 0]), erMess
        assert scipy.all(yc == domain.centroid_coordinates[:, 1]), erMess

        # Find triangles in which we want to average
        if polygons_for_averaging is not None:

            averaging_flag = 0 * xc

            # Need georeferenced centroid coordinates to find which
            # are in the polygon
            xll = domain.geo_reference.xllcorner
            yll = domain.geo_reference.yllcorner
            centroid_coordinates_georef = scipy.vstack([xc + xll,
                                                        yc + yll]).transpose()

            for j in range(len(polygons_for_averaging)):
                poly_j = polygons_for_averaging[j]
                # poly_j can either be a polygon, or a filename
                if type(poly_j) is str:
                    poly_j = su.read_polygon(poly_j)

                points_in_poly_j = inside_polygon(centroid_coordinates_georef,
                                                  poly_j)

                averaging_flag[points_in_poly_j] = 1

        else:
            averaging_flag = 1 + 0 * xc

        for i in range(lx_div_cs):
            # Evaluate in triangles lb:ub
            lb = i * chunk_size
            ub = min((i + 1) * chunk_size, lx)

            if verbose:
                print 'Averaging in triangles ', lb, '-', ub - 1

            # Store x,y,triangleIndex
            px = scipy.array([])
            py = scipy.array([])
            p_indices = scipy.array([])

            for j in range(lb, ub):
                # If we average this cell, then get a grid
                # of points in it. Otherwise just get the centroid
                # coordinates.
                if averaging_flag[j] == 1:
                    mesh_tri = \
                        domain.mesh.vertex_coordinates[
                            range(3 * j, 3 * j + 3), :].tolist()

                    pts = su.gridPointsInPolygon(
                        mesh_tri, approx_grid_spacing=approx_grid_spacing)
                else:
                    # Careful to keep this a 2D array
                    pts = domain.centroid_coordinates[j, :, None].transpose()

                px = scipy.hstack([px, pts[:, 0]])

                py = scipy.hstack([py, pts[:, 1]])

                p_indices = scipy.hstack(
                    [p_indices, scipy.repeat(j, len(pts[:, 0]))])

            # Get function values at all px,py
            if verbose:
                print '  Evaluating function at ', len(px), ' points'

            allTopo = q_function(px, py)

            # Set output values in lb:ub
            for j in range(lb, ub):
                out_indices = (p_indices == j).nonzero()[0]
                assert len(out_indices) > 0
                if (averaging == 'mean'):
                    out[j] = allTopo[out_indices].mean()
                elif (averaging == 'min'):
                    out[j] = allTopo[out_indices].min()
                elif (averaging == 'max'):
                    out[j] = allTopo[out_indices].max()
                else:
                    raise Exception('Unknown value of averaging')
        return (out)
Пример #17
0
def get_initial_condition_data(data_source, worksheet, flag, print_info):
    """Convenience function to extract the initial condition data
       (and initial_condition_additions) from the xls worksheet

       The needs have become more elaborate over time, e.g.
        to support combining 2 line files into a polygon

       Given a character string referring to a quantity which has initial
       conditions in the xls worksheet (e.g. 'Elevation'),
       extract the associated data from the
       'data_source' (an AnugaXls object) on worksheet 'worksheet'

       This assumes a particular format in the excel sheet
    """

    # Read the polygon / value pairs
    quantity_data = data_source.get_paired_list(
        worksheet, flag, [1, 1], post_process=string_or_float)

    # If the polygon is a wildcard, assume it matches 2 lines, read them in,
    # and join them to make a polygon. This is a convenient shorthand
    # for when we have lkl
    for i in range(len(quantity_data)):
        polygon_files = glob.glob(quantity_data[i][0])

        # Check it only matches 0 or 1 or 2 files
        msg = 'Polygon:' + str(i) + ' : ' + quantity_data[i][0] + \
              '  for ' + flag + ' on  worksheet' + \
              worksheet + ' matches > 2 files. We can join at most 2 lines' + \
              'to make a polygon'
        assert len(polygon_files) <= 2, msg

        if len(polygon_files) == 0:
            # Check it is valid
            msg = 'Polygon:' + str(i) + ' : ' + quantity_data[i][0] + \
                  '  for ' + flag + ' on  worksheet' + \
                  worksheet + ' matches no files, and is not All or None ' + \
                  'or Extent (for a raster)'
            assert ((quantity_data[i][0] == 'All') |
                    (quantity_data[i][0] is None) |
                    (quantity_data[i][0] == 'Extent')), msg
        elif len(polygon_files) == 2:
            # If it matches 2, try to combine to 1.
            # This is often required to use pairs of breaklines as polygons
            # Do this by:
            # 1) Setting up the 2 lines as though they were in a
            #    breakline object
            # 2) Using su.polygon_from_matching_breakLines
            print_info.append('Initial ' + flag)
            print_info.append('Combining these files to a polygon: ')
            print_info.append(str(polygon_files))
            print_info.append('')

            l0 = su.read_polygon(polygon_files[0])
            l1 = su.read_polygon(polygon_files[1])
            fake_breakline = {polygon_files[0]: l0, polygon_files[1]: l1}
            fake_match = quantity_data[i][0].split('*')[0]
            out_poly = su.polygon_from_matching_breaklines(
                fake_match, fake_breakline)
            quantity_data[i][0] = out_poly

    # Get the clip_range for each polygon / function pair
    quantity_clip_range = data_source.get_fixed_size_subtable_by_columns(
        worksheet, flag,
        dimensions=[2, len(quantity_data)],
        offset=[3, 1], post_process=string_or_float)

    new_quantity_clip_range = reformat_clip_range(quantity_clip_range)

    # Get sub-grid size for spatial averaging, if applicable
    spatial_average = data_source.get_var(
        worksheet, flag, [5, 1], post_process=string_or_float)

    if type(spatial_average) == str:
        spatial_average = None

    return quantity_data, new_quantity_clip_range, spatial_average