Exemplo n.º 1
0
    def analyze(self, **params):
        """ Propagates water quality scores from monitoring locations
        to upstream subcatchments. Calls directly to :func:`propagate`.
        """

        # analysis options
        ws = params.pop('workspace', '.')
        overwrite = params.pop('overwrite', True)
        add_output_to_map = params.pop('add_output_to_map', False)
        output_layer = params.pop('output_layer', None)

        # subcatchment info
        sc = params.pop('subcatchments', None)
        ID_col = params.pop('ID_column', None)
        downstream_ID_col = params.pop('downstream_ID_column', None)

        # monitoring location info
        ml = params.pop('monitoring_locations', None)
        ml_type_col = params.pop('ml_type_col', None)
        included_ml_types = validate.non_empty_list(params.pop(
            'included_ml_types', None),
                                                    on_fail='create')

        # monitoring location type filter function
        if ml_type_col is not None and len(included_ml_types) > 0:
            ml_filter = lambda row: row[ml_type_col] in included_ml_types
        else:
            ml_filter = None

        # value columns and aggregations
        value_cols_string = params.pop('value_columns', None)
        value_columns = [
            vc.split(' ')
            for vc in value_cols_string.replace(' #', ' average').split(';')
        ]

        # streams data
        streams = params.pop('streams', None)

        # perform the analysis
        with utils.WorkSpace(ws), utils.OverwriteState(overwrite):
            output_layers = propagate(
                subcatchments=sc,
                id_col=ID_col,
                ds_col=downstream_ID_col,
                monitoring_locations=ml,
                ml_filter=ml_filter,
                ml_filter_cols=ml_type_col,
                value_columns=value_columns,
                output_path=output_layer,
                streams=streams,
                verbose=True,
                asMessage=True,
            )

            if add_output_to_map:
                for lyr in output_layers:
                    self._add_to_map(lyr)

        return output_layers
Exemplo n.º 2
0
    def test_multi_agg(self):
        expected_cols = [
            'medDry_B', 'aveDry_B', 'maxDry_B',
            'minWet_B', 'aveWet_M', 'maxWet_M',
        ]
        wq_cols = [
            ('Dry_B', 'medIAN'),
            ('Dry_B',),
            ('Dry_B', 'MAXIMUM'),
            ('Wet_B', 'minIMUM'),
            ('Wet_M',),
            ('Wet_M', 'MAXIMUM'),
        ]

        with utils.OverwriteState(True), utils.WorkSpace(self.ws):
            wq, cols = analysis.preprocess_wq(
                monitoring_locations=self.ml,
                subcatchments=self.sc,
                id_col='CID',
                ds_col='DS_CID',
                output_path=self.results,
                value_columns=wq_cols
            )
        expected = 'expected_multi_agg.shp'
        pptest.assert_shapefiles_are_close(
            os.path.join(self.ws, expected),
            os.path.join(self.ws, self.results),
        )
        nt.assert_true(isinstance(wq, numpy.ndarray))
        nt.assert_list_equal(cols, expected_cols)
Exemplo n.º 3
0
    def test_false_true(self):
        arcpy.env.overwriteOutput = False

        nt.assert_false(arcpy.env.overwriteOutput)
        with utils.OverwriteState(True):
            nt.assert_true(arcpy.env.overwriteOutput)

        nt.assert_false(arcpy.env.overwriteOutput)
Exemplo n.º 4
0
 def test_no_wq_col_error(self):
     with utils.OverwriteState(True), utils.WorkSpace(self.ws):
         wq, cols = analysis.preprocess_wq(
             monitoring_locations=self.ml,
             subcatchments=self.sc,
             id_col='CID',
             ds_col='DS_CID',
             output_path=self.results,
         )
Exemplo n.º 5
0
def test_reduce():
    ws = resource_filename("propagator.testing", "_reduce")
    with utils.OverwriteState(True), utils.WorkSpace(ws):
        mon_locations = resource_filename("propagator.testing._reduce", "point.shp")
        expected_reduced_mon_locations = resource_filename("propagator.testing._reduce", "reduced_point.shp")
        # Create a placeholder for output first, since the function takes the output file as an input.

        reduced_mon_locations = utils.create_temp_filename("reduced_point", filetype='shape')
        reduced_mon_locations = analysis._reduce(mon_locations, reduced_mon_locations, ["WQ1","WQ2","WQ3"],'ID','FID')
        pptest.assert_shapefiles_are_close(reduced_mon_locations, expected_reduced_mon_locations)
        utils.cleanup_temp_results(reduced_mon_locations)
Exemplo n.º 6
0
def test_spatial_join():
    known = resource_filename('propagator.testing.spatial_join', 'merge_result.shp')
    left = resource_filename('propagator.testing.spatial_join', 'merge_baseline.shp')
    right = resource_filename('propagator.testing.spatial_join', 'merge_join.shp')
    outputfile = resource_filename('propagator.testing.spatial_join', 'merge_result.shp')
    with utils.OverwriteState(True):
        test = utils.spatial_join(left=left, right=right, outputfile=outputfile)

    nt.assert_equal(test, outputfile)
    pptest.assert_shapefiles_are_close(test, known)

    utils.cleanup_temp_results(test)
Exemplo n.º 7
0
def test_concat_results():
    known = resource_filename('propagator.testing.concat_results', 'known.shp')
    with utils.OverwriteState(True):
        test = utils.concat_results(
            resource_filename('propagator.testing.concat_results', 'result.shp'),
            [resource_filename('propagator.testing.concat_results', 'input1.shp'),
             resource_filename('propagator.testing.concat_results', 'input2.shp')]
        )

    nt.assert_true(isinstance(test, arcpy.mapping.Layer))
    pptest.assert_shapefiles_are_close(test.dataSource, known)

    utils.cleanup_temp_results(test)
Exemplo n.º 8
0
    def test_baseline(self):
        with utils.WorkSpace(self.ws), utils.OverwriteState(True):
            subc_layer, stream_layer = propagator.toolbox.propagate(
                subcatchments='subcatchments.shp',
                monitoring_locations='monitoring_locations.shp',
                id_col='CID',
                ds_col='DS_CID',
                value_columns=self.columns,
                streams='streams.shp',
                output_path='test.shp')

        self.check(subc_layer, stream_layer, self.subc_expected_base,
                   self.stream_expected_base)
Exemplo n.º 9
0
def test_intersect_layers():
    ws = resource_filename('propagator.testing', 'intersect_layers')
    with utils.OverwriteState(True), utils.WorkSpace(ws):
        utils.intersect_layers(
            ['subcatchments.shp', 'monitoring_locations.shp'],
            'test.shp',
        )

    pptest.assert_shapefiles_are_close(
        os.path.join(ws, 'expected.shp'),
        os.path.join(ws, 'test.shp'),
    )

    utils.cleanup_temp_results(os.path.join(ws, 'test.shp'))
Exemplo n.º 10
0
def test_intersect_polygon_layers():
    input1_file = resource_filename("propagator.testing.intersect_polygons", "intersect_input1.shp")
    input2_file = resource_filename("propagator.testing.intersect_polygons", "intersect_input2.shp")
    known_file = resource_filename("propagator.testing.intersect_polygons", "intersect_known.shp")
    output_file = resource_filename("propagator.testing.intersect_polygons", "intersect_output.shp")

    with utils.OverwriteState(True):
        output = utils.intersect_polygon_layers(
            output_file,
            [input1_file, input2_file,]
        )

    nt.assert_true(isinstance(output, arcpy.mapping.Layer))
    pptest.assert_shapefiles_are_close(output_file, known_file)

    utils.cleanup_temp_results(output)
Exemplo n.º 11
0
 def test_baseline(self):
     with utils.OverwriteState(True), utils.WorkSpace(self.ws):
         wq, cols = analysis.preprocess_wq(
             monitoring_locations=self.ml,
             subcatchments=self.sc,
             id_col='CID',
             ds_col='DS_CID',
             output_path=self.results,
             value_columns=self.wq_cols
         )
     expected = 'expected.shp'
     pptest.assert_shapefiles_are_close(
         os.path.join(self.ws, expected),
         os.path.join(self.ws, self.results),
     )
     nt.assert_true(isinstance(wq, numpy.ndarray))
     nt.assert_list_equal(cols, self.expected_cols)
Exemplo n.º 12
0
    def test_multi_agg(self):
        stacol = 'StationTyp'
        with utils.WorkSpace(self.ws), utils.OverwriteState(True):
            subc_layer, stream_layer = propagator.toolbox.propagate(
                subcatchments='subcatchments.shp',
                id_col='CID',
                ds_col='DS_CID',
                monitoring_locations='monitoring_locations.shp',
                ml_filter=lambda row: row[
                    stacol] in ['Channel', 'Outfall', 'Outfall, Coastal'],
                ml_filter_cols=stacol,
                value_columns=self.muli_agg_columns,
                streams='streams.shp',
                output_path='test.shp')

        self.check(subc_layer, stream_layer, self.subc_expected_multi_agg,
                   self.stream_expected_multi_agg)
Exemplo n.º 13
0
def test_update_attribute_table():
    ws = resource_filename('propagator.testing', 'update_attribute_table')
    with utils.WorkSpace(ws), utils.OverwriteState(True):
        inputpath = resource_filename("propagator.testing.update_attribute_table", "input.shp")
        testpath = inputpath.replace('input', 'test')
        expected = resource_filename("propagator.testing.update_attribute_table", "expected_output.shp")

        new_attributes = numpy.array(
            [
                (1, 0, u'Cu_1', 'Pb_1'), (2, 0, u'Cu_2', 'Pb_2'),
                (3, 0, u'Cu_3', 'Pb_3'), (4, 0, u'Cu_4', 'Pb_4'),
            ], dtype=[('id', int), ('ds_id', int), ('Cu', '<U5'), ('Pb', '<U5'),]
        )

        arcpy.management.Copy(inputpath, testpath)
        utils.update_attribute_table(testpath, new_attributes, 'id', ['Cu', 'Pb'])

        pptest.assert_shapefiles_are_close(testpath, expected)
        utils.cleanup_temp_results(testpath)
Exemplo n.º 14
0
def test_aggregate_streams_by_subcatchment():
    ws = resource_filename('propagator.testing', 'agg_stream_in_subc')
    with utils.WorkSpace(ws), utils.OverwriteState(True):
        results = analysis.aggregate_streams_by_subcatchment(
            stream_layer='streams.shp',
            subcatchment_layer='subc.shp',
            id_col='CID',
            ds_col='DS_CID',
            other_cols=['WQ_1', 'WQ_2'],
            output_layer='test.shp'
        )

    nt.assert_equal(results, 'test.shp')
    pptest.assert_shapefiles_are_close(
        os.path.join(ws, results),
        os.path.join(ws, 'expected.shp'),
        ngeom=4
    )

    utils.cleanup_temp_results(os.path.join(ws, results),)
Exemplo n.º 15
0
    def analyze(self, **params):
        """ Accumulates subcatchments properties from upstream
        subcatchments into stream. Calls directly to :func:`accumulate`.

        """

        # analysis options
        ws = params.pop('workspace', '.')
        overwrite = params.pop('overwrite', True)
        add_output_to_map = params.pop('add_output_to_map', False)

        # input parameters
        sc = params.pop('subcatchments', None)
        ID_col = params.pop('ID_column', None)
        downstream_ID_col = params.pop('downstream_ID_column', None)
        # value columns and aggregations
        value_cols_string = params.pop('value_columns', None)
        value_columns = [
            vc.split(' ')
            for vc in value_cols_string.replace(' #', ' average').split(';')
        ]

        streams = params.pop('streams', None)
        output_layer = params.pop('output_layer', None)

        with utils.WorkSpace(ws), utils.OverwriteState(overwrite):
            output_layers = accumulate(
                subcatchments_layer=sc,
                id_col=ID_col,
                ds_col=downstream_ID_col,
                value_columns=value_columns,
                streams_layer=streams,
                output_layer=output_layer,
                verbose=True,
                asMessage=True,
            )

            if add_output_to_map:
                self._add_to_map(output_layers)

        return output_layers
Exemplo n.º 16
0
def test_accumulate():
    ws = resource_filename('propagator.testing', 'score_accumulator')

    with utils.WorkSpace(ws), utils.OverwriteState(True):
        results = toolbox.accumulate(
            subcatchments_layer='subcatchment_wq.shp',
            id_col='Catch_ID_a',
            ds_col='Dwn_Catch_',
            value_columns=[('DryM', 'maximum', 'n/a'),
                           ('DryN', 'First', 'area'),
                           ('WetB', 'WeIghtED_AveragE', 'imp_ar'),
                           ('WetM', 'minimum', 'imp_ar'),
                           ('WetN', 'average', 'n/a'), ('Area', 'sum', 'n/a'),
                           ('Imp', 'weighted_Average', 'Area'),
                           ('imp_ar', 'sum', 'n/a')],
            streams_layer='streams.shp',
            output_layer='output.shp',
        )

        pptest.assert_shapefiles_are_close(
            os.path.join(ws, 'expected_results.shp'),
            os.path.join(ws, results))

        utils.cleanup_temp_results(os.path.join(ws, results))
Exemplo n.º 17
0
 def setup(self):
     source = resource_filename("propagator.testing.add_field_with_value", 'field_adder.shp')
     with utils.OverwriteState(True):
         self.testfile = utils.copy_layer(source, source.replace('field_adder', 'test'))
     self.fields_added = ["_text", "_unicode", "_int", "_float", '_no_valstr', '_no_valnum']