Esempio n. 1
0
    def analyze(self, **params):
        """ Propagates water quality scores from monitoring locations
        to upstream subcatchments. Calls directly to :func:`propagate`.
        """

        # analysis options
        ws = params.pop('workspace', '.')
        overwrite = params.pop('overwrite', True)
        add_output_to_map = params.pop('add_output_to_map', False)
        output_layer = params.pop('output_layer', None)

        # subcatchment info
        sc = params.pop('subcatchments', None)
        ID_col = params.pop('ID_column', None)
        downstream_ID_col = params.pop('downstream_ID_column', None)

        # monitoring location info
        ml = params.pop('monitoring_locations', None)
        ml_type_col = params.pop('ml_type_col', None)
        included_ml_types = validate.non_empty_list(params.pop(
            'included_ml_types', None),
                                                    on_fail='create')

        # monitoring location type filter function
        if ml_type_col is not None and len(included_ml_types) > 0:
            ml_filter = lambda row: row[ml_type_col] in included_ml_types
        else:
            ml_filter = None

        # value columns and aggregations
        value_cols_string = params.pop('value_columns', None)
        value_columns = [
            vc.split(' ')
            for vc in value_cols_string.replace(' #', ' average').split(';')
        ]

        # streams data
        streams = params.pop('streams', None)

        # perform the analysis
        with utils.WorkSpace(ws), utils.OverwriteState(overwrite):
            output_layers = propagate(
                subcatchments=sc,
                id_col=ID_col,
                ds_col=downstream_ID_col,
                monitoring_locations=ml,
                ml_filter=ml_filter,
                ml_filter_cols=ml_type_col,
                value_columns=value_columns,
                output_path=output_layer,
                streams=streams,
                verbose=True,
                asMessage=True,
            )

            if add_output_to_map:
                for lyr in output_layers:
                    self._add_to_map(lyr)

        return output_layers
Esempio n. 2
0
    def test_multi_agg(self):
        expected_cols = [
            'medDry_B', 'aveDry_B', 'maxDry_B',
            'minWet_B', 'aveWet_M', 'maxWet_M',
        ]
        wq_cols = [
            ('Dry_B', 'medIAN'),
            ('Dry_B',),
            ('Dry_B', 'MAXIMUM'),
            ('Wet_B', 'minIMUM'),
            ('Wet_M',),
            ('Wet_M', 'MAXIMUM'),
        ]

        with utils.OverwriteState(True), utils.WorkSpace(self.ws):
            wq, cols = analysis.preprocess_wq(
                monitoring_locations=self.ml,
                subcatchments=self.sc,
                id_col='CID',
                ds_col='DS_CID',
                output_path=self.results,
                value_columns=wq_cols
            )
        expected = 'expected_multi_agg.shp'
        pptest.assert_shapefiles_are_close(
            os.path.join(self.ws, expected),
            os.path.join(self.ws, self.results),
        )
        nt.assert_true(isinstance(wq, numpy.ndarray))
        nt.assert_list_equal(cols, expected_cols)
Esempio n. 3
0
def test__get_wq_fields():
    ws = resource_filename('propagator.testing', 'get_wq_fields')
    with utils.WorkSpace(ws):
        results = analysis._get_wq_fields('monitoring_locations.shp', ['dry', 'wet'])

    expected = [u'Dry_B', u'Dry_M', u'Dry_N', u'Wet_B', u'Wet_M', u'Wet_N']

    nt.assert_list_equal(results, expected)
Esempio n. 4
0
    def test_geodb_barefile_with_num(self):
        with utils.WorkSpace(self.geodbworkspace):
            known_raster = os.path.join(self.geodbworkspace, '_temp_test_7')
            temp_raster = utils.create_temp_filename('test', filetype='raster', num=7)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.geodbworkspace, '_temp_test_22')
            temp_shape = utils.create_temp_filename('test', filetype='shape', num=22)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 5
0
    def test_folderworkspace_barefile_with_num(self):
        with utils.WorkSpace(self.folderworkspace):
            known_raster = os.path.join(self.folderworkspace, '_temp_test_14.tif')
            temp_raster = utils.create_temp_filename('test', filetype='raster', num=14)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.folderworkspace, '_temp_test_3.shp')
            temp_shape = utils.create_temp_filename('test', filetype='shape', num=3)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 6
0
    def test_folderworkspace_withsubfolder_with_num(self):
        with utils.WorkSpace(self.folderworkspace):
            known_raster = os.path.join(self.folderworkspace, 'subfolder', '_temp_test_1.tif')
            temp_raster = utils.create_temp_filename(os.path.join('subfolder', 'test'), filetype='raster', num=1)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.folderworkspace, 'subfolder', '_temp_test_12.shp')
            temp_shape = utils.create_temp_filename(os.path.join('subfolder','test'), filetype='shape', num=12)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 7
0
 def test_no_wq_col_error(self):
     with utils.OverwriteState(True), utils.WorkSpace(self.ws):
         wq, cols = analysis.preprocess_wq(
             monitoring_locations=self.ml,
             subcatchments=self.sc,
             id_col='CID',
             ds_col='DS_CID',
             output_path=self.results,
         )
Esempio n. 8
0
 def test_single_group_col(self):
     with utils.WorkSpace(self.workspace):
         results = utils.aggregate_geom(
             layerpath=self.input_file,
             by_fields='CID',
             field_stat_tuples=self.stats,
             outputpath=self.output,
         )
     self.check(results, self.expected_single)
Esempio n. 9
0
    def test_with_extension_geodb_with_num(self):
        with utils.WorkSpace(self.folderworkspace):
            filename = os.path.join(self.geodbworkspace, 'test')
            known_raster = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_2000')
            temp_raster = utils.create_temp_filename(filename + '.tif', filetype='raster', num=2000)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_999')
            temp_shape = utils.create_temp_filename(filename + '.tif', filetype='shape', num=999)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 10
0
    def test_geodb_as_subfolder_with_num(self):
        with utils.WorkSpace(self.folderworkspace):
            filename = os.path.join(self.geodbworkspace, 'test')
            known_raster = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_5')
            temp_raster = utils.create_temp_filename(filename, filetype='raster', num=5)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.folderworkspace, self.geodbworkspace, '_temp_test_99')
            temp_shape = utils.create_temp_filename(filename, filetype='shape', num=99)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 11
0
    def test_with_extension_folder_with_num(self):
        with utils.WorkSpace(self.folderworkspace):
            filename = 'test'
            known_raster = os.path.join(self.folderworkspace, '_temp_test_4.tif')
            temp_raster = utils.create_temp_filename(filename + '.tif', filetype='raster', num=4)
            nt.assert_equal(temp_raster, known_raster)

            known_shape = os.path.join(self.folderworkspace, '_temp_test_4.shp')
            temp_shape = utils.create_temp_filename(filename + '.shp', filetype='shape', num=4)
            nt.assert_equal(temp_shape, known_shape)
Esempio n. 12
0
def test_cleanup_temp_results():

    workspace = os.path.abspath(resource_filename('propagator.testing', 'cleanup_temp_results'))
    template_file = 'test_dem.tif'

    name1 = 'temp_1.tif'
    name2 = 'temp_2.tif'

    with utils.WorkSpace(workspace):
        raster1 = utils.copy_layer(template_file, name1)
        raster2 = utils.copy_layer(template_file, name2)

    nt.assert_true(os.path.exists(os.path.join(workspace, 'temp_1.tif')))
    nt.assert_true(os.path.exists(os.path.join(workspace, 'temp_2.tif')))

    with utils.WorkSpace(workspace):
        utils.cleanup_temp_results(name1, name2)

    nt.assert_false(os.path.exists(os.path.join(workspace, 'temp_1.tif')))
    nt.assert_false(os.path.exists(os.path.join(workspace, 'temp_2.tif')))
Esempio n. 13
0
def test_reduce():
    ws = resource_filename("propagator.testing", "_reduce")
    with utils.OverwriteState(True), utils.WorkSpace(ws):
        mon_locations = resource_filename("propagator.testing._reduce", "point.shp")
        expected_reduced_mon_locations = resource_filename("propagator.testing._reduce", "reduced_point.shp")
        # Create a placeholder for output first, since the function takes the output file as an input.

        reduced_mon_locations = utils.create_temp_filename("reduced_point", filetype='shape')
        reduced_mon_locations = analysis._reduce(mon_locations, reduced_mon_locations, ["WQ1","WQ2","WQ3"],'ID','FID')
        pptest.assert_shapefiles_are_close(reduced_mon_locations, expected_reduced_mon_locations)
        utils.cleanup_temp_results(reduced_mon_locations)
Esempio n. 14
0
    def test_baseline(self):
        with utils.WorkSpace(self.ws), utils.OverwriteState(True):
            subc_layer, stream_layer = propagator.toolbox.propagate(
                subcatchments='subcatchments.shp',
                monitoring_locations='monitoring_locations.shp',
                id_col='CID',
                ds_col='DS_CID',
                value_columns=self.columns,
                streams='streams.shp',
                output_path='test.shp')

        self.check(subc_layer, stream_layer, self.subc_expected_base,
                   self.stream_expected_base)
Esempio n. 15
0
def test_intersect_layers():
    ws = resource_filename('propagator.testing', 'intersect_layers')
    with utils.OverwriteState(True), utils.WorkSpace(ws):
        utils.intersect_layers(
            ['subcatchments.shp', 'monitoring_locations.shp'],
            'test.shp',
        )

    pptest.assert_shapefiles_are_close(
        os.path.join(ws, 'expected.shp'),
        os.path.join(ws, 'test.shp'),
    )

    utils.cleanup_temp_results(os.path.join(ws, 'test.shp'))
Esempio n. 16
0
 def test_baseline(self):
     with utils.OverwriteState(True), utils.WorkSpace(self.ws):
         wq, cols = analysis.preprocess_wq(
             monitoring_locations=self.ml,
             subcatchments=self.sc,
             id_col='CID',
             ds_col='DS_CID',
             output_path=self.results,
             value_columns=self.wq_cols
         )
     expected = 'expected.shp'
     pptest.assert_shapefiles_are_close(
         os.path.join(self.ws, expected),
         os.path.join(self.ws, self.results),
     )
     nt.assert_true(isinstance(wq, numpy.ndarray))
     nt.assert_list_equal(cols, self.expected_cols)
Esempio n. 17
0
    def test_multi_agg(self):
        stacol = 'StationTyp'
        with utils.WorkSpace(self.ws), utils.OverwriteState(True):
            subc_layer, stream_layer = propagator.toolbox.propagate(
                subcatchments='subcatchments.shp',
                id_col='CID',
                ds_col='DS_CID',
                monitoring_locations='monitoring_locations.shp',
                ml_filter=lambda row: row[
                    stacol] in ['Channel', 'Outfall', 'Outfall, Coastal'],
                ml_filter_cols=stacol,
                value_columns=self.muli_agg_columns,
                streams='streams.shp',
                output_path='test.shp')

        self.check(subc_layer, stream_layer, self.subc_expected_multi_agg,
                   self.stream_expected_multi_agg)
Esempio n. 18
0
def test_update_attribute_table():
    ws = resource_filename('propagator.testing', 'update_attribute_table')
    with utils.WorkSpace(ws), utils.OverwriteState(True):
        inputpath = resource_filename("propagator.testing.update_attribute_table", "input.shp")
        testpath = inputpath.replace('input', 'test')
        expected = resource_filename("propagator.testing.update_attribute_table", "expected_output.shp")

        new_attributes = numpy.array(
            [
                (1, 0, u'Cu_1', 'Pb_1'), (2, 0, u'Cu_2', 'Pb_2'),
                (3, 0, u'Cu_3', 'Pb_3'), (4, 0, u'Cu_4', 'Pb_4'),
            ], dtype=[('id', int), ('ds_id', int), ('Cu', '<U5'), ('Pb', '<U5'),]
        )

        arcpy.management.Copy(inputpath, testpath)
        utils.update_attribute_table(testpath, new_attributes, 'id', ['Cu', 'Pb'])

        pptest.assert_shapefiles_are_close(testpath, expected)
        utils.cleanup_temp_results(testpath)
Esempio n. 19
0
def test_aggregate_streams_by_subcatchment():
    ws = resource_filename('propagator.testing', 'agg_stream_in_subc')
    with utils.WorkSpace(ws), utils.OverwriteState(True):
        results = analysis.aggregate_streams_by_subcatchment(
            stream_layer='streams.shp',
            subcatchment_layer='subc.shp',
            id_col='CID',
            ds_col='DS_CID',
            other_cols=['WQ_1', 'WQ_2'],
            output_layer='test.shp'
        )

    nt.assert_equal(results, 'test.shp')
    pptest.assert_shapefiles_are_close(
        os.path.join(ws, results),
        os.path.join(ws, 'expected.shp'),
        ngeom=4
    )

    utils.cleanup_temp_results(os.path.join(ws, results),)
Esempio n. 20
0
    def analyze(self, **params):
        """ Accumulates subcatchments properties from upstream
        subcatchments into stream. Calls directly to :func:`accumulate`.

        """

        # analysis options
        ws = params.pop('workspace', '.')
        overwrite = params.pop('overwrite', True)
        add_output_to_map = params.pop('add_output_to_map', False)

        # input parameters
        sc = params.pop('subcatchments', None)
        ID_col = params.pop('ID_column', None)
        downstream_ID_col = params.pop('downstream_ID_column', None)
        # value columns and aggregations
        value_cols_string = params.pop('value_columns', None)
        value_columns = [
            vc.split(' ')
            for vc in value_cols_string.replace(' #', ' average').split(';')
        ]

        streams = params.pop('streams', None)
        output_layer = params.pop('output_layer', None)

        with utils.WorkSpace(ws), utils.OverwriteState(overwrite):
            output_layers = accumulate(
                subcatchments_layer=sc,
                id_col=ID_col,
                ds_col=downstream_ID_col,
                value_columns=value_columns,
                streams_layer=streams,
                output_layer=output_layer,
                verbose=True,
                asMessage=True,
            )

            if add_output_to_map:
                self._add_to_map(output_layers)

        return output_layers
Esempio n. 21
0
    def updateParameters(self, parameters):
        params = self._get_parameter_dict(parameters)
        param_vals = self._get_parameter_values(parameters)
        ws = param_vals.get('workspace', '.')
        vc = params['value_columns']

        with utils.WorkSpace(ws):
            ml = param_vals['monitoring_locations']
            if params['ml_type_col'].altered:
                col = param_vals['ml_type_col']
                values = utils.unique_field_values(ml, col).tolist()
                params['included_ml_types'].filter.list = values

            if params['monitoring_locations'].value:
                agg_methods = analysis.AGG_METHOD_DICT.copy()
                agg_methods.pop('weighted_average', None)

                fields = analysis._get_wq_fields(ml, ['dry', 'wet'])
                self._set_filter_list(vc.filters[0], fields)
                self._set_filter_list(vc.filters[1], list(agg_methods.keys()))

            self._update_value_table_with_default(vc, 'average')
Esempio n. 22
0
    def updateParameters(self, parameters):
        params = self._get_parameter_dict(parameters)
        param_vals = self._get_parameter_values(parameters)
        ws = param_vals.get('workspace', '.')
        vc = params['value_columns']

        with utils.WorkSpace(ws):
            sc = param_vals['subcatchments']

            # handles field name from Propagator output
            prefix = [i[0:3] for i in analysis.AGG_METHOD_DICT.keys()]
            # handles unmodified field name
            prefix.extend(['area', 'imp', 'dry', 'wet'])

            if params['subcatchments'].value:
                fields = analysis._get_wq_fields(sc, prefix)
                fields.append('n/a')
                self._set_filter_list(vc.filters[0], fields)
                self._set_filter_list(vc.filters[1],
                                      list(analysis.AGG_METHOD_DICT.keys()))
                self._set_filter_list(vc.filters[2], fields)

            self._update_value_table_with_default(vc, ['sum', 'n/a'])
Esempio n. 23
0
def test_accumulate():
    ws = resource_filename('propagator.testing', 'score_accumulator')

    with utils.WorkSpace(ws), utils.OverwriteState(True):
        results = toolbox.accumulate(
            subcatchments_layer='subcatchment_wq.shp',
            id_col='Catch_ID_a',
            ds_col='Dwn_Catch_',
            value_columns=[('DryM', 'maximum', 'n/a'),
                           ('DryN', 'First', 'area'),
                           ('WetB', 'WeIghtED_AveragE', 'imp_ar'),
                           ('WetM', 'minimum', 'imp_ar'),
                           ('WetN', 'average', 'n/a'), ('Area', 'sum', 'n/a'),
                           ('Imp', 'weighted_Average', 'Area'),
                           ('imp_ar', 'sum', 'n/a')],
            streams_layer='streams.shp',
            output_layer='output.shp',
        )

        pptest.assert_shapefiles_are_close(
            os.path.join(ws, 'expected_results.shp'),
            os.path.join(ws, results))

        utils.cleanup_temp_results(os.path.join(ws, results))
Esempio n. 24
0
    def test_workspace(self):
        nt.assert_equal(arcpy.env.workspace, self.baseline)
        with utils.WorkSpace(self.new_ws):
            nt.assert_equal(arcpy.env.workspace, self.new_ws)

        nt.assert_equal(arcpy.env.workspace, self.baseline)
Esempio n. 25
0
 def teardown(self):
     with utils.WorkSpace(self.workspace):
         utils.cleanup_temp_results(self.output)