def test_analyze(self): tbx = toolbox.Accumulator() ws = resource_filename('propagator.testing', 'score_accumulator') vc = ('DryM maximum n/a;' 'DryN First area;' 'WetB WeIghtED_AveragE imp_ar;' 'WetM minimum imp_ar;' 'WetN average n/a;' 'Area sum n/a;' 'Imp weighted_Average Area;' 'imp_ar sum n/a') with mock.patch.object(tbx, '_add_to_map') as atm: stream_layer = tbx.analyze(workspace=ws, overwrite=True, subcatchments='subcatchment_wq.shp', ID_column='Catch_ID_a', downstream_ID_column='Dwn_Catch_', value_columns=vc, streams='streams.shp', output_layer='output.shp', add_output_to_map=True) nt.assert_equal(stream_layer, 'output.shp') pptest.assert_shapefiles_are_close( os.path.join(ws, 'expected_results.shp'), os.path.join(ws, stream_layer), ) utils.cleanup_temp_results(os.path.join(ws, stream_layer)) atm.assert_called_once_with(stream_layer) utils.cleanup_temp_results(stream_layer)
def test_analyze(self): tbx = toolbox.Propagator() ws = resource_filename('propagator.testing', 'tbx_propagate') columns = 'Dry_B averAgE;Dry_M Median;Dry_N minimum;Wet_B maximum;Wet_M #;Wet_N Median' with mock.patch.object(toolbox.Propagator, '_add_to_map') as atm: subc_layer, stream_layer = tbx.analyze( workspace=ws, overwrite=True, subcatchments='subcatchments.shp', ID_column='CID', downstream_ID_column='DS_CID', monitoring_locations='monitoring_locations.shp', value_columns=columns, output_layer='test.shp', streams='streams.shp', add_output_to_map=True) nt.assert_equal(subc_layer, 'test_subcatchments.shp') nt.assert_equal(stream_layer, 'test_streams.shp') pptest.assert_shapefiles_are_close( os.path.join(ws, 'expected_subc.shp'), os.path.join(ws, subc_layer), ) pptest.assert_shapefiles_are_close( os.path.join(ws, 'expected_streams.shp'), os.path.join(ws, stream_layer), ) utils.cleanup_temp_results(os.path.join(ws, subc_layer), os.path.join(ws, stream_layer)) atm.assert_has_calls( [mock.call(subc_layer), mock.call(stream_layer)])
def test_reduce(): ws = resource_filename("propagator.testing", "_reduce") with utils.OverwriteState(True), utils.WorkSpace(ws): mon_locations = resource_filename("propagator.testing._reduce", "point.shp") expected_reduced_mon_locations = resource_filename("propagator.testing._reduce", "reduced_point.shp") # Create a placeholder for output first, since the function takes the output file as an input. reduced_mon_locations = utils.create_temp_filename("reduced_point", filetype='shape') reduced_mon_locations = analysis._reduce(mon_locations, reduced_mon_locations, ["WQ1","WQ2","WQ3"],'ID','FID') pptest.assert_shapefiles_are_close(reduced_mon_locations, expected_reduced_mon_locations) utils.cleanup_temp_results(reduced_mon_locations)
def test_spatial_join(): known = resource_filename('propagator.testing.spatial_join', 'merge_result.shp') left = resource_filename('propagator.testing.spatial_join', 'merge_baseline.shp') right = resource_filename('propagator.testing.spatial_join', 'merge_join.shp') outputfile = resource_filename('propagator.testing.spatial_join', 'merge_result.shp') with utils.OverwriteState(True): test = utils.spatial_join(left=left, right=right, outputfile=outputfile) nt.assert_equal(test, outputfile) pptest.assert_shapefiles_are_close(test, known) utils.cleanup_temp_results(test)
def test_concat_results(): known = resource_filename('propagator.testing.concat_results', 'known.shp') with utils.OverwriteState(True): test = utils.concat_results( resource_filename('propagator.testing.concat_results', 'result.shp'), [resource_filename('propagator.testing.concat_results', 'input1.shp'), resource_filename('propagator.testing.concat_results', 'input2.shp')] ) nt.assert_true(isinstance(test, arcpy.mapping.Layer)) pptest.assert_shapefiles_are_close(test.dataSource, known) utils.cleanup_temp_results(test)
def test_intersect_layers(): ws = resource_filename('propagator.testing', 'intersect_layers') with utils.OverwriteState(True), utils.WorkSpace(ws): utils.intersect_layers( ['subcatchments.shp', 'monitoring_locations.shp'], 'test.shp', ) pptest.assert_shapefiles_are_close( os.path.join(ws, 'expected.shp'), os.path.join(ws, 'test.shp'), ) utils.cleanup_temp_results(os.path.join(ws, 'test.shp'))
def test_intersect_polygon_layers(): input1_file = resource_filename("propagator.testing.intersect_polygons", "intersect_input1.shp") input2_file = resource_filename("propagator.testing.intersect_polygons", "intersect_input2.shp") known_file = resource_filename("propagator.testing.intersect_polygons", "intersect_known.shp") output_file = resource_filename("propagator.testing.intersect_polygons", "intersect_output.shp") with utils.OverwriteState(True): output = utils.intersect_polygon_layers( output_file, [input1_file, input2_file,] ) nt.assert_true(isinstance(output, arcpy.mapping.Layer)) pptest.assert_shapefiles_are_close(output_file, known_file) utils.cleanup_temp_results(output)
def test_update_attribute_table(): ws = resource_filename('propagator.testing', 'update_attribute_table') with utils.WorkSpace(ws), utils.OverwriteState(True): inputpath = resource_filename("propagator.testing.update_attribute_table", "input.shp") testpath = inputpath.replace('input', 'test') expected = resource_filename("propagator.testing.update_attribute_table", "expected_output.shp") new_attributes = numpy.array( [ (1, 0, u'Cu_1', 'Pb_1'), (2, 0, u'Cu_2', 'Pb_2'), (3, 0, u'Cu_3', 'Pb_3'), (4, 0, u'Cu_4', 'Pb_4'), ], dtype=[('id', int), ('ds_id', int), ('Cu', '<U5'), ('Pb', '<U5'),] ) arcpy.management.Copy(inputpath, testpath) utils.update_attribute_table(testpath, new_attributes, 'id', ['Cu', 'Pb']) pptest.assert_shapefiles_are_close(testpath, expected) utils.cleanup_temp_results(testpath)
def test_cleanup_temp_results(): workspace = os.path.abspath(resource_filename('propagator.testing', 'cleanup_temp_results')) template_file = 'test_dem.tif' name1 = 'temp_1.tif' name2 = 'temp_2.tif' with utils.WorkSpace(workspace): raster1 = utils.copy_layer(template_file, name1) raster2 = utils.copy_layer(template_file, name2) nt.assert_true(os.path.exists(os.path.join(workspace, 'temp_1.tif'))) nt.assert_true(os.path.exists(os.path.join(workspace, 'temp_2.tif'))) with utils.WorkSpace(workspace): utils.cleanup_temp_results(name1, name2) nt.assert_false(os.path.exists(os.path.join(workspace, 'temp_1.tif'))) nt.assert_false(os.path.exists(os.path.join(workspace, 'temp_2.tif')))
def test_aggregate_streams_by_subcatchment(): ws = resource_filename('propagator.testing', 'agg_stream_in_subc') with utils.WorkSpace(ws), utils.OverwriteState(True): results = analysis.aggregate_streams_by_subcatchment( stream_layer='streams.shp', subcatchment_layer='subc.shp', id_col='CID', ds_col='DS_CID', other_cols=['WQ_1', 'WQ_2'], output_layer='test.shp' ) nt.assert_equal(results, 'test.shp') pptest.assert_shapefiles_are_close( os.path.join(ws, results), os.path.join(ws, 'expected.shp'), ngeom=4 ) utils.cleanup_temp_results(os.path.join(ws, results),)
def test_accumulate(): ws = resource_filename('propagator.testing', 'score_accumulator') with utils.WorkSpace(ws), utils.OverwriteState(True): results = toolbox.accumulate( subcatchments_layer='subcatchment_wq.shp', id_col='Catch_ID_a', ds_col='Dwn_Catch_', value_columns=[('DryM', 'maximum', 'n/a'), ('DryN', 'First', 'area'), ('WetB', 'WeIghtED_AveragE', 'imp_ar'), ('WetM', 'minimum', 'imp_ar'), ('WetN', 'average', 'n/a'), ('Area', 'sum', 'n/a'), ('Imp', 'weighted_Average', 'Area'), ('imp_ar', 'sum', 'n/a')], streams_layer='streams.shp', output_layer='output.shp', ) pptest.assert_shapefiles_are_close( os.path.join(ws, 'expected_results.shp'), os.path.join(ws, results)) utils.cleanup_temp_results(os.path.join(ws, results))
def teardown(self): utils.cleanup_temp_results(os.path.join(self.ws, self.subc_res), os.path.join(self.ws, self.stream_res))
def teardown(self): with utils.WorkSpace(self.workspace): utils.cleanup_temp_results(self.output)
def teardown(self): utils.cleanup_temp_results(self.testfile)
def test_cleanup_with_bad_input(): utils.cleanup_temp_results(1, 2, ['a', 'b', 'c'])
def teardown(self): utils.cleanup_temp_results(os.path.join(self.ws, self.results))