def test_missing_in_row_dict_raise_valueerror(self): database = Database("whoah") database.register() dtype = [ (numpy_string('a'), np.uint32), (numpy_string('b'), np.uint32), (numpy_string('row'), np.uint32), (numpy_string('col'), np.uint32), (numpy_string('values'), np.float32), ] array = np.array([ (1, 2, MAX_INT_32, MAX_INT_32, 99), (1, 4, MAX_INT_32, MAX_INT_32, 99), ], dtype=dtype) row_dict = {1: 0} col_dict = {2: 0} np.save(database.filepath_processed(), array, allow_pickle=False) with self.assertRaises(ValueError): MatrixBuilder.build([database.filepath_processed()], "values", "a", "row", "b", "col", row_dict, col_dict, drop_missing=False)
def test_base_class(self): database = DatabaseChooser("a database") self.assertEqual(database._metadata, databases) self.assertEqual( [x[0] for x in database.dtype_fields], [numpy_string(x) for x in ('input', 'output', 'row', 'col', 'type')] )
def test_multiple_values_same_exchange(self): """Values for same (row, col) should add together""" a = np.zeros((2, ), dtype=[(numpy_string('values'), np.float64), (numpy_string('rows'), np.uint32), (numpy_string('cols'), np.uint32)]) a[0] = (9, 1, 1) a[1] = (33, 1, 1) r = [0, 0, 0] # Just need right length matrix = MatrixBuilder.build_matrix(array=a, row_dict=r, col_dict=r, row_index_label='rows', col_index_label='cols', data_label='values') answer = np.array(((0, 0, 0), (0, 42, 0), (0, 0, 0))) self.assertTrue(np.allclose(answer, matrix.toarray()))
class PDS(ProcessedDataStore): _metadata = metadata dtype_fields = [ (numpy_string('input'), np.uint32), ] def process_data(self, row): return (row['input'], ), row
def test_base_class(self): method = Method(("a", "method")) self.assertEqual(method.validator, ia_validator) self.assertEqual(method._metadata, methods) method.register() self.assertTrue(isinstance(method.metadata, dict)) self.assertEqual( [x[0] for x in method.dtype_fields], [numpy_string(x) for x in ('flow', 'geo', 'row', 'col')])
def test_build_matrix(self): a = np.zeros((4, ), dtype=[(numpy_string('values'), np.float64), (numpy_string('rows'), np.uint32), (numpy_string('cols'), np.uint32)]) a[0] = (4.2, 0, 2) a[1] = (6.6, 1, 1) a[2] = (1.3, 2, 1) a[3] = (10, 2, 2) r = [0, 0, 0] # Just need right length matrix = MatrixBuilder.build_matrix(array=a, row_dict=r, col_dict=r, row_index_label='rows', col_index_label='cols', data_label='values') answer = np.array(((0, 0, 4.2), (0, 6.6, 0), (0, 1.3, 10))) self.assertTrue(np.allclose(answer, matrix.toarray()))
class Loading(ProcessedDataStore): """""" _metadata = loadings validator = loading_validator dtype_fields = [(numpy_string("geo"), np.uint32), (numpy_string("row"), np.uint32)] def add_mappings(self, data): """In theory, this shouldn't do anything, as all spatial units should be in defined by the method.""" geomapping.add({obj[1] for obj in data}) def process_data(self, row): return (geomapping[row[1]], MAX_INT_32), row[0] @property def filename(self): return super(Loading, self).filename + ".loading" def write_to_map(self, method, flow, geocollection=None): map_obj = get_pandarus_map(method, geocollection) data = {x[1][1]: x[0] for x in self.load()} if map_obj.raster: self._write_raster_loadings_to_map(map_obj, data) else: self._write_vector_loadings_to_map(map_obj, data) def _write_raster_loadings_to_map(self, map_obj, data): NODATA = -9999.0 filepath = os.path.join(projects.output_dir, self.filename + ".tiff") array = np.zeros(map_obj.file.array().shape) + NODATA for obj in map_obj: if obj["label"] in data: array[obj["row"], obj["col"]] = data[obj["label"]] map_obj.file.write_modified_array(filepath, array, nodata=NODATA) return filepath def _write_vector_loadings_to_map(self, map_obj, data): raise NotImplementedError
def test_build_one_d_drop_missing(self): database = Database("ghost") database.register() dtype = [ (numpy_string('a'), np.uint32), (numpy_string('row'), np.uint32), (numpy_string('values'), np.float32), ] array = np.array([ (1, MAX_INT_32, 99), (2, MAX_INT_32, 99), (3, MAX_INT_32, 99), ], dtype=dtype) row_dict = {1: 0, 2: 1} np.save(database.filepath_processed(), array, allow_pickle=False) values = MatrixBuilder.build([database.filepath_processed()], "values", "a", "row", row_dict=row_dict, one_d=True)[0] self.assertEqual(values.shape, (2, ))
def test_build_one_d(self): database = Database("sour") database.register() dtype = [ (numpy_string('a'), np.uint32), (numpy_string('row'), np.uint32), (numpy_string('values'), np.float32), ] array = np.array([ (1, MAX_INT_32, 99), (2, MAX_INT_32, 100), ], dtype=dtype) row_dict = {1: 0, 2: 1} np.save(database.filepath_processed(), array, allow_pickle=False) matrix = MatrixBuilder.build([database.filepath_processed()], "values", "a", "row", row_dict=row_dict, one_d=True)[3] self.assertTrue( np.allclose(matrix.toarray(), np.array(((99, 0), (0, 100)))))
def test_build_drop_missing(self): database = Database("boo") database.register() dtype = [ (numpy_string('a'), np.uint32), (numpy_string('b'), np.uint32), (numpy_string('row'), np.uint32), (numpy_string('col'), np.uint32), (numpy_string('values'), np.float32), ] array = np.array([ (1, 2, MAX_INT_32, MAX_INT_32, 99), (3, 4, MAX_INT_32, MAX_INT_32, 99), (3, 2, MAX_INT_32, MAX_INT_32, 99), (5, 6, MAX_INT_32, MAX_INT_32, 99), ], dtype=dtype) row_dict = {1: 0, 3: 1} col_dict = {2: 0, 6: 1} np.save(database.filepath_processed(), array, allow_pickle=False) values = MatrixBuilder.build([database.filepath_processed()], "values", "a", "row", "b", "col", row_dict, col_dict)[0] self.assertEqual(values.shape, (2, ))
def test_base_class(self): norm = Normalization(("foo", )) self.assertEqual(norm.validator, normalization_validator) self.assertEqual(norm._metadata, normalizations) self.assertEqual([x[0] for x in norm.dtype_fields], [numpy_string(x) for x in ('flow', 'index')])