def test_swap_funcs(self): M1 = mf.create_matrix(100, 100) M2 = mf.create_matrix(100, 100) expected = np.dot(M1, M2) np.testing.assert_array_equal(expected, mf.scipy_csc_dot_numpy_with_swap(M1, M2)) np.testing.assert_array_equal(expected, mf.scipy_csr_dot_numpy_with_swap(M1, M2)) np.testing.assert_array_equal(expected, mf.scipy_bsr_dot_numpy_with_swap(M1, M2))
def run_performance_test(items_in_matrix, number_of_timings, functions): test_results = {f.__name__:[] for f in functions} for n in items_in_matrix: dense_matrix = mf.create_matrix(n, n, 0.01) sparse_matrix = mf.create_matrix(n, n, 0.99) for func in functions: test_results[func.__name__].append(bf.test_performance(func, number_of_timings, dense_matrix, sparse_matrix)) print(func.__name__, n) return test_results
def run_performance_test(items_in_matrix, number_of_timings, functions): test_results = {f.__name__: [] for f in functions} for n in items_in_matrix: dense_matrix = mf.create_matrix(n, n, 0.01) sparse_matrix = mf.create_matrix(n, n, 0.99) for func in functions: test_results[func.__name__].append( bf.test_performance(func, number_of_timings, dense_matrix, sparse_matrix)) print(func.__name__, n) return test_results
def run_performance_test(functions, items_pro_dimension, sparsities): """ Runs the benchmark. Parameters ---------- functions - the functions under test items_pro_dimension - number of items pro matrix dimenstion sparsities - a list of values between 0 and 1 which define the percent zeros in each matrix. Returns a dictionary with the avg. results and std. for each function ------- """ results = { size: {f.__name__: [] for f in functions} for size in items_pro_dimension } for key, dimension in items_pro_dimension.items(): for sparsity in sparsities: matrix_1 = mf.create_matrix(dimension, dimension, sparsity) matrix_2 = matrix_1.T for func in functions: results[key][func.__name__].append( bf.test_performance(func, number_of_timings, matrix_1, matrix_2)) print(key, sparsity, func) return results
def run_performance_test(sparsities, sparse_matrices, items_pro_dimension, number_of_timings): """ Runs the benchmark Parameters ---------- sparsities a list of values between 0 and 1 the represent the tested sparsities sparse_matrices - the matrices under test items_pro_dimension - amount of items pro matrix dimension number_of_timings - amount of repeats pro test Returns a dictionary containing the avg timing and std. for each tested matrix. ------- """ results = { sparsity: {sm.__name__: [] for sm in sparse_matrices} for sparsity in sparsities } for sparsity in sparsities: for n in items_pro_dimension: M_1 = create_matrix(n, n, sparsity) M_2 = M_1.T for sm in sparse_matrices: M_1 = sm(M_1) M_2 = sm(M_2) results[sparsity][sm.__name__].append( bf.test_performance_dot_sparse(number_of_timings, M_1, M_2)) print(sparsity, sm.__name__, n) return results
def test_create_matrix_half_zeros(self): matrix = mf.create_matrix(10, 10, 0.5) zeros_count = 0 for row in matrix: for i in row: if i == 0: zeros_count+=1 self.assertEquals(50, zeros_count)
def run_performance_test(items_pro_dimension, number_of_timings, functions): """ Runs the Benchmark. Parameters ---------- items_pro_dimension - number of items in each matrix dimension number_of_timings - number of repeats for each timing functions - the functions under test Returns a dictionary with the avg. results and std. for each function ------- """ test_results = {f.__name__:[] for f in functions} for n in items_pro_dimension: dense_matrix = mf.create_matrix(n, n, 0.01) sparse_matrix = mf.create_matrix(n, n, 0.99) for func in functions: test_results[func.__name__].append(bf.test_performance(func, number_of_timings, dense_matrix, sparse_matrix)) print(func.__name__, n) return test_results
def run_performance_test(items_pro_dimension, number_of_timings, functions): """ Runs the Benchmark. Parameters ---------- items_pro_dimension - number of items in each matrix dimension number_of_timings - number of repeats for each timing functions - the functions under test Returns a dictionary with the avg. results and std. for each function ------- """ test_results = {f.__name__: [] for f in functions} for n in items_pro_dimension: dense_matrix = mf.create_matrix(n, n, 0.01) sparse_matrix = mf.create_matrix(n, n, 0.99) for func in functions: test_results[func.__name__].append( bf.test_performance(func, number_of_timings, dense_matrix, sparse_matrix)) print(func.__name__, n) return test_results
def test_create_matrix_all_zeros(self): zeros_matrix = mf.create_matrix(10, 10, 1) if zeros_matrix.any() != 0: self.fail() self.assertTrue(True)
def test_dot_scipy_funcs_with_conversion(self): M1 = mf.create_matrix(100, 100) expected = np.dot(M1, M1) np.testing.assert_array_equal(expected, mf.dot_scipy_csc_with_conversion(M1, M1)) np.testing.assert_array_equal(expected, mf.dot_scipy_csr_with_conversion(M1, M1)) np.testing.assert_array_equal(expected, mf.dot_scipy_bsr_with_conversion(M1, M1))
def test_dot_numpy(self): M1 = mf.create_matrix(100, 100) M2 = mf.create_matrix(100, 100) expected = np.dot(M1, M2) np.testing.assert_array_equal(expected, mf.dot_numpy(M1,M2))
def test_create_matrix_invalid_percent_zeros_too_high(self): matrix = mf.create_matrix(10, 10, 1.1)
def test_create_matrix_invalid_percent_zeros_too_low(self): matrix = mf.create_matrix(10, 10, -0.1)
def test_create_matrix_invalid_2nd_dimension(self): matrix = mf.create_matrix(10, 0, 0.5)
def test_create_matrix_invalid_1st_dimension(self): matrix = mf.create_matrix(0, 10, 0.5)
def test_create_matrix_all_ones(self): ones_matrix = mf.create_matrix(10, 10, 0) if ones_matrix.any() != 1: self.fail() self.assertTrue(True)