def create_actual_table_dense_dot_sparse(self): labels = {'dot_numpy': 'numpy (Referenz)'} items_pro_dim = [100] funcs_ranked_by_time = [(0.1, 'dot_numpy')] results = {'dot_numpy': [(0.1, 0.001)]} timings = {'dot_numpy': [0.1]} test_data = tf.TableData(labels, items_pro_dim, funcs_ranked_by_time, results, timings) return tf.create_summery_tables_dense_dot_sparse_benchmark(test_data)
def create_actual_table_for_sparsity_benchmark(self): labels = {'bsr_matrix': 'Block Sparse Row'} items_pro_dim = [100] funcs_ranked_by_time = [(0.1, 'bsr_matrix')] results = {0.1: {'bsr_matrix': [(0.1, 0.001), (0.4, 0.004)]}} timings = {'bsr_matrix': [0.1, 0.4]} test_data = tf.TableData(labels, items_pro_dim, funcs_ranked_by_time, results, timings) return tf.create_summery_tables_for_sparse_matrices_benchmark( test_data, 0.1)
Returns a dictionary with the function names as keys and aliases as values. ------- """ return {'dot_numpy':'Numpy x Numpy (Referenz)', 'scipy_csc_dot_numpy_with_swap':'Compressed Sparse Column x Numpy', 'scipy_csr_dot_numpy_with_swap':'Compressed Sparse Row x Numpy', 'scipy_bsr_dot_numpy_with_swap':'Block Sparse Row x Numpy'} if __name__ == '__main__': benchmark_timestamp = df.get_date() results_path = RESULTS_DIRECTORY + BENCHMARK_DIRECTORY + benchmark_timestamp + "/" functions_under_test = get_functions_under_test() items_pro_dimension = [500, 1000, 2000, 3000, 4000, 5000] number_of_timings_pro_function_and_matrix_dimension = 5 results = dds.run_performance_test(items_pro_dimension, number_of_timings_pro_function_and_matrix_dimension, functions_under_test) dds.backup_results(results_path, results, FILENAME) timings = dds.get_timings_from_results(results) functions_ranked_by_time = dds.rank_functions_by_performance(timings) functions_labels = create_functions_aliases() table_data = tf.TableData(functions_labels, items_pro_dimension, functions_ranked_by_time, results, timings) ranked_times = [ranked_label for time, ranked_label in functions_ranked_by_time] plot__data = PlotData(TEST_NAME, functions_labels, results, ranked_times, PLOT_X_LABEL, PLOT_Y_LABEL) dds.persist_plots(items_pro_dimension, results_path, plot__data) results_table = dds.create_summery_table(table_data) dds.persist_summery_table(number_of_timings_pro_function_and_matrix_dimension, results_path, results_table, TABLE_HEADLINE)
results = run_performance_test(sparsities, sparse_matrices, items_pro_matrix_dimension, number_of_timings) backup_results(results_path, results) timings = get_timings_from_performance_test_results(results) functions_labels = create_functions_aliases() functions_ranked_by_performance = rank_functions_by_performance(timings) persist_benchmark_data_to_summery_table_file(number_of_timings, results_path) for key in sparsities: plot_title = create_plot_title(key) plot_data = PlotData(plot_title, functions_labels, results, functions_ranked_by_performance, PLOT_X_LABEL, PLOT_Y_LABEL) pf.plot_timing_sparse_matrices_benchmark(plot_data, items_pro_matrix_dimension, sparse_matrices, key, results_path) pf.make_reduced_plot_for_sparse_matrices_benchmark( plot_data, items_pro_matrix_dimension, sparse_matrices, key, results_path) table_data = tf.TableData(functions_labels, items_pro_matrix_dimension, functions_ranked_by_performance[key], results, timings[key]) persist_summery_table(table_data, key, results_path)