def test_009_grid(self): task = benchmarking_task.task() ns = [5, 7, 13] for n in ns: task.set_parametrization("var_range_"+str(n), benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, (0,1,n))) ns.append(2) task.set_parametrization("var_list", benchmarking_task.parametrization(benchmarking_task.LIST, [0,1])) ns.append(1) task.set_parametrization("var_static", benchmarking_task.parametrization(benchmarking_task.STATIC, numpy.pi)) total_should = reduce(lambda x,y: x*y, ns, 1) grid, constants, names = task.get_parameter_set() self.assertEqual(len(grid) , total_should) self.assertSequenceEqual(constants, [task.variables["var_static"].get_values()])
def test_003_param_split_list_non_integer(self): N = 100 SPLITS = 11 p = benchmarking_task.parametrization(benchmarking_task.LIST, range(N)) splitted = p.split(SPLITS) total = numpy.concatenate([sp.get_values() for sp in splitted]) self.assertListEqual(total.tolist(), range(N))
def test_002_param_split_list_integer(self): N = 100 SPLITS = 10 p = benchmarking_task.parametrization(benchmarking_task.LIST, range(N)) splitted = p.split(SPLITS) for i in range(SPLITS): self.assertSequenceEqual(range(i*(N/SPLITS), (i+1)*(N/SPLITS)), splitted[i].get_values())
def test_005_task_json_save(self): task = benchmarking_task.task("class", "module") task.set_parametrization("static_test_variable", benchmarking_task.parametrization(benchmarking_task.STATIC,numpy.pi, numpy.dtype("float32").type)) task.set_parametrization("range_test_variable", benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, self.range_spec) ) task.set_parametrization("list_test_variable", benchmarking_task.parametrization(benchmarking_task.LIST, [10,20,30], int) ) task.sinks = self.ref_task_dic["sinks"] outfile = tempfile.NamedTemporaryFile(delete=False,suffix=".json") task.save(outfile) outfile.close() infile = open(outfile.name) resdic = json.load(infile) self.assert_(helpers.comp_dict(resdic, self.ref_task_dic)) infile.close()
def test_008_total_points(self): task = benchmarking_task.task() l_noise = 100 l_signal = 13 task.set_parametrization("noise_power", benchmarking_task.parametrization(benchmarking_task.LIST, range(l_noise), int)) task.set_parametrization("signal_power", benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, (0,10,l_signal))) task.set_parametrization("sample_rate", benchmarking_task.parametrization()) task.set_parametrization("threshold", benchmarking_task.parametrization(benchmarking_task.STATIC, 1.0/numpy.pi)) points = task.get_total_points() self.assertEqual(points, l_noise*l_signal * 1 * 1) n = 20 tasks = task.split(n) self.assertEqual(len(tasks), 20) self.assertEqual(sum([t.get_total_points() for t in tasks]), points)
def _add_var(self): newname = "new_variable{:d}" counter = 0 while newname.format(counter) in self.task.variables: counter += 1 newname = newname.format(counter) newvar = bt.parametrization() self.task.variables[newname] = newvar n_rows = self.gui.variable_table.rowCount() self.gui.variable_table.setRowCount(n_rows + 1) self._add_row_from_parametrization(n_rows, newname, newvar)
def test_004_param_split_range(self): r = numpy.linspace(*self.range_spec) p = benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, self.range_spec) splitted = p.split(13) for spl in splitted: l = len(spl.get_values()) self.assert_(l >= len(r)/13 and l <= len(r)/13 + 1) total = numpy.concatenate([spl.get_values() for spl in splitted]) ref_spec = list(self.range_spec) ref_spec[2] = (self.range_spec[2] / 13 ) * 13 + 13 self.assertFloatTuplesAlmostEqual(total, numpy.linspace(*ref_spec), places = 5)
def setUp(self): self.range_spec = (0,1,100) self.xml_file = open(os.path.join(os.path.dirname(__file__), "extraction_test_topblock.grc"), "r") self.xml_file.close() task = bt.task.from_grc(self.xml_file.name) task.set_parametrization("length", bt.parametrization(bt.LIST, [2,5,10], int)) task.set_parametrization("value", bt.parametrization(bt.LIN_RANGE, (0,10,20))) print task._get_variable_names() print task.variables["length"] self.dis = distributed_benchmarking.distributor() self.dis.start() time.sleep(0.05) self.worker_dic = { "id": "worker", "control_address": "tcp://127.0.0.1:", "pool": "default" }
def _add_var(self): newname = "new_variable{:d}" counter = 0 while newname.format(counter) in self.task.variables: counter += 1 newname = newname.format(counter) newvar = bt.parametrization() self.task.variables[newname] = newvar n_rows = self.gui.variable_table.rowCount() self.gui.variable_table.setRowCount(n_rows+1) self._add_row_from_parametrization(n_rows,newname, newvar)
def test_005_task_json_save(self): task = benchmarking_task.task("class", "module") task.set_parametrization( "static_test_variable", benchmarking_task.parametrization(benchmarking_task.STATIC, numpy.pi, numpy.dtype("float32").type)) task.set_parametrization( "range_test_variable", benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, self.range_spec)) task.set_parametrization( "list_test_variable", benchmarking_task.parametrization(benchmarking_task.LIST, [10, 20, 30], int)) outfile = StringIO.StringIO() task.save(outfile) outfile.seek(0) resdic = json.load(outfile) self.assert_(helpers.comp_dict(resdic, self.ref_task_dic))
def setUp(self): self.range_spec = (0, 1, 100) self.xml_file = open( os.path.join(os.path.dirname(__file__), "extraction_test_topblock.grc"), "r") self.xml_file.close() task = bt.task.from_grc(self.xml_file.name) task.set_parametrization("length", bt.parametrization(bt.LIST, [2, 5, 10], int)) task.set_parametrization("value", bt.parametrization(bt.LIN_RANGE, (0, 10, 20))) print task._get_variable_names() print task.variables["length"] self.dis = distributed_benchmarking.distributor() self.dis.start() time.sleep(0.05) self.worker_dic = { "id": "worker", "control_address": "tcp://127.0.0.1:", "pool": "default" }
def test_001_param_range(self): p = benchmarking_task.parametrization(benchmarking_task.LIN_RANGE, self.range_spec) vals = p.get_values() self.assertListEqual(vals.tolist(), numpy.linspace(*self.range_spec).tolist())