def _test_content_hypersphere(self): # disabled because it is slow print("Test content hypersphere") for i in range(5): numpy.seterr(all='ignore') test_bunch = Bunch() content_predicted = numpy.linalg.det(self.diag)*math.pi**2/2. test_bunch = Bunch.new_hit_shell(7, 4.*self.diag, ['x', 'y', 'px', 'py'], '') test_bunch.append(Hit.new_from_dict({'x':0., 'y':0., 'px':0., 'py':0.})) for hit in test_bunch: hit['x'] += 1.*i hit['px'] += 2.*i hit['y'] += 3.*i hit['py'] += 4.*i my_weights = VoronoiWeighting(['x', 'y', 'px', 'py'], numpy.array([[2., 0., 0., 0.], [0., 1., 0., 0.], [0., 0., 1., 0.], [0., 0., 0., 1.],])) my_weights.apply_weights(test_bunch, False) my_weights.plot_two_d_projection(['x', 'y']) my_weights.plot_two_d_projection(['px', 'py']) #self.assertEqual(len(my_weights.tile_content_list), len(test_bunch)+1) content_actual = sum(my_weights.tile_content_list) print("Content", content_actual, "Predicted", content_predicted) self.assertAlmostEqual(content_actual, content_predicted, 0) input()
def test_content_square(self): test_bunch = Bunch() for x in range(-2, 3): for y in range(-2, 3): test_hit = Hit.new_from_dict({'x':x, 'y':y}) test_bunch.append(test_hit) limit_bunch = Bunch.new_hit_shell(3, numpy.array([[9.5, 0.],[0., 9.5]]), ['x', 'y'], '') my_weights = VoronoiWeighting(['x', 'y'], numpy.array([[2., 0.],[0., 1.]])) my_weights.apply_weights(test_bunch, False) my_weights.plot_two_d_projection(['x', 'y']) self.assertEqual(len(my_weights.tile_content_list), len(test_bunch)) self.assertAlmostEqual(sum(my_weights.tile_content_list), 9., 3)
def test_apply_weights_bound(self): test_bunch = Bunch() for i in range(1000): x = random.gauss(0., 10.**0.5) y = random.gauss(0., 20.**0.5) test_hit = Hit.new_from_dict({'x':x, 'y':y}) test_bunch.append(test_hit) limit_ellipse = numpy.zeros([2, 2]) for i in range(2): limit_ellipse[i, i] = 200. bound = BoundingEllipse(limit_ellipse, numpy.zeros([2]), 50) my_weights = VoronoiWeighting(['x', 'y'], numpy.array([[20., 0.],[0., 20.]]), voronoi_bound = bound) print("Plotting weights", datetime.datetime.now()) canvas, hist = test_bunch.root_histogram('x', 'mm', 'y', 'mm') hist.Draw("COLZ") canvas.Update() print('Covariances ["x", "y"] before\n', test_bunch.covariance_matrix(['x', 'y'])) print("Applying weights", datetime.datetime.now()) my_weights.apply_weights(test_bunch, False) print("Plotting tesselation", datetime.datetime.now()) #my_weights.plot_two_d_projection(['x', 'y'], 'weight') my_weights.plot_two_d_projection(['x', 'y'], 'weight') my_weights.plot_two_d_projection(['x', 'y'], 'content') print("Plotting weights", datetime.datetime.now()) canvas, hist = test_bunch.root_histogram('x', 'mm', 'y', 'mm') hist.Draw("COLZ") canvas.Update() test_bunch.root_histogram('local_weight', nx_bins=100) canvas = common.make_root_canvas('local_weight') hist = common.make_root_histogram('local_weight', test_bunch.list_get_hit_variable(['local_weight'])[0], 'local_weight', 100) hist.Draw() canvas.Update() canvas = common.make_root_canvas('content') hist = common.make_root_histogram('content', my_weights.tile_content_list, 'content', 100) hist.Draw() canvas.Update() print('Covariances ["x", "y"] after\n', test_bunch.covariance_matrix(['x', 'y']))
def system_mem_test(verbose=True): # Test memory usage isn't too huge # Test bunch cleans up after itself # We allocate new memory in this test and then deallocate it. Python process # will increase its buffer but the buffer should be empty by the end. Then # run again - python process should not increase buffer size by much as on # second iteration we import gc gc.collect() #force a memory cleanup mem_usage_b4 = print_mem_usage() bunch_list = [] print("Allocating memory") for i in range(3): bunch_list.append(Bunch()) for i in range(10000): bunch_list[-1].append(Hit()) bunch_list[-1][0].get('x') bunch_list[-1][0].set('x', 1.) bunch_list[-1][0].set('global_weight', 0.5) bunch_list[-1][0].get('global_weight') # this function was leaking bunch_list[-1].moment(['x']) Hit.clear_global_weights() mem_usage = print_mem_usage() print("Cleaning memory") bunch = bunch_list[0] while len(bunch) > 0: del bunch[0] bunch_list.remove(bunch_list[0]) del bunch_list Hit.clear_global_weights() gc.collect() #force a memory cleanup mem_usage = print_mem_usage() if verbose: print("Memory usage after cleanup in Mb (target 0 Mb):", mem_usage, '(absolute)', mem_usage - mem_usage_b4, '(difference)') if mem_usage - mem_usage_b4 > 1000.: return "warn" # looks like memory leak... return "pass"
def test_content_circle(self): test_bunch = Bunch() n_events = 361 for theta in range(0, n_events-1): x = 2.*math.sin(math.radians(theta)) y = 2.*math.cos(math.radians(theta)) test_hit = Hit.new_from_dict({'x':x, 'y':y}) test_bunch.append(test_hit) test_hit = Hit.new_from_dict({'x':0., 'y':0.}) test_bunch.append(test_hit) limit_bunch = Bunch.new_hit_shell(3, numpy.array([[9.5, 0.],[0., 9.5]]), ['x', 'y'], '') my_weights = VoronoiWeighting(['x', 'y'], numpy.array([[2., 0.],[0., 1.]])) my_weights.apply_weights(test_bunch, False) my_weights.plot_two_d_projection(['x', 'y']) self.assertEqual(len(my_weights.tile_content_list), n_events) non_zero_content = [cont for cont in my_weights.tile_content_list \ if cont > 1e-6] self.assertEqual(len(non_zero_content), 1) self.assertAlmostEqual(non_zero_content[0], math.pi, 3)