def run(self, n=40, plot=True): classifiers = list() results = dict() for bin in range(self.min_bins, self.max_bins + 1): classifiers.append(classifier(bin)) results[bin] = [0, 0, 0] for i in range(n): print("velojson_5k/" + str(i) + ".json") #abre archivo f = open("velojson_5k/" + str(i) + ".json") json_data = json.loads(f.read()) event = em.event(json_data) f.close() for m in range(0, self.max_bins - self.min_bins + 1): res = classifiers[m].classify(event) aux = [x + y for x, y in zip(results[self.min_bins + m], res)] results[self.min_bins + m] = aux if plot: p_results = {k: (v[2] / v[0]) for k, v in results.items()} print(p_results) #plt.hist(list(p_results.keys()), list(p_results.items())) plt.bar(list(p_results.keys()), p_results.values()) plt.show() return results
def get_events(self, j=0, n=20): events = [] for i in range(j, n): f = open("../../velojson_5k/" + str(i) + ".json") json_data = json.loads(f.read()) events.append(em.event(json_data)) f.close() return events
def simple_event_detection(data, model, alpha=0.001): em = event_model.event_model(data, model) cv = critical_value(alpha) dates = data['date'] while True: res = em.residuals(data) index, value = candidate_event(res) date = dates[index] if abs(value) < cv: break ev = event_model.event(date) ev.value = value ev.index = index em.add_event(ev) return em
def next_event(data, model, alpha): m = model(data) res = m.residuals(data) clambda = cusum_lambda(res) index = np.abs(clambda).argmax() sig = clambda[index] date = data[index]['date'] ev = event_model.event(date) ev.significance = sig ev.index = index cv = critical_value(alpha) if np.abs(sig) > cv: return True, ev else: return False, ev
def prueba_1(n=5000, eta=6, bins=100): """ recorrido de los 5k archivos para determinar la distribucion de theta """ j = 0 bin_edges = np.linspace(start=-eta, stop=eta, num=bins + 1) hist = np.zeros(bins) #hist = histogram_n([], -eta*bins, eta*bins, bins) for i in range(j, n): #open file f = open("velojson_5k/" + str(i) + ".json") json_data = json.loads(f.read()) event = em.event(json_data) f.close() hit_ps = [] for h in event.hits: theta = cm.phase(hit_ht(h).complex_ht) pseudorapidity = -m.log(m.tan(theta / 2)) hit_ps.append(pseudorapidity) hist += np.histogram(hit_ps, bins=bin_edges, density=1)[0] #print(hist) # plt.hist(hist , bins = bin_edges, range = (-6, 6)) # plt.plot(hist) # plt.ylabel("n_hits") # plt.xlabel("pseudorapidity") # plt.show() print((hist, bin_edges)) return (hist, bin_edges)
#!/usr/bin/python3 import event_model as em import validator_lite as vl import json # Solvers from graph_dfs import graph_dfs from classical_solver import classical_solver solutions = {} # Get an event f = open("velojson/23.json") json_data = json.loads(f.read()) event = em.event(json_data) f.close() # Solve with the classic method classical = classical_solver() solutions["classic"] = classical.solve(event) # Solve with the DFS method dfs = graph_dfs() solutions["dfs"] = dfs.solve(event) print(solutions["dfs"]) # Validate the solutions for k, v in iter(sorted(solutions.items())): print("%s method validation" % (k)) vl.validate_print([json_data], [v]) print()
from classical_solver import classical_solver import event_model as em import validator_lite as vl import json # Get an event f = open("velojson/0.json") json_data = json.loads(f.read()) event = em.event(json_data) f.close() # Get all tracks by using the classical method and print them print("Invoking classical solver...") classical = classical_solver() classical_tracks = classical.solve(event) print("Found", len(classical_tracks), "tracks") # Validate the event vl.validate_print([json_data], [classical_tracks]) print('RE long>5GeV, [0-1]:', vl.validate_efficiency([json_data], [classical_tracks], 'long>5GeV')) print('CF long>5GeV, [0-1]:', vl.validate_clone_fraction([json_data], [classical_tracks], 'long>5GeV')) print('GF of all tracks, [0-1]:', vl.validate_ghost_fraction([json_data], [classical_tracks]))