class TestSimulation(unittest.TestCase): def setUp(self): self.test_graph = Graph() self.test_node_0 = Node(0, 0.1, 1, 1, 1, 1, 1, 1) self.test_node_1 = Node(1, 0.2, 2, 2, 2, 2, 2, 2) self.test_node_2 = Node(2, 0.3, 1, 2, 3, 4, 5, 6) self.test_graph.add_vertex(self.test_node_0) self.test_graph.add_vertex(self.test_node_1) self.test_graph.add_vertex(self.test_node_2) self.test_graph.add_edge(self.test_node_0, self.test_node_1, 30) self.test_graph.add_edge(self.test_node_1, self.test_node_2, 50) self.test_sim = Simulation() self.test_sim.load_graph(self.test_graph) def test_spread(self): res1 = min(1, Simulation.sigmoid(30) * 0.15) res2 = min(1, Simulation.sigmoid(60) * 0.25 * 1.2) self.assertAlmostEqual( Simulation.spread(self.test_node_0, self.test_node_1), res1) self.assertAlmostEqual( Simulation.spread(self.test_node_1, self.test_node_2), res2) def test_calc_new_score(self): res = (Simulation.spread(self.test_node_0, self.test_node_1) + Simulation.spread(self.test_node_2, self.test_node_1)) / 2 self.assertAlmostEqual(self.test_sim.calc_new_score(self.test_node_1), res) def test_run_one_timestep(self): print(self.test_sim.graph.get_scores()) self.test_sim.run_one_timestep() print(self.test_sim.graph.get_scores())
def decisionTimes(N, NN, MCS, plotting=False): #pass an object instead? or dont decisionTime = [] adt = [] sim = Simulation(N, NN, MCS, plotting) sim.run() #for j in range(sim.time): #decisionTime[j] = (sim.dT[j]) decisionTime.append(sim.dT) adt.append(sim.adT) lst = [] for i in decisionTime: for j in i: lst.append(j) lst2 = [] for i in adt: for j in i: lst2.append(j) dt = np.asarray(lst).ravel() adt = np.asarray(lst2).ravel() return dt, adt
def test_spread(self): res1 = min(1, Simulation.sigmoid(30) * 0.15) res2 = min(1, Simulation.sigmoid(60) * 0.25 * 1.2) self.assertAlmostEqual( Simulation.spread(self.test_node_0, self.test_node_1), res1) self.assertAlmostEqual( Simulation.spread(self.test_node_1, self.test_node_2), res2)
def setUp(self): self.test_graph = Graph() self.test_node_0 = Node(0, 0.1, 1, 1, 1, 1, 1, 1) self.test_node_1 = Node(1, 0.2, 2, 2, 2, 2, 2, 2) self.test_node_2 = Node(2, 0.3, 1, 2, 3, 4, 5, 6) self.test_graph.add_vertex(self.test_node_0) self.test_graph.add_vertex(self.test_node_1) self.test_graph.add_vertex(self.test_node_2) self.test_graph.add_edge(self.test_node_0, self.test_node_1, 30) self.test_graph.add_edge(self.test_node_1, self.test_node_2, 50) self.test_sim = Simulation() self.test_sim.load_graph(self.test_graph)
def probas(N, NN, MCS, switch_proba): cB = 0 sim = Simulation(N, NN, MCS, cB=cB, clusterd=True, random=False, plotting=False, switch_proba=switch_proba) A, B, C, M = sim.run() plt.figure() plt.title('Magnetization with a noise p=%g' % (switch_proba)) plt.xlabel('Monte Carlo Cycles') plt.ylabel('Magnetization m') plt.plot(range(len(M)), M) plt.ylim([-1.05, 1.05]) plt.ticklabel_format(axis='x', style='sci', scilimits=[0, 0]) plt.show()
def decisionProbas(): cB = 0 switch_probas = [2e-4, 2e-3, 2e-2, 2e-1] for i in switch_probas: sim = Simulation(500, 10000, 2000000, cB=cB, clusterd=True, random=False, plotting=False, switch_proba=i) sim.run() adt = sim.adT adt = np.asarray(adt) plt.loglog(range(len(adt)), 3 / adt, '+', label='p: %g ' % (i)) plt.xlabel('$\\tau$') plt.ylabel('$P(\\tau)$') plt.title('Decision times for different noises p') plt.legend() plt.show()
def partABC(N, NN, MCS, plotting=True): sim = Simulation(N, NN, MCS, plotting=plotting) A, B, C, M = sim.run() M = np.asarray(M) plt.figure() plt.plot(range(len(M)), M) plt.xlabel('Monte Carlo Cycles') plt.ylabel('Magnetization m') plt.title('The magnetization of Monte Carlo cycles') plt.ylim([-1.05, 1.05]) plt.ticklabel_format(axis='x', style='sci', scilimits=[0, 0]) plt.show() dtlen = int(len(M) / 2) dts = np.linspace(1, dtlen, dtlen) g = G(dts, M) plt.figure() plt.plot(dts, g) plt.title('Auto correlation') plt.xlabel('$\Delta t$') plt.ylabel('$G(\Delta t)$') plt.ticklabel_format(axis='x', style='sci', scilimits=[0, 0]) plt.show()
def clustered(cB, N, NN, MCS, random=False, plotting=False): sim = Simulation(N, NN, MCS, cB, clusterd=True, random=random, plotting=plotting) A, B, C, M = sim.run() plt.figure() plt.plot(range(len(M)), M) plt.xlabel('Monte Carlo Cycles') plt.ylabel('Magnetization m') plt.ylim([-1.05, 1.05]) plt.ticklabel_format(axis='x', style='sci', scilimits=[0, 0]) plt.show() dt = sim.dT dt = np.asarray(dt) adt = sim.adT adt = np.asarray(adt) return dt, adt
print("f eval:", f1) f2 = sum([f(d2, _alpha, _C) for _C, _alpha in zip(C, alpha)]) print("f eval:", f2) N, upper_bound = int(sys.argv[1]), str_to_bool(sys.argv[2]) print(N, upper_bound) y = 2 T = y * N Sigma, tau = SLR_cov(N, seed=3823) np.random.seed(4328) sim = Simulation(Sigma, T) X, Sigma, S, lam, U = sim.X, sim.Sigma, sim.S, sim.lam, sim.U K = 10 if K == 1: alpha = [U.T.dot(np.ones(N))] C = [U.T.dot(Sigma).dot(U)] else: m = int(T / K) C = [] alpha = [] for k in range(K): k_set = list(range(k * m, (k + 1) * m)) X_k = X[k_set, :]
def demo(N, y, cov_fun_kwargs, loo, K, ylim, figsize, seed, trace, upper_bound): """Simple demo showing the results of the various shrinkage methods""" T = y * N cov_fun, cov_kwargs = cov_fun_kwargs Sigma, tau = cov_functions[cov_fun](N, seed=seed, **cov_kwargs) np.random.seed(seed) sim = Simulation(Sigma, T) fig, (ax0, ax1) = plt.subplots(figsize=figsize, ncols=2) # ax0.plot(annualize_vol(tau / N), label='true') # ax1.plot(annualize_vol(tau / N), label='true') # ax0.plot(annualize_vol(lam / N), label='sample') # ax1.plot(annualize_vol(lam / N), label='sample') # Oracle LW NLS shrinkage # d_lw_oracle = nls_oracle(sim) # d_isolw_oracle = nls_oracle(sim, isotonic=True) # ax0.plot(annualize_vol(d_lw_oracle / N), label='lw oracle') # ax1.plot(annualize_vol(d_isolw_oracle / N), label='lw iso oracle') # # LW NLS shrinkage # S_lw = nlshrink_covariance(X, centered=True) # d_lw = eig(S_lw, return_eigenvectors=False) # ax1.plot(annualize_vol(d_lw / N), label='lw') # if loo: # # LOO LW NLS shrinkage # _, d_loo = nls_loo_cv(X, S, U) # d_isoloo = isotonic_regression(d_loo) # ax0.plot(annualize_vol(d_loo / N), label='noisy-loo') # ax1.plot(annualize_vol(d_isoloo / N), label='isoloo') # K-fold LW NLS shrinkage # d_lw_loo = nls_loo(sim) # d_lw_isoloo = nls_loo(sim, isotonic=True) # ax0.plot(annualize_vol(d_lw_loo / N), label='lw_kfold') # ax1.plot(annualize_vol(d_lw_isoloo / N), label='lw_isoloo') d_lw_kfold = nls_kfold(sim, K) d_lw_isokfold = nls_kfold(sim, K, isotonic=True) ax0.plot(annualize_vol(d_lw_kfold / N), label='lw_kfold') ax1.plot(annualize_vol(d_lw_isokfold / N), label='lw_isokfold') # MinVar NLS shrinkage d_mv_oracle = minvar_oracle(sim, monotonicity=None, trace=trace, upper_bound=upper_bound) d_mv_mono_oracle = minvar_oracle(sim, monotonicity='constraint', trace=trace, upper_bound=upper_bound) d_mv_iso_oracle = minvar_oracle(sim, monotonicity='isotonic', trace=trace, upper_bound=upper_bound) ax0.plot(annualize_vol(d_mv_oracle / N), label='mv_oracle') ax1.plot(annualize_vol(d_mv_mono_oracle / N), label='mv_mono_oracle') ax1.plot(annualize_vol(d_mv_iso_oracle / N), label='mv_iso_oracle') d_mv_loo = minvar_loo(sim, monotonicity=None, trace=trace, upper_bound=upper_bound) d_mv_mono_loo = minvar_loo(sim, monotonicity='constraint', trace=trace, upper_bound=upper_bound) d_mv_iso_loo = minvar_loo(sim, monotonicity='isotonic', trace=trace, upper_bound=upper_bound) ax0.plot(annualize_vol(d_mv_loo / N), label='mv_loo') ax1.plot(annualize_vol(d_mv_mono_loo / N), label='mv_mono_loo') ax1.plot(annualize_vol(d_mv_iso_loo / N), label='mv_iso_loo') ax0.legend() ax1.legend() # ax0.set_ylim(*ylim) # ax1.set_ylim(*ylim) plt.show()
from random import randint from classes import Simulation sim = Simulation() for i in range(0,10): sim.spawnForager() for i in range(0,50): sim.spawnFood() sim.running = True while sim.running: if sim.frame_num % 25 == 0: for forager in sim.foragers: sensed = forager.sense(sim.food, sim.spawn) if sensed['carrying_food'] == True: print sensed forager.rotatingCW = (True if randint(0,1) == 0 else False) forager.rotatingCCW = (True if randint(0,1) == 0 else False) #forager.moving = (True if randint(0,1) == 0 else False) #time.sleep(1.0/33) sim.nextFrame()
["Database 1A.csv", "triadic_closure/Edge_Table_Triadic_Closure.csv", "gephi_output_triadic_closure.csv"] ] NODE_TABLE_INPUT_FILE, EDGE_TABLE_INPUT_FILE, GEPHI_OUTPUT_FILE = SETTINGS[SCENARIO] bad_guys = [6, 160, 51, 178] number_of_timesteps = 101 good_guys = [8, 50, 32, 63, 45, 109, 167, 86] good_guys_enter_timestep = 5 nodes_to_remove = [65, 110] # 176 reserved for what-if to tip the balance node_remove_timestep = 15 test_sim = Simulation( bad_guys=bad_guys, good_guys=good_guys, good_guys_enter_timestep=good_guys_enter_timestep, ) test_sim.load_vertices_from_file(NODE_TABLE_INPUT_FILE, bad_guys) test_sim.load_edges_from_file(EDGE_TABLE_INPUT_FILE) for i in range(number_of_timesteps): # for the first timestep, do nothing except log intial scores if i > 0: score_dict = test_sim.graph.get_scores() if i == good_guys_enter_timestep: test_sim.graph.set_good_guys(good_guys) # good guys remove selected nodes at timestep indicated at node_remove_timestep if i == node_remove_timestep: for v in nodes_to_remove:
def test_calc_new_score(self): res = (Simulation.spread(self.test_node_0, self.test_node_1) + Simulation.spread(self.test_node_2, self.test_node_1)) / 2 self.assertAlmostEqual(self.test_sim.calc_new_score(self.test_node_1), res)