def gen_req_schedule(receivers, rate, duration_warmup, duration_real, n_contents, alpha): """ Generate schedule of requests Parameters ---------- receivers : list List of receivers rate : float Rate of requests (per receiver) duration_warmup : float Length of warmup period (run without logging, for cache prepopulation) duration_real : float Length of measured period (run with logging) n_contents : int Size of content population alpha : float Alpha of Zipf content distribution """ zipf = ZipfDistribution(alpha, n_contents) def event_generator(log): recv = choice(receivers) content = int(zipf.rand_val()) return {'receiver': recv, 'content': content, 'log': log} es_warm = fnss.poisson_process_event_schedule(1.0 / rate, 0, duration_warmup, 'ms', event_generator, False) es_real = fnss.poisson_process_event_schedule(1.0 / rate, duration_warmup, duration_real, 'ms', event_generator, True) es_warm.add_schedule(es_real) return es_warm
def gen_req_schedule(receivers, rate, duration_warmup, duration_real, n_contents, alpha): """ Generate schedule of requests Parameters ---------- receivers : list List of receivers rate : float Rate of requests (per receiver) duration_warmup : float Length of warmup period (run without logging, for cache prepopulation) duration_real : float Length of measured period (run with logging) n_contents : int Size of content population alpha : float Alpha of Zipf content distribution """ zipf = ZipfDistribution(alpha, n_contents) def event_generator(log): recv = choice(receivers) content = int(zipf.rand_val()) return {'receiver': recv, 'content': content, 'log': log} es_warm = fnss.poisson_process_event_schedule(1.0/rate, 0, duration_warmup, 'ms', event_generator, False) es_real = fnss.poisson_process_event_schedule(1.0/rate, duration_warmup, duration_real, 'ms', event_generator, True) es_warm.add_schedule(es_real) return es_warm
def test_poisson_process_event_schedule(self): action = ['read_email', 'watch_video'] schedule = fnss.poisson_process_event_schedule(15, 0, 8000, 'ms', self.event_gen, 0.5, action=action) self.assertIsNotNone(schedule) for time, event in schedule: self.assertTrue(event['action'] in action) self.assertTrue(time >= 0) self.assertTrue(time <= 8000)
def scenario_simple_test(): """ Makes simple scenario for test puropses """ def gen_event(receivers, contents): return {'receiver': choice(receivers), 'content': choice(contents)} contents = {5: [1, 2, 3, 4], 7: [5, 6, 7, 8]} n_caches = 4 size = 5 topology = fnss.ring_topology(n_caches) for u in range(n_caches): v = u + n_caches topology.add_edge(u, v) fnss.add_stack(topology, u, 'cache', {'size': size}) if u % 2 == 0: fnss.add_stack(topology, v, 'receiver', {}) else: fnss.add_stack(topology, v, 'source', {'contents': contents[v]}) event_schedule = fnss.poisson_process_event_schedule(20, 0, 300, 'ms', gen_event, [4, 6], range(1, 9)) fnss.write_topology(topology, path.join(scenarios_dir, 'TOPO_TEST.xml')) fnss.write_event_schedule(event_schedule, path.join(scenarios_dir, 'ES_TEST.xml'))
right_nodes = [ nodes for nodes in node_types if node_types[nodes] == 'right_bell' ] for node in left_nodes: fnss.add_application(topology, node, 'receiver', {}) for node in right_nodes: fnss.add_application(topology, node, 'source', {}) # now create a function that generate events def rand_request(source_nodes, receiver_nodes): source = random.choice(source_nodes) receiver = random.choice(receiver_nodes) return {'source': source, 'receiver': receiver} event_schedule = fnss.poisson_process_event_schedule( avg_interval=50, # 50 ms t_start=0, # starts at 0 duration=10 * 1000, # 10 sec t_unit='ms', # milliseconds event_generator=rand_request, # event gen function source_nodes=right_nodes, # rand_request argument receiver_nodes=left_nodes # rand_request argument ) # Write topology and event schedule to files fnss.write_topology(topology, 'topology.xml') fnss.write_event_schedule(event_schedule, 'event_schedule.xml')
# now create a static traffic matrix assuming all nodes are both origins # and destinations of traffic traffic_matrix = fnss.static_traffic_matrix(topology, mean=2, stddev=0.2, max_u=0.5) # This is the event generator function, which generates link failure events def rand_failure(links): link = random.choice(links) return {'link': link, 'action': 'down'} # Create schedule of link failures event_schedule = fnss.poisson_process_event_schedule( avg_interval=0.5, # 0.5 min = 30 sec t_start=0, # starts at 0 duration=60, # 2 hours t_unit='min', # minutes event_generator=rand_failure, # event gen function links=topology.edges(), # 'links' argument ) # Now let's create a schedule with link restoration events # We assume that the duration of a failure is exponentially distributed with # average 1 minute. restore_schedule = fnss.EventSchedule(t_start=0, t_unit='min') for failure_time, event in event_schedule: link = event['link'] restore_time = failure_time + random.expovariate(1) restore_schedule.add(time=restore_time, event={'link': link, 'action': 'up'}, absolute_time=True )
if node_types[nodes] == 'right_bell'] for node in left_nodes: fnss.add_application(topology, node, 'receiver', {}) for node in right_nodes: fnss.add_application(topology, node, 'source', {}) # now create a function that generate events def rand_request(source_nodes, receiver_nodes): source = random.choice(source_nodes) receiver = random.choice(receiver_nodes) return {'source': source, 'receiver': receiver} event_schedule = fnss.poisson_process_event_schedule( avg_interval=50, # 50 ms t_start=0, # starts at 0 duration= 10*1000, # 10 sec t_unit='ms', # milliseconds event_generator=rand_request, # event gen function source_nodes=right_nodes, # rand_request argument receiver_nodes=left_nodes # rand_request argument ) # Write topology and event schedule to files fnss.write_topology(topology, 'topology.xml') fnss.write_event_schedule(event_schedule, 'event_schedule.xml')
mean=2, stddev=0.2, max_u=0.5) # This is the event generator function, which generates link failure events def rand_failure(links): link = random.choice(links) return {'link': link, 'action': 'down'} # Create schedule of link failures event_schedule = fnss.poisson_process_event_schedule( avg_interval=0.5, # 0.5 min = 30 sec t_start=0, # starts at 0 duration=60, # 2 hours t_unit='min', # minutes event_generator=rand_failure, # event gen function links=topology.edges(), # 'links' argument ) # Now let's create a schedule with link restoration events # We assume that the duration of a failure is exponentially distributed with # average 1 minute. restore_schedule = fnss.EventSchedule(t_start=0, t_unit='min') for failure_time, event in event_schedule: link = event['link'] restore_time = failure_time + random.expovariate(1) restore_schedule.add(time=restore_time, event={ 'link': link, 'action': 'up'
def rand_mobility(nodes): node = random.choice(nodes) return {'node': node} def rand_request(source_nodes, receiver_nodes): source = random.choice(source_nodes) receiver = random.choice(receiver_nodes) return {'source': source, 'receiver': receiver} event_schedule = fnss.poisson_process_event_schedule( avg_interval=5, t_start=0, duration=5000, t_unit='ms', event_generator=rand_failure, # event gen function links=list(topology.edges()), # 'links' argument ) node_types = nx.get_node_attributes(topology, 'type') drones = [nodes for nodes in node_types if node_types[nodes] == 'host'] cloud = [0] edges = [ nodes for nodes in node_types if node_types[nodes] == 'switch' and nodes != 0 ] event_schedule_mob = fnss.poisson_process_event_schedule( avg_interval=5,