def start_simulation(ndict, cdict, sink_node): lrec = LogRecord('packetlogger.txt') lrec.truncate_log() beginner_node = 1 queues = Queues() #Create dissipation event dissipation_event = threading.Event() transmission_event = threading.Event() transmission_done_event = threading.Event() #initialize the beginner, node and the sink threads try: for nod in neighbor_dict: if nod == beginner_node: bthread = node.Node(beginner_node, cdict[beginner_node], ndict, queues, dissipation_event, transmission_event, True) else: exec("Thread_" + str(nod) + " = node.Node(" + str(nod) + ", " + str(cdict[nod]) + ",ndict, queues, dissipation_event, transmission_event)") except Exception: print "Thread already started" #Get the node level dictionary of all nodes according to their neighbors node_level_dict = thread_start_order(ndict, beginner_node) for keyy in node_level_dict: #Get unique values of node IDs in the list node_set = set(node_level_dict[keyy]) unique_node_list = list(node_set) node_level_dict.update({keyy: unique_node_list}) #Start the beginner thread bthread.start() time.sleep(1) #Start rest of the threads level-wise wait_constant = 0.2 popped_level_0 = node_level_dict.pop(0) for key in node_level_dict: for every_node in node_level_dict[key]: try: thname = "Thread_" + str(every_node) exec("alive_status = " + thname + ".isAlive()") if not alive_status: exec("Thread_" + str(every_node) + ".start()") except Exception: print "Thread already started" time.sleep(wait_constant * key) sthread = sink.Sink(sink_node, cdict[sink_node], ndict, queues, dissipation_event, transmission_event, transmission_done_event) print "Starting the sink node" sthread.start() result = start_transmission(queues, transmission_event) transmission_done_event.wait() return
def begin(publish, name, context, issue): if not publish: return None hostname = socket.gethostname().split(".")[0] current = time.strftime('%Y%m%d-%H%M%M') # Update the body for an existing issue if issue: number = issue["number"] identifier = "{0}-{1}-{2}".format(name, number, current) title = issue["title"] wip = "WIP: {0}: {1}".format(hostname, title) requests = [{ "method": "POST", "resource": api.qualify("issues/{0}".format(number)), "data": { "title": wip } }, { "method": "POST", "resource": api.qualify("issues/{0}/comments".format(number)), "data": { "body": "{0} in progress on {1}.\nLog: :link".format(name, hostname) } }] watches = [{ "resource": api.qualify("issues/{0}".format(number)), "result": { "title": wip } }] aborted = [{ "method": "POST", "resource": api.qualify("issues/{0}".format(number)), "data": { "title": title } }, { "method": "POST", "resource": api.qualify("issues/{0}/comments".format(number)), "data": { "body": "Task aborted." } }] else: identifier = "{0}-{1}".format(name, current) requests = [] watches = [] aborted = [] status = { "github": { "token": api.token, "requests": requests, "watches": watches }, "onaborted": { "github": { "token": api.token, "requests": aborted } } } publishing = sink.Sink(publish, identifier, status) sys.stdout.write("# Task: {0} {1}\n# Host: {2}\n\n".format( name, context or "", hostname)) # For statistics publishing.start = time.time() return publishing