def test_parse_slice(self): self.assertEqual(r2lab_parse_slice('inria_foo'), ('inria_foo', 'faraday.inria.fr')) self.assertEqual(r2lab_parse_slice('*****@*****.**'), ('inria_foo', 'faraday.inria.fr')) self.assertEqual( r2lab_parse_slice('*****@*****.**'), ('inria_foo', 'etourdi.pl.sophia.inria.fr'))
def run( *, # the pieces to use slice, hss, epc, enb, phones, e3372_ues, oai_ues, gnuradios, e3372_ue_xterms, oai_ue_xterms, gnuradio_xterms, # boolean flags load_nodes, skip_reset_usb, oscillo, # the images to load image_gw, image_enb, image_oai_ue, image_e3372_ue, image_gnuradio, # miscell n_rb, verbose, dry_run): """ ########## # 3 methods to get nodes ready # (*) load images # (*) reset nodes that are known to have the right image # (*) do nothing, proceed to experiment expects e.g. * slice : s.t like [email protected] * hss : 04 * epc : 03 * enb : 23 * phones: list of indices of phones to use * e3372_ues : list of nodes to use as a UE using e3372 * oai_ues : list of nodes to use as a UE using OAI * gnuradios : list of nodes to load with a gnuradio image * image_* : the name of the images to load on the various nodes Plus * load_nodes: whether to load images or not - in which case image_gw, image_enb and image_* are used to tell the image names * skip_reset_usb : the USRP board will be reset as well unless this is set """ # what argparse knows as a slice actually is a gateway (user + host) gwuser, gwhost = r2lab_parse_slice(slice) gwnode = SshNode(hostname=gwhost, username=gwuser, formatter=TimeColonFormatter(verbose=verbose), debug=verbose) hostnames = hssname, epcname, enbname = [ r2lab_hostname(x) for x in (hss, epc, enb) ] optional_ids = e3372_ues + oai_ues + gnuradios + \ e3372_ue_xterms + oai_ue_xterms + gnuradio_xterms hssnode, epcnode, enbnode = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames ] sched = Scheduler(verbose=verbose) ########## preparation job_check_for_lease = SshJob( node=gwnode, command=["rhubarbe", "leases", "--check"], label="check we have a current lease", scheduler=sched, ) # turn off all nodes turn_off_command = ["rhubarbe", "off", "-a"] # except our 3 nodes and the optional ones turn_off_command += [ "~{}".format(x) for x in [hss, epc, enb] + optional_ids ] # only do the turn-off thing if load_nodes if load_nodes: job_off_nodes = SshJob( node=gwnode, # switch off all nodes but the ones we use command=turn_off_command, label="turn off unused nodes", required=job_check_for_lease, scheduler=sched, ) # actually run this in the gateway, not on the macphone # the ssh keys are stored in the gateway and we do not yet have # the tools to leverage such remote keys job_stop_phones = [ SshJob( node=gwnode, command=RunScript( # script find_local_embedded_script("faraday.sh"), # arguments "macphone{}".format(id), "r2lab-embedded/shell/macphone.sh", "phone-off", # options includes=includes), label="put phone{} in airplane mode".format(id), required=job_check_for_lease, scheduler=sched, ) for id in phones ] ########## prepare the image-loading phase # this will be a dict of items imagename -> ids to_load = defaultdict(list) to_load[image_gw] += [hss, epc] to_load[image_enb] += [enb] if e3372_ues: to_load[image_e3372_ue] += e3372_ues if e3372_ue_xterms: to_load[image_e3372_ue] += e3372_ue_xterms if oai_ues: to_load[image_oai_ue] += oai_ues if oai_ue_xterms: to_load[image_oai_ue] += oai_ue_xterms if gnuradios: to_load[image_gnuradio] += gnuradios if gnuradio_xterms: to_load[image_gnuradio] += gnuradio_xterms prep_job_by_node = {} for image, nodes in to_load.items(): commands = [] if load_nodes: commands.append(Run("rhubarbe", "usrpoff", *nodes)) commands.append(Run("rhubarbe", "load", "-i", image, *nodes)) commands.append(Run("rhubarbe", "usrpon", *nodes)) # always do this commands.append(Run("rhubarbe", "wait", "-t", 120, *nodes)) job = SshJob( node=gwnode, commands=commands, label="Prepare node(s) {}".format(nodes), required=job_check_for_lease, scheduler=sched, ) for node in nodes: prep_job_by_node[node] = job # start services job_service_hss = SshJob( node=hssnode, command=RunScript(find_local_embedded_script("oai-hss.sh"), "run-hss", epc, includes=includes), label="start HSS service", required=prep_job_by_node[hss], scheduler=sched, ) delay = 15 job_service_epc = SshJob( node=epcnode, commands=[ Run("echo giving HSS a headstart {delay}s to warm up; sleep {delay}" .format(delay=delay)), RunScript(find_local_embedded_script("oai-epc.sh"), "run-epc", hss, includes=includes), ], label="start EPC services", required=prep_job_by_node[epc], scheduler=sched, ) ########## enodeb job_warm_enb = SshJob( node=enbnode, commands=[ RunScript(find_local_embedded_script("oai-enb.sh"), "warm-enb", epc, n_rb, not skip_reset_usb, includes=includes), ], label="Warm eNB", required=prep_job_by_node[enb], scheduler=sched, ) enb_requirements = (job_warm_enb, job_service_hss, job_service_epc) # wait for everything to be ready, and add an extra grace delay grace = 30 if load_nodes else 10 grace_delay = SshJob( node = LocalNode(formatter=TimeColonFormatter()), command = "echo Allowing grace of {grace} seconds; sleep {grace}"\ .format(grace=grace), required = enb_requirements, scheduler = sched, ) # start services job_service_enb = SshJob( node=enbnode, # run-enb expects the id of the epc as a parameter # n_rb means number of resource blocks for DL, set to either 25 or 50. commands=[ RunScript(find_local_embedded_script("oai-enb.sh"), "run-enb", oscillo, includes=includes, x11=oscillo), ], label="start softmodem on eNB", required=grace_delay, scheduler=sched, ) ########## run experiment per se # Manage phone(s) # this starts at the same time as the eNB, but some # headstart is needed so that eNB actually is ready to serve delay = 12 msg = "wait for {delay}s for enodeb to start up"\ .format(delay=delay) wait_command = "echo {msg}; sleep {delay}".format(msg=msg, delay=delay) job_start_phones = [ SshJob( node=gwnode, commands=[ Run(wait_command), RunScript(find_local_embedded_script("faraday.sh"), "macphone{}".format(id), "r2lab-embedded/shell/macphone.sh", "phone-on", includes=includes), RunScript(find_local_embedded_script("faraday.sh"), "macphone{}".format(id), "r2lab-embedded/shell/macphone.sh", "phone-start-app", includes=includes), ], label="start Nexus phone and speedtest app", required=grace_delay, scheduler=sched, ) for id in phones ] job_ping_phones_from_epc = [ SshJob( node=epcnode, commands=[ Run("sleep 10"), Run("ping -c 100 -s 100 -i .05 172.16.0.{ip} &> /root/ping-phone" .format(ip=id + 1)), ], label="ping Nexus phone from EPC", critical=False, required=job_start_phones, ) for id in phones ] ########## xterm nodes colors = ["wheat", "gray", "white", "darkolivegreen"] xterms = e3372_ue_xterms + oai_ue_xterms + gnuradio_xterms for xterm, color in zip(xterms, itertools.cycle(colors)): xterm_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(xterm), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) SshJob( node=xterm_node, command=Run("xterm -fn -*-fixed-medium-*-*-*-20-*-*-*-*-*-*-*" " -bg {} -geometry 90x10".format(color), x11=True), label="xterm on node {}".format(xterm_node.hostname), required=prep_job_by_node[xterm], scheduler=sched, # don't set forever; if we do, then these xterms get killed # when all other tasks have completed # forever = True, ) # # remove dangling requirements - if any - should not be needed but won't hurt either sched.sanitize() print(20 * "*", "nodes usage summary") if load_nodes: for image, nodes in to_load.items(): for node in nodes: print("node {node} : {image}".format(node=node, image=image)) else: print("NODES ARE USED AS IS (no image loaded, no reset)") print(10 * "*", "phones usage summary") if phones: for phone in phones: print("Using phone{phone}".format(phone=phone)) else: print("No phone involved") sched.rain_check() # Update the .dot and .png file for illustration purposes if verbose or dry_run: sched.list() name = "scenario-load" if load_nodes else \ "scenario" sched.export_as_dotfile("{name}.dot".format(name=name)) os.system("dot -Tpng {name}.dot -o {name}.png".format(name=name)) print("(Over)wrote {name}.png".format(name=name)) if dry_run: return False if verbose: input('OK ? - press control C to abort ? ') if not sched.orchestrate(): print("RUN KO : {}".format(sched.why())) sched.debrief() return False else: print("RUN OK") return True
def collect(run_name, slice, hss, epc, enb, verbose): """ retrieves all relevant logs under a common name otherwise, same signature as run() for convenience retrieved stuff will be 3 compressed tars named <run_name>-(hss|epc|enb).tar.gz xxx - todo - it would make sense to also unwrap them all in a single place locally, like what "logs.sh unwrap" does """ gwuser, gwhost = r2lab_parse_slice(slice) gwnode = SshNode(hostname=gwhost, username=gwuser, formatter=TimeColonFormatter(verbose=verbose), debug=verbose) functions = "hss", "epc", "enb" hostnames = hssname, epcname, enbname = [ r2lab_hostname(x) for x in (hss, epc, enb) ] nodes = hssnode, epcnode, enbnode = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames ] # first run a 'capture' function remotely to gather all the relevant # info into a single tar named <run_name>.tgz capturers = [ SshJob( node=node, command=RunScript(find_local_embedded_script("oai-common.sh"), "capture-{}".format(function), run_name, includes=[ find_local_embedded_script( "oai-{}.sh".format(function)) ]), label="capturer on {}".format(function), # capture-enb will run oai-as-enb and thus requires oai-enb.sh ) for (node, function) in zip(nodes, functions) ] collectors = [ SshJob( node=node, command=Pull(remotepaths=["{}-{}.tgz".format(run_name, function)], localpath="."), label="collector on {}".format(function), required=capturers, ) for (node, function, capturer) in zip(nodes, functions, capturers) ] sched = Scheduler(verbose=verbose) sched.update(capturers) sched.update(collectors) if verbose: sched.list() if not sched.orchestrate(): print("KO") sched.debrief() return print("OK") if os.path.exists(run_name): print("local directory {} already exists = NOT UNWRAPPED !".format( run_name)) return os.mkdir(run_name) local_tars = [ "{run_name}-{ext}.tgz".format(run_name=run_name, ext=ext) for ext in ['hss', 'epc', 'enb'] ] for tar in local_tars: print("Untaring {} in {}".format(tar, run_name)) os.system("tar -C {} -xzf {}".format(run_name, tar))
def run(*, # pylint: disable=r0912, r0914, r0915 # the pieces to use slicename, cn, ran, phones, e3372_ues, oai_ues, gnuradios, e3372_ue_xterms, gnuradio_xterms, ns3, # boolean flags load_nodes, reset_usb, oscillo, tcp_streaming, # the images to load image_cn, image_ran, image_oai_ue, image_e3372_ue, image_gnuradio, image_T_tracer, image_ns3, # miscell n_rb, nodes_left_alone, T_tracer, publisher_ip, verbose, dry_run): """ ########## # 3 methods to get nodes ready # (*) load images # (*) reset nodes that are known to have the right image # (*) do nothing, proceed to experiment expects e.g. * slicename : s.t like [email protected] * cn : 7 * ran : 23 * ns3 : 32 * phones: list of indices of phones to use * e3372_ues : list of nodes to use as a UE using e3372 * oai_ues : list of nodes to use as a UE using OAI * gnuradios : list of nodes to load with a gnuradio image * T_tracer : list of nodes to load with a tracer image * image_* : the name of the images to load on the various nodes Plus * load_nodes: whether to load images or not - in which case image_cn, image_ran and image_* are used to tell the image names * reset_usb : the USRP board will be reset when this is set * tcp_streaming : set up TCP streaming scenario * publisher_ip : IP address of the publisher """ # what argparse knows as a slice actually is about the gateway (user + host) gwuser, gwhost = r2lab_parse_slice(slicename) gwnode = SshNode(hostname=gwhost, username=gwuser, formatter=TimeColonFormatter(verbose=verbose), debug=verbose) hostnames = [r2lab_hostname(x) for x in (cn, ran)] cnnode, rannode = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames ] scheduler = Scheduler(verbose=verbose, label="CORE EXP") ########## prepare the image-loading phase # focus on the experiment, and use # prepare_testbed_scheduler later on to prepare testbed # all we need to do at this point is compute a mapping dict # image -> list-of-nodes images_to_load = defaultdict(list) images_to_load[image_cn] += [cn] images_to_load[image_ran] += [ran] if e3372_ues: images_to_load[image_e3372_ue] += e3372_ues if e3372_ue_xterms: images_to_load[image_e3372_ue] += e3372_ue_xterms if oai_ues: images_to_load[image_oai_ue] += oai_ues if gnuradios: images_to_load[image_gnuradio] += gnuradios if gnuradio_xterms: images_to_load[image_gnuradio] += gnuradio_xterms if T_tracer: images_to_load[image_T_tracer] += T_tracer if ns3: images_to_load[image_ns3] += [ns3] # start core network job_start_cn = SshJob( node=cnnode, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "configure", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "start", includes=INCLUDES), tcpdump_cn_service.start_command(), ], label="start CN service", scheduler=scheduler, ) # prepare enodeb reset_option = "-u" if reset_usb else "" job_warm_ran = SshJob( node=rannode, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "warm-up", reset_option, includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "configure -b", n_rb, cn, includes=INCLUDES), ], label="Configure eNB", scheduler=scheduler, ) ran_requirements = [job_start_cn, job_warm_ran] ### if oai_ues: # prepare OAI UEs for ue in oai_ues: ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) job_warm_ues = [ SshJob( node=ue_node, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-oai-ue.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-oai-ue.sh"), "warm-up", reset_option, includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-oai-ue.sh"), "configure -b", n_rb, includes=INCLUDES), ], label=f"Configure OAI UE on fit{ue:02d}", scheduler=scheduler) ] ran_requirements.append(job_warm_ues) ### if not load_nodes and phones: job_turn_off_phones = SshJob( node=gwnode, commands=[ RunScript(find_local_embedded_script("faraday.sh"), f"macphone{phone} phone-off") for phone in phones], scheduler=scheduler, ) ran_requirements.append(job_turn_off_phones) # wait for everything to be ready, and add an extra grace delay grace = 5 grace_delay = PrintJob( f"Allowing grace of {grace} seconds", sleep=grace, required=ran_requirements, scheduler=scheduler, label=f"settle for {grace}s", ) # optionally start T_tracer if T_tracer: job_start_T_tracer = SshJob( # pylint: disable=w0612 node=SshNode( gateway=gwnode, hostname=r2lab_hostname(T_tracer[0]), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose), commands=[ Run(f"/root/trace {ran}", x11=True), ], label="start T_tracer service", required=ran_requirements, scheduler=scheduler, ) # ran_requirements.append(job_start_T_tracer) # start services graphical_option = "-x" if oscillo else "" graphical_message = "graphical" if oscillo else "regular" tracer_option = " -T" if T_tracer else "" # we use a Python variable for consistency # although it not used down the road _job_service_ran = SshJob( node=rannode, commands=[ RunScript(find_local_embedded_script("mosaic-ran.sh"), "start", graphical_option, tracer_option, includes=INCLUDES, x11=oscillo, ), ], label=f"start {graphical_message} softmodem on eNB", required=grace_delay, scheduler=scheduler, ) ########## run experiment per se # Manage phone(s) and OAI UE(s) # this starts at the same time as the eNB, but some # headstart is needed so that eNB actually is ready to serve sleeps = [20, 30] phone_msgs = [f"wait for {sleep}s for eNB to start up before waking up phone{id}" for sleep, id in zip(sleeps, phones)] wait_commands = [f"echo {msg}; sleep {sleep}" for msg, sleep in zip(phone_msgs, sleeps)] job_start_phones = [ SshJob( node=gwnode, commands=[ Run(wait_command), RunScript(find_local_embedded_script("faraday.sh"), f"macphone{id}", "r2lab-embedded/shell/macphone.sh", "phone-on", includes=INCLUDES), RunScript(find_local_embedded_script("faraday.sh"), f"macphone{id}", "r2lab-embedded/shell/macphone.sh", "phone-start-app", includes=INCLUDES), ], label=f"turn off airplace mode on phone {id}", required=grace_delay, scheduler=scheduler) for id, wait_command in zip(phones, wait_commands)] if oai_ues: delay = 25 for ue in oai_ues: msg = f"wait for {delay}s for eNB to start up before running UE on node fit{ue:02d}" wait_command = f"echo {msg}; sleep {delay}" ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) job_start_ues = [ SshJob( node=ue_node, commands=[ Run(wait_command), RunScript(find_local_embedded_script("mosaic-oai-ue.sh"), "start", includes=INCLUDES), ], label=f"Start OAI UE on fit{ue:02d}", required=grace_delay, scheduler=scheduler) ] delay += 20 for ue in oai_ues: environ = {'USER': '******'} cefnet_ue_service = Service("cefnetd", service_id="cefnet", environ=environ) ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) msg = f"Wait 60s and then ping faraday gateway from UE on fit{ue:02d}" ue_commands = f"echo {msg}; sleep 60; ping -c 5 -I oip1 faraday.inria.fr" if tcp_streaming: # TCP streaming scenario if load_nodes: ue_commands += "sysctl -w net.ipv4.ip_forward=1;" ue_commands += "ip route add 10.1.1.0/24 via 192.168.2.32 dev data" job_setup_ue = [ SshJob( node=ue_node, commands=[ Run(ue_commands,label="test UE link and set up routing for TCP streaming"), ], label=f"ping faraday gateway from UE on fit{ue:02d} and set up routing for the TCP streaming scenario", critical=True, required=job_start_ues, scheduler=scheduler) ] else: # Cefore streaming scenario if load_nodes: ue_commands += "sysctl -w net.ipv4.ip_forward=1;" ue_commands += f"ip route add {publisher_ip}/32 dev oip1;" ue_commands += "ip route add 10.1.1.0/24 via 192.168.2.32 dev data;" ue_commands += "iptables -t nat -A POSTROUTING -s 10.1.1.2/32 -j SNAT --to-source 172.16.0.2;" ue_commands += "iptables -t nat -A PREROUTING -d 172.16.0.2 -j DNAT --to-destination 10.1.1.2;" ue_commands += "iptables -A FORWARD -d 10.1.1.2/32 -i oip1 -j ACCEPT;" ue_commands += f"iptables -A FORWARD -d {publisher_ip}/32 -i data -j ACCEPT;" ue_commands += "ip rule del from all to 172.16.0.2 lookup 201;" ue_commands += "ip rule del from 172.16.0.2 lookup 201;" ue_commands += "ip rule add from 10.1.1.2 lookup lte prio 32760;" ue_commands += "ip rule add from all to 172.16.0.2 lookup lte prio 32761;" ue_commands += "ip rule add from all fwmark 0x1 lookup lte prio 32762;" ue_commands += "ip route add table lte 10.1.1.0/24 via 192.168.2.32 dev data;" # ue_commands += "killall cefnetd || true" job_setup_ue = [ SshJob( node=ue_node, commands=[ Run(ue_commands,label="test UE link and set up routing for Cefore streaming"), cefnet_ue_service.start_command(), ], label=f"ping faraday gateway from fit{ue:02d} UE and set up routing for the Cefore streaming scenario", critical=True,#old cefnetd not killed when running new one... required=job_start_ues, scheduler=scheduler) ] if ns3: ns3_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ns3), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) msg = f"Wait for the UE node to be ready before running the streaming scenario with ns-3 on fit{ns3}" if load_nodes: job_prepare_ns3_node = [ SshJob( node=ns3_node, commands=[ Run("turn-on-data"), Run("ifconfig data promisc up"), Run("ip route add default via 192.168.2.6 dev data || true"), Run("sysctl -w net.ipv4.ip_forward=1"), ], label=f"setup routing on ns-3 fit{ns3:02d} node", # ip route may already be there so the ip route command may fail critical=True, required=job_setup_ue, scheduler=scheduler) ] ns3_requirements = job_prepare_ns3_node else: ns3_requirements = job_setup_ue if not tcp_streaming: environ = {'USER': '******'} cefnet_ns3_service = Service("cefnetd", service_id="cefnet", environ=environ) job_start_cefnet_on_cn = [ SshJob( node=cnnode, commands=[ Run(f"echo 'ccn:/streaming tcp {publisher_ip}:80' > /usr/local/cefore/cefnetd.conf"), # Run("killall cefnetd || true"),# not done by default with service.start_command() cefnet_ns3_service.start_command(), ], label=f"Start Cefnet on EPC running at fit{cn:02d}", critical=True,#old cefnetd not killed when running new one... required=ns3_requirements, scheduler=scheduler, ) ] # ditto _job_ping_phones_from_cn = [ SshJob( node=cnnode, commands=[ Run("sleep 20"), Run(f"ping -c 100 -s 100 -i .05 172.16.0.{id+1} &> /root/ping-phone{id}"), ], label=f"ping phone {id} from core network", critical=False, required=job_start_phones, scheduler=scheduler) for id in phones] ########## xterm nodes colors = ("wheat", "gray", "white", "darkolivegreen") xterms = e3372_ue_xterms + gnuradio_xterms for xterm, color in zip(xterms, cycle(colors)): xterm_node = SshNode( gateway=gwnode, hostname=r2lab_hostname(xterm), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) SshJob( node=xterm_node, command=Run(f"xterm -fn -*-fixed-medium-*-*-*-20-*-*-*-*-*-*-*", f" -bg {color} -geometry 90x10", x11=True), label=f"xterm on node {xterm_node.hostname}", scheduler=scheduler, # don't set forever; if we do, then these xterms get killed # when all other tasks have completed # forever = True, ) # remove dangling requirements - if any # should not be needed but won't hurt either scheduler.sanitize() ########## print(10*"*", "nodes usage summary") if load_nodes: for image, nodes in images_to_load.items(): for node in nodes: print(f"node fit{node:02d} : {image}") else: print("NODES ARE USED AS IS (no image loaded, no reset)") print(10*"*", "phones usage summary") if phones: for phone in phones: print(f"Using phone{phone}") else: print("No phone involved") if nodes_left_alone: print(f"Ignore following fit nodes: {nodes_left_alone}") print(f"Publisher IP is {publisher_ip}") if tcp_streaming: print("Run streaming scenario with TCP") else: print("Run streaming scenario with Cefore") # wrap scheduler into global scheduler that prepares the testbed scheduler = prepare_testbed_scheduler( gwnode, load_nodes, scheduler, images_to_load, nodes_left_alone) scheduler.check_cycles() # Update the .dot and .png file for illustration purposes name = "cefore-load" if load_nodes else "cefore" print(10*'*', 'See main scheduler in', scheduler.export_as_pngfile(name)) if verbose: scheduler.list() if dry_run: return True if verbose: input('OK ? - press control C to abort ? ') if not scheduler.orchestrate(): print(f"RUN KO : {scheduler.why()}") scheduler.debrief() return False print("RUN OK") print(80*'*') if tcp_streaming: # TCP streaming scenario print(f"Now it's time to run the ns-3 script on node fit{ns3:02d}") print(f"root@fit{ns3:02d}:~# /root/NS3/source/ns-3-dce/waf --run dce-tcp-test") print("Then, run iperf on the publisher host:") print("yourlogin@publisher:~# iperf -s -P 1 -p 80") print(f"Log file will be available on fit{ns3:02d} at:") print(" /root/NS3/source/ns-3-dce/files-4/var/log/56884/stdout") else: # Cefore streaming scenario print("Now, if not already done, copy cefnetd and cefputfile binaries on your publisher host") print("login@your_host:r2lab-demos/cefore# scp bin/cefnetd yourlogin@publisher_node:/usr/local/sbin") print("login@your_host:r2lab-demos/cefore# scp bin/cefputfile yourlogin@publisher_node:/user/local/bin") print(f"After that, run on the ns-3 fit{ns3:02d} node the following command:") print(f"root@fit{ns3:02d}:~# /root/NS3/source/ns-3-dce/waf --run dce-cefore-test ") print("Then, run on the publisher host:") print("yourlogin@publisher:~# cefnetd") print("yourlogin@publisher:~# cefputfile ccn:/streaming/test -f ./[file-name] -r [1 <= streaming_rate <= 32 (Mbps)]") print(f"Log file will be available on fit{ns3:02d} at:") print(" /root/NS3/source/ns-3-dce/files-3/tmp/cefgetstream-thuputLog-126230400110000") print(80*'*') return True
def collect(run_name, slicename, cn, ran, oai_ues, verbose, dry_run): """ retrieves all relevant logs under a common name otherwise, same signature as run() for convenience retrieved stuff will be made of * one pcap file for the CN * compressed tgz files, one per node, gathering logs and configs and datas * for convenience the tgz files are unwrapped in run_name/id0 """ # the local dir to store incoming raw files. mostly tar files local_path = Path(f"{run_name}") if not local_path.exists(): print(f"Creating directory {local_path}") local_path.mkdir() gwuser, gwhost = r2lab_parse_slice(slicename) gwnode = SshNode(hostname=gwhost, username=gwuser, formatter=TimeColonFormatter(verbose=verbose), debug=verbose) functions = ["cn", "ran"] hostnames = [r2lab_hostname(x) for x in (cn, ran)] node_cn, node_ran = nodes = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames ] if oai_ues: hostnames_ue = [r2lab_hostname(x) for x in oai_ues] nodes_ue = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames_ue] # all nodes involved are managed in the same way # node: a SshNode instance # id: the fit number # function, a string like 'cn' or 'ran' or 'oai-ue' local_nodedirs_tars = [] scheduler = Scheduler(verbose=verbose) for (node, id, function) in zip( chain(nodes, nodes_ue), chain( [cn, ran], oai_ues), chain(functions, cycle(["oai-ue"]))): # nodes on 2 digits id0 = f"{id:02d}" # node-dep collect dir node_dir = local_path / id0 node_dir.exists() or node_dir.mkdir() local_tar = f"{local_path}/{function}-{id0}.tgz" SshJob( node=node, commands=[ # first run a 'capture-all' function remotely # to gather all the relevant files and commands remotely RunScript( find_local_embedded_script(f"mosaic-{function}.sh"), f"capture-all", f"{run_name}-{function}", includes=INCLUDES), # and retrieve it locally Pull( remotepaths=f"{run_name}-{function}.tgz", localpath=local_tar), ], scheduler=scheduler) local_nodedirs_tars.append((node_dir, local_tar)) # retrieve tcpdump on CN SshJob( node=node_cn, commands=[ tcpdump_cn_service.stop_command(), Pull(remotepaths=[tcpdump_cn_pcap], localpath=local_path), ], scheduler=scheduler ) print(10*'*', 'See collect scheduler in', scheduler.export_as_pngfile("cefore-collect")) if verbose: scheduler.list() if dry_run: return if not scheduler.run(): print("KO") scheduler.debrief() return # unwrap for node_dir, tar in local_nodedirs_tars: print(f"Untaring {tar} in {node_dir}/") os.system(f"tar -C {node_dir} -xzf {tar}")
def run( *, # pylint: disable=r0912, r0914, r0915 # the pieces to use slicename, cn, ran, phones, e3372_ues, oai_ues, gnuradios, e3372_ue_xterms, gnuradio_xterms, # boolean flags load_nodes, reset_usb, oscillo, # the images to load image_cn, image_ran, image_oai_ue, image_e3372_ue, image_gnuradio, image_T_tracer, # miscell n_rb, nodes_left_alone, T_tracer, verbose, dry_run): """ ########## # 3 methods to get nodes ready # (*) load images # (*) reset nodes that are known to have the right image # (*) do nothing, proceed to experiment expects e.g. * slicename : s.t like [email protected] * cn : 7 * ran : 23 * phones: list of indices of phones to use * e3372_ues : list of nodes to use as a UE using e3372 * oai_ues : list of nodes to use as a UE using OAI * gnuradios : list of nodes to load with a gnuradio image * T_tracer : list of nodes to load with a tracer image * image_* : the name of the images to load on the various nodes Plus * load_nodes: whether to load images or not - in which case image_cn, image_ran and image_* are used to tell the image names * reset_usb : the USRP board will be reset when this is set """ # what argparse knows as a slice actually is about the gateway (user + host) gwuser, gwhost = r2lab_parse_slice(slicename) gwnode = SshNode(hostname=gwhost, username=gwuser, formatter=TimeColonFormatter(verbose=verbose), debug=verbose) hostnames = [r2lab_hostname(x) for x in (cn, ran)] cnnode, rannode = [ SshNode(gateway=gwnode, hostname=hostname, username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) for hostname in hostnames ] scheduler = Scheduler(verbose=verbose, label="CORE EXP") ########## prepare the image-loading phase # focus on the experiment, and use # prepare_testbed_scheduler later on to prepare testbed # all we need to do at this point is compute a mapping dict # image -> list-of-nodes images_to_load = defaultdict(list) images_to_load[image_cn] += [cn] images_to_load[image_ran] += [ran] if e3372_ues: images_to_load[image_e3372_ue] += e3372_ues if e3372_ue_xterms: images_to_load[image_e3372_ue] += e3372_ue_xterms if oai_ues: images_to_load[image_oai_ue] += oai_ues if gnuradios: images_to_load[image_gnuradio] += gnuradios if gnuradio_xterms: images_to_load[image_gnuradio] += gnuradio_xterms if T_tracer: images_to_load[image_T_tracer] += T_tracer # start core network job_start_cn = SshJob( node=cnnode, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "configure", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-cn.sh"), "start", includes=INCLUDES), tcpdump_cn_service.start_command(), ], label="start CN service", scheduler=scheduler, ) # prepare enodeb reset_option = "-u" if reset_usb else "" job_warm_ran = SshJob( node=rannode, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "warm-up", reset_option, includes=INCLUDES), RunScript(find_local_embedded_script("mosaic-ran.sh"), "configure -b", n_rb, cn, includes=INCLUDES), ], label="Configure eNB", scheduler=scheduler, ) ran_requirements = [job_start_cn, job_warm_ran] ### if oai_ues: # prepare OAI UEs for ue in oai_ues: ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) job_warm_ues = [ SshJob(node=ue_node, commands=[ RunScript(find_local_embedded_script("nodes.sh"), "git-pull-r2lab", includes=INCLUDES), RunScript( find_local_embedded_script("mosaic-oai-ue.sh"), "journal --vacuum-time=1s", includes=INCLUDES), RunScript( find_local_embedded_script("mosaic-oai-ue.sh"), "warm-up", reset_option, includes=INCLUDES), RunScript( find_local_embedded_script("mosaic-oai-ue.sh"), "configure -b", n_rb, includes=INCLUDES), ], label=f"Configure OAI UE on fit{ue}", scheduler=scheduler) ] ran_requirements.append(job_warm_ues) ### if not load_nodes and phones: job_turn_off_phones = SshJob( node=gwnode, commands=[ RunScript(find_local_embedded_script("faraday.sh"), f"macphone{phone} phone-off") for phone in phones ], scheduler=scheduler, ) ran_requirements.append(job_turn_off_phones) # wait for everything to be ready, and add an extra grace delay grace = 5 grace_delay = PrintJob( f"Allowing grace of {grace} seconds", sleep=grace, required=ran_requirements, scheduler=scheduler, label=f"settle for {grace}s", ) # optionally start T_tracer if T_tracer: job_start_T_tracer = SshJob( # pylint: disable=w0612 node=SshNode(gateway=gwnode, hostname=r2lab_hostname(T_tracer[0]), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose), commands=[ Run(f"/root/trace {ran}", x11=True), ], label="start T_tracer service", required=ran_requirements, scheduler=scheduler, ) # ran_requirements.append(job_start_T_tracer) # start services graphical_option = "-x" if oscillo else "" graphical_message = "graphical" if oscillo else "regular" tracer_option = " -T" if T_tracer else "" # we use a Python variable for consistency # although it not used down the road _job_service_ran = SshJob( node=rannode, commands=[ RunScript( find_local_embedded_script("mosaic-ran.sh"), "start", graphical_option, tracer_option, includes=INCLUDES, x11=oscillo, ), ], label=f"start {graphical_message} softmodem on eNB", required=grace_delay, scheduler=scheduler, ) ########## run experiment per se # Manage phone(s) and OAI UE(s) # this starts at the same time as the eNB, but some # headstart is needed so that eNB actually is ready to serve sleeps = [20, 30] phone_msgs = [ f"wait for {sleep}s for eNB to start up before waking up phone{id}" for sleep, id in zip(sleeps, phones) ] wait_commands = [ f"echo {msg}; sleep {sleep}" for msg, sleep in zip(phone_msgs, sleeps) ] job_start_phones = [ SshJob(node=gwnode, commands=[ Run(wait_command), RunScript(find_local_embedded_script("faraday.sh"), f"macphone{id}", "r2lab-embedded/shell/macphone.sh", "phone-on", includes=INCLUDES), RunScript(find_local_embedded_script("faraday.sh"), f"macphone{id}", "r2lab-embedded/shell/macphone.sh", "phone-start-app", includes=INCLUDES), ], label=f"turn off airplace mode on phone {id}", required=grace_delay, scheduler=scheduler) for id, wait_command in zip(phones, wait_commands) ] if oai_ues: delay = 25 for ue in oai_ues: msg = f"wait for {delay}s for eNB to start up before running UE on node fit{ue}" wait_command = f"echo {msg}; sleep {delay}" ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) job_start_ues = [ SshJob(node=ue_node, commands=[ Run(wait_command), RunScript( find_local_embedded_script("mosaic-oai-ue.sh"), "start", includes=INCLUDES), ], label=f"Start OAI UE on fit{ue}", required=grace_delay, scheduler=scheduler) ] delay += 20 for ue in oai_ues: ue_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(ue), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) msg = f"Wait 60s and then ping faraday gateway from UE on fit{ue}" _job_ping_gw_from_ue = [ SshJob(node=ue_node, commands=[ Run(f"echo {msg}; sleep 60"), Run(f"ping -c 5 -I oip1 faraday.inria.fr"), ], label=f"ping faraday gateway from UE on fit{ue}", critical=False, required=job_start_ues, scheduler=scheduler) ] # ditto _job_ping_phones_from_cn = [ SshJob( node=cnnode, commands=[ Run("sleep 20"), Run(f"ping -c 100 -s 100 -i .05 172.16.0.{id+1} &> /root/ping-phone{id}" ), ], label=f"ping phone {id} from core network", critical=False, required=job_start_phones, scheduler=scheduler) for id in phones ] ########## xterm nodes colors = ("wheat", "gray", "white", "darkolivegreen") xterms = e3372_ue_xterms + gnuradio_xterms for xterm, color in zip(xterms, cycle(colors)): xterm_node = SshNode(gateway=gwnode, hostname=r2lab_hostname(xterm), username='******', formatter=TimeColonFormatter(verbose=verbose), debug=verbose) SshJob( node=xterm_node, command=Run(f"xterm -fn -*-fixed-medium-*-*-*-20-*-*-*-*-*-*-*", f" -bg {color} -geometry 90x10", x11=True), label=f"xterm on node {xterm_node.hostname}", scheduler=scheduler, # don't set forever; if we do, then these xterms get killed # when all other tasks have completed # forever = True, ) # remove dangling requirements - if any # should not be needed but won't hurt either scheduler.sanitize() ########## print(10 * "*", "nodes usage summary") if load_nodes: for image, nodes in images_to_load.items(): for node in nodes: print(f"node {node} : {image}") else: print("NODES ARE USED AS IS (no image loaded, no reset)") print(10 * "*", "phones usage summary") if phones: for phone in phones: print(f"Using phone{phone}") else: print("No phone involved") if nodes_left_alone: print(f"Ignore following fit nodes: {nodes_left_alone}") # wrap scheduler into global scheduler that prepares the testbed scheduler = prepare_testbed_scheduler(gwnode, load_nodes, scheduler, images_to_load, nodes_left_alone) scheduler.check_cycles() # Update the .dot and .png file for illustration purposes name = "mosaic-load" if load_nodes else "mosaic" print(10 * '*', 'See main scheduler in', scheduler.export_as_pngfile(name)) if verbose: scheduler.list() if dry_run: return True if verbose: input('OK ? - press control C to abort ? ') if not scheduler.orchestrate(): print(f"RUN KO : {scheduler.why()}") scheduler.debrief() return False print("RUN OK") return True