def fetch_data(dir_exp, dir_exp_two):
    co.check_directory_exists(dir_exp)
    co.check_directory_exists(dir_exp_two)
    dico = {MPTCP: {}, TCP: {}}
    for dirpath, dirnames, filenames in os.walk(dir_exp):
        for fname in filenames:
            try:
                ack_file = open(os.path.join(dirpath, fname), 'r')
                dico[MPTCP][fname] = pickle.load(ack_file)
                ack_file.close()
                conn_ids = dico[MPTCP][fname][co.S2C].keys()
                for conn_id in conn_ids:
                    if conn_id not in valid[fname]:
                        del dico[MPTCP][fname][co.S2C][conn_id]
                        del dico[MPTCP][fname][co.C2S][conn_id]
            except IOError as e:
                print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    for dirpath, dirnames, filenames in os.walk(dir_exp_two):
        for fname in filenames:
            try:
                ack_file = open(os.path.join(dirpath, fname), 'r')
                dico[TCP][fname] = pickle.load(ack_file)
                ack_file.close()
                conn_ids = dico[TCP][fname][co.S2C].keys()
                for conn_id in conn_ids:
                    if conn_id not in valid[fname]:
                        del dico[TCP][fname][co.S2C][conn_id]
                        del dico[TCP][fname][co.C2S][conn_id]
            except IOError as e:
                print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    return dico
def fetch_data(dir_exp, dir_exp_two):
    co.check_directory_exists(dir_exp)
    co.check_directory_exists(dir_exp_two)
    dico = {MPTCP: {}, TCP: {}}
    for dirpath, dirnames, filenames in os.walk(dir_exp):
        for fname in filenames:
            try:
                ack_file = open(os.path.join(dirpath, fname), 'r')
                dico[MPTCP][fname] = pickle.load(ack_file)
                ack_file.close()
                conn_ids = dico[MPTCP][fname][co.S2C].keys()
                for conn_id in conn_ids:
                    if conn_id not in valid[fname]:
                        del dico[MPTCP][fname][co.S2C][conn_id]
                        del dico[MPTCP][fname][co.C2S][conn_id]
            except IOError as e:
                print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    for dirpath, dirnames, filenames in os.walk(dir_exp_two):
        for fname in filenames:
            try:
                ack_file = open(os.path.join(dirpath, fname), 'r')
                dico[TCP][fname] = pickle.load(ack_file)
                ack_file.close()
                conn_ids = dico[TCP][fname][co.S2C].keys()
                for conn_id in conn_ids:
                    if conn_id not in valid[fname]:
                        del dico[TCP][fname][co.S2C][conn_id]
                        del dico[TCP][fname][co.C2S][conn_id]
            except IOError as e:
                print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    return dico
def fetch_data(dir_exp, args):
    co.check_directory_exists(dir_exp)
    dico = {}
    for dirpath, dirnames, filenames in os.walk(dir_exp):
        if check_in_list(dirpath, args.dirs):
            for fname in filenames:
                try:
                    stat_file = open(os.path.join(dirpath, fname), "r")
                    dico[fname] = pickle.load(stat_file)
                    stat_file.close()
                except IOError as e:
                    print(str(e) + ": skip stat file " + fname, file=sys.stderr)
    return dico
def fetch_data(dir_exp):
    co.check_directory_exists(dir_exp)
    dico = {}
    for dirpath, dirnames, filenames in os.walk(dir_exp):
        if check_in_list(dirpath, args.dirs):
            for fname in filenames:
                try:
                    stat_file = open(os.path.join(dirpath, fname), 'r')
                    dico[fname] = pickle.load(stat_file)
                    stat_file.close()
                except IOError as e:
                    print(str(e) + ': skip stat file ' + fname, file=sys.stderr)
    return dico
def fetch_data(dir_exp, dir_exp_two, dir_exp_three):
    co.check_directory_exists(dir_exp)
    co.check_directory_exists(dir_exp_two)
    dico = {DS_1: {}, DS_2: {}, DS_3: {}}
    for dirpath, dirnames, filenames in os.walk(dir_exp):
        if check_in_list(dirpath, args.dirs):
            for fname in filenames:
                fname_date = co.get_date_as_int(fname)
                if is_app_name(fname, args.app) and (fname_date and (int(start_time) <= fname_date <= int(stop_time))) and check_conditions(fname) and (not apps_to_load or get_app_name(fname) in apps_to_load):
                    try:
                        stat_file = open(os.path.join(dirpath, fname), 'r')
                        dico[DS_1][fname] = pickle.load(stat_file)
                        stat_file.close()
                    except IOError as e:
                        print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    for dirpath, dirnames, filenames in os.walk(dir_exp_two):
        if check_in_list(dirpath, args.dirs_two):
            for fname in filenames:
                fname_date = co.get_date_as_int(fname)
                if is_app_name(fname, args.app) and (fname_date and (int(start_time) <= fname_date <= int(stop_time))) and check_conditions(fname) and (not apps_to_load or get_app_name(fname) in apps_to_load):
                    try:
                        stat_file = open(os.path.join(dirpath, fname), 'r')
                        dico[DS_2][fname] = pickle.load(stat_file)
                        stat_file.close()
                    except IOError as e:
                        print(str(e) + ': skip stat file ' + fname, file=sys.stderr)

    for dirpath, dirnames, filenames in os.walk(dir_exp_three):
        if check_in_list(dirpath, args.dirs_three):
            for fname in filenames:
                fname_date = co.get_date_as_int(fname)
                if is_app_name(fname, args.app) and (fname_date and (int(start_time) <= fname_date <= int(stop_time))) and check_conditions(fname) and (not apps_to_load or get_app_name(fname) in apps_to_load):
                    try:
                        stat_file = open(os.path.join(dirpath, fname), 'r')
                        dico[DS_3][fname] = pickle.load(stat_file)
                        stat_file.close()
                    except IOError as e:
                        print(str(e) + ': skip stat file ' + fname, file=sys.stderr)
    return dico
Beispiel #6
0
def launch_experiments(exp_factors):
    #create directory
    dir_name = datetime.now().strftime('%Y%m%d_%H%M%S')
    dir_path = os.path.join(dir_exp, dir_name)
    co.check_directory_exists(dir_path)

    #save sysctl into a file all dir?
    sysctlerr = os.system('sysctl -a > ' + os.path.join(dir_path, 'sysctl'))
    sysctlerr = os.system('dmesg | grep MPTCP >> ' +
                          os.path.join(dir_path, 'sysctl'))

    xp_file = os.path.join(dir_path, 'xp')

    copyfile(xp_base, xp_file)
    with open(xp_file, 'a') as xp:
        xp.write('iperfTime:' + str(args.time) + '\n')

    for factor in exp_factors:
        if args.only_one_path:
            co.experiment_one_topo_loss_one_path(factor, dir_path, xp_file)
        else:
            co.experiment_one_topo_loss(factor, dir_path, xp_file)
Beispiel #7
0
                    "--stat",
                    help="directory where the stat files are stored",
                    default=co.DEF_STAT_DIR + '_' + co.DEF_IFACE)
parser.add_argument('-S',
                    "--sums",
                    help="directory where the summary graphs will be stored",
                    default=co.DEF_SUMS_DIR + '_' + co.DEF_IFACE)
parser.add_argument("-d",
                    "--dirs",
                    help="list of directories to aggregate",
                    nargs="+")

args = parser.parse_args()
stat_dir_exp = os.path.abspath(os.path.join(ROOT_DIR, args.stat))
sums_dir_exp = os.path.abspath(os.path.join(ROOT_DIR, args.sums))
co.check_directory_exists(sums_dir_exp)

##################################################
##                 GET THE DATA                 ##
##################################################

connections = cog.fetch_valid_data(stat_dir_exp, args)
multiflow_connections, singleflow_connections = cog.get_multiflow_connections(
    connections)

##################################################
##               PLOTTING RESULTS               ##
##################################################

INITIAL_SF = 'Initial SF'
INITIAL_SFS = '2 Initial SFs'
##                  ARGUMENTS                   ##
##################################################

parser = argparse.ArgumentParser(
    description="Summarize stat files generated by analyze")
parser.add_argument("-s",
                    "--stat", help="directory where the stat files are stored", default=co.DEF_STAT_DIR + '_' + co.DEF_IFACE)
parser.add_argument('-S',
                    "--sums", help="directory where the summary graphs will be stored", default=co.DEF_SUMS_DIR + '_' + co.DEF_IFACE)
parser.add_argument("-d",
                    "--dirs", help="list of directories to aggregate", nargs="+")

args = parser.parse_args()
stat_dir_exp = os.path.abspath(os.path.join(ROOT_DIR, args.stat))
sums_dir_exp = os.path.abspath(os.path.join(ROOT_DIR, args.sums))
co.check_directory_exists(sums_dir_exp)

##################################################
##                 GET THE DATA                 ##
##################################################

connections = cog.fetch_valid_data(stat_dir_exp, args)
multiflow_connections, singleflow_connections = cog.get_multiflow_connections(connections)

##################################################
##               PLOTTING RESULTS               ##
##################################################

color = "orange"
base_graph_name = "example_graph"
graph_full_path = base_graph_name + ".pdf"
args = parser.parse_args()

dir_exp = os.path.abspath(os.path.expanduser(args.res_dir))
topo_file = os.path.abspath(os.path.expanduser(args.topo_base))
xp_base = os.path.abspath(os.path.expanduser(args.xp_base))

bin_mpPerf = os.path.abspath(os.path.expanduser(args.mpperf_file))
experiment_file = os.path.abspath(os.path.expanduser(args.experiments))

def launch_experiments(exp_factors):
	""" Create the xp file and lauch the different experiments """
    #create directory
    dir_name = datetime.now().strftime('%Y%m%d_%H%M%S')
    dir_path = os.path.join(dir_exp, dir_name)
    co.check_directory_exists(dir_path)

    #save sysctl into a file all dir?
    sysctlerr = os.system('sysctl -a > '+ os.path.join(dir_path, 'sysctl')) 
    sysctlerr = os.system('dmesg | grep MPTCP >> '+os.path.join(dir_path, 'sysctl'))

    xp_file = os.path.join(dir_path, 'xp')
	
    copyfile(xp_base,xp_file)
    with open(xp_file, 'a') as xp:
        xp.write(co.IPERFTIME+':'+str(args.time)+'\n')

    for factor in exp_factors:
        if args.only_one_path:
            co.experiment_one_topo_one_path(factor, dir_path, xp_file)	
        else:
    in_dir_exp = os.path.abspath(os.path.expanduser(args.input))
    # ~/graphs -> /home/mptcp/graphs_lo ; ../graphs/ -> /home/mptcp/graphs_lo
    trace_dir_exp = co.get_dir_from_arg(args.trace, args.pcap[0])
    ports_dir_exp = co.get_dir_from_arg(args.ports, args.pcap[0])

    if os.path.isdir(in_dir_exp):
        # add the basename of the input dir
        base_dir = os.path.basename(in_dir_exp)  # 20150215-013001_d8cac271ad6d544930b0e804383c19378ed4908c
        parent_dir = os.path.basename(os.path.dirname(in_dir_exp))  # TCPDump or TCPDump_bad_simulation
        trace_dir_exp = os.path.join(trace_dir_exp, parent_dir, base_dir)
        ports_dir_exp = os.path.join(ports_dir_exp, parent_dir, base_dir)

    print_out = sys.stdout

    co.check_directory_exists(ports_dir_exp)

    class TSharkError(Exception):
        pass

    def tshark_filter(condition, src_path, dst_path, print_out=sys.stderr):
        """ Filter src_path using the condition and write the result to dst_path
            Raise a TSharkError in case of failure
        """
        cmd = ['tshark', '-r', src_path, '-Y', condition, '-w', dst_path]
        if subprocess.call(cmd, stdout=print_out) != 0:
            raise TSharkError("Error with condition " + condition + " for source " + src_path + " and destination "
                                 + dst_path)


    def uncompress_file(filename, dirpath):
Beispiel #11
0
                    print("Error when moving " + filename, file=sys.stderr)
                else:
                    return output_filepath
        else:
            print(filename + ": not in a valid format, skipped",
                  file=sys.stderr)
    return False


def add_if_valid(list, item):
    if item:
        list.append(item)


pcap_list = []
co.check_directory_exists(trace_dir_exp)
if not args.dir_input:
    if os.path.isdir(in_dir_exp):
        for dirpath, dirnames, filenames in os.walk(in_dir_exp):
            for filename in filenames:
                add_if_valid(pcap_list, uncompress_file(filename, dirpath))
    else:
        add_if_valid(
            pcap_list,
            uncompress_file(os.path.basename(in_dir_exp),
                            os.path.dirname(in_dir_exp)))

pcap_list_len = len(pcap_list)

##################################################
##                  FETCH DB                    ##
                cmd = ['cp', os.path.join(dirpath, filename), output_filepath]
                if subprocess.call(cmd, stdout=print_out) != 0:
                    print("Error when moving " + filename, file=sys.stderr)
                else:
                    return output_filepath
        else:
            print(filename + ": not in a valid format, skipped", file=sys.stderr)
    return False


def add_if_valid(list, item):
    if item:
        list.append(item)

pcap_list = []
co.check_directory_exists(trace_dir_exp)
if not args.dir_input:
    if os.path.isdir(in_dir_exp):
        for dirpath, dirnames, filenames in os.walk(in_dir_exp):
            for filename in filenames:
                add_if_valid(pcap_list, uncompress_file(filename, dirpath))
    else:
        add_if_valid(pcap_list, uncompress_file(os.path.basename(in_dir_exp),
                                                os.path.dirname(in_dir_exp)))

pcap_list_len = len(pcap_list)


##################################################
##                  FETCH DB                    ##
##################################################
##################################################
#                   ARGUMENTS                    #
##################################################

parser = argparse.ArgumentParser(
    description="Summarize stat files generated by analyze")
parser.add_argument("-s",
                    "--stat", help="directory where the stat files are stored", default=co.DEF_STAT_DIR + '_' + co.DEF_IFACE)
parser.add_argument('-c',
                    "--csv", help="directory where the csv will be stored", default=DEF_CSV)

args = parser.parse_args()
stat_dir_exp = os.path.abspath(os.path.expanduser(args.stat))
csv_dir_exp = os.path.abspath(os.path.expanduser(args.csv))
co.check_directory_exists(csv_dir_exp)

##################################################
#                  GET THE DATA                  #
##################################################


def ensures_smartphone_to_proxy(connections):
    for conn_id in connections.keys():
        if isinstance(connections[conn_id], mptcp.MPTCPConnection):
            inside = True
            for flow_id, flow in connections[conn_id].flows.iteritems():
                if not [x for x in co.PREFIX_IP_PROXY if flow.attr[co.DADDR].startswith(x)] and not flow.attr[co.DADDR] in co.IP_PROXY:
                    connections.pop(conn_id, None)
                    inside = False
                    break
parser = argparse.ArgumentParser(
    description="Summarize stat files generated by analyze")
parser.add_argument("-s",
                    "--stat",
                    help="directory where the stat files are stored",
                    default=co.DEF_STAT_DIR + '_' + co.DEF_IFACE)
parser.add_argument('-c',
                    "--csv",
                    help="directory where the csv will be stored",
                    default=DEF_CSV)

args = parser.parse_args()
stat_dir_exp = os.path.abspath(os.path.expanduser(args.stat))
csv_dir_exp = os.path.abspath(os.path.expanduser(args.csv))
co.check_directory_exists(csv_dir_exp)

##################################################
#                  GET THE DATA                  #
##################################################


def ensures_smartphone_to_proxy(connections):
    for conn_id in connections.keys():
        if isinstance(connections[conn_id], mptcp.MPTCPConnection):
            inside = True
            for flow_id, flow in connections[conn_id].flows.iteritems():
                if not [
                        x for x in co.PREFIX_IP_PROXY
                        if flow.attr[co.DADDR].startswith(x)
                ] and not flow.attr[co.DADDR] in co.IP_PROXY:
Beispiel #15
0
parser = argparse.ArgumentParser(
    description="Summarize stat files generated by analyze")
parser.add_argument("stats", help="directory where the stat files are stored")
parser.add_argument("mptcp_ack", help="directory where the MPTCP acksizes are stored")
parser.add_argument("tcp_ack", help="directory where the TCP acksizes are stored")
parser.add_argument('-g',
                    "--graph", help="directory where the graphs will be stored", default=co.DEF_SUMS_DIR + '_acksize')
parser.add_argument("-d",
                    "--dirs", help="list of directories to aggregate", nargs="+")

args = parser.parse_args()

MPTCP = 'mptcp'
TCP = 'tcp'

co.check_directory_exists(args.graph)

stats_dir_exp = os.path.abspath(os.path.expanduser(args.stats))
mptcp_dir_exp = os.path.abspath(os.path.expanduser(args.mptcp_ack))
tcp_dir_exp = os.path.abspath(os.path.expanduser(args.tcp_ack))
sums_dir_exp = os.path.abspath(os.path.expanduser(args.graph))

connections = cog.fetch_valid_data(stats_dir_exp, args)

valid = {}

# Create a dict of set of valid connections
for fname, conns in connections.iteritems():
    valid[fname] = set()
    for conn_id in conns.keys():
        valid[fname].add(conn_id)