Esempio n. 1
0
    def on_timer(self):
        if self.test_number >= self.repeat_time:
            # self.test_number = self.test_number % self.repeat_time
            self.log.info("deadlock count: %d" % self.deadlock_count)
            self.log.info("unsplittable deadlock count: %d" %
                          self.deadlock_unsplittable_count)
            sys.exit(0)
            return
            # When running on real switch use this:
            #if self.current_controller_sw < len(global_vars.switch_ids) - 1:
            #    self.current_controller_sw += 1
            #    self.test_number = 0
            #else:
            #    return
        flow_gen = FlowChangeGenerator()
        # self.log.info("test number: %d" % self.test_number)
        self.log.debug(self.repeat_time)
        filename = global_vars.flow_folder + "/flows_%s.intra"\
                                             % str(self.test_number % self.repeat_time)
        update = flow_gen.read_flows(filename)
        # self.convert_flows(filename, old_flows, new_flows)

        if update.old_flows == [] and update.new_flows == []:
            self.log.info("has deadlock during transition")
            return
        # self.log.info("old_flows: %s" % old_flows)
        # self.log.info("new_flows: %s" % new_flows)
        log.info("skip deadlock: %s" % self.skip_deadlock)
        self.having_splittable_deadlock = False
        self.having_unsplittable_deadlock = False
        self.msgs_sent_by_sws.clear()
        self.install_update(update.old_flows, update.new_flows)
        self.test_number += 1
Esempio n. 2
0
 def read_flows(self, test_number):
     flow_gen = FlowChangeGenerator()
     directory = "../data/%s/random/1000/" % self.topo_input
     filename = directory + "flows_%d.intra" % test_number
     self.logger.debug(filename)
     return flow_gen.read_flows(filename)
Esempio n. 3
0
 def load_flows_for_test(self, flows_file):
     debug = self.log.debug
     flow_gen = FlowChangeGenerator()
     update = flow_gen.read_flows(flows_file, False)
     self.old_flows = update.old_flows
     self.new_flows = update.new_flows
Esempio n. 4
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='ez-segway sim.')
    parser.add_argument('--logFolder', nargs='?', type=str, default="logs")
    parser.add_argument('--logFile', nargs='?', type=str, default="stdout")
    parser.add_argument('--data_file', nargs='?', type=str, default="data")
    parser.add_argument('--end_flow', nargs='?', type=str, default="data")
    args = parser.parse_args()

    directory = "../%s" % (args.logFolder)
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.init("../" + args.logFolder + "/" + args.logFile,
                constants.LOG_LEVEL)
    log = logger.getLogger("data-generator", constants.LOG_LEVEL)
    log.info("---> Log start <---")

    flow_gen = FlowChangeGenerator()
    filename = "../%s" % args.data_file
    update = flow_gen.read_flows(filename)
    for flow in update.old_flows:
        flow.path = []
    flow_gen.write_flows_pair(filename, update.old_flows, update.new_flows)

    filename = "../%s" % args.end_flow
    update = flow_gen.read_flows(filename)
    for flow in update.new_flows:
        flow.path = []
    flow_gen.write_flows_pair(filename, update.old_flows, update.new_flows)
Esempio n. 5
0
    parser = argparse.ArgumentParser(description='ez-segway sim.')
    parser.add_argument('--logFolder', nargs='?', type=str, default="logs")
    parser.add_argument('--logFile', nargs='?', type=str, default="stdout")
    parser.add_argument('--data_folder', nargs='?', type=str, default="data")
    args = parser.parse_args()

    directory = "../%s" % (args.logFolder)
    if not os.path.exists(directory):
        os.makedirs(directory)

    logger.init("../" + args.logFolder + "/" + args.logFile,
                constants.LOG_LEVEL)
    log = logger.getLogger("data-generator", constants.LOG_LEVEL)
    log.info("---> Log start <---")

    flow_change_generator = FlowChangeGenerator()
    datafiles = [
        f for f in listdir(args.data_folder)
        if isfile(join(args.data_folder, f))
    ]
    # for file in datafiles:
    #     update = flow_change_generator.read_flows(join(args.data_folder, file))
    #     update.no_of_segments_by_count = \
    #         flow_change_generator.analyze_pivot_switches(update.old_flows, update.new_flows)
    #     flow_change_generator.write_flows(join(args.data_folder, file), update, write_reversed_flow=False)

    avg_of_avg_old = 0
    avg_of_avg_new = 0
    count = 0
    for file in datafiles:
        network_update_info = flow_change_generator.read_statistic_info(