def create(unknown_arguments=False): logging.info('Called network config') utils.check_for_file(config.nodes_csv) nodes = utils.read_csv(config.nodes_csv) parser = _create_parser() if unknown_arguments: args = parser.parse_known_args(sys.argv[2:])[0] else: args = parser.parse_args(sys.argv[2:]) logging.info("Parsed arguments in {}: {}".format(__name__, args)) utils.update_args(args) random.seed(args.seed) header = _create_header(nodes) matrix = _create_matrix(header, args.connectivity) if _check_if_fully_connected(matrix) is not True: raise Exception( "Not all nodes a reachable. Consider to raise the connectivity.") logging.info('Created {}:'.format(config.network_csv)) print(pandas.DataFrame(matrix)) with open(config.network_csv, "w") as file: writer = csv.writer(file) writer.writerows(matrix) logging.info('End network config')
def create(unknown_arguments=False): logging.info('Called ticks config') utils.check_for_file(config.nodes_csv) nodes = utils.read_csv(config.nodes_csv) parser = _create_parser() if unknown_arguments: args = parser.parse_known_args(sys.argv[2:])[0] else: args = parser.parse_args(sys.argv[2:]) logging.info("Parsed arguments in {}: {}".format(__name__, args)) utils.update_args(args) random.seed(args.seed) block_events = _create_block_events( nodes, args.amount_of_ticks, args.blocks_per_tick) ticks = _create_ticks( nodes, block_events, args.txs_per_tick, args.amount_of_ticks) logging.info('Created {}:'.format(config.ticks_csv)) print(pandas.DataFrame(ticks)) with open(config.ticks_csv, "w") as file: writer = csv.writer(file) writer.writerows(ticks) logging.info('End ticks config')
def execute(self): try: utils.check_for_file(config.ticks_csv) with open(config.ticks_csv, 'r') as file: logging.info("Runner.Event.execute ticks_csv openend, starting pool") with Pool(2) as pool: logging.info("Runner.Event.execute Pool started") start_time = time.time() for i, line in enumerate(file): logging.info(f'Line {i} executing') actual_start = time.time() planned_start = start_time + i * self._context.args.tick_duration self._txs_count = self._blocks_count = 0 if line: # logging.exception(line) cmds = line.rstrip().split(',') if cmds == ['']: continue cmd_list_string = list(map(self._execute_cmd_string,list(cmds))) cmd_list_string = list(map(lambda x: x.rstrip(),cmd_list_string)) # print(pool.map(check_output_without_log,cmd_list_string)) for result in pool.imap_unordered(check_output_without_log,cmd_list_string): logging.info(result) # cmd_list = list(cmds) # pool.map(shell_exec(self._execute_cmd(_x_), cmd_list,1) # pool.map(self._execute_cmd, cmd_list,1) # pool.close() # for cmd in cmds: # self._execute_cmd_string(cmd) planned_start_next_tick = start_time + (i + 1) * self._context.args.tick_duration current_time = time.time() duration = current_time - actual_start logging.info('Tick={} with planned_start={}, actual_start={} and duration={:F},' ' created txs={} and blocks={}' .format(i, planned_start, actual_start, duration, self._txs_count, self._blocks_count)) if current_time < planned_start_next_tick: difference = planned_start_next_tick - current_time logging.info('Sleep {} seconds for next tick={}'.format(difference, i)) utils.sleep(difference) else: # tick took longer than planned raise Exception("Tick Timeout. A Tick took longer than planned.") # pool.close() # tell workers to stop # pool.join() # wait for them to stop except Exception: logging.exception('Simulation could not execute all events because of an exception')
def read_connections(): utils.check_for_file(config.network_csv) connections = {} network_config = pandas.read_csv(open(config.network_csv), index_col=0) for node_row, row in network_config.iterrows(): connections[node_row] = [] for node_column, value in row.iteritems(): if node_column == node_row: pass elif value == 1: connections[node_row].append(node_column) return connections
def run(unknown_arguments=False): for file in [config.ticks_csv, config.network_csv, config.nodes_csv]: utils.check_for_file(file) parser = _create_parser() if unknown_arguments: args = parser.parse_known_args(sys.argv[2:])[0] else: args = parser.parse_args(sys.argv[2:]) logging.info("Parsed arguments in {}: {}".format(__name__, args)) utils.update_args(args) _check_skip_ticks(args.skip_ticks) context = Context() logging.info(config.log_line_run_start + context.run_name) tag = context.args.tag if hasattr(context.args, 'tag_appendix'): tag += context.args.tag_appendix writer = Writer(tag) runner = Runner(context, writer) prepare = Prepare(context) runner._prepare = prepare postprocessing = PostProcessing(context, writer) runner._postprocessing = postprocessing event = Event(context) runner._event = event start = time.time() runner.run() logging.info("The duration of the run was {} seconds".format( str(time.time() - start)))
def run(unknown_arguments=False): for file in [config.ticks_csv, config.network_csv, config.nodes_csv]: utils.check_for_file(file) parser = _create_parser() if unknown_arguments: args = parser.parse_known_args(sys.argv[2:])[0] else: args = parser.parse_args(sys.argv[2:]) logging.info("Parsed arguments in {}: {}".format(__name__, args)) utils.update_args(args) _check_skip_ticks(args.skip_ticks) context = Context() logging.info(config.log_line_run_start + context.run_name) tag = context.args.tag if hasattr(context.args, 'tag_appendix'): tag += context.args.tag_appendix context.tag = tag runner = Runner(context) start = time.time() runner.run() end = time.time() duration = end - start Info().time_elapsed = str(duration) logging.info("The duration of the run was {} seconds".format( str(duration)))
def create(unknown_arguments=False): logging.info('Called network config') utils.check_for_file(config.nodes_csv) nodes = utils.read_csv(config.nodes_csv) parser = _create_parser() if unknown_arguments: args = parser.parse_known_args(sys.argv[2:])[0] else: args = parser.parse_args(sys.argv[2:]) logging.info("Parsed arguments in {}: {}".format(__name__, args)) utils.update_args(args) random.seed(args.seed) header = _create_header(nodes) logging.info('Created {}:'.format(config.network_csv)) with open(config.network_csv, "w") as file: writer = csv.writer(file) logging.info('End network config')
def execute(self): try: utils.check_for_file(config.ticks_csv) with open(config.ticks_csv, 'r') as file: start_time = time.time() for i, line in enumerate(file): actual_start = time.time() planned_start = start_time + i * self._context.args.tick_duration self._txs_count = self._blocks_count = 0 line = line.rstrip() cmds = line.split(',') for cmd in cmds: self._execute_cmd(cmd) planned_start_next_tick = start_time + \ (i + 1) * self._context.args.tick_duration current_time = time.time() duration = current_time - actual_start logging.info( 'Tick={} with planned_start={}, actual_start={} and duration={:F},' ' created txs={} and blocks={}'.format( i, planned_start, actual_start, duration, self._txs_count, self._blocks_count)) if current_time < planned_start_next_tick: difference = planned_start_next_tick - current_time logging.info( 'Sleep {} seconds for next tick={}'.format( difference, i)) utils.sleep(difference) except Exception: logging.exception( 'Simulation could not execute all events because of an exception' )
def test_check_for_files_file_exists(self, m_isfile): m_isfile.return_value = True utils.check_for_file('file.txt')
def test_check_for_files_file_exists(self, m_isfile, m_exit): m_isfile.return_value = True utils.check_for_file('file.txt') self.assertFalse(m_exit.called)
def test_check_for_files_file_not_existing(self, m_isfile, m_exit): m_isfile.return_value = False utils.check_for_file('file.txt') self.assertTrue(m_exit.called)
def test_check_for_files_file_exists(m_isfile, m_exit): m_isfile.return_value = True utils.check_for_file('file.txt') assert m_exit.called is False