def __init__(self, shared_counter, test_config, time_counter=SharedCounter(val_type='d'), **kwargs): super(GraderProcess, self).__init__(shared_counter, test_config, time_counter, kwargs) multiprocessing.Process.__init__(self) # use an internal counter to reduce global success counter locks self.internal_counter = SharedCounter() self.internal_timer = SharedCounter(val_type='d')
class GraderProcess(GraderSkeleton, multiprocessing.Process): def __init__(self, shared_counter, test_config, time_counter=SharedCounter(val_type='d'), **kwargs): super(GraderProcess, self).__init__(shared_counter, test_config, time_counter, kwargs) multiprocessing.Process.__init__(self) # use an internal counter to reduce global success counter locks self.internal_counter = SharedCounter() self.internal_timer = SharedCounter(val_type='d') self.question_count = 0 def grade(self): # warm up threads threads = [] session_count = 0 while session_count < self.loop: grader_thread = GraderThread(self.internal_counter, self.test_config, self.internal_timer) grader_thread.init() threads.append(grader_thread) session_count += 1 self.question_count = grader_thread.get_question_number() # do real grade for grader_thread in threads: grader_thread.start() # wait for spawn interval sleep(self.spawn_interval) # wait for threads for grader_thread in threads: grader_thread.join() # calculate success count in the end self.shared_counter.increment(self.internal_counter.value()) self.time_counter.increment(self.internal_timer.value()) def run(self): self.grade() def get_question_number(self): return self.question_count
def __init__(self, shared_counter, test_config, time_counter=SharedCounter(val_type='d'), **kwargs): super(GraderThread, self).__init__(shared_counter, test_config, time_counter, kwargs) threading.Thread.__init__(self) # initialize grader self.grader = Grader()
def run(test_config_file_name, test_session, test_length): test_config = YamlConfigFileHandler(test_config_file_name) test_config_id = test_config.get_config('name', test_config.get_config('id', '')) if test_config_file_name.find(test_config_id) == -1: report_logger.error( "file name: {} is different from config id/name: {}".format( test_config_file_name, test_config_id)) return if test_session == -1: # forever stable test while True: grader = Grader() grader.init(test_config) grader.test() elif test_session == 1: # single session grade # init_log_file() grader = Grader() grader.init(test_config) grader.test() elif test_length == -1: # single session several round grade for i in range(1, test_session): grader = Grader() grader.init(test_config) grader.test() elif test_session > 1: # multi session grade # calculate thread spawn interval spawn_interval = test_length / (test_session * 1.0) # determine grader class use_process = False handler_count = test_session session_per_handler = 1 Handler_Class = GraderThread # use process to speed up grade if test_session > 512 or spawn_interval < 0.5: use_process = True handler_count = multiprocessing.cpu_count() session_per_handler = test_session // handler_count Handler_Class = GraderProcess # thread safe success counter success_count = SharedCounter() success_time_count = SharedCounter(val_type='d') # process time counter process_time = time.time() # thread group threads = [] # if not use_process and test_session <= 100: # init_log_file() report_logger.info( "Testing {0} sessions in {1} seconds, interval: {2}, using class {3}" .format(test_session, test_length, spawn_interval, Handler_Class.__name__)) report_logger.info("Warming up ...") warm_up_time = time.time() # Spawn threads sessions_to_distribute = test_session while sessions_to_distribute > 0: session_per_process = sessions_to_distribute > session_per_handler and session_per_handler or sessions_to_distribute grader_handler = Handler_Class(success_count, test_config, success_time_count, loop=session_per_process, spawn_interval=spawn_interval * handler_count) grader_handler.init() threads.append(grader_handler) sessions_to_distribute -= session_per_process report_logger.info( "Warm up process finished in {0} seconds".format(time.time() - warm_up_time)) launch_time = time.time() # Start threads for grader_handler in threads: grader_handler.start() # Wait for spawn interval sleep(spawn_interval) report_logger.info("{0} sessions started in {1}".format( int(test_session), time.time() - launch_time)) # Wait for all threads to finish for grader_handler in threads: grader_handler.join() questions_count = success_count.value( ) * grader_handler.get_question_number() report_logger.info( "Result: {0} / {1} passed. Total time: {2}\nSuccess time: {3} Passed: {4} Success avg: {5}" .format(success_count.value(), int(test_session), time.time() - process_time, success_time_count.value(), questions_count, success_time_count.value()))