def test_submit_test_event(self): """ Testing POST /EventService/SubmitTestEvent """ global task # Suppose rackhd and test stack have the same localhost server = Httpd(port=int(self.__httpd_port), handler_class=self.EventServiceHandler) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) if task.timeout: # Else port forward rackhd -> localhost server = Httpd(port=int(HTTPD_PORT), handler_class=self.EventServiceHandler) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() # forward port for services running on a guest host session = open_ssh_forward(self.__httpd_port) redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) session.logout() assert_false(task.timeout, message='timeout waiting for task {0}'.format(task.id))
def test_submit_test_event(self): """ Testing POST /EventService/SubmitTestEvent """ global task # Suppose rackhd and test stack have the same localhost server = Httpd(port=int(self.__httpd_port), handler_class=self.EventServiceHandler) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) if task.timeout: # Else port forward rackhd -> localhost server = Httpd(port=int(HTTPD_PORT), handler_class=self.EventServiceHandler) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() # forward port for services running on a guest host session = open_ssh_forward(self.__httpd_port) redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) session.logout() self.assertFalse(task.timeout, msg='timeout waiting for task {0}'.format(task.id)) self.assertFalse(self.__class__.eventHandlerFailed, msg='Event handler reported subscriptionId / memberId mismatch')
def run_workflow_tasks(self, tasks, timeout_sec): def thread_func(worker, id): worker.start() worker_tasks = WorkerTasks(tasks=self.__tasks, func=thread_func) worker_tasks.run() worker_tasks.wait_for_completion(timeout_sec=timeout_sec) for task in self.__tasks: assert_false(task.timeout, \ message='Timeout for {0}, node {1}'.format(self.__graph_name, task.id)) if 'failed' in self.__graph_status: fail('Failure running {0}'.format(self.__graph_name))
def _wait_until_graph_finish(self, graph_name, timevalue): self.__graph_name = graph_name self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.__handle_graph_finish]), \ graph_name) def start(worker, id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=timevalue) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id))
def run_workflow_tasks(self, tasks, timeout_sec): def thread_func(worker, id): worker.start() worker_tasks = WorkerTasks(tasks=self.__tasks, func=thread_func) worker_tasks.run() worker_tasks.wait_for_completion(timeout_sec=timeout_sec) for task in tasks: if task.timeout: LOG.error('Timeout for {0}, node {1}'.format(self.__graph_name, task.id)) self.__graph_status.append('failed') if 'failed' in self.__graph_status: fail('Failure running {0}'.format(self.__graph_name))
def run_workflow_tasks(self, tasks, timeout_sec): def thread_func(worker, id): worker.start() tasks = self.__tasks if tasks is None else tasks worker_tasks = WorkerTasks(tasks=self.__tasks, func=thread_func) worker_tasks.run() worker_tasks.wait_for_completion(timeout_sec=timeout_sec) for task in tasks: if task.timeout: LOG.error('Timeout for {0}, node {1}'.format(self.__graph_name, task.id)) self.__graph_status.append('failed') if 'failed' in self.__graph_status: fail('Failure running {0}'.format(self.__graph_name))
def test_discovery(self): """ Wait for discovery finished """ self.case_recorder.write_event('start all discovery') self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.handle_discovery_finish]), \ 'benchmark discovery') def start(worker, id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id))
def run_listener(q): log = Log(__name__,level='INFO') log.info('Run AMQP listener until ctrl-c input\n {0}'.format(q)) def thread_func(worker,id): worker.start() def signal_handler(signum,stack): sys.exit(0) signal.signal(signal.SIGINT, signal_handler) worker = AMQPWorker(queue=q) task = WorkerThread(worker,'amqp') tasks = WorkerTasks(tasks=[task], func=thread_func) tasks.run() tasks.wait_for_completion(timeout_sec=-1)
def test_nodes_discovery(self): """ API 2.0 Testing Graph.Discovery completion """ self.__discovery_duration = datetime.now() LOG.info('Wait start time: {0}'.format(self.__discovery_duration)) self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.handle_graph_finish]), 'discovery') def start(worker,id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id))
def test_submit_test_event(self): """ Testing POST /EventService/SubmitTestEvent """ global task server = Httpd(port=int(HTTPD_PORT), handler_class=self.EventServiceHandler) task = WorkerThread(server,'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() # forward port for services running on a guest host session = open_ssh_forward(self.__httpd_port) redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) session.logout() assert_false(task.timeout, message='timeout waiting for task {0}'.format(task.id))
def run_listener(q, timeout_sec=3): log = Log(__name__, level='INFO') log.info('Run AMQP listener until ctrl-c input\n {0}'.format(q)) def thread_func(worker, id): worker.start() def signal_handler(signum, stack): sys.exit(0) signal.signal(signal.SIGINT, signal_handler) worker = AMQPWorker(queue=q) task = WorkerThread(worker, 'amqp') tasks = WorkerTasks(tasks=[task], func=thread_func) tasks.run() tasks.wait_for_completion(timeout_sec)
def test_nodes_discovery(self): """ API 2.0 Testing Graph.Discovery completion """ if self.check_compute_count(): LOG.warning('Nodes already discovered!') return self.__discovery_duration = datetime.now() LOG.info('Wait start time: {0}'.format(self.__discovery_duration)) self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.handle_graph_finish]), 'discovery') def start(worker,id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id))
def test_nodes_discovery(self): """ API 2.0 Testing Graph.Discovery completion """ count = defaults.get('RACKHD_NODE_COUNT', '') if (count.isdigit() and self.check_compute_count() == int(count)) or self.check_compute_count(): LOG.warning('Nodes already discovered!') return self.__discovery_duration = datetime.now() LOG.info('Wait start time: {0}'.format(self.__discovery_duration)) self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \ callbacks=[self.handle_graph_finish]), 'discovery') def start(worker,id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) assert_false(self.__task.timeout, \ message='timeout waiting for task {0}'.format(self.__task.id))
def check_chassis_task(self): """ Testing AMQP on.task.ipmi.chassis.result """ Nodes().nodes_get() nodes = loads(self.__client.last_response.data) self.__tasks = [] for node in nodes: id = node.get('id') assert_is_not_none(id) type = node.get('type') assert_is_not_none(type) if type == 'compute': worker = AMQPWorker(queue=QUEUE_CHASSIS_RESULT, \ callbacks=[self.__handle_result]) self.__tasks.append(WorkerThread(worker,id)) tasks = WorkerTasks(tasks=self.__tasks, func=self.__task_thread) tasks.run() tasks.wait_for_completion()
def test_nodes_discovery(self): """ Testing Graph.Discovery completion """ if self.check_compute_count(): LOG.warning("Nodes already discovered!") return self.__discovery_duration = datetime.now() LOG.info("Wait start time: {0}".format(self.__discovery_duration)) self.__task = WorkerThread( AMQPWorker(queue=QUEUE_GRAPH_FINISH, callbacks=[self.handle_graph_finish]), "discovery" ) def start(worker, id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) assert_false(self.__task.timeout, message="timeout waiting for task {0}".format(self.__task.id))
def check_sdr_task(self): """ Testing AMQP on.task.ipmi.sdr.result """ Nodes().nodes_get() nodes = loads(self.__client.last_response.data) self.__tasks = [] for node in nodes: id = node.get('id') assert_is_not_none(id) type = node.get('type') assert_is_not_none(type) if type == 'compute': worker = AMQPWorker(queue=QUEUE_SDR_RESULT, \ callbacks=[self.__handle_result]) self.__tasks.append(WorkerThread(worker,id)) tasks = WorkerTasks(tasks=self.__tasks, func=self.__task_thread) tasks.run() tasks.wait_for_completion() for task in self.__tasks: assert_false(task.timeout, message='timeout waiting for task {0}'.format(task.id))
def test_nodes_discovery(self): # API 2.0 Testing Graph.Discovery completion count = defaults.get('RACKHD_NODE_COUNT', '') if (count.isdigit() and self.check_compute_count() == int(count)) or self.check_compute_count(): logs.warning('Nodes already discovered!') return self.__discovery_duration = datetime.now() logs.info(' Wait start time: %s', str(self.__discovery_duration)) self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, callbacks=[self.handle_graph_finish]), 'discovery') def start(worker, id): worker.start() tasks = WorkerTasks(tasks=[self.__task], func=start) tasks.run() tasks.wait_for_completion(timeout_sec=1200) self.assertFalse(self.__task.timeout, msg=('timeout waiting for task %s', self.__task.id))
def test_submit_test_event(self): # """ Testing POST /EventService/SubmitTestEvent """ global task server = Httpd(port=int(HTTPD_PORT), handler_class=self.EventServiceHandler) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=self.__httpd_start) worker.run() # forward port for services running on a guest host session = open_ssh_forward(self.__httpd_port) redfish().test_event(body={}) worker.wait_for_completion(timeout_sec=60) session.logout() self.assertFalse(task.timeout, msg='timeout waiting for task {0}'.format(task.id)) self.assertFalse( self.__class__.eventHandlerFailed, msg='Event handler reported subscriptionId / memberId mismatch')
def check_sdr_task(self): """ Testing AMQP on.task.ipmi.sdr.result """ Nodes().nodes_get() nodes = loads(self.__client.last_response.data) self.__tasks = [] for node in nodes: id = node.get('id') assert_is_not_none(id) type = node.get('type') assert_is_not_none(type) if type == 'compute': worker = AMQPWorker(queue=QUEUE_SDR_RESULT, \ callbacks=[self.__handle_result]) self.__tasks.append(WorkerThread(worker, id)) tasks = WorkerTasks(tasks=self.__tasks, func=self.__task_thread) tasks.run() tasks.wait_for_completion() for task in self.__tasks: assert_false(task.timeout, message='timeout waiting for task {0}'.format( task.id))
def clear_queue(): task = WorkerThread(amqp_listner_worker, 'amqp') tasks = WorkerTasks(tasks=[task], func=thread_func) print'Clearing the graph.finished queue...' tasks.run() tasks.wait_for_completion(time_to_clear_queue)