Exemple #1
0
 def start(data, id):
     log.info('starting amqp listener @' + id)
     amqp = AMQPWorker(amqp_url=amqp_url,
                       queue=QUEUE_GRAPH_FINISH,
                       callbacks=[handler_cb])
     add_amqp_listener(amqp, id)
     amqp.start()
Exemple #2
0
 def test_nodes_discovery(self):
     """ Testing Graph.Discovery completion """
     if self.check_compute_count():
         LOG.warning('Nodes already discovered!')
         return
     self.__discovery_duration = datetime.now()
     LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
     self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,callbacks=[self.handle_graph_finish])
     self.__worker.start()
Exemple #3
0
    def post_workflows(self, graph_name, \
                       timeout_sec=300, nodes=[], data={}, \
                       tasks=[], callback=None, run_now=True):
        self.__graph_name = graph_name
        self.__graph_status = []
        if len(nodes) == 0:
            Api().nodes_get_all()
            nodes = loads(self.__client.last_response.data)

        if callback == None:
            callback = self.handle_graph_finish

        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                assert_not_equal(id, None)
                LOG.info('starting amqp listener for node {0}'.format(id))
                worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,
                                    callbacks=[callback])
                thread = WorkerThread(worker, id)
                self.__tasks.append(thread)
                tasks.append(thread)
                try:
                    Api().nodes_workflow_action_by_id(id,
                                                      {'command': 'cancel'})
                except ApiException as e:
                    assert_equal(404, e.status, message='status should be 404')
                except (TypeError, ValueError) as e:
                    assert (e.message)
                Api().nodes_post_workflow_by_id(id,
                                                name=self.__graph_name,
                                                body=data)

        if run_now:
            self.run_workflow_tasks(self.__tasks, timeout_sec)
Exemple #4
0
 def test_nodes_discovery(self):
     """ API 2.0 Testing Graph.Discovery completion """
     if self.check_compute_count():
         LOG.warning('Nodes already discovered!')
         return
     self.__discovery_duration = datetime.now()
     LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
     self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,callbacks=[self.handle_graph_finish])
     self.__worker.start()
Exemple #5
0
    def post_workflows(self,
                       graph_name,
                       timeout_sec=10,
                       nodes=[],
                       data=None,
                       tasks=None,
                       callback=None,
                       run_now=True):
        self.__class__.__graph_name = graph_name
        self.__class__.__graph_status = []

        # clean up the defaults
        tasks = tasks if tasks else []
        data = data if data else {}

        if len(nodes) == 0:
            Api().nodes_get_all()
            nodes = loads(self.__client.last_response.data)

        if callback is None:
            logs.info("handle graph finish")
            callback = self.handle_graph_finish

        for n in nodes:
            if n.get('type') == 'compute':
                logs.debug("node is compute")
                id = n.get('id')
                self.assertIsNotNone(id)
                logs.debug(' Starting amqp listener for node %s', id)
                worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,
                                    callbacks=[callback])
                thread = WorkerThread(worker, id)
                self.__class__.__tasks.append(thread)
                tasks.append(thread)
                try:
                    Api().nodes_workflow_action_by_id(id,
                                                      {'command': 'cancel'})
                except ApiException as e:
                    self.assertEqual(
                        404,
                        e.status,
                        msg='Expected 404 status, received {}'.format(
                            e.status))
                except (TypeError, ValueError) as e:
                    assert (e.message)
                Api().nodes_post_workflow_by_id(
                    id, name=self.__class__.__graph_name, body=data)
                logs.info("Posted workflow %s on node %s",
                          self.__class__.__graph_name, id)

        if run_now:
            logs.info("running workflow tasks....")
            self.run_workflow_tasks(self.__class__.__tasks, timeout_sec)
Exemple #6
0
 def _wait_until_graph_finish(self, graph_name, timevalue):
     self.__graph_name = graph_name
     self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \
                                           callbacks=[self.__handle_graph_finish]), \
                                graph_name)
     def start(worker, id):
         worker.start()
     tasks = WorkerTasks(tasks=[self.__task], func=start)
     tasks.run()
     tasks.wait_for_completion(timeout_sec=timevalue)
     assert_false(self.__task.timeout, \
         message='timeout waiting for task {0}'.format(self.__task.id))
Exemple #7
0
 def test_nodes_discovery(self):
     """ API 2.0 Testing Graph.Discovery completion """
     if self.check_compute_count():
         LOG.warning('Nodes already discovered!')
         return
     self.__discovery_duration = datetime.now()
     LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
     self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH, \
                                           callbacks=[self.handle_graph_finish]), 'discovery')
     def start(worker,id):
         worker.start()
     tasks = WorkerTasks(tasks=[self.__task], func=start)
     tasks.run()
     tasks.wait_for_completion(timeout_sec=1200)
     assert_false(self.__task.timeout, \
         message='timeout waiting for task {0}'.format(self.__task.id))
Exemple #8
0
 def check_chassis_task(self):
     """ Testing AMQP on.task.ipmi.chassis.result """
     Nodes().nodes_get()
     nodes = loads(self.__client.last_response.data)
     self.__tasks = []
     for node in nodes:
         id = node.get('id')
         assert_is_not_none(id)
         type = node.get('type')
         assert_is_not_none(type)
         if type == 'compute':
             worker = AMQPWorker(queue=QUEUE_CHASSIS_RESULT, \
                                 callbacks=[self.__handle_result])
             self.__tasks.append(WorkerThread(worker,id))
     tasks = WorkerTasks(tasks=self.__tasks, func=self.__task_thread)
     tasks.run()
     tasks.wait_for_completion()
Exemple #9
0
    def post_unbound_workflow(self, graph_name, \
                       timeout_sec=300, data={}, \
                       tasks=[], callback=None, run_now=True):
        self.__graph_name = graph_name
        self.__graph_status = []

        if callback == None:
            callback = self.handle_graph_finish

        LOG.info('Starting AMQP listener for {0}'.format(self.__graph_name))
        worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH, callbacks=[callback])
        thread = WorkerThread(worker, self.__graph_name)
        self.__tasks.append(thread)
        tasks.append(thread)
        Workflows().workflows_post(graph_name, body=data)
        if run_now:
            self.run_workflow_tasks(self.__tasks, timeout_sec)
Exemple #10
0
    def post_workflows(self, graph_name, \
                       timeout_sec=300, nodes=[], data={}, \
                       tasks=[], callback=None, run_now=True):
        self.__graph_name = graph_name
        self.__graph_status = []
        
        if len(nodes) == 0:
            Nodes().nodes_get()
            for n in loads(self.__client.last_response.data):
                if n.get('type') == 'compute':
                    nodes.append(n.get('id'))
        
        if callback == None:
            callback = self.handle_graph_finish
        
        for node in nodes:
            LOG.info('Starting AMQP listener for node {0}'.format(node))
            worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH, callbacks=[callback])
            thread = WorkerThread(worker, node)
            self.__tasks.append(thread)
            tasks.append(thread)
            
            try:
                Nodes().nodes_identifier_workflows_active_delete(node)
            except ApiException as e:
                assert_equal(HTTP_NOT_FOUND, e.status, \
                    message = 'status should be {0}'.format(HTTP_NOT_FOUND))
            except (TypeError, ValueError) as e:
                assert(e.message)

            retries = 5
            Nodes().nodes_identifier_workflows_active_get(node)
            status = self.__client.last_response.status
            while status != HTTP_NO_CONTENT and retries != 0:
                status = self.__client.last_response.status
                LOG.warning('Workflow status for Node {0} (status={1},retries={2})' \
                    .format(node, status, retries))
                time.sleep(1)
                retries -= 1
                Nodes().nodes_identifier_workflows_active_get(node)
            assert_equal(HTTP_NO_CONTENT, status, \
                message = 'status should be {0}'.format(HTTP_NO_CONTENT))
            Nodes().nodes_identifier_workflows_post(node, name=graph_name, body=data)
        if run_now:
            self.run_workflow_tasks(self.__tasks, timeout_sec)
Exemple #11
0
    def test_nodes_discovery(self):
        # API 2.0 Testing Graph.Discovery completion
        count = defaults.get('RACKHD_NODE_COUNT', '')
        if (count.isdigit() and self.check_compute_count() == int(count)) or self.check_compute_count():
            logs.warning('Nodes already discovered!')
            return
        self.__discovery_duration = datetime.now()
        logs.info(' Wait start time: %s', str(self.__discovery_duration))
        self.__task = WorkerThread(AMQPWorker(queue=QUEUE_GRAPH_FINISH,
                                              callbacks=[self.handle_graph_finish]), 'discovery')

        def start(worker, id):
            worker.start()

        tasks = WorkerTasks(tasks=[self.__task], func=start)
        tasks.run()
        tasks.wait_for_completion(timeout_sec=1200)
        self.assertFalse(self.__task.timeout,
                         msg=('timeout waiting for task %s', self.__task.id))
Exemple #12
0
 def check_sdr_task(self):
     """ Testing AMQP on.task.ipmi.sdr.result """
     Nodes().nodes_get()
     nodes = loads(self.__client.last_response.data)
     self.__tasks = []
     for node in nodes:
         id = node.get('id')
         assert_is_not_none(id)
         type = node.get('type')
         assert_is_not_none(type)
         if type == 'compute':
             worker = AMQPWorker(queue=QUEUE_SDR_RESULT, \
                                 callbacks=[self.__handle_result])
             self.__tasks.append(WorkerThread(worker, id))
     tasks = WorkerTasks(tasks=self.__tasks, func=self.__task_thread)
     tasks.run()
     tasks.wait_for_completion()
     for task in self.__tasks:
         assert_false(task.timeout,
                      message='timeout waiting for task {0}'.format(
                          task.id))
Exemple #13
0
class NodesTests(object):

    def __init__(self):
        self.__client_old = config_old.api_client
        self.__client = config.api_client
        self.__worker = None
        self.__discovery_duration = None
        self.__discovered = 0
        self.__test_nodes = [
            {
                'autoDiscover': 'false',
                'name': 'test_switch_node',
                'type': 'switch',
                'snmpSettings': {
                    'host': '1.1.1.1',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_mgmt_node',
                'type': 'mgmt',
                'snmpSettings': {
                    'host': '1.1.1.1',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_pdu_node',
                'type': 'pdu',
                'snmpSettings': {
                    'host': '1.1.1.2',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_enclosure_node',
                'type': 'enclosure'
            },
            {
                'autoDiscover': 'false',
                'name': 'test_compute_node',
                'type': 'compute',
                'obmSettings': [{
                    'config': {'host': '00:01:02:03:04:05', 'password': '******', 'user': '******'},
                    'service': 'ipmi-obm-service'
                }]
            }
        ]
        self.__test_tags = {
            'tags': ['tag1', 'tag2']
        }
        
    def __get_data(self):
        return loads(self.__client.last_response.data)

    def __get_workflow_status(self, id):
        Api().nodes_get_active_workflow_by_id(identifier=id)
        status = self.__client.last_response.status
        if status == 200:
            data = self.__get_data()
            if data:
                status = data.get('_status')
                assert_is_not_none(status)
        return status
        
    def __post_workflow(self, id, graph_name):
        status = self.__get_workflow_status(id)
        if status != 'pending' and status != 'running':
            Api().nodes_post_workflow_by_id(identifier=id, name=graph_name, body={'name': graph_name})
        timeout = 20
        while status != 'pending' and status != 'running' and timeout != 0:
            LOG.warning('Workflow status for Node {0} (status={1},timeout={2})'.format(id,status,timeout))
            status = self.__get_workflow_status(id)
            sleep(1)
            timeout -= 1
        return timeout

    def check_compute_count(self):
        Api().nodes_get_all()
        nodes = self.__get_data()
        count = 0
        for n in nodes:
            type = n.get('type')
            if type == 'compute':
                count += 1
        return count

    @test(groups=['nodes.api2.discovery.test'])
    def test_nodes_discovery(self):
        """ API 2.0 Testing Graph.Discovery completion """
        if self.check_compute_count():
            LOG.warning('Nodes already discovered!')
            return
        self.__discovery_duration = datetime.now()
        LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
        self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,callbacks=[self.handle_graph_finish])
        self.__worker.start()

    def handle_graph_finish(self,body,message):
        routeId = message.delivery_info.get('routing_key').split('graph.finished.')[1]
        Workflows().workflows_get()  #TODO replace with 2.0 workflow API
        workflows = self.__get_data()
        for w in workflows:
            definition = w['definition']
            injectableName = definition.get('injectableName') 
            if injectableName == 'Graph.SKU.Discovery':
                graphId = w['context'].get('graphId')
                if graphId == routeId:
                    status = body.get('status')
                    if status == 'succeeded':
                        options = definition.get('options')
                        nodeid = options['defaults'].get('nodeId')
                        duration = datetime.now() - self.__discovery_duration
                        LOG.info('{0} - target: {1}, status: {2}, route: {3}, duration: {4}'
                                .format(injectableName,nodeid,status,routeId,duration))
                        self.__discovered += 1
                        message.ack()
                        break
        check = self.check_compute_count()
        if check and check == self.__discovered:
            self.__worker.stop()
            self.__worker = None
            self.__discovered = 0

    @test(groups=['test-nodes-api2'], depends_on_groups=['nodes.api2.discovery.test'])
    def test_nodes(self):
        """ Testing GET:/api/2.0/nodes """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes,json=True)
        assert_not_equal(0, len(nodes), message='Node list was empty!')

    @test(groups=['test-node-id-api2'], depends_on_groups=['test-nodes-api2'])
    def test_node_id(self):
        """ Testing GET:/api/2.0/nodes/:id """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes,json=True)
        codes = []
        for n in nodes:
            LOG.info(n,json=True)
            if n.get('type') == 'compute':
                uuid = n.get('id')
                Api().nodes_get_by_id(identifier=uuid)
                rsp = self.__client.last_response
                codes.append(rsp)
        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_get_by_id, 'fooey')

    @test(groups=['create-node-api2'], depends_on_groups=['test-node-id-api2'])
    def test_node_create(self):
        """ Testing POST:/api/2.0/nodes/ """
        for n in self.__test_nodes:
            LOG.info('Creating node (name={0})'.format(n.get('name')))
            Api().nodes_post(identifiers=n)
            rsp = self.__client.last_response
            assert_equal(201, rsp.status, message=rsp.reason)

    @test(groups=['test-node-id-obm-api2'], depends_on_groups=['create-node-api2'])
    def test_node_id_obm(self):
        """ Testing GET:/api/2.0/nodes/:id/obm """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes,json=True)
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Api().nodes_get_obm_by_id(identifier=uuid)
                rsp = self.__client.last_response
                LOG.info('OBM setting for node ID {0} is {1}'.format(uuid, rsp.data))
                codes.append(rsp)

        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_get_obm_by_id, 'fooey')

    @test(groups=['patch-node-api2'], depends_on_groups=['test-node-id-api2'])
    def test_node_patch(self):
        """ Testing PATCH:/api/2.0/nodes/:id """
        data = {"name": 'fake_name_test'}
        Api().nodes_get_all()
        nodes = self.__get_data()
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Api().nodes_patch_by_id(identifier=uuid,body=data)
                rsp = self.__client.last_response
                test_nodes = self.__get_data()
                assert_equal(test_nodes.get('name'), 'fake_name_test', 'Oops patch failed')
                codes.append(rsp)
                LOG.info('Restoring name to "test_compute_node"')
                correct_data = {"name": 'test_compute_node'}
                Api().nodes_patch_by_id(identifier=uuid,body=correct_data)
                rsp = self.__client.last_response
                restored_nodes = self.__get_data()
                assert_equal(restored_nodes.get('name'), 'test_compute_node', 'Oops restoring failed')
                codes.append(rsp)
        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_patch_by_id, 'fooey', data)

    @test(groups=['delete-node-api2'], depends_on_groups=['patch-node-api2'])
    def test_node_delete(self):
        """ Testing DELETE:/api/2.0/nodes/:id """
        codes = []
        test_names = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        test_names = [t.get('name') for t in self.__test_nodes]
        for n in nodes:
            name = n.get('name')
            if name in test_names:
                uuid = n.get('id')
                LOG.info('Deleting node {0} (name={1})'.format(uuid, name))
                Api().nodes_del_by_id(identifier=uuid)
                codes.append(self.__client.last_response)

        assert_not_equal(0, len(codes), message='Delete node list empty!')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_del_by_id, 'fooey')

    @test(groups=['catalog_nodes-api2'], depends_on_groups=['delete-whitelist-node-api2'])
    def test_node_catalogs(self):
        """ Testing GET:/api/2.0/nodes/:id/catalogs """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_catalog_by_id(identifier=n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0, len(resp), message='Node catalog is empty!')
        assert_raises(rest.ApiException, Api().nodes_get_catalog_by_id, 'fooey')

    @test(groups=['catalog_source-api2'], depends_on_groups=['catalog_nodes-api2'])
    def test_node_catalogs_bysource(self):
        """ Testing GET:/api/2.0/nodes/:id/catalogs/source """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_catalog_source_by_id(identifier=n.get('id'), source='bmc')
                resps.append(self.__client.last_response)
        for resp in resps:
            assert_equal(200,resp.status, message=resp.reason)
        assert_raises(rest.ApiException, Api().nodes_get_catalog_source_by_id, 'fooey','bmc')

    @test(groups=['node_workflows-api2'], depends_on_groups=['catalog_source-api2'])
    def test_node_workflows_get(self):
        """ Testing GET:/api/2.0/nodes/:id/workflows """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_workflow_by_id(identifier=n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0, len(resp), message='No Workflows found for Node')
        assert_raises(rest.ApiException, Api().nodes_get_workflow_by_id, 'fooey')

    @test(groups=['node_post_workflows-api2'], depends_on_groups=['node_workflows-api2'])
    def test_node_workflows_post(self):
        """ Testing POST:/api/2.0/nodes/:id/workflows """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = self.__post_workflow(id,'Graph.Discovery')
                if timeout > 0:
                    data = self.__get_data()
                resps.append({'data': data, 'id':id})
        for resp in resps:
            assert_not_equal(0, len(resp['data']), 
                message='No Workflows found for Node {0}'.format(resp['id']))
        assert_raises(rest.ApiException, Api().nodes_post_workflow_by_id, 'fooey',name='Graph.Discovery',body={})

    @test(groups=['node_workflows_active-api2'], depends_on_groups=['node_post_workflows-api2'])
    def test_node_workflows_active(self):
        """ Testing GET:/api/2.0/nodes/:id/workflows/active """
        # test_node_workflows_post verifies the same functionality
        self.test_node_workflows_post()
        assert_raises(rest.ApiException, Api().nodes_get_active_workflow_by_id, 'fooey')

    @test(groups=['node_workflows_del_active-api2'], depends_on_groups=['node_workflows_active-api2'])
    def test_node_workflows_del_active(self):
        """ Testing DELETE:/api/2.0/nodes/:id/workflows/active """
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = 5
                done = False
                while timeout > 0 and done == False:
                    if 0 == self.__post_workflow(id,'Graph.Discovery'):
                        fail('Timed out waiting for graph to start!')
                    try:
                        Api().nodes_del_active_workflow_by_id(identifier=id)
                        done = True
                    except rest.ApiException as e:
                        if e.status != 404:
                            raise e
                        timeout -= 1
                assert_not_equal(timeout, 0, message='Failed to delete an active workflow')
        assert_raises(rest.ApiException, Api().nodes_del_active_workflow_by_id, 'fooey')

    @test(groups=['node_tags_patch'], depends_on_groups=['node_workflows_del_active-api2'])
    def test_node_tags_patch(self):
        """ Testing PATCH:/api/2.0/nodes/:id/tags """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            LOG.info(n, json=True)
            Api().nodes_patch_tag_by_id(identifier=n.get('id'), body=self.__test_tags)
            LOG.info('Creating tag (name={0})'.format(self.__test_tags))
            rsp = self.__client.last_response
            codes.append(rsp)
            LOG.info(n.get('id'));
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_patch_tag_by_id, 'fooey',body=self.__test_tags)

    @test(groups=['node_tags_get'], depends_on_groups=['node_tags_patch'])
    def test_node_tags_get(self):
        """ Testing GET:api/2.0/nodes/:id/tags """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            Api().nodes_get_tags_by_id(n.get('id'))
            rsp = self.__client.last_response
            tags = loads(rsp.data)
            codes.append(rsp)
            for t in self.__test_tags.get('tags'):
                assert_true(t in tags, message= "cannot find new tag" )
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_patch_tag_by_id, 'fooey',body=self.__test_tags)


    @test(groups=['node_tags_delete'], depends_on_groups=['node_tags_get'])
    def test_node_tags_del(self):
        """ Testing DELETE:api/2.0/nodes/:id/tags/:tagName """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            for t in self.__test_tags.get('tags'):
                Api().nodes_del_tag_by_id(identifier=n.get('id'), tag_name=t)
                rsp = self.__client.last_response
                codes.append(rsp)
            Api().nodes_get_by_id(identifier=n.get('id'))
            rsp = self.__client.last_response
            codes.append(rsp)
            updated_node = loads(rsp.data)
            for t in self.__test_tags.get('tags'):
                assert_true(t not in updated_node.get('tags'), message= "Tag " + t + " was not deleted" )
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_del_tag_by_id, 'fooey',tag_name=['tag'])


    @test(groups=['nodes_tag_masterDelete'], depends_on_groups=['node_tags_delete'])
    def test_node_tags_masterDel(self):
        """ Testing DELETE:api/2.0/nodes/tags/:tagName """
        codes = []
        self.test_node_tags_patch()
        t = 'tag3'
        LOG.info("Check to make sure invalid tag is not deleted")
        Api().nodes_master_del_tag_by_id(tag_name=t)
        rsp = self.__client.last_response
        codes.append(rsp)
        updated_node = loads(rsp.data)
        assert_equal([], updated_node, message= "masterDel API deleted an invalid node ")
        LOG.info("Test to check valid tags are deleted")
        for t in self.__test_tags.get('tags'):
            Api().nodes_master_del_tag_by_id(tag_name=t)
            rsp = self.__client.last_response
            codes.append(rsp)
            updated_node = loads(rsp.data)
            LOG.info("Printing nodes list")
            LOG.info(updated_node)
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
Exemple #14
0
class NodesTests(object):
    def __init__(self):
        self.__client = config.api_client
        self.__worker = None
        self.__discovery_duration = None
        self.__discovered = 0
        self.__test_nodes = [{
            'autoDiscover': 'false',
            'name': 'test_switch_node',
            'type': 'switch',
            'snmpSettings': {
                'host': '1.1.1.1',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_mgmt_node',
            'type': 'mgmt',
            'snmpSettings': {
                'host': '1.1.1.1',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_pdu_node',
            'type': 'pdu',
            'snmpSettings': {
                'host': '1.1.1.2',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_enclosure_node',
            'type': 'enclosure'
        }, {
            'autoDiscover':
            'false',
            'name':
            'test_compute_node',
            'type':
            'compute',
            'obmSettings': [{
                'config': {
                    'host': '00:01:02:03:04:05',
                    'password': '******',
                    'user': '******'
                },
                'service': 'ipmi-obm-service'
            }]
        }]

    def __get_data(self):
        return loads(self.__client.last_response.data)

    def __get_workflow_status(self, id):
        Nodes().nodes_identifier_workflows_active_get(id)
        status = self.__client.last_response.status
        if status == 200:
            data = self.__get_data()
            status = data.get('_status')
            assert_is_not_none(status)
        return status

    def __post_workflow(self, id, graph_name, data):
        status = self.__get_workflow_status(id)
        if status != 'pending' and status != 'running':
            Nodes().nodes_identifier_workflows_post(id, graph_name, body=data)
        timeout = 20
        while status != 'pending' and status != 'running' and timeout != 0:
            LOG.warning(
                'Workflow status for Node {0} (status={1},timeout={2})'.format(
                    id, status, timeout))
            status = self.__get_workflow_status(id)
            sleep(1)
            timeout -= 1
        return timeout

    def check_compute_count(self):
        Nodes().nodes_get()
        nodes = self.__get_data()
        count = 0
        for n in nodes:
            type = n.get('type')
            if type == 'compute':
                count += 1
        return count

    @test(groups=['nodes.discovery.test'])
    def test_nodes_discovery(self):
        """ Testing Graph.Discovery completion """
        if self.check_compute_count():
            LOG.warning('Nodes already discovered!')
            return
        self.__discovery_duration = datetime.now()
        LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
        self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,
                                   callbacks=[self.handle_graph_finish])
        self.__worker.start()

    def handle_graph_finish(self, body, message):
        routeId = message.delivery_info.get('routing_key').split(
            'graph.finished.')[1]
        Workflows().workflows_get()
        workflows = self.__get_data()
        for w in workflows:
            definition = w['definition']
            injectableName = definition.get('injectableName')
            if injectableName == 'Graph.SKU.Discovery':
                graphId = w['context'].get('graphId')
                if graphId == routeId:
                    status = body.get('status')
                    if status == 'succeeded':
                        options = definition.get('options')
                        nodeid = options['defaults'].get('nodeId')
                        duration = datetime.now() - self.__discovery_duration
                        LOG.info(
                            '{0} - target: {1}, status: {2}, route: {3}, duration: {4}'
                            .format(injectableName, nodeid, status, routeId,
                                    duration))
                        self.__discovered += 1
                        message.ack()
                        break
        check = self.check_compute_count()
        if check and check == self.__discovered:
            self.__worker.stop()
            self.__worker = None
            self.__discovered = 0

    @test(groups=['test-nodes'], depends_on_groups=['nodes.discovery.test'])
    def test_nodes(self):
        """ Testing GET:/nodes """
        Nodes().nodes_get()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        assert_not_equal(0, len(nodes), message='Node list was empty!')

    @test(groups=['test-node-id'], depends_on_groups=['test-nodes'])
    def test_node_id(self):
        """ Testing GET:/nodes/:id """
        Nodes().nodes_get()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        codes = []
        for n in nodes:
            LOG.info(n)
            if n.get('type') == 'compute':
                uuid = n.get('id')
                Nodes().nodes_identifier_get(uuid)
                rsp = self.__client.last_response
                codes.append(rsp)
        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_get, 'fooey')

    @test(groups=['create-node'], depends_on_groups=['test-node-id'])
    def test_node_create(self):
        """ Verify POST:/nodes/ """
        for n in self.__test_nodes:
            LOG.info('Creating node (name={0})'.format(n.get('name')))
            Nodes().nodes_post(n)
            rsp = self.__client.last_response
            assert_equal(201, rsp.status, message=rsp.reason)

    @test(groups=['test-node-id-obm'], depends_on_groups=['create-node'])
    def test_node_id_obm(self):
        """ Testing GET:/nodes/:id/obm """
        Nodes().nodes_get()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Nodes().nodes_identifier_obm_get(uuid)
                rsp = self.__client.last_response
                LOG.info('OBM setting for node ID {0} is {1}'.format(
                    uuid, rsp.data))
                codes.append(rsp)

        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_obm_get, 'fooey')

    @test(groups=['patch-node'], depends_on_groups=['test-node-id-obm'])
    def test_node_patch(self):
        """ Verify PATCH:/nodes/:id """
        data = {"name": 'fake_name_test'}
        Nodes().nodes_get()
        nodes = self.__get_data()
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Nodes().nodes_identifier_patch(uuid, data)
                rsp = self.__client.last_response
                test_nodes = self.__get_data()
                assert_equal(test_nodes.get('name'), 'fake_name_test',
                             'Oops patch failed')
                codes.append(rsp)
                LOG.info('Restoring name to "test_compute_node"')
                correct_data = {"name": 'test_compute_node'}
                Nodes().nodes_identifier_patch(uuid, correct_data)
                rsp = self.__client.last_response
                restored_nodes = self.__get_data()
                assert_equal(restored_nodes.get('name'), 'test_compute_node',
                             'Oops restoring failed')
                codes.append(rsp)
        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_patch, 'fooey', data)

    @test(groups=['delete-node'], depends_on_groups=['patch-node'])
    def test_node_delete(self):
        """ Testing DELETE:/nodes/:id """
        codes = []
        test_names = []
        Nodes().nodes_get()
        nodes = self.__get_data()
        test_names = [t.get('name') for t in self.__test_nodes]
        for n in nodes:
            name = n.get('name')
            if name in test_names:
                uuid = n.get('id')
                LOG.info('Deleting node {0} (name={1})'.format(uuid, name))
                Nodes().nodes_identifier_delete(uuid)
                codes.append(self.__client.last_response)

        assert_not_equal(0, len(codes), message='Delete node list empty!')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_delete, 'fooey')

    @test(groups=['create-whitelist-node'], depends_on_groups=['delete-node'])
    def test_whitelist_node_create(self):
        """ Verify POST:/nodes/:mac/dhcp/whitelist """
        Nodes().nodes_get()
        nodes = self.__get_data()
        macList = []
        for n in nodes:
            type = n.get('type')
            assert_is_not_none(type)
            if type == 'compute':
                idList = n.get('identifiers')
                assert_is_not_none(idList)
                if len(idList) > 0:
                    macList.append(idList[0])  # grab the first mac

        for addr in macList:
            LOG.info('whitelisting MAC address {0}'.format(addr))
            Nodes().nodes_macaddress_dhcp_whitelist_post(addr, body={})
            data = self.__get_data()
            assert_not_equal(0, len(data))
            addrParsed = data[0].replace("-", ":")
            LOG.info(addrParsed)
            LOG.info(addr)

    @test(groups=['delete-whitelist-node'],
          depends_on_groups=['create-whitelist-node'])
    def test_whitelist_node_delete(self):
        """ Verify Delete:/nodes/:mac/dhcp/whitelist """
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            for i in n:
                if i == 'identifiers':
                    if len(n[i]) > 0:
                        macaddress = n[i]

        macaddress_to_delete = macaddress[len(macaddress) - 1]
        LOG.info('Deleting macaddress {0}'.format(macaddress_to_delete))
        Nodes().nodes_macaddress_dhcp_whitelist_delete(macaddress_to_delete)
        rsp = self.__client.last_response
        assert_equal(204, rsp.status, message=rsp.reason)

    @test(groups=['catalog_nodes'],
          depends_on_groups=['delete-whitelist-node'])
    def test_node_catalogs(self):
        """ Testing GET id:/catalogs """
        resps = []
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_catalogs_get(n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0, len(resp), message='Node catalog is empty!')
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_catalogs_get, 'fooey')

    @test(groups=['catalog_source'], depends_on_groups=['catalog_nodes'])
    def test_node_catalogs_bysource(self):
        """ Testing GET id:/catalogs/source """
        resps = []
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_catalogs_source_get(
                    n.get('id'), 'bmc')
                resps.append(self.__client.last_response)
        for resp in resps:
            assert_equal(200, resp.status, message=resp.reason)
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_catalogs_source_get, 'fooey',
                      'bmc')

    @test(groups=['node_workflows'], depends_on_groups=['catalog_source'])
    def test_node_workflows_get(self):
        """Testing node GET:id/workflows"""
        resps = []
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_workflows_get(n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0,
                             len(resp),
                             message='No Workflows found for Node')
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_workflows_get, 'fooey')

    @test(groups=['node_post_workflows'], depends_on_groups=['node_workflows'])
    def test_node_workflows_post(self):
        """Testing node POST:id/workflows"""
        resps = []
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = self.__post_workflow(id, 'Graph.Discovery', {})
                if timeout > 0:
                    data = self.__get_data()
                resps.append({'data': data, 'id': id})
        for resp in resps:
            assert_not_equal(0,
                             len(resp['data']),
                             message='No Workflows found for Node {0}'.format(
                                 resp['id']))
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_workflows_post,
                      'fooey',
                      'Graph.Discovery',
                      body={})

    @test(groups=['node_workflows_active'],
          depends_on_groups=['node_post_workflows'])
    def test_node_workflows_active(self):
        """Testing node GET:id/workflows/active"""
        # test_node_workflows_post verifies the same functionality
        self.test_node_workflows_post()
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_workflows_active_get, 'fooey')

    @test(groups=['node_workflows_del_active'],
          depends_on_groups=['node_workflows_active'])
    def test_node_workflows_del_active(self):
        """Testing node DELETE:id/workflows/active"""
        Nodes().nodes_get()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = 5
                done = False
                while timeout > 0 and done == False:
                    if 0 == self.__post_workflow(id, 'Graph.Discovery', {}):
                        fail('Timed out waiting for graph to start!')
                    try:
                        Nodes().nodes_identifier_workflows_active_delete(id)
                        done = True
                    except rest.ApiException as e:
                        if e.status != 404:
                            raise e
                        timeout -= 1
        assert_raises(rest.ApiException,
                      Nodes().nodes_identifier_workflows_active_delete,
                      'fooey')
Exemple #15
0
class NodesTests(object):
    def __init__(self):
        self.__client_old = config_old.api_client
        self.__client = config.api_client
        self.__worker = None
        self.__discovery_duration = None
        self.__discovered = 0
        self.__test_nodes = [{
            'autoDiscover': 'false',
            'name': 'test_switch_node',
            'type': 'switch',
            'snmpSettings': {
                'host': '1.1.1.1',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_mgmt_node',
            'type': 'mgmt',
            'snmpSettings': {
                'host': '1.1.1.1',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_pdu_node',
            'type': 'pdu',
            'snmpSettings': {
                'host': '1.1.1.2',
                'community': 'rackhd'
            }
        }, {
            'autoDiscover': 'false',
            'name': 'test_enclosure_node',
            'type': 'enclosure'
        }, {
            'autoDiscover':
            'false',
            'name':
            'test_compute_node',
            'type':
            'compute',
            'obmSettings': [{
                'config': {
                    'host': '00:01:02:03:04:05',
                    'password': '******',
                    'user': '******'
                },
                'service': 'ipmi-obm-service'
            }]
        }]
        self.__test_tags = {'tags': ['tag1', 'tag2']}

    def __get_data(self):
        return loads(self.__client.last_response.data)

    def __get_workflow_status(self, id):
        Api().nodes_get_active_workflow_by_id(identifier=id)
        status = self.__client.last_response.status
        if status == 200:
            data = self.__get_data()
            if data:
                status = data.get('_status')
                assert_is_not_none(status)
        return status

    def __post_workflow(self, id, graph_name):
        status = self.__get_workflow_status(id)
        if status != 'pending' and status != 'running':
            Api().nodes_post_workflow_by_id(identifier=id,
                                            name=graph_name,
                                            body={'name': graph_name})
        timeout = 20
        while status != 'pending' and status != 'running' and timeout != 0:
            LOG.warning(
                'Workflow status for Node {0} (status={1},timeout={2})'.format(
                    id, status, timeout))
            status = self.__get_workflow_status(id)
            sleep(1)
            timeout -= 1
        return timeout

    def check_compute_count(self):
        Api().nodes_get_all()
        nodes = self.__get_data()
        count = 0
        for n in nodes:
            type = n.get('type')
            if type == 'compute':
                count += 1
        return count

    @test(groups=['nodes.api2.discovery.test'])
    def test_nodes_discovery(self):
        """ API 2.0 Testing Graph.Discovery completion """
        if self.check_compute_count():
            LOG.warning('Nodes already discovered!')
            return
        self.__discovery_duration = datetime.now()
        LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
        self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,
                                   callbacks=[self.handle_graph_finish])
        self.__worker.start()

    def handle_graph_finish(self, body, message):
        routeId = message.delivery_info.get('routing_key').split(
            'graph.finished.')[1]
        Workflows().workflows_get()  #TODO replace with 2.0 workflow API
        workflows = self.__get_data()
        for w in workflows:
            definition = w['definition']
            injectableName = definition.get('injectableName')
            if injectableName == 'Graph.SKU.Discovery':
                graphId = w['context'].get('graphId')
                if graphId == routeId:
                    status = body.get('status')
                    if status == 'succeeded':
                        options = definition.get('options')
                        nodeid = options['defaults'].get('nodeId')
                        duration = datetime.now() - self.__discovery_duration
                        LOG.info(
                            '{0} - target: {1}, status: {2}, route: {3}, duration: {4}'
                            .format(injectableName, nodeid, status, routeId,
                                    duration))
                        self.__discovered += 1
                        message.ack()
                        break
        check = self.check_compute_count()
        if check and check == self.__discovered:
            self.__worker.stop()
            self.__worker = None
            self.__discovered = 0

    @test(groups=['test-nodes-api2'],
          depends_on_groups=['nodes.api2.discovery.test'])
    def test_nodes(self):
        """ Testing GET:/api/2.0/nodes """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        assert_not_equal(0, len(nodes), message='Node list was empty!')

    @test(groups=['test-node-id-api2'], depends_on_groups=['test-nodes-api2'])
    def test_node_id(self):
        """ Testing GET:/api/2.0/nodes/:id """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        codes = []
        for n in nodes:
            LOG.info(n, json=True)
            if n.get('type') == 'compute':
                uuid = n.get('id')
                Api().nodes_get_by_id(identifier=uuid)
                rsp = self.__client.last_response
                codes.append(rsp)
        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_get_by_id, 'fooey')

    @test(groups=['create-node-api2'], depends_on_groups=['test-node-id-api2'])
    def test_node_create(self):
        """ Testing POST:/api/2.0/nodes/ """
        for n in self.__test_nodes:
            LOG.info('Creating node (name={0})'.format(n.get('name')))
            Api().nodes_post(identifiers=n)
            rsp = self.__client.last_response
            assert_equal(201, rsp.status, message=rsp.reason)

    @test(groups=['test-node-id-obm-api2'],
          depends_on_groups=['create-node-api2'])
    def test_node_id_obm(self):
        """ Testing GET:/api/2.0/nodes/:id/obm """
        Api().nodes_get_all()
        nodes = self.__get_data()
        LOG.debug(nodes, json=True)
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Api().nodes_get_obm_by_id(identifier=uuid)
                rsp = self.__client.last_response
                LOG.info('OBM setting for node ID {0} is {1}'.format(
                    uuid, rsp.data))
                codes.append(rsp)

        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_get_obm_by_id, 'fooey')

    @test(groups=['patch-node-api2'], depends_on_groups=['test-node-id-api2'])
    def test_node_patch(self):
        """ Testing PATCH:/api/2.0/nodes/:id """
        data = {"name": 'fake_name_test'}
        Api().nodes_get_all()
        nodes = self.__get_data()
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Api().nodes_patch_by_id(identifier=uuid, body=data)
                rsp = self.__client.last_response
                test_nodes = self.__get_data()
                assert_equal(test_nodes.get('name'), 'fake_name_test',
                             'Oops patch failed')
                codes.append(rsp)
                LOG.info('Restoring name to "test_compute_node"')
                correct_data = {"name": 'test_compute_node'}
                Api().nodes_patch_by_id(identifier=uuid, body=correct_data)
                rsp = self.__client.last_response
                restored_nodes = self.__get_data()
                assert_equal(restored_nodes.get('name'), 'test_compute_node',
                             'Oops restoring failed')
                codes.append(rsp)
        assert_not_equal(0,
                         len(codes),
                         message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Api().nodes_patch_by_id, 'fooey', data)

    @test(groups=['delete-node-api2'], depends_on_groups=['patch-node-api2'])
    def test_node_delete(self):
        """ Testing DELETE:/api/2.0/nodes/:id """
        codes = []
        test_names = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        test_names = [t.get('name') for t in self.__test_nodes]
        for n in nodes:
            name = n.get('name')
            if name in test_names:
                uuid = n.get('id')
                LOG.info('Deleting node {0} (name={1})'.format(uuid, name))
                Api().nodes_del_by_id(identifier=uuid)
                codes.append(self.__client.last_response)

        assert_not_equal(0, len(codes), message='Delete node list empty!')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Api().nodes_del_by_id, 'fooey')

    @test(groups=['catalog_nodes-api2'],
          depends_on_groups=['delete-whitelist-node-api2'])
    def test_node_catalogs(self):
        """ Testing GET:/api/2.0/nodes/:id/catalogs """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_catalog_by_id(identifier=n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0, len(resp), message='Node catalog is empty!')
        assert_raises(rest.ApiException,
                      Api().nodes_get_catalog_by_id, 'fooey')

    @test(groups=['catalog_source-api2'],
          depends_on_groups=['catalog_nodes-api2'])
    def test_node_catalogs_bysource(self):
        """ Testing GET:/api/2.0/nodes/:id/catalogs/source """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_catalog_source_by_id(identifier=n.get('id'),
                                                     source='bmc')
                resps.append(self.__client.last_response)
        for resp in resps:
            assert_equal(200, resp.status, message=resp.reason)
        assert_raises(rest.ApiException,
                      Api().nodes_get_catalog_source_by_id, 'fooey', 'bmc')

    @test(groups=['node_workflows-api2'],
          depends_on_groups=['catalog_source-api2'])
    def test_node_workflows_get(self):
        """ Testing GET:/api/2.0/nodes/:id/workflows """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                Api().nodes_get_workflow_by_id(identifier=n.get('id'))
                resps.append(self.__get_data())
        for resp in resps:
            assert_not_equal(0,
                             len(resp),
                             message='No Workflows found for Node')
        assert_raises(rest.ApiException,
                      Api().nodes_get_workflow_by_id, 'fooey')

    @test(groups=['node_post_workflows-api2'],
          depends_on_groups=['node_workflows-api2'])
    def test_node_workflows_post(self):
        """ Testing POST:/api/2.0/nodes/:id/workflows """
        resps = []
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = self.__post_workflow(id, 'Graph.Discovery')
                if timeout > 0:
                    data = self.__get_data()
                resps.append({'data': data, 'id': id})
        for resp in resps:
            assert_not_equal(0,
                             len(resp['data']),
                             message='No Workflows found for Node {0}'.format(
                                 resp['id']))
        assert_raises(rest.ApiException,
                      Api().nodes_post_workflow_by_id,
                      'fooey',
                      name='Graph.Discovery',
                      body={})

    @test(groups=['node_workflows_active-api2'],
          depends_on_groups=['node_post_workflows-api2'])
    def test_node_workflows_active(self):
        """ Testing GET:/api/2.0/nodes/:id/workflows/active """
        # test_node_workflows_post verifies the same functionality
        self.test_node_workflows_post()
        assert_raises(rest.ApiException,
                      Api().nodes_get_active_workflow_by_id, 'fooey')

    @test(groups=['node_workflows_del_active-api2'],
          depends_on_groups=['node_workflows_active-api2'])
    def test_node_workflows_del_active(self):
        """ Testing DELETE:/api/2.0/nodes/:id/workflows/active """
        Api().nodes_get_all()
        nodes = self.__get_data()
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                timeout = 5
                done = False
                while timeout > 0 and done == False:
                    if 0 == self.__post_workflow(id, 'Graph.Discovery'):
                        fail('Timed out waiting for graph to start!')
                    try:
                        Api().nodes_del_active_workflow_by_id(identifier=id)
                        done = True
                    except rest.ApiException as e:
                        if e.status != 404:
                            raise e
                        timeout -= 1
                assert_not_equal(timeout,
                                 0,
                                 message='Failed to delete an active workflow')
        assert_raises(rest.ApiException,
                      Api().nodes_del_active_workflow_by_id, 'fooey')

    @test(groups=['node_tags_patch'],
          depends_on_groups=['node_workflows_del_active-api2'])
    def test_node_tags_patch(self):
        """ Testing PATCH:/api/2.0/nodes/:id/tags """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            LOG.info(n, json=True)
            Api().nodes_patch_tag_by_id(identifier=n.get('id'),
                                        body=self.__test_tags)
            LOG.info('Creating tag (name={0})'.format(self.__test_tags))
            rsp = self.__client.last_response
            codes.append(rsp)
            LOG.info(n.get('id'))
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Api().nodes_patch_tag_by_id,
                      'fooey',
                      body=self.__test_tags)

    @test(groups=['node_tags_get'], depends_on_groups=['node_tags_patch'])
    def test_node_tags_get(self):
        """ Testing GET:api/2.0/nodes/:id/tags """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            Api().nodes_get_tags_by_id(n.get('id'))
            rsp = self.__client.last_response
            tags = loads(rsp.data)
            codes.append(rsp)
            for t in self.__test_tags.get('tags'):
                assert_true(t in tags, message="cannot find new tag")
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Api().nodes_patch_tag_by_id,
                      'fooey',
                      body=self.__test_tags)

    @test(groups=['node_tags_delete'], depends_on_groups=['node_tags_get'])
    def test_node_tags_del(self):
        """ Testing DELETE:api/2.0/nodes/:id/tags/:tagName """
        codes = []
        Api().nodes_get_all()
        rsp = self.__client.last_response
        nodes = loads(rsp.data)
        codes.append(rsp)
        for n in nodes:
            for t in self.__test_tags.get('tags'):
                Api().nodes_del_tag_by_id(identifier=n.get('id'), tag_name=t)
                rsp = self.__client.last_response
                codes.append(rsp)
            Api().nodes_get_by_id(identifier=n.get('id'))
            rsp = self.__client.last_response
            codes.append(rsp)
            updated_node = loads(rsp.data)
            for t in self.__test_tags.get('tags'):
                assert_true(t not in updated_node.get('tags'),
                            message="Tag " + t + " was not deleted")
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException,
                      Api().nodes_del_tag_by_id,
                      'fooey',
                      tag_name=['tag'])

    @test(groups=['nodes_tag_masterDelete'],
          depends_on_groups=['node_tags_delete'])
    def test_node_tags_masterDel(self):
        """ Testing DELETE:api/2.0/nodes/tags/:tagName """
        codes = []
        self.test_node_tags_patch()
        t = 'tag3'
        LOG.info("Check to make sure invalid tag is not deleted")
        Api().nodes_master_del_tag_by_id(tag_name=t)
        rsp = self.__client.last_response
        codes.append(rsp)
        updated_node = loads(rsp.data)
        assert_equal([],
                     updated_node,
                     message="masterDel API deleted an invalid node ")
        LOG.info("Test to check valid tags are deleted")
        for t in self.__test_tags.get('tags'):
            Api().nodes_master_del_tag_by_id(tag_name=t)
            rsp = self.__client.last_response
            codes.append(rsp)
            updated_node = loads(rsp.data)
            LOG.info("Printing nodes list")
            LOG.info(updated_node)
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
Exemple #16
0
class NodesTests(object):

    def __init__(self):
        self.__client = config.api_client
        self.__worker = None
        self.__discovery_duration = None
        self.__discovered = 0
        self.__test_nodes = [
            {
                'autoDiscover': 'false',
                'name': 'test_switch_node',
                'type': 'switch',
                'snmpSettings': {
                    'host': '1.1.1.1',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_mgmt_node',
                'type': 'mgmt',
                'snmpSettings': {
                    'host': '1.1.1.1',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_pdu_node',
                'type': 'pdu',
                'snmpSettings': {
                    'host': '1.1.1.2',
                    'community': 'rackhd'
                }
            },
            {
                'autoDiscover': 'false',
                'name': 'test_enclosure_node',
                'type': 'enclosure'
            },
            {
                'autoDiscover': 'false',
                'name': 'test_compute_node',
                'type': 'compute',
                'obmSettings': [{
                    'config': {'host': '00:01:02:03:04:05', 'password': '******', 'user': '******'},
                    'service': 'ipmi-obm-service'
                }]
            }
        ]

    def check_compute_count(self):
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        count = 0
        for n in nodes:
            type = n.get('type')
            if type == 'compute':
                count += 1
        return count

    @test(groups=['nodes.discovery.test'])
    def test_nodes_discovery(self):
        """ Testing Graph.Discovery completion """
        if self.check_compute_count():
            LOG.warning('Nodes already discovered!')
            return
        self.__discovery_duration = datetime.now()
        LOG.info('Wait start time: {0}'.format(self.__discovery_duration))
        self.__worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH,callbacks=[self.handle_graph_finish])
        self.__worker.start()

    def handle_graph_finish(self,body,message):
        routeId = message.delivery_info.get('routing_key').split('graph.finished.')[1]
        Workflows().workflows_get()
        workflows = loads(self.__client.last_response.data)
        for w in workflows:
            definition = w['definition']
            injectableName = definition.get('injectableName') 
            if injectableName == 'Graph.SKU.Discovery':
                graphId = w['context'].get('graphId')
                if graphId == routeId:
                    status = body.get('status')
                    if status == 'succeeded':
                        options = definition.get('options')
                        nodeid = options['defaults'].get('nodeId')
                        duration = datetime.now() - self.__discovery_duration
                        LOG.info('{0} - target: {1}, status: {2}, route: {3}, duration: {4}'
                                .format(injectableName,nodeid,status,routeId,duration))
                        self.__discovered += 1
                        message.ack()
                        break
        check = self.check_compute_count()
        if check and check == self.__discovered:
            self.__worker.stop()
            self.__worker = None
            self.__discovered = 0

    @test(groups=['test-nodes'], depends_on_groups=['nodes.discovery.test'])
    def test_nodes(self):
        """ Testing GET:/nodes """
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        LOG.debug(nodes,json=True)
        assert_not_equal(0, len(nodes), message='Node list was empty!')

    @test(groups=['test-node-id'], depends_on_groups=['test-nodes'])
    def test_node_id(self):
        """ Testing GET:/nodes/:id """
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        LOG.debug(nodes,json=True)
        codes = []
        for n in nodes:
            LOG.info(n)
            if n.get('type') == 'compute':
                uuid = n.get('id')
                Nodes().nodes_identifier_get(uuid)
                rsp = self.__client.last_response
                codes.append(rsp)
        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_get, 'fooey')

    @test(groups=['create-node'], depends_on_groups=['test-node-id'])
    def test_node_create(self):
        """ Verify POST:/nodes/ """
        for n in self.__test_nodes:
            LOG.info('Creating node (name={0})'.format(n.get('name')))
            Nodes().nodes_post(n)
            rsp = self.__client.last_response
            assert_equal(201, rsp.status, message=rsp.reason)

    @test(groups=['test-node-id-obm'], depends_on_groups=['create-node'])
    def test_node_id_obm(self):
        """ Testing GET:/nodes/:id/obm """
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        LOG.debug(nodes,json=True)
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Nodes().nodes_identifier_obm_get(uuid)
                rsp = self.__client.last_response
                LOG.info('OBM setting for node ID {0} is {1}'.format(uuid, rsp.data))
                codes.append(rsp)

        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_obm_get, 'fooey')

    @test(groups=['patch-node'], depends_on_groups=['test-node-id-obm'])
    def test_node_patch(self):
        """ Verify PATCH:/nodes/:id """
        data = {"name": 'fake_name_test'}
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        codes = []
        for n in nodes:
            if n.get('name') == 'test_compute_node':
                uuid = n.get('id')
                Nodes().nodes_identifier_patch(uuid, data)
                rsp = self.__client.last_response
                test_nodes = loads(self.__client.last_response.data)
                assert_equal(test_nodes.get('name'), 'fake_name_test', 'Oops patch failed')
                codes.append(rsp)
                LOG.info('Restoring name to "test_compute_node"')
                correct_data = {"name": 'test_compute_node'}
                Nodes().nodes_identifier_patch(uuid, correct_data)
                rsp = self.__client.last_response
                restored_nodes = loads(self.__client.last_response.data)
                assert_equal(restored_nodes.get('name'), 'test_compute_node', 'Oops restoring failed')
                codes.append(rsp)
        assert_not_equal(0, len(codes), message='Failed to find compute node Ids')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_patch, 'fooey', data)

    @test(groups=['delete-node'], depends_on_groups=['patch-node'])
    def test_node_delete(self):
        """ Testing DELETE:/nodes/:id """
        codes = []
        test_names = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        test_names = [t.get('name') for t in self.__test_nodes]
        for n in nodes:
            name = n.get('name')
            if name in test_names:
                uuid = n.get('id')
                LOG.info('Deleting node {0} (name={1})'.format(uuid, name))
                Nodes().nodes_identifier_delete(uuid)
                codes.append(self.__client.last_response)

        assert_not_equal(0, len(codes), message='Delete node list empty!')
        for c in codes:
            assert_equal(200, c.status, message=c.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_delete, 'fooey')

    @test(groups=['create-whitelist-node'], depends_on_groups=['delete-node'])
    def test_whitelist_node_create(self):
        """ Verify POST:/nodes/:mac/dhcp/whitelist """
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        macList = []
        for n in nodes:
            type = n.get('type')
            assert_is_not_none(type)
            if type == 'compute':
                idList = n.get('identifiers')
                assert_is_not_none(idList)
                if len(idList) > 0:
                    macList.append(idList[0]) # grab the first mac

        for addr in macList:
            LOG.info('whitelisting MAC address {0}'.format(addr))
            Nodes().nodes_macaddress_dhcp_whitelist_post(addr,body={})
            data = loads(self.__client.last_response.data)
            assert_not_equal(0, len(data))
            addrParsed = data[0].replace("-", ":")
            LOG.info(addrParsed)
            LOG.info(addr)

    @test(groups=['delete-whitelist-node'], depends_on_groups=['create-whitelist-node'])
    def test_whitelist_node_delete(self):
        """ Verify Delete:/nodes/:mac/dhcp/whitelist """
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            for i in n:
                if i == 'identifiers':
                    if len(n[i]) > 0:
                        macaddress = n[i]

        macaddress_to_delete = macaddress[len(macaddress)-1]
        LOG.info('Deleting macaddress {0}' .format(macaddress_to_delete))
        Nodes().nodes_macaddress_dhcp_whitelist_delete(macaddress_to_delete)
        rsp = self.__client.last_response
        assert_equal(204, rsp.status, message=rsp.reason)

    @test(groups=['catalog_nodes'], depends_on_groups=['delete-whitelist-node'])
    def test_node_catalogs(self):
        """ Testing GET id:/catalogs """
        resps = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_catalogs_get( n.get('id'))
                resps.append(self.__client.last_response.data)
        for resp in resps:
            assert_not_equal(0, len(loads(resp)), message='Node catalog is empty!')
        assert_raises(rest.ApiException, Nodes().nodes_identifier_catalogs_get, 'fooey')

    @test(groups=['catalog_source'], depends_on_groups=['catalog_nodes'])
    def test_node_catalogs_bysource(self):
        """ Testing GET id:/catalogs/source """
        resps = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_catalogs_source_get( n.get('id'),'bmc')
                resps.append(self.__client.last_response)
        for resp in resps:
            assert_equal(200,resp.status, message=resp.reason)
        assert_raises(rest.ApiException, Nodes().nodes_identifier_catalogs_source_get, 'fooey','bmc')

    @test(groups=['node_workflows'], depends_on_groups=['catalog_source'])
    def test_node_workflows_get(self):
        """Testing node GET:id/workflows"""
        resps = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_workflows_get(n.get('id'))
                resps.append(self.__client.last_response.data)
        for resp in resps:
            assert_not_equal(0, len(loads(resp)), message='No Workflows found for Node')
        assert_raises(rest.ApiException, Nodes().nodes_identifier_workflows_get, 'fooey')

    @test(groups=['node_post_workflows'], depends_on_groups=['node_workflows'])
    def test_node_workflows_post(self):
        """Testing node POST:id/workflows"""
        resps = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_workflows_post(n.get('id'),'Graph.Discovery',body={})
                resps.append(self.__client.last_response.data)
        for resp in resps:
            assert_not_equal(0, len(loads(resp)), message='No Workflows found for Node')
        assert_raises(rest.ApiException, Nodes().nodes_identifier_workflows_post, 'fooey','Graph.Discovery',body={})

    @test(groups=['node_workflows_active'], depends_on_groups=['node_post_workflows'])
    def test_node_workflows_active(self):
        """Testing node GET:id/workflows/active"""
        resps = []
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                Nodes().nodes_identifier_workflows_active_get(n.get('id'))
                resps.append(self.__client.last_response.data)
        for resp in resps:
            assert_not_equal(0, len(loads(resp)), message='No active Workflows found for Node')
        assert_raises(rest.ApiException, Nodes().nodes_identifier_workflows_active_get, 'fooey')

    @test(groups=['node_workflows_del_active'], depends_on_groups=['node_workflows_active'])
    def test_node_workflows_del_active(self):
        """Testing node DELETE:id/workflows/active"""
        Nodes().nodes_get()
        nodes = loads(self.__client.last_response.data)
        for n in nodes:
            if n.get('type') == 'compute':
                id = n.get('id')
                assert_is_not_none(id)
                Nodes().nodes_identifier_workflows_active_delete(id)
                assert_equal(0, len(self.__client.last_response.data), 
                        message='No active Workflows found for Node {0}'.format(id))
        assert_raises(rest.ApiException, Nodes().nodes_identifier_workflows_active_delete, 'fooey')