def test_api_redfish_simple_storage(self): """ Tests the redfish /Systems/{identifier}/simpleStorage APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' for node in ucsComputeList: url = "/redfish/v1/Systems/{}/SimpleStorage".format(node) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' continue for member in api_data['json']['Members']: _url = member['@odata.id'] _api_data = fit_common.rackhdapi(_url) if _api_data['status'] != 200: errUrls += _url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' _body = _api_data['json'] errData = self.validate_simple_storage_data(_body, _url) if errData: errUrls += errData self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_system(self): """ Tests the redfish /Systems APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Systems') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Systems/'):] if id in ucsComputeList: ucsComputeList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' _body = api_data['json'] schema = { "str": ["Name", "SystemType", "Manufacturer", "Model", "SerialNumber", "IndicatorLED", "PowerState", "BiosVersion"], "dict": ["ProcessorSummary", "MemorySummary", "Actions", "Processors", "EthernetInterfaces", "SimpleStorage", "LogServices", "Links", "Storage"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(ucsComputeList), 0, 'not all UCS computes were listed under /System') self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_processor(self): """ Tests the /Systems/{identifier}/processors APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' for node in ucsComputeList: url = "/redfish/v1/Systems/{}/Processors".format(node) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' continue if len(api_data['json']['Members']) == 0: errUrls += url + ' CPU count is 0,\n' continue for member in api_data['json']['Members']: _url = member['@odata.id'] _api_data = fit_common.rackhdapi(_url) if _api_data['status'] != 200: errUrls += _url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' _body = _api_data['json'] schema = { "str": ["Socket", "ProcessorType", "ProcessorArchitecture", "InstructionSet", "Manufacturer", "Model"], "int": ["MaxSpeedMHz", "TotalCores", "TotalThreads"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_chassis(self): """ Tests the redfish /Chassis APIs with UCS nodes :return: """ ucsEnclList = ucs_common.get_ucs_encl_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Chassis') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Chassis/'):] if id in ucsEnclList: ucsEnclList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' _body = api_data['json'] schema = { "str": ["Name", "ChassisType", "Manufacturer", "Model", "SerialNumber", "IndicatorLED"], "dict": ["Thermal", "Power", "Links"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(ucsEnclList), 0, 'not all UCS chassis were listed under /chassis') self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_processor(self): """ Tests the /Systems/{identifier}/processors APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' for node in ucsComputeList: url = "/redfish/v1/Systems/{}/Processors".format(node) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' continue if len(api_data['json']['Members']) == 0: errUrls += url + ' CPU count is 0,\n' continue for member in api_data['json']['Members']: _url = member['@odata.id'] _api_data = fit_common.rackhdapi(_url) if _api_data['status'] != 200: errUrls += _url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' _body = _api_data['json'] schema = { "str": ["Socket", "ProcessorType", "ProcessorArchitecture", "InstructionSet", "Manufacturer", "Model"], "int": ["MaxSpeedMHz", "TotalCores", "TotalThreads"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_simple_storage(self): """ Tests the redfish /Systems/{identifier}/simpleStorage APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' for node in ucsComputeList: url = "/redfish/v1/Systems/{}/SimpleStorage".format(node) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' continue for member in api_data['json']['Members']: _url = member['@odata.id'] _api_data = fit_common.rackhdapi(_url) if _api_data['status'] != 200: errUrls += _url + ' returned status ' + str(api_data['status']) + \ ', Expected 200,\n' _body = _api_data['json'] errData = self.validate_simple_storage_data(_body, _url) if errData: errUrls += errData self.assertEqual(len(errUrls), 0, errUrls)
def wait_utility(self, id, counter, name, max_wait=MAX_WAIT): """ Wait for the specified graph to finish :param id: Graph ID :param counter: Safeguard for the number of times we can check the status of the graph :param name: Description of graph we are waiting for :return: returns status of the taskgraph, or "timeout" if count is exceeded """ api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] logs.info_1( "Waiting up to {0} seconds for {1} Workflow, ID: {2}".format( max_wait, name, id)) while (status == 'running' and counter < max_wait): time.sleep(1) counter += 1 api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] if counter >= max_wait: logs.info_1( "wait_utility() timed out after {0} attemps. status: {1}, ID: {2}, name: {3}" .format(counter, id, name)) return 'timeout' else: logs.info_1( "wait_utility() copleted with status: {0} for run: {1}. ID: {2}, name: {3}" .format(status, counter, id, name)) return status
def test_api_redfish_chassis(self): """ Tests the redfish/chassis routes with UCS nodes :return: """ ucsEnclList = self.get_ucs_encl_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Chassis') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Chassis/'):] if id in ucsEnclList: ucsEnclList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str( api_data['status']) + ',\n' self.assertEqual(len(ucsEnclList), 0, 'not all UCS chassis were listed under /chassis') self.assertEqual(len(errUrls), 0, errUrls)
def restore_node_utility(): """ Deletes all the added ucs nodes by the test. :return: return False on failure, or True otherwise """ logs.info_1("Restoring Nodes") api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False for node in api_data['json']: if node['id'] not in INITIAL_NODES: api_data = fit_common.rackhdapi('/api/2.0/nodes/' + node['id'], action="delete") logs.info_1("Deleting Node: {0}. Status was: {1}".format(node['id'], api_data['status'])) api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False temp = {} for node in api_data['json']: temp[node['id']] = node['name'] if len(temp) != len(INITIAL_NODES): logs.error("Found {0} nodes remaining after restoring the nodes, should be {1}, Remaining nodes: {2}" .format(len(temp), len(INITIAL_NODES), temp)) return False return True
def test_api_redfish_system(self): """ Tests the redfish /Systems APIs with UCS nodes :return: """ ucsComputeList = ucs_common.get_ucs_compute_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Systems') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Systems/'):] if id in ucsComputeList: ucsComputeList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' _body = api_data['json'] schema = { "str": ["Name", "SystemType", "Manufacturer", "Model", "SerialNumber", "IndicatorLED", "PowerState", "BiosVersion"], "dict": ["ProcessorSummary", "MemorySummary", "Actions", "Processors", "EthernetInterfaces", "SimpleStorage", "LogServices", "Links", "Storage"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(ucsComputeList), 0, 'not all UCS computes were listed under /System') self.assertEqual(len(errUrls), 0, errUrls)
def restore_node_utility(self, catalog_workflows): """ Deletes all the added ucs nodes by the test. :param catalog_workflows: A list of the catalog workflow IDs that will wait for their completion . :return: """ logs.info_1("Restoring Nodes") api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for catalog_workflow in catalog_workflows: self.wait_utility(str(catalog_workflow), 0, "Catalog") for node in api_data['json']: if node['id'] not in self.INITIAL_NODES: api_data = fit_common.rackhdapi('/api/2.0/nodes/' + node['id'], action="delete") logs.info_1("Deleting Node: {0}. Status was: {1}".format(node['id'], api_data['status'])) time.sleep(self.MAX_WAIT_ON_DELETE) api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) temp = {} for node in api_data['json']: temp[node['id']] = node['name'] self.assertEqual(len(temp), len(self.INITIAL_NODES), "Found {0} nodes remaining after restoring the nodes, should be {1}, Remaining nodes: {2}" .format(len(temp), len(self.INITIAL_NODES), temp))
def wait_utility(self, id, counter, name, max_wait=MAX_WAIT): """ Wait for the specified graph to finish :param id: Graph ID :param counter: Safeguard for the number of times we can check the status of the graph :param name: Description of graph we are waiting for :return: returns status of the taskgraph, or "timeout" if count is exceeded """ api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] logs.info_1("Waiting up to {0} seconds for {1} Workflow, ID: {2}" .format(max_wait, name, id)) while (status == 'running' and counter < max_wait): time.sleep(1) counter += 1 api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] if counter >= max_wait: logs.info_1("wait_utility() timed out after {0} attemps. status: {1}, ID: {2}, name: {3}" .format(counter, id, name)) return 'timeout' else: logs.info_1("wait_utility() copleted with status: {0} for run: {1}. ID: {2}, name: {3}" .format(status, counter, id, name)) return status
def restore_node_utility(): """ Deletes all the added ucs nodes by the test. :return: return False on failure, or True otherwise """ logs.info_1("Restoring Nodes") api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False for node in api_data['json']: if node['id'] not in INITIAL_NODES: api_data = fit_common.rackhdapi('/api/2.0/nodes/' + node['id'], action="delete") logs.info_1("Deleting Node: {0}. Status was: {1}".format(node['id'], api_data['status'])) api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False temp = {} for node in api_data['json']: temp[node['id']] = node['name'] if len(temp) != len(INITIAL_NODES): logs.error("Found {0} nodes remaining after restoring the nodes, should be {1}, Remaining nodes: {2}" .format(len(temp), len(INITIAL_NODES), temp)) return False return True
def restore_obms_utility(): """ Deletes all the added ucs obms by this test. :return: """ logs.info_1("Restoring OBMs") api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False for obm in api_data['json']: if obm['id'] not in INITIAL_OBMS: api_data = fit_common.rackhdapi('/api/2.0/obms/' + obm['id'], action="delete") logs.info_1("Deleting OBM: {0}. Status was: {1}".format(obm['id'], str(api_data['status']))) api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False temp = {} for obm in api_data['json']: temp[obm['id']] = obm['service'] if len(temp) != len(INITIAL_OBMS): logs.error("Found {0} ucs obms remaining after restoring the obms, should be {1}. Remaining OBMs: {2}" .format(len(temp), len(INITIAL_OBMS), temp)) return False return True
def restore_obms_utility(): """ Deletes all the added ucs obms by this test. :return: """ logs.info_1("Restoring OBMs") api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False for obm in api_data['json']: if obm['id'] not in INITIAL_OBMS: api_data = fit_common.rackhdapi('/api/2.0/obms/' + obm['id'], action="delete") logs.info_1("Deleting OBM: {0}. Status was: {1}".format(obm['id'], str(api_data['status']))) api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False temp = {} for obm in api_data['json']: temp[obm['id']] = obm['service'] if len(temp) != len(INITIAL_OBMS): logs.error("Found {0} ucs obms remaining after restoring the obms, should be {1}. Remaining OBMs: {2}" .format(len(temp), len(INITIAL_OBMS), temp)) return False return True
def restore_node_utility(self, catalog_workflows): """ Deletes all the added ucs nodes by the test. :param catalog_workflows: A list of the catalog workflow IDs that will wait for their completion . :return: """ logs.info_1("Restoring Nodes") api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for catalog_workflow in catalog_workflows: self.wait_utility(str(catalog_workflow), 0, "Catalog") for node in api_data['json']: if node['id'] not in self.INITIAL_NODES: api_data = fit_common.rackhdapi('/api/2.0/nodes/' + node['id'], action="delete") logs.info_1("Deleting Node: {0}. Status was: {1}".format( node['id'], api_data['status'])) time.sleep(self.MAX_WAIT_ON_DELETE) api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) temp = {} for node in api_data['json']: temp[node['id']] = node['name'] self.assertEqual( len(temp), len(self.INITIAL_NODES), "Found {0} nodes remaining after restoring the nodes, should be {1}, Remaining nodes: {2}" .format(len(temp), len(self.INITIAL_NODES), temp))
def test_api_redfish_chassis(self): """ Tests the redfish /Chassis APIs with UCS nodes :return: """ ucsEnclList = ucs_common.get_ucs_encl_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Chassis') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Chassis/'):] if id in ucsEnclList: ucsEnclList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' _body = api_data['json'] schema = { "str": ["Name", "ChassisType", "Manufacturer", "Model", "SerialNumber", "IndicatorLED"], "dict": ["Thermal", "Power", "Links"] } errData = ucs_common.validate_redfish_data_payload(_body, schema, url) if errData: errUrls += errData self.assertEqual(len(ucsEnclList), 0, 'not all UCS chassis were listed under /chassis') self.assertEqual(len(errUrls), 0, errUrls)
def test_api_20_get_pollers_by_id(self): msg = "Description: Display the poller data per node." logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: api_data = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/pollers") self.assertEqual( api_data['status'], 200, "Incorrect HTTP return code, expected 200, got:{0}".format( str(api_data['status']))) for item in api_data['json']: # check required fields self.assertGreater(item['pollInterval'], 0, 'pollInterval field error') for subitem in [ 'node', 'config', 'createdAt', 'id', 'name', 'failureCount', 'leaseExpires', 'leaseToken', 'updatedAt' ]: self.assertIn(subitem, item, subitem + ' field error') logs.info_2("\nNode: ") poller_dict = test_api_utils.get_supported_pollers(node) self.POLLERS[node] = poller_dict for poller in poller_dict: poller_id = poller_dict[poller]["poller_id"] logs.info_2("\nPoller: " + poller + " ID: " + str(poller_id)) poll_data = fit_common.rackhdapi("/api/2.0/pollers/" + poller_id) logs.info_5(fit_common.json.dumps(poll_data['json'], indent=4))
def restore_obms_utility(self): """ Deletes all the added ucs obms by this test. :return: """ logs.info_1("Restoring OBMs") api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for obm in api_data['json']: if obm['id'] not in self.INITIAL_OBMS: api_data = fit_common.rackhdapi('/api/2.0/obms/' + obm['id'], action="delete") logs.info_1("Deleting OBM: {0}. Status was: {1}".format( obm['id'], str(api_data['status']))) time.sleep(self.MAX_WAIT_ON_DELETE) api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) temp = {} for obm in api_data['json']: temp[obm['id']] = obm['service'] self.assertEqual( len(temp), len(self.INITIAL_OBMS), "Found {0} ucs obms remaining after restoring the obms, should be {1}. Remaining OBMs: {2}" .format(len(temp), len(self.INITIAL_OBMS), temp))
def test_api_20_ucs_discovery(self): """ Tests the UCS Discovery workflow in rackHD :return: """ initialNodeCount = len(self.get_ucs_node_list()) data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": UCSM_USER, "password": UCSM_PASS, "ucs": UCSM_IP, "uri": UCS_SERVICE_URI } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = self.wait_utility(str(id), 0, "Discovery") self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(self.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format(len(api_data['json']))) self.assertGreaterEqual(newNodeCount - initialNodeCount, self.EXPECTED_UCS_PHYSICAL_NODES, 'Expected to discover {0} UCS nodes, got: {1}' .format(self.EXPECTED_UCS_PHYSICAL_NODES, newNodeCount - initialNodeCount)) # rerun discovery and verify duplicate nodes are not created api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = self.wait_utility(str(id), 0, "Discovery") self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(self.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format(len(api_data['json']))) self.assertGreaterEqual(newNodeCount - initialNodeCount, 0, 'Expected to discover {0} UCS nodes, got: {1}' .format(0, newNodeCount - initialNodeCount))
def test_api_20_workflow_ucs_pollers(self): """ Tests the UCS Poller workflow in rackHD :return: """ data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": ucs_common.UCSM_USER, "password": ucs_common.UCSM_PASS, "ucs": ucs_common.UCSM_IP, "uri": ucs_common.UCS_SERVICE_URI }, "when-discover-physical-ucs": { "discoverPhysicalServers": "true", }, "when-discover-logical-ucs": { "discoverLogicalServer": "false" }, "when-catalog-ucs": { "autoCatalogUcs": "false" } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) id = api_data["json"]["context"]["graphId"] status = ucs_common.wait_utility(str(id), 0, "Ucs Discovery") self.assertEqual(status, 'succeeded', 'Ucs Discovery graph returned status {}'.format(status)) self.get_ucs_node_list() errNodes = '' errGraphs = '' for node in self.NODELIST: postUrl = '/api/2.0/nodes/' + node + "/workflows?name=Graph.Ucs.Poller" header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi(postUrl, headers=header, action="post", payload={}) if api_data['status'] != 201: errNodes += 'POST for node {} returned {}, '.format(node, api_data['status']) status = ucs_common.wait_utility(api_data["json"]["instanceId"], 0, "Poller") if status != 'succeeded': errGraphs += 'graph id {} finished with status: {}, '.format(api_data["json"]["instanceId"], status) logs.info_1("Posted URL: {0} with status: {1}".format(postUrl, api_data['status'])) self.assertEqual(len(errNodes), 0, errNodes) self.assertEqual(len(errGraphs), 0, errGraphs)
def test_api_20_ucs_discovery(self): """ Tests the UCS Discovery workflow in rackHD :return: """ self.get_nodes_utility() self.get_obms_utility() data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": "******", "password": "******", "ucs": self.UCS_IP, "uri": "http://" + self.RACKHD_IP + ":7080/sys" } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual( api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) self.wait_utility(str(id), 0, "Discovery") api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) logs.info_1("Found {0} Nodes after cataloging the UCS".format( len(api_data['json']))) for node in api_data['json']: if node["obms"] != [] and node["obms"][0][ "service"] == "ucs-obm-service": self.UCS_NODES.append(node) if node["type"] == "compute": self.UCS_COMPUTE_NODES.append(node) self.assertGreaterEqual( len(self.UCS_NODES), self.EXPECTED_UCS_NODES, 'Expected to discover {0} UCS nodes, got: {1}'.format( self.EXPECTED_UCS_NODES, len(self.UCS_NODES))) logs.info_1("Found {0} UCS nodes {1}".format( len(self.UCS_COMPUTE_NODES), self.UCS_COMPUTE_NODES))
def test_get_nodes_rackhdapi(self): """ This test is an example of using fit_common.rackhdapi() to perform an API call and using data from the response. For demo purposes, it needs communication to a running rackhd instance. """ nodes = [] nodelist = [] # Perform an API call api_data = fit_common.rackhdapi('/api/2.0/nodes') # Check return status is what you expect status = api_data.get('status') self.assertEqual( status, 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) # Use the response data try: nodes = api_data.get('json') except: self.fail("No Json data in repsonse") for node in nodes: nodelist.append(node.get('id')) logs.info(" %s", dumps(nodelist, indent=4)) # example to set the class level nodelist self.__class__.nodes = nodelist
def wait_utility(self, id, counter, name): """ Recursevily wait for the ucs discovery workflow to finish :param id: Graph ID :param counter: Safeguard for the number of times we can check the status of the graph :param name: Description of graph we are waiting for :return: returns status of the taskgraph, or "timeout" if count is exceeded """ api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] if status == "running" and counter < self.MAX_WAIT: time.sleep(1) logs.info_1( "In the wait_utility: Workflow status is {0} for the {1}'s run. ID: {2}, name: {3}" .format(status, counter, id, name)) counter += 1 return self.wait_utility(id, counter, name) elif status == "running" and counter >= self.MAX_WAIT: logs.info_1( "In the wait_utility: Timed out after trying {0} times. ID: {1}, name: {2}" .format(self.MAX_WAIT, id, name)) return 'timeout' else: logs.info_1( "In the wait_utility: Waiting for workflow {0}. The status is: {1} for run: {2}. ID: {3}" .format(name, status, counter, id)) return status
def get_ucs_node_list(self): api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: if node["obms"] != [] and node["obms"][0]["service"] == "ucs-obm-service": self.NODELIST.append(node["id"])
def test_api_20_ucs_catalog(self): """ Tests the UCS Catalog workflow in rackHD :return: """ catalog_workflows = [] for x in range(len(self.UCS_NODES)): postUrl = '/api/2.0/nodes/' + str( self.UCS_NODES[x]["id"]) + "/workflows?name=Graph.Ucs.Catalog" header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi(postUrl, headers=header, action="post") self.assertEqual( api_data['status'], 201, 'Expected to catalog {0} UCS nodes with status {1}, got: {2}'. format(self.UCS_NODES[x]["id"], 201, api_data['status'])) catalog_workflows.append(api_data["json"]["instanceId"]) logs.info_1("Posted URL: {0} with status: {1}".format( postUrl, api_data['status'])) # Restore the nodes, obms, and catalogs to their state before the UCS discovery # in order to avoid any failure in other tests logs.info_1( "Restoring the database to the state it was in before the UCS discovery and catalog" ) self.restore_node_utility(catalog_workflows) self.restore_obms_utility()
def __wait_for_workflow_complete(self, instanceid, start_time, waittime=900, cycle=30): log.info_5(" Workflow started at time: %f", start_time) while time.time() - start_time < waittime: result = fit_common.rackhdapi("/api/2.0/workflows/" + instanceid) if result['status'] != 200: log.error(" HTTP error: " + result['text']) return False if result['json']['status'] in ['running', 'pending']: log.info_5("{} workflow status: {}".format( result['json']['injectableName'], result['json']['status'])) time.sleep(cycle) elif result['json']['status'] == 'succeeded': log.info_5("{} workflow status: {}".format( result['json']['injectableName'], result['json']['status'])) log.info_5(" Workflow completed at time: " + str(time.time())) return True else: log.error(" Workflow failed: status: %s text: %s", result['json']['status'], result['text']) return False log.error(" Workflow Timeout: " + result['text']) return False
def test_api_20_ucs_discover_and_catalog_all(self): """ Tests the UCS Discovery and Catalon All workflow in rackHD :return: """ # delete all previously discovered nodes and catalogs self.assertTrue(restore_node_utility(), "failed to restore nodes") self.assertTrue(restore_obms_utility(), "failed to restore obms") initialNodeCount = len(self.get_ucs_node_list()) expected_ucs_logical_nodes = self.get_service_profile_count() expected_ucs_physical_nodes = self.get_physical_server_count() data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": UCSM_USER, "password": UCSM_PASS, "ucs": UCSM_IP, "uri": UCS_SERVICE_URI }, "when-discover-physical-ucs": { "discoverPhysicalServers": "true", }, "when-discover-logical-ucs": { "discoverLogicalServer": "true" }, "when-catalog-ucs": { "autoCatalogUcs": "true" } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual( api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = self.wait_utility(str(id), 0, "Discovery", 240) self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(self.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format( len(api_data['json']))) self.assertEqual( newNodeCount - initialNodeCount, expected_ucs_physical_nodes + expected_ucs_logical_nodes, 'Expected to discover {0} UCS nodes, got: {1}'.format( expected_ucs_physical_nodes + expected_ucs_logical_nodes, newNodeCount - initialNodeCount))
def test_get_nodes_rackhdapi(self): """ This test is an example of using fit_common.rackhdapi() to perform an API call and using data from the response. For demo purposes, it needs communication to a running rackhd instance. """ nodes = [] nodelist = [] # Perform an API call api_data = fit_common.rackhdapi('/api/2.0/nodes') # Check return status is what you expect status = api_data.get('status') self.assertEqual(status, 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) # Use the response data try: nodes = api_data.get('json') except: self.fail("No Json data in repsonse") for node in nodes: nodelist.append(node.get('id')) logs.info(" %s", dumps(nodelist, indent=4)) # example to set the class level nodelist self.__class__.nodes = nodelist
def test_api_20_ucs_catalog(self): """ Tests the UCS Catalog workflow in rackHD :return: """ ucsNodes = self.get_ucs_node_list() errNodes = '' errGraphs = '' for x in range(len(ucsNodes)): postUrl = '/api/2.0/nodes/' + str( ucsNodes[x]["id"]) + "/workflows?name=Graph.Ucs.Catalog" header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi(postUrl, headers=header, action="post", payload={}) if api_data['status'] != 201: errNodes += 'POST for node {} returned {}, '.format( ucsNodes[x]['id'], api_data['status']) status = self.wait_utility(api_data["json"]["instanceId"], 0, "Catalog") if status != 'succeeded': errGraphs += 'graph id {} finished with status: {}, '.format( api_data["json"]["instanceId"], status) logs.info_1("Posted URL: {0} with status: {1}".format( postUrl, api_data['status'])) self.assertEqual(len(errNodes), 0, errNodes) self.assertEqual(len(errGraphs), 0, errGraphs)
def get_ucs_node_list(self): nodeList = [] api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: if node["obms"] != [] and node["obms"][0]["service"] == "ucs-obm-service": nodeList.append(node) return (nodeList)
def get_ucs_node_list(): """ Get UCS nodes """ nodeList = [] api_data = fit_common.rackhdapi('/api/2.0/nodes') for node in api_data['json']: if node["obms"] != [] and node["obms"][0]["service"] == "ucs-obm-service": nodeList.append(node) return nodeList
def test_api_20_ucs_discovery(self): """ Tests the UCS Discovery workflow in rackHD :return: """ self.get_nodes_utility() self.get_obms_utility() data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": "******", "password": "******", "ucs": self.UCS_IP, "uri": "http://" + self.RACKHD_IP + ":7080/sys" } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) self.wait_utility(str(id), 0, "Discovery") api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) logs.info_1("Found {0} Nodes after cataloging the UCS".format(len(api_data['json']))) for node in api_data['json']: if node["obms"] != [] and node["obms"][0]["service"] == "ucs-obm-service": self.UCS_NODES.append(node) if node["type"] == "compute": self.UCS_COMPUTE_NODES.append(node) self.assertGreaterEqual(len(self.UCS_NODES), self.EXPECTED_UCS_NODES, 'Expected to discover {0} UCS nodes, got: {1}' .format(self.EXPECTED_UCS_NODES, len(self.UCS_NODES))) logs.info_1("Found {0} UCS nodes {1}".format(len(self.UCS_COMPUTE_NODES), self.UCS_COMPUTE_NODES))
def get_ucs_node_list(): """ Get UCS nodes """ nodeList = [] api_data = fit_common.rackhdapi('/api/2.0/nodes') for node in api_data['json']: if node["obms"] != [] and node["obms"][0][ "service"] == "ucs-obm-service": nodeList.append(node) return nodeList
def test_api_20_ucs_discover_and_catalog_all(self): """ Tests the UCS Discovery and Catalon All workflow in rackHD :return: """ # delete all previously discovered nodes and catalogs self.assertTrue(restore_node_utility(), "failed to restore nodes") self.assertTrue(restore_obms_utility(), "failed to restore obms") initialNodeCount = len(self.get_ucs_node_list()) expected_ucs_logical_nodes = self.get_service_profile_count() expected_ucs_physical_nodes = self.get_physical_server_count() data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": UCSM_USER, "password": UCSM_PASS, "ucs": UCSM_IP, "uri": UCS_SERVICE_URI }, "when-discover-physical-ucs": { "discoverPhysicalServers": "true", }, "when-discover-logical-ucs": { "discoverLogicalServer": "true" }, "when-catalog-ucs": { "autoCatalogUcs": "true" } } } header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = self.wait_utility(str(id), 0, "Discovery", 240) self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(self.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format(len(api_data['json']))) self.assertEqual(newNodeCount - initialNodeCount, expected_ucs_physical_nodes + expected_ucs_logical_nodes, 'Expected to discover {0} UCS nodes, got: {1}' .format(expected_ucs_physical_nodes + expected_ucs_logical_nodes, newNodeCount - initialNodeCount))
def test_api_redfish_chassis(self): """ Tests the redfish/chassis routes with UCS nodes :return: """ ucsEnclList = self.get_ucs_encl_id_list() errUrls = '' api_data = fit_common.rackhdapi('/redfish/v1/Chassis') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for chassis in api_data['json']['Members']: url = chassis['@odata.id'] id = url[len('/redfish/v1/Chassis/'):] if id in ucsEnclList: ucsEnclList.remove(id) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' self.assertEqual(len(ucsEnclList), 0, 'not all UCS chassis were listed under /chassis') self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_chassis_power(self): """ Tests the redfish /Chassis/{identifier}/Power APIs with UCS nodes :return: """ ucsEnclList = ucs_common.get_ucs_encl_id_list() errUrls = '' for chassis in ucsEnclList: url = '/redfish/v1/Chassis/{}/Power'.format(chassis) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' self.assertEqual(len(errUrls), 0, errUrls)
def test_api_redfish_chassis_power(self): """ Tests the redfish /Chassis/{identifier}/Power APIs with UCS nodes :return: """ ucsEnclList = ucs_common.get_ucs_encl_id_list() errUrls = '' for chassis in ucsEnclList: url = '/redfish/v1/Chassis/{}/Power'.format(chassis) api_data = fit_common.rackhdapi(url) if api_data['status'] != 200: errUrls += url + ' returned status ' + str(api_data['status']) + ',\n' self.assertEqual(len(errUrls), 0, errUrls)
def get_obms_utility(self): """ Takes inventory of the obms available before discovering the UCS obms. We will restore the obms collection to this snapshot. :return: """ api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for obm in api_data['json']: self.INITIAL_OBMS[obm['id']] = obm['service'] logs.info_1("Found {0} obms before cataloging the UCS: {1}".format(len(self.INITIAL_OBMS), self.INITIAL_OBMS))
def get_nodes_utility(self): """ Takes inventory of the nodes available before discovering the UCS nodes. We will restore the nodes collection to this snapshot :return: """ api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: self.INITIAL_NODES[node['id']] = node['type'] logs.info_1("Found {0} Nodes before cataloging the UCS. {1}" .format(len(self.INITIAL_NODES), self.INITIAL_NODES))
def test_api_20_vefify_catalogs_source_data(self): msg = "Description: Check source data of catalogs created for node" logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: for item in self.CATALOGS[node]: logs.info_2("Checking source:{0}".format(item['source'])) self.assertNotEqual(item, '', 'Empty JSON Field') sourcedata = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/catalogs/" + item['source']) self.assertGreater(len(sourcedata['json']['id']), 0, 'id field error') self.assertGreater(len(sourcedata['json']['node']), 0, 'node field error') self.assertGreater(len(sourcedata['json']['source']), 0, 'source field error') self.assertGreater(len(sourcedata['json']['updatedAt']), 0, 'updatedAt field error') self.assertGreater(len(sourcedata['json']['createdAt']), 0, 'createdAt field error')
def test_api_20_get_catalogs(self): msg = "Description: Check catalogs data per node." logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: api_data = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/catalogs") self.assertEqual(api_data['status'], 200, "Incorrect HTTP return code, expected 200, got:{0}" .format(str(api_data['status']))) self.CATALOGS[node] = api_data['json'] for item in api_data['json']: for subitem in ['node', 'id', 'source', 'data']: self.assertIn(subitem, item, subitem + ' field error')
def get_ucs_node_list(self): api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: if node["obms"] != [] and node["obms"][0]["service"] == "ucs-obm-service": self.NODELIST.append(node["id"]) node_name = node["name"].split("/")[-1] if "rack" in node_name: self.RACK_NODELIST.append(node["id"]) elif "blade" in node_name: self.BLADE_NODELIST.append(node["id"]) elif "chassis" in node_name: self.CHASSIS_NODELIST.append(node["id"])
def test_api_20_get_pollers_by_id(self): msg = "Description: Display the poller data per node." logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: api_data = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/pollers") self.assertEqual(api_data['status'], 200, "Incorrect HTTP return code, expected 200, got:{0}" .format(str(api_data['status']))) for item in api_data['json']: # check required fields self.assertGreater(item['pollInterval'], 0, 'pollInterval field error') for subitem in ['node', 'config', 'createdAt', 'id', 'name', 'failureCount', 'leaseExpires', 'leaseToken', 'updatedAt']: self.assertIn(subitem, item, subitem + ' field error') logs.info_2("\nNode: ") poller_dict = test_api_utils.get_supported_pollers(node) self.POLLERS[node] = poller_dict for poller in poller_dict: poller_id = poller_dict[poller]["poller_id"] logs.info_2("\nPoller: " + poller + " ID: " + str(poller_id)) poll_data = fit_common.rackhdapi("/api/2.0/pollers/" + poller_id) logs.info_5(fit_common.json.dumps(poll_data['json'], indent=4))
def test_api_20_get_catalogs(self): msg = "Description: Check catalogs data per node." logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: api_data = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/catalogs") self.assertEqual( api_data['status'], 200, "Incorrect HTTP return code, expected 200, got:{0}".format( str(api_data['status']))) self.CATALOGS[node] = api_data['json'] for item in api_data['json']: for subitem in ['node', 'id', 'source', 'data']: self.assertIn(subitem, item, subitem + ' field error')
def get_nodes_utility(self): """ Takes inventory of the nodes available before discovering the UCS nodes. We will restore the nodes collection to this snapshot :return: """ api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: self.INITIAL_NODES[node['id']] = node['type'] logs.info_1("Found {0} Nodes before cataloging the UCS. {1}".format( len(self.INITIAL_NODES), self.INITIAL_NODES))
def restore_obms_utility(self): """ Deletes all the added ucs obms by this test. :return: """ logs.info_1("Restoring OBMs") api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for obm in api_data['json']: if obm['id'] not in self.INITIAL_OBMS: api_data = fit_common.rackhdapi('/api/2.0/obms/' + obm['id'], action="delete") logs.info_1("Deleting OBM: {0}. Status was: {1}".format(obm['id'], str(api_data['status']))) time.sleep(self.MAX_WAIT_ON_DELETE) api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) temp = {} for obm in api_data['json']: temp[obm['id']] = obm['service'] self.assertEqual(len(temp), len(self.INITIAL_OBMS), "Found {0} ucs obms remaining after restoring the obms, should be {1}. Remaining OBMs: {2}" .format(len(temp), len(self.INITIAL_OBMS), temp))
def get_obms_utility(): """ Takes inventory of the obms available before discovering the UCS obms. We will restore the obms collection to this snapshot. :return: return False on failure, or True otherwise """ api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False for obm in api_data['json']: INITIAL_OBMS[obm['id']] = obm['service'] logs.info_1("Found {0} obms before cataloging the UCS: {1}".format(len(INITIAL_OBMS), INITIAL_OBMS)) return True
def get_obms_utility(): """ Takes inventory of the obms available before discovering the UCS obms. We will restore the obms collection to this snapshot. :return: return False on failure, or True otherwise """ api_data = fit_common.rackhdapi('/api/2.0/obms') if api_data['status'] != 200: logs.error("get /api/2.0/obms returned status {}, expected 200".format(api_data['status'])) return False for obm in api_data['json']: INITIAL_OBMS[obm['id']] = obm['service'] logs.info_1("Found {0} obms before cataloging the UCS: {1}".format(len(INITIAL_OBMS), INITIAL_OBMS)) return True
def get_obms_utility(self): """ Takes inventory of the obms available before discovering the UCS obms. We will restore the obms collection to this snapshot. :return: """ api_data = fit_common.rackhdapi('/api/2.0/obms') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for obm in api_data['json']: self.INITIAL_OBMS[obm['id']] = obm['service'] logs.info_1("Found {0} obms before cataloging the UCS: {1}".format( len(self.INITIAL_OBMS), self.INITIAL_OBMS))
def get_nodes_utility(): """ Takes inventory of the nodes available before discovering the UCS nodes. We will restore the nodes collection to this snapshot :return: return False on failure, or True otherwise """ api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False for node in api_data['json']: INITIAL_NODES[node['id']] = node['type'] logs.info_1("Found {0} Nodes before cataloging the UCS. {1}" .format(len(INITIAL_NODES), INITIAL_NODES)) return True
def get_nodes_utility(): """ Takes inventory of the nodes available before discovering the UCS nodes. We will restore the nodes collection to this snapshot :return: return False on failure, or True otherwise """ api_data = fit_common.rackhdapi('/api/2.0/nodes') if api_data['status'] != 200: logs.error("get /api/2.0/nodes returned status {}, expected 200".format(api_data['status'])) return False for node in api_data['json']: INITIAL_NODES[node['id']] = node['type'] logs.info_1("Found {0} Nodes before cataloging the UCS. {1}" .format(len(INITIAL_NODES), INITIAL_NODES)) return True
def get_ucs_node_list(self): api_data = fit_common.rackhdapi('/api/2.0/nodes') self.assertEqual( api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status'])) for node in api_data['json']: if node["obms"] != [] and node["obms"][0][ "service"] == "ucs-obm-service": self.NODELIST.append(node["id"]) node_name = node["name"].split("/")[-1] if "rack" in node_name: self.RACK_NODELIST.append(node["id"]) elif "blade" in node_name: self.BLADE_NODELIST.append(node["id"]) elif "chassis" in node_name: self.CHASSIS_NODELIST.append(node["id"])
def test_api_20_ucs_discovery(self): """ Tests the UCS Discovery workflow in rackHD :return: """ initialNodeCount = len(ucs_common.get_ucs_node_list()) data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": ucs_common.UCSM_USER, "password": ucs_common.UCSM_PASS, "ucs": ucs_common.UCSM_IP, "uri": ucs_common.UCS_SERVICE_URI }, "when-discover-logical-ucs": { "discoverLogicalServer": "false" } } } expected_ucs_physical_nodes = ucs_common.get_physical_server_count() header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual( api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = ucs_common.wait_utility(str(id), 0, "Discovery") self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(ucs_common.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format( len(api_data['json']))) self.assertEqual( newNodeCount - initialNodeCount, expected_ucs_physical_nodes, 'Expected to discover {0} UCS nodes, got: {1}'.format( expected_ucs_physical_nodes, newNodeCount - initialNodeCount))
def test_api_20_vefify_catalogs_source_data(self): msg = "Description: Check source data of catalogs created for node" logs.info_2("\t{0}".format(msg)) for node in self.NODELIST: for item in self.CATALOGS[node]: logs.info_2("Checking source:{0}".format(item['source'])) self.assertNotEqual(item, '', 'Empty JSON Field') sourcedata = fit_common.rackhdapi("/api/2.0/nodes/" + node + "/catalogs/" + item['source']) self.assertGreater(len(sourcedata['json']['id']), 0, 'id field error') self.assertGreater(len(sourcedata['json']['node']), 0, 'node field error') self.assertGreater(len(sourcedata['json']['source']), 0, 'source field error') self.assertGreater(len(sourcedata['json']['updatedAt']), 0, 'updatedAt field error') self.assertGreater(len(sourcedata['json']['createdAt']), 0, 'createdAt field error')
def test_api_20_verify_poller_current_data(self): msg = "Description: Display most current data from poller" logs.info_2("\t{0}".format(msg)) errorlist = [] for node in self.NODELIST: poller_dict = self.POLLERS[node] for poller in poller_dict: poller_id = poller_dict[poller]["poller_id"] logs.info_2("Poller: {} ID: {} ".format(poller, str(poller_id))) monurl = "/api/2.0/pollers/" + str(poller_id) + "/data/current" api_data = fit_common.rackhdapi(url_cmd=monurl) if api_data['status'] not in [200, 201, 202, 204]: errorlist.append("Error: Node {} Poller_ID {} Failed to get current poller data, status {}" .format(node, poller_id, api_data['status'])) else: logs.info_5(fit_common.json.dumps(api_data['json'], indent=4)) if errorlist != []: logs.info_2("{}".format(fit_common.json.dumps(errorlist, indent=4))) self.assertEqual(errorlist, [], "Error reported.")
def test_api_20_ucs_catalog(self): """ Tests the UCS Catalog workflow in rackHD :return: """ catalog_workflows = [] for x in range(len(self.UCS_NODES)): postUrl = '/api/1.1/nodes/' + str(self.UCS_NODES[x]["id"]) + "/workflows?name=Graph.Ucs.Catalog" header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi(postUrl, headers=header, action="post") self.assertEqual(api_data['status'], 201, 'Expected to catalog {0} UCS nodes with status {1}, got: {2}' .format(self.UCS_NODES[x]["id"], 201, api_data['status'])) catalog_workflows.append(api_data["json"]["instanceId"]) logs.info_1("Posted URL: {0} with status: {1}".format(postUrl, api_data['status'])) # Restore the nodes, obms, and catalogs to their state before the UCS discovery # in order to avoid any failure in other tests logs.info_1("Restoring the database to the state it was in before the UCS discovery and catalog") self.restore_node_utility(catalog_workflows) self.restore_obms_utility()
def test01_decommission_node(self): # launch workflow workflowid = None result = fit_common.rackhdapi('/api/2.0/nodes/' + self.__class__.__NODE + '/workflows', action='post', payload=DECOMMISSION_PAYLOAD) if result['status'] == 201: # workflow running log.info_5(" InstanceID: " + result['json']['instanceId']) log.info_5(" Payload: " + dumps(DECOMMISSION_PAYLOAD)) workflowid = result['json']['instanceId'] else: # workflow failed with response code log.error(" InstanceID: " + result['text']) log.error(" Payload: " + dumps(DECOMMISSION_PAYLOAD)) self.fail("Workflow failed with response code: " + result['status']) self.assertTrue( self.__wait_for_workflow_complete(workflowid, time.time()), "Decommission Node workflow failed, see logs.")
def test_api_20_ucs_discovery(self): """ Tests the UCS Discovery workflow in rackHD :return: """ initialNodeCount = len(ucs_common.get_ucs_node_list()) data_payload = { "name": "Graph.Ucs.Discovery", "options": { "defaults": { "username": ucs_common.UCSM_USER, "password": ucs_common.UCSM_PASS, "ucs": ucs_common.UCSM_IP, "uri": ucs_common.UCS_SERVICE_URI }, "when-discover-logical-ucs": { "discoverLogicalServer": "false" } } } expected_ucs_physical_nodes = ucs_common.get_physical_server_count() header = {"Content-Type": "application/json"} api_data = fit_common.rackhdapi("/api/2.0/workflows", action="post", headers=header, payload=data_payload) id = api_data["json"]["context"]["graphId"] self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status'])) status = ucs_common.wait_utility(str(id), 0, "Discovery") self.assertEqual(status, 'succeeded', 'Discovery graph returned status {}'.format(status)) newNodeCount = len(ucs_common.get_ucs_node_list()) logs.info_1("Found {0} Nodes after cataloging the UCS".format(len(api_data['json']))) self.assertEqual(newNodeCount - initialNodeCount, expected_ucs_physical_nodes, 'Expected to discover {0} UCS nodes, got: {1}' .format(expected_ucs_physical_nodes, newNodeCount - initialNodeCount))
def wait_utility(self, id, counter, name): """ Recursevily wait for the ucs discovery workflow to finish :param id: Graph ID :param counter: Safeguard for the number of times we can check the status of the graph :param name: Description of graph we are waiting for :return: return False on failure, or True otherwise """ api_data = fit_common.rackhdapi('/api/2.0/workflows/' + str(id)) status = api_data["json"]["status"] if status == "running" and counter < self.MAX_WAIT: time.sleep(1) logs.info_1("In the wait_utility: Workflow status is {0} for the {1}'s run. ID: {2}, name: {3}" .format(status, counter, id, name)) counter += 1 self.wait_utility(id, counter, name) elif status == "running" and counter >= self.MAX_WAIT: logs.info_1("In the wait_utility: Timed out after trying {0} times. ID: {1}, name: {2}" .format(self.MAX_WAIT, id, name)) return False else: logs.info_1("In the wait_utility: Waiting for workflow {0}. The status is: {1} for run: {2}. ID: {3}" .format(name, status, counter, id)) return True