def post_workflows(self, graph_name, \ timeout_sec=300, nodes=[], data={}, \ tasks=[], callback=None, run_now=True): self.__graph_name = graph_name self.__graph_status = [] if len(nodes) == 0: Api().nodes_get_all() nodes = loads(self.__client.last_response.data) if callback == None: callback = self.handle_graph_finish for n in nodes: if n.get('type') == 'compute': id = n.get('id') assert_not_equal(id, None) LOG.info('starting amqp listener for node {0}'.format(id)) worker = AMQPWorker(queue=QUEUE_GRAPH_FINISH, callbacks=[callback]) thread = WorkerThread(worker, id) self.__tasks.append(thread) tasks.append(thread) try: Api().nodes_workflow_action_by_id(id, {'command': 'cancel'}) except ApiException as e: assert_equal(404, e.status, message='status should be 404') except (TypeError, ValueError) as e: assert (e.message) Api().nodes_post_workflow_by_id(id, name=self.__graph_name, body=data) if run_now: self.run_workflow_tasks(self.__tasks, timeout_sec)
def test_17_node_get_obm_by_node_id(self): # Testing GET:/api/2.0/:id/obm # FIX ME 12/21/16: This test deletes all OBM settings on the nodes # If run before any other testing that relies on OBMs being set, you've messed # up your test bed. Restore the OBMS when this set is done or run against __test_nodes # Or check somehow if we are only running against virtual nodes Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) self.assertEqual(200, rsp.status, msg=rsp.status) for n in nodes: if n.get('type') == 'compute': LOG.info(n, json=True) if fit_common.VERBOSITY >= 2: print("{}".format(json.dumps(n, indent=4))) Api().nodes_get_obms_by_node_id(identifier=n.get('id')) LOG.info('getting OBMs for node {0}'.format(n.get('id'))) if fit_common.VERBOSITY >= 2: print('getting OBMs for node {0}'.format(n.get('id'))) rsp = self.__client.last_response self.assertEqual(200, rsp.status, msg=rsp.status) obms = loads(rsp.data) self.assertNotEqual(0, len(obms), msg='OBMs list was empty!') for obm in obms: id = obm.get('id') Api().obms_delete_by_id(identifier=id) rsp = self.__client.last_response self.assertEqual(204, rsp.status, msg=rsp.status)
def test_node_put_obm_by_node_id(self): # Testing PUT:/api/2.0/nodes/:id/obm self.create_temp_nodes() Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) self.assertEqual(200, rsp.status, msg=rsp.status) test_obm = { 'config': { 'host': '1.2.3.4', 'user': '******', 'password': '******' }, 'service': 'noop-obm-service' } for n in nodes: if n.get('name') == 'test_compute_node': logs.info(" Node to put obm: %s %s ", n.get('id'), n.get('name')) test_obm["nodeId"] = str(n.get('id')) logs.debug(json.dumps(n, indent=4)) Api().nodes_put_obms_by_node_id(identifier=n.get('id'), body=test_obm) logs.info(' Creating obm: %s ', str(test_obm)) rsp = self.__client.last_response self.assertEqual(201, rsp.status, msg=rsp.status)
def test_workflows_action(self): """ Testing PUT:/api/2.0/nodes/:id/workflows/action """ Api().nodes_get_all() nodes = self.__get_data() for n in nodes: if n.get('type') == 'compute': id = n.get('id') timeout = 5 done = False while timeout > 0 and done == False: if 0 == self.__post_workflow(id, 'Graph.Discovery'): fail('Timed out waiting for graph to start!') try: Api().nodes_workflow_action_by_id( id, {'command': 'cancel'}) done = True except rest.ApiException as e: if e.status != 404: raise e timeout -= 1 assert_not_equal(timeout, 0, message='Failed to delete an active workflow') assert_raises(rest.ApiException, Api().nodes_workflow_action_by_id, 'fooey', {'command': 'test'})
def test_tags(self): """ Testing GET:/api/2.0/tags """ Api().nodes_get_all() nodes = self.__get_data() tagsArray = [] for n in nodes: if n.get('type') == 'compute': Api().nodes_get_catalog_by_id(identifier=n.get('id')) updated_catalog = self.__get_data() tagsWithRules = { "name": n.get('id'), "rules": [{ "equals": updated_catalog[0]["data"]["System Information"] ["Manufacturer"], "path": "dmi.System Information.Manufacturer" }] } tagsArray.append(tagsWithRules) Api().get_all_tags() rsp = self.__client.last_response updated_tags = self.__get_data() assert_equal(200, rsp.status, message=rsp.reason) for i in xrange(len(updated_tags)): assert_equal(updated_tags[i]['rules'][0]['path'], 'dmi.System Information.Manufacturer', message='Could not find the tag')
def test_tag_create(self): """ Testing POST:/api/2.0/tags/ """ Api().nodes_get_all() nodes = self.__get_data() for n in nodes: if n.get('type') == 'compute': Api().nodes_get_catalog_by_id(identifier=n.get('id')) updated_catalog = self.__get_data() tagsWithRules = { "name": n.get('id'), "rules": [{ "equals": updated_catalog[0]["data"]["System Information"] ["Manufacturer"], "path": "dmi.System Information.Manufacturer" }] } LOG.info(tagsWithRules) Api().create_tag(body=tagsWithRules) tag_data = self.__get_data() assert_equal(n.get('id'), tag_data["name"], "Failed creating tag") assert_raises(rest.ApiException, Api().create_tag, body='fooey')
def test_node_put_obm_invalid_node_id(self): # Testing that PUT:/api/2.0/:id/obm returns 404 with invalid node ID found_node = False Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) self.assertEqual(200, rsp.status, msg=rsp.status) test_obm = { 'config': { 'host': '1.2.3.4', 'user': '******', 'password': '******' }, 'service': 'noop-obm-service' } # get the first test compute node id, to get a 404, we need the payload data set up # with a valid node id. Otherwise, a 400 will be returned if bad payload. for n in nodes: if n.get('name') == 'test_compute_node': test_obm["nodeId"] = str(n.get('id')) found_node = True break if found_node: try: Api().nodes_put_obms_by_node_id(identifier='invalid_ID', body=test_obm) self.fail(msg='did not raise exception') except ApiException as e: self.assertEqual( 404, e.status, msg='unexpected response {0}, expected 404'.format( e.status)) else: self.fail(msg='No test compute node available for try invalid_ID')
def test_catalogs(self): """Testing GET:api/2.0/catalogs to get list of catalogs""" #get a list of all nodes Api().nodes_get_all() nodes = loads(self.__client.last_response.data) assert_not_equal(0, len(nodes), message='Node list was empty!') #get all catalog data Api().catalogs_get() rsp = self.__client.last_response assert_not_equal(404, rsp.status, message=rsp.reason) assert_equal(200, rsp.status, message=rsp.reason) catalogs = loads(rsp.data) #verify that each node contains a minimum set of catalogs for node in nodes: if node.get('type') == 'compute': for source in self.__expected_sources: for catalog in catalogs: if catalog.get('source') == source and catalog.get( 'node').find(node.get('id')): break else: fail('Catalog {0} not found in node {1}!'.format( source, node.get('id')))
def test_workflows_graphs_put(self): # """ Testing PUT:/workflows/graphs """ # Make sure there is no workflowTask with the same name Api().workflows_get_graphs_by_name('*') rawj = self.__get_data() for i, var in enumerate(rawj): if self.workflowDict['injectableName'] == str(rawj[i].get('injectableName')): fnameList = str(rawj[i].get('friendlyName')).split('_') suffix = int(fnameList[1]) + 1 self.workflowDict['friendlyName'] = fnameList[0] + '_' + str(suffix) break # Add a workflow task logs.info("Adding workflow task: %s ", str(self.workflowDict)) Api().workflows_put_graphs(body=self.workflowDict) self.assertEqual(201, self.__get_result().status) # Validate the content Api().workflows_get_graphs() rawj = self.__get_data() foundInsertedWorkflow = False for i, var in enumerate(rawj): if self.workflowDict['injectableName'] == str(rawj[i].get('injectableName')): foundInsertedWorkflow = True readWorkflowTask = rawj[i] readFriendlyName = readWorkflowTask.get('friendlyName') readInjectableName = readWorkflowTask.get('injectableName') self.assertEqual(readFriendlyName, self.workflowDict.get('friendlyName')) self.assertEqual(readInjectableName, self.workflowDict.get('injectableName')) self.assertEqual(foundInsertedWorkflow, True)
def test_node_tags_del(self): """ Testing DELETE:api/2.0/nodes/:id/tags/:tagName """ get_codes = [] del_codes = [] Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) get_codes.append(rsp) for n in nodes: for t in self.__test_tags.get('tags'): Api().nodes_del_tag_by_id(identifier=n.get('id'), tag_name=t) rsp = self.__client.last_response del_codes.append(rsp) Api().nodes_get_by_id(identifier=n.get('id')) rsp = self.__client.last_response get_codes.append(rsp) updated_node = loads(rsp.data) for t in self.__test_tags.get('tags'): assert_true(t not in updated_node.get('tags'), message="Tag " + t + " was not deleted") for c in get_codes: assert_equal(200, c.status, message=c.reason) for c in del_codes: assert_equal(204, c.status, message=c.reason) assert_raises(rest.ApiException, Api().nodes_del_tag_by_id, 'fooey', tag_name=['tag'])
def test_node_patch(self): """ Testing PATCH:/api/2.0/nodes/:id """ data = {"name": 'fake_name_test'} Api().nodes_get_all() nodes = self.__get_data() codes = [] for n in nodes: if n.get('name') == 'test_compute_node': uuid = n.get('id') Api().nodes_patch_by_id(identifier=uuid, body=data) rsp = self.__client.last_response test_nodes = self.__get_data() assert_equal(test_nodes.get('name'), 'fake_name_test', 'Oops patch failed') codes.append(rsp) LOG.info('Restoring name to "test_compute_node"') correct_data = {"name": 'test_compute_node'} Api().nodes_patch_by_id(identifier=uuid, body=correct_data) rsp = self.__client.last_response restored_nodes = self.__get_data() assert_equal(restored_nodes.get('name'), 'test_compute_node', 'Oops restoring failed') codes.append(rsp) assert_not_equal(0, len(codes), message='Failed to find compute node Ids') for c in codes: assert_equal(200, c.status, message=c.reason) assert_raises(rest.ApiException, Api().nodes_patch_by_id, 'fooey', data)
def test_11_workflows_action(self): # Testing PUT:/api/2.0/nodes/:id/workflows/action # This test posts a workflow against a compute node and then verfies # the workflow can be cancelled Api().nodes_get_all() nodes = self.__get_data() for n in nodes: if n.get('type') == 'compute': id = n.get('id') timeout = 5 done = False while timeout > 0 and done == False: if 0 == self.__post_workflow(id, 'Graph.Discovery'): fail('Timed out waiting for graph to start!') try: Api().nodes_workflow_action_by_id( id, {'command': 'cancel'}) done = True except rest.ApiException as e: if e.status != 404: raise e timeout -= 1 self.assertNotEqual(timeout, 0, msg='Failed to delete an active workflow') self.assertRaises(rest.ApiException, Api().nodes_workflow_action_by_id, 'fooey', {'command': 'test'})
def clear(self): Api().skus_get() rsp = self.__client.last_response data = loads(self.__client.last_response.data) for item in data: LOG.info(item.get("id")) Api().skus_id_delete(item.get("id"))
def test_12_node_tags_patch(self): # Testing PATCH:/api/2.0/nodes/:id/tags codes = [] Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) codes.append(rsp) for n in nodes: LOG.info(n, json=True) if fit_common.VERBOSITY >= 2: print("info: node to tag {}".format(n.get('id'))) Api().nodes_patch_tag_by_id(identifier=n.get('id'), body=self.__test_tags) LOG.info('Creating tag (name={0})'.format(self.__test_tags)) if fit_common.VERBOSITY >= 2: print('Creating tag (name={0})'.format(self.__test_tags)) rsp = self.__client.last_response codes.append(rsp) LOG.info(n.get('id')) if fit_common.VERBOSITY >= 2: print("{} ".format(n.get('id'))) for c in codes: self.assertEqual(200, c.status, msg=c.reason) self.assertRaises(rest.ApiException, Api().nodes_patch_tag_by_id, 'fooey', body=self.__test_tags)
def test_validate_user_readOnly(self): # """ Testing validate read Only privilege """ user = {'username': '******', 'password': '******'} Api().get_user('funtest-name') logs.info(dumps(user, indent=4)) save_admin_token = config.api_client.default_headers['authorization'] config.api_client.default_headers[ 'authorization'] = 'JWT ' + self.get_auth_token(user) newuser = { 'username': '******', 'password': '******', 'role': 'Administrator' } logs.info('should fail to create user') try: Api().add_user(body=newuser) except ApiException as e: self.assertEqual(403, e.status, msg='Expected 403 status, received {}'.format( e.status)) logs.info('should be able to display users list') Api().list_users() users = self.__get_data() logs.debug(dumps(users, indent=4)) self.assertNotEqual(0, len(users)) # Restore config token config.api_client.default_headers['authorization'] = save_admin_token
def test_post_lookup(self): # """ Testing POST /""" # Validate that the lookup has not been posted from a previous test Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) for i, val in enumerate(rsp): if self.lookup.get("macAddress") == str(rsp[i].get('macAddress')): deleteID = str(rsp[i].get('id')) logs.info( " Deleting the lookup with the same info before we post again, ID: %s", deleteID) Api().lookups_del_by_id(deleteID) # Add a lookup Api().lookups_post(self.lookup) rsp = self.__client.last_response self.assertEqual(201, rsp.status, msg=rsp.reason) self.lookup_id = str(loads(rsp.data).get('id')) logs.debug_5("Lookup ID is %s", self.lookup_id) # other tests depend on the value self.__class__.lookup_id = self.lookup_id logs.info(" The lookup ID from post: %s", self.lookup_id) # Validate the content Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) self.assertEqual(str(rsp[0].get("ipAddress")), str(self.lookup.get("ipAddress"))) self.assertEqual(str(rsp[0].get("macAddress")), str(self.lookup.get("macAddress"))) self.assertEqual(str(rsp[0].get("node")), str(self.lookup.get("node")))
def post_lookup(self): """ Testing POST /""" #Validate that the lookup has not been posted from a previous test Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) listLen =len(rsp) for i, val in enumerate (rsp): if ( self.lookup.get("macAddress") == str (rsp[i].get('macAddress')) ): LOG.info("Deleting the lookup with the same info before we post again") deleteID= str (rsp[i].get('id')) Api().lookups_del_by_id(deleteID) #Add a lookup Api().lookups_post(self.lookup) rsp = self.__client.last_response assert_equal(201, rsp.status, message=rsp.reason) self.id = str (loads(rsp.data).get('id')) LOG.info("ID is "+ self.id) #Validate the content Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) assert_equal(str(rsp[0].get("ipAddress")),str(self.lookup.get("ipAddress"))) assert_equal(str(rsp[0].get("macAddress")),str(self.lookup.get("macAddress"))) assert_equal(str(rsp[0].get("node")),str(self.lookup.get("node")))
def __post_workflow(self, graph_name, nodes, body): # check if NODE_INDEX is set index = None try: index = int(NODE_INDEX) except: LOG.info('NODE_INDEX env is not set') workflows().post_workflows(graph_name, timeout_sec=DEFAULT_TIMEOUT_SEC, nodes=nodes, data=body) return # check if index is in the array range nodes = self.__get_compute_nodes() if index >= len(nodes): raise Exception('index is outside the array range index: {0} vs nodes len {1}'.format(index, len(nodes))) return LOG.info('node index is set to {0}'.format(index)) node = nodes[index] id = node.get('id') # delete active workflow on the selected node try: Api().nodes_workflow_action_by_id(id, {'command': 'cancel'}) except ApiException as e: assert_equal(404, e.status, message='status should be 404') Api().nodes_post_workflow_by_id(id, name=graph_name, body=body) log_context = self.__get_data().get('logContext') if log_context is None: raise Exception('Could not find logContext in {0}'.format(self.__get_data())) return # load graph instance id graph_instance = log_context.get('graphInstance') return self.__wait_for_completion(node, graph_name, graph_instance)
def check_nodes(self, service_type, uuid=None): retval = [] Api().nodes_get_all() nodes = loads(self.__client.last_response.data) for n in nodes: node_type = n.get('type') uid = n.get('id') if uuid is None or uuid == uid: if node_type != 'enclosure': obm_obj = [] Api().obms_get() all_obms = loads(self.__client.last_response.data) for obm in all_obms: node_ref = obm.get('node') if node_ref == uid or node_ref.split('/')[-1] == uid: obm_obj.append(obm) if (obm_obj is None) or (obm_obj is not None and len(obm_obj)== 0): LOG.warning('No OBM settings for node type {0} (id={1})'.format(node_type,uid)) retval.append(False) else: for obm in obm_obj: service = obm.get('service') if service_type not in service: LOG.warning('No OBM service type {0} (id={1})'.format(service_type,uid)) retval.append(False) return retval
def test_14_node_tags_del(self): # Testing DELETE:api/2.0/nodes/:id/tags/:tagName # This workflow deletes the the tags off the nodes created by # the test above test_node_tags_patch get_codes = [] del_codes = [] Api().nodes_get_all() rsp = self.__client.last_response nodes = loads(rsp.data) get_codes.append(rsp) for n in nodes: for t in self.__test_tags.get('tags'): Api().nodes_del_tag_by_id(identifier=n.get('id'), tag_name=t) rsp = self.__client.last_response del_codes.append(rsp) Api().nodes_get_by_id(identifier=n.get('id')) rsp = self.__client.last_response get_codes.append(rsp) updated_node = loads(rsp.data) for t in self.__test_tags.get('tags'): self.assertTrue(t not in updated_node.get('tags'), msg="Tag " + t + " was not deleted") for c in get_codes: self.assertEqual(200, c.status, msg=c.reason) for c in del_codes: self.assertEqual(204, c.status, msg=c.reason) self.assertRaises(rest.ApiException, Api().nodes_del_tag_by_id, 'fooey', tag_name=['tag'])
def _set_ipmi(self, uid): user, passwd = get_bmc_cred() mac = None Api().nodes_get_catalog_source_by_id(uid,'bmc') rsp = self.__client.last_response bmc = loads(rsp.data) if 'data' in bmc: mac = bmc['data'].get('MAC Address') else: Api().nodes_get_catalog_source_by_id(uid,'rmm') rsp = self.__client.last_response rmm = loads(rsp.data) if 'data' in rmm: mac = bmc['data'].get('MAC Address') if mac is not None: LOG.debug('BMC MAC {0} for {1}'.format(mac,uid)) setting = { 'nodeId': uid, 'service':'ipmi-obm-service', 'config': { 'user':user, 'password':passwd, 'host': mac } } LOG.info('Creating ipmi obm-settings for node {0} \n {1}'.format(uid,setting)) try: Api().obms_put(setting) except rest.ApiException as e: LOG.error(e) return False else: LOG.error('Error finding configurable IPMI MAC address for {0}'.format(uid)) return False
def test_validate_user_readOnly(self): """ Testing validate read Only privilege """ user = {'username': '******', 'password': '******'} Api().get_user('funtest-name') found_user = self.__get_data() LOG.info(user, json=True) save_admin_token = config.api_client.default_headers['authorization'] config.api_client.default_headers[ 'authorization'] = 'JWT ' + self.get_auth_token(user) newuser = { 'username': '******', 'password': '******', 'role': 'Administrator' } LOG.info('should fail to create user') try: Api().add_user(body=newuser) except ApiException as e: assert_equal(403, e.status) LOG.info('should be able to display users list') Api().list_users() users = self.__get_data() LOG.debug(users, json=True) assert_not_equal(0, len(users)) #Restore config token config.api_client.default_headers['authorization'] = save_admin_token
def test_obmMac(self): """validates test utility method api_node_select() using obm mac address will return the correct node id""" obm_count = 0 Api().nodes_get_all() node_list = self.__get_data() for node in node_list: if 'sku' in node: Api().nodes_get_obms_by_node_id(identifier=node.get('id')) obm_list = self.__get_data() for obm_entry in obm_list: if obm_entry['config']: obm_mac = obm_entry['config'].get('host') id_list = api_node_select(self.__client, obm_mac=obm_mac) self.assertEqual( 1, len(id_list), msg='Expected a list size of 1 got {} - node id {}' .format(len(id_list), node['id'])) self.assertIn( node['id'], id_list, msg= 'Node id {} with obm mac ({}) not found in Id list' .format(node.get('id'), obm_mac)) obm_count += 1 self.assertGreater(obm_count, 0, msg='No nodes with OBM found')
def test_05_node_patch(self): # Testing PATCH:/api/2.0/nodes/:id data = {"name": 'fake_name_test'} Api().nodes_get_all() nodes = self.__get_data() codes = [] for n in nodes: if n.get('name') == 'test_compute_node': uuid = n.get('id') Api().nodes_patch_by_id(identifier=uuid, body=data) rsp = self.__client.last_response test_nodes = self.__get_data() self.assertEqual(test_nodes.get('name'), 'fake_name_test', 'Oops patch failed') codes.append(rsp) LOG.info('Restoring name to "test_compute_node"') if fit_common.VERBOSITY >= 2: print('Restoring name to \"test_compute_node\"') correct_data = {"name": 'test_compute_node'} Api().nodes_patch_by_id(identifier=uuid, body=correct_data) rsp = self.__client.last_response restored_nodes = self.__get_data() self.assertEqual(restored_nodes.get('name'), 'test_compute_node', 'Oops restoring failed') codes.append(rsp) self.assertNotEqual(0, len(codes), msg='Failed to find compute node Ids') for c in codes: self.assertEqual(200, c.status, msg=c.reason) self.assertRaises(rest.ApiException, Api().nodes_patch_by_id, 'fooey', data)
def post_sku(self): """Test POST:api/2.0/skus""" sku = { "name": "Quanta-D44", "rules": [{ "path": "dmi.Base Board Information.Manufacturer", "contains": "Intel" }, { "path": "ohai.dmi.memory.total", "equals": "32946864kB" }], "discoveryGraphName": "Graph.InstallCoreOS", "discoveryGraphOptions": { "username": "******", "password": "******", "hostname": "mycoreos" } } Api().skus_post(sku) data = self.__get_data() for key in sku.keys(): assert_equal(sku[key], data[key]) #POST the same SKU again and make sure that we get a 409 try: Api().skus_post(sku) except ApiException as e: assert_true(409, e.status)
def patch_lookup(self): """ Testing PATCH /:id""" Api().lookups_patch_by_id(self.id, self.patchedNode) #validate that the node element has been updated Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) assert_equal(str(rsp[0].get("node")),self.patchedNode.get("node") )
def test_patch_lookup(self): # """ Testing PATCH /:id""" logs.info(" The lookup ID to be patched is %s", self.lookup_id) Api().lookups_patch_by_id(self.lookup_id, self.patchedNode) # validate that the node element has been updated Api().lookups_get(q=self.lookup.get("macAddress")) rsp = loads(self.__client.last_response.data) self.assertEqual(str(rsp[0].get("node")), self.patchedNode.get("node"))
def test_workflows_graphs_delete(self): # """Testing DELETE:/workflows/graphs/injectableName""" Api().workflows_get_graphs_by_name(self.workflowDict.get('injectableName')) rawj = self.__get_data() self.assertEqual(self.workflowDict2.get('friendlyName'), str(rawj[0].get('friendlyName'))) Api().workflows_delete_graphs_by_name(self.workflowDict.get('injectableName')) self.assertEqual(204, self.__get_result().status) Api().workflows_get_graphs_by_name(self.workflowDict.get('injectableName')) self.assertEqual(0, len(self.__get_data()))
def test_node_workflows_get(self): """ Testing GET:/api/2.0/nodes/:id/workflows """ resps = [] Api().nodes_get_all() nodes = self.__get_data() for n in nodes: if n.get('type') == 'compute': Api().nodes_get_workflow_by_id(identifier=n.get('id')) resps.append(self.__get_data())
def test_skus_id(self): """Testing GET:api/2.0/skus/id to get specific catalog details""" Api().skus_get() skus = loads(self.__client.last_response.data) for n in skus: self.__sku_id = n.get('id') assert_is_not_none(self.__sku_id) Api().skus_id_get(identifier=self.__sku_id) rsp = self.__client.last_response assert_equal(200, rsp.status, message=rsp.reason)