def _check_retries(self, get_pairs_data): errors = { "errors": [ server_not_member(self.lbs[0].upper(), self.nodes[0]), "Cloud Server {} does not exist".format(self.nodes[1]), "Load Balancer Pool {} does not exist".format( self.lbs[2].upper()) ] } lbr1 = "d6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2" noder1 = "a95ae0c4-6ab8-4873-b82f-f8433840cff2" lbr2 = "e6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2" noder2 = "e95ae0c4-6ab8-4873-b82f-f8433840cff2" pairs, data = get_pairs_data(lbr1, noder1, lbr2, noder2) retried_data = r._sorted_data([(lbr1, noder1), (lbr2, noder2)]) success_resp = {"good": "response"} seq = [ (self.svc_req_intent(data), const(stub_json_response(errors, 409))), (log_intent( "request-rcv3-bulk", errors, req_body=("jsonified", data)), noop), (self.svc_req_intent(retried_data), const(stub_json_response(success_resp, 204))), (log_intent( "request-rcv3-bulk", success_resp, req_body=("jsonified", retried_data)), noop) ] self.assertEqual( perform_sequence(seq, r.bulk_delete(pairs)), success_resp)
def test_all_retries(self): """ If bulk_delete returns "server not a member", lb or server deleted for all attempted pairs then there is no retry and returns None """ errors = { "errors": [ server_not_member(self.lbs[0].upper(), self.nodes[0]), "Cloud Server {} does not exist".format(self.nodes[1]), "Load Balancer Pool {} does not exist".format( self.lbs[2].upper()) ] } pairs = pset([ (self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]) # test same lb pairs ]) pairs = self.pairs | pairs data = r._sorted_data(pairs) seq = [ (self.svc_req_intent(data), const(stub_json_response(errors, 409))), (log_intent( "request-rcv3-bulk", errors, req_body=("jsonified", data)), noop) ] self.assertIsNone(perform_sequence(seq, r.bulk_delete(pairs)))
def get_pairs_data(lbr1, noder1, lbr2, noder2): pairs = pset( [(self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]), # test same lb pairs (lbr1, noder1), (lbr2, noder2)]) pairs |= self.pairs return pairs, r._sorted_data(pairs)
def test_all_retries(self): """ If bulk_delete returns "server not a member", lb or server deleted for all attempted pairs then there is no retry and returns None """ errors = { "errors": [ server_not_member(self.lbs[0].upper(), self.nodes[0]), "Cloud Server {} does not exist".format(self.nodes[1]), "Load Balancer Pool {} does not exist".format( self.lbs[2].upper()) ] } pairs = pset([ (self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]) # test same lb pairs ]) pairs = self.pairs | pairs data = r._sorted_data(pairs) seq = [(self.svc_req_intent(data), const(stub_json_response(errors, 409))), (log_intent("request-rcv3-bulk", errors, req_body=("jsonified", data)), noop)] self.assertIsNone(perform_sequence(seq, r.bulk_delete(pairs)))
def _check_retries(self, get_pairs_data): errors = { "errors": [ server_not_member(self.lbs[0].upper(), self.nodes[0]), "Cloud Server {} does not exist".format(self.nodes[1]), "Load Balancer Pool {} does not exist".format( self.lbs[2].upper()) ] } lbr1 = "d6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2" noder1 = "a95ae0c4-6ab8-4873-b82f-f8433840cff2" lbr2 = "e6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2" noder2 = "e95ae0c4-6ab8-4873-b82f-f8433840cff2" pairs, data = get_pairs_data(lbr1, noder1, lbr2, noder2) retried_data = r._sorted_data([(lbr1, noder1), (lbr2, noder2)]) success_resp = {"good": "response"} seq = [(self.svc_req_intent(data), const(stub_json_response(errors, 409))), (log_intent("request-rcv3-bulk", errors, req_body=("jsonified", data)), noop), (self.svc_req_intent(retried_data), const(stub_json_response(success_resp, 204))), (log_intent("request-rcv3-bulk", success_resp, req_body=("jsonified", retried_data)), noop)] self.assertEqual(perform_sequence(seq, r.bulk_delete(pairs)), success_resp)
def get_pairs_data(lbr1, noder1, lbr2, noder2): pairs = pset([ (self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]), # test same lb pairs (lbr1, noder1), (lbr2, noder2) ]) pairs |= self.pairs return pairs, r._sorted_data(pairs)
def get_pairs_data(lbr1, noder1, lbr2, noder2): new_pairs = pset( [(self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]), # test same lb pairs (lbr1, noder1), (lbr2, noder2)]) # existing pairs with upper case LB lbs = self.lbs[:] lbs[0] = lbs[0].upper() existing_pairs = pset(zip(lbs, self.nodes)) pairs = existing_pairs | new_pairs # The data will still be self.pairs since lbs[0] will be normalized return pairs, r._sorted_data(self.pairs | new_pairs)
def test_retries_uppercase(self): """ If bulk adding is called with upper case LB ID and it only returns "lb node pair is already member" error with 409 then other pairs are retried """ lbs = self.lbs[:] lbs[0] = lbs[0].upper() pairs = pset(zip(lbs, self.nodes)) retried_data = r._sorted_data(pairs - pset([(lbs[0], self.nodes[0])])) errors = {"errors": [node_already_member(lbs[0], self.nodes[0])]} self._check_retries(pairs, self.data, retried_data, errors)
def test_retries(self): """ If bulk adding only returns "lb node pair is already member" error with 409 then other pairs are retried """ errors = { "errors": [node_already_member(self.lbs[0].upper(), self.nodes[0])] } retried_data = r._sorted_data(self.pairs - pset([(self.lbs[0], self.nodes[0])])) self._check_retries(self.pairs, self.data, retried_data, errors)
def test_retries(self): """ If bulk adding only returns "lb node pair is already member" error with 409 then other pairs are retried """ errors = { "errors": [ node_already_member(self.lbs[0].upper(), self.nodes[0]) ] } retried_data = r._sorted_data( self.pairs - pset([(self.lbs[0], self.nodes[0])])) self._check_retries(self.pairs, self.data, retried_data, errors)
def get_pairs_data(lbr1, noder1, lbr2, noder2): new_pairs = pset([ (self.lbs[0], self.nodes[1]), # test same server pairs (self.lbs[2], self.nodes[0]), # test same lb pairs (lbr1, noder1), (lbr2, noder2) ]) # existing pairs with upper case LB lbs = self.lbs[:] lbs[0] = lbs[0].upper() existing_pairs = pset(zip(lbs, self.nodes)) pairs = existing_pairs | new_pairs # The data will still be self.pairs since lbs[0] will be normalized return pairs, r._sorted_data(self.pairs | new_pairs)
def test_retries_uppercase(self): """ If bulk adding is called with upper case LB ID and it only returns "lb node pair is already member" error with 409 then other pairs are retried """ lbs = self.lbs[:] lbs[0] = lbs[0].upper() pairs = pset(zip(lbs, self.nodes)) retried_data = r._sorted_data( pairs - pset([(lbs[0], self.nodes[0])])) errors = { "errors": [ node_already_member(lbs[0], self.nodes[0]) ] } self._check_retries(pairs, self.data, retried_data, errors)
class RCv3Tests(SynchronousTestCase): """ Common data for bulk_add|delete functions """ lbs = [ "a6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2", "b6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2", "c6d3aa7c-dfa5-4e61-96ee-1d54ac1075d2" ] nodes = [ "a95ae0c4-6ab8-4873-b82f-f8433840cff2", "b95ae0c4-6ab8-4873-b82f-f8433840cff2", "c95ae0c4-6ab8-4873-b82f-f8433840cff2" ] pairs = pset(zip(lbs, nodes)) data = r._sorted_data(pairs) def setUp(self): patch(self, "otter.cloud_client.json.dumps", side_effect=lambda d, **k: ("jsonified", d))