def test_lb_disappeared_during_node_fetch(self): """ If a load balancer gets deleted while fetching nodes, no nodes will be returned for it. """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([ [nodes_req(1, [node('11', 'a11')])], [lb_req('loadbalancers/2/nodes', True, CLBNotFoundError(lb_id=u'2'))], [lb_hm_req(1, {"type": "CONNECT"})], [lb_req('loadbalancers/2/healthmonitor', True, CLBNotFoundError(lb_id=u'2'))] ]), parallel_sequence([]) # No node feeds to fetch ] make_desc = partial(CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY) eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([CLBNode(node_id='11', address='a11', description=make_desc(lb_id='1'))], {'1': CLB(True)}))
def test_success(self): """ Gets LB contents with drained_at correctly """ node11 = node('11', 'a11', condition='DRAINING') node12 = node('12', 'a12') node21 = node('21', 'a21', weight=3) node22 = node('22', 'a22', weight=None, condition='DRAINING') seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([[nodes_req(1, [node11, node12])], [nodes_req(2, [node21, node22])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {})]]), parallel_sequence([[node_feed_req('1', '11', '11feed')], [node_feed_req('2', '22', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(1, node11), _drained_at=1.0), CLBNode.from_node_json(1, node12), CLBNode.from_node_json(2, node21), attr.assoc(CLBNode.from_node_json(2, node22), _drained_at=2.0)], {'1': CLB(True), '2': CLB(False)}))
def test_lb_disappeared_during_node_fetch(self): """ If a load balancer gets deleted while fetching nodes, no nodes will be returned for it. """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[nodes_req(1, [node('11', 'a11')])], [ lb_req('loadbalancers/2/nodes', True, CLBNotFoundError(lb_id=u'2')) ], [lb_hm_req(1, {"type": "CONNECT"})], [ lb_req('loadbalancers/2/healthmonitor', True, CLBNotFoundError(lb_id=u'2')) ]]), parallel_sequence([]) # No node feeds to fetch ] make_desc = partial(CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY) eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), ([ CLBNode( node_id='11', address='a11', description=make_desc(lb_id='1')) ], { '1': CLB(True) }))
def test_success(self): """ Gets LB contents with drained_at correctly """ node11 = node('11', 'a11', condition='DRAINING') node12 = node('12', 'a12') node21 = node('21', 'a21', weight=3) node22 = node('22', 'a22', weight=None, condition='DRAINING') seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[nodes_req(1, [node11, node12])], [nodes_req(2, [node21, node22])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {})]]), parallel_sequence([[node_feed_req('1', '11', '11feed')], [node_feed_req('2', '22', '22feed')]]), ] eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), ([ attr.assoc(CLBNode.from_node_json(1, node11), _drained_at=1.0), CLBNode.from_node_json(1, node12), CLBNode.from_node_json(2, node21), attr.assoc(CLBNode.from_node_json(2, node22), _drained_at=2.0) ], { '1': CLB(True), '2': CLB(False) }))
def test_no_draining(self): """ Doesnt fetch feeds if all nodes are ENABLED """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[nodes_req(1, [node('11', 'a11')])], [nodes_req(2, [node('21', 'a21')])], [lb_hm_req(1, {})], [lb_hm_req(2, {})]]), parallel_sequence([]) # No nodes to fetch ] make_desc = partial(CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY) eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), ([ CLBNode( node_id='11', address='a11', description=make_desc(lb_id='1')), CLBNode( node_id='21', address='a21', description=make_desc(lb_id='2')) ], { '1': CLB(False), '2': CLB(False) }))
def test_lb_disappeared_during_feed_fetch(self): """ If a load balancer gets deleted while fetching feeds, no nodes will be returned for it. """ node21 = node('21', 'a21', condition='DRAINING', weight=None) seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([ [nodes_req(1, [node('11', 'a11', condition='DRAINING'), node('12', 'a12')])], [nodes_req(2, [node21])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {"type": "CONNECT"})] ]), parallel_sequence([ [node_feed_req('1', '11', CLBNotFoundError(lb_id=u'1'))], [node_feed_req('2', '21', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(2, node21), _drained_at=2.0)], {'2': CLB(True)}))
def test_lb_disappeared_during_feed_fetch(self): """ If a load balancer gets deleted while fetching feeds, no nodes will be returned for it. """ node21 = node('21', 'a21', condition='DRAINING', weight=None) seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{ 'id': 1 }, { 'id': 2 }]}), parallel_sequence([[ nodes_req(1, [ node('11', 'a11', condition='DRAINING'), node('12', 'a12') ]) ], [nodes_req(2, [node21])], [lb_hm_req(1, {"type": "CONNECT"})], [lb_hm_req(2, {"type": "CONNECT"})]]), parallel_sequence( [[node_feed_req('1', '11', CLBNotFoundError(lb_id=u'1'))], [node_feed_req('2', '21', '22feed')]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), ([attr.assoc(CLBNode.from_node_json(2, node21), _drained_at=2.0) ], { '2': CLB(True) }))
def test_no_nodes(self): """ Return empty if there are LBs but no nodes in them """ seq = [ lb_req("loadbalancers", True, {"loadBalancers": [{"id": 1}, {"id": 2}]}), parallel_sequence([[nodes_req(1, [])], [nodes_req(2, [])]]), parallel_sequence([]), # No nodes to fetch ] self.assertEqual(perform_sequence(seq, get_clb_contents()), [])
def test_no_nodes(self): """ Return empty if there are LBs but no nodes in them """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([[nodes_req(1, [])], [nodes_req(2, [])]]), parallel_sequence([]), # No nodes to fetch ] self.assertEqual(perform_sequence(seq, get_clb_contents()), [])
def test_no_lb(self): """ Return empty list if there are no LB """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': []}), parallel_sequence([]), # No LBs to fetch parallel_sequence([]), # No nodes to fetch ] eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), [])
def test_no_nodes(self): """ Return empty if there are LBs but no nodes in them """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([ [nodes_req(1, [])], [nodes_req(2, [])], [lb_hm_req(1, {})], [lb_hm_req(2, {"type": "a"})] ]), parallel_sequence([]), # No nodes to fetch ] self.assertEqual( perform_sequence(seq, get_clb_contents()), ([], {'1': CLB(False), '2': CLB(True)}))
def test_lb_disappeared_during_feed_fetch(self): """ If a load balancer gets deleted while fetching feeds, no nodes will be returned for it. """ node21 = node("21", "a21", condition="DRAINING", weight=None) seq = [ lb_req("loadbalancers", True, {"loadBalancers": [{"id": 1}, {"id": 2}]}), parallel_sequence( [[nodes_req(1, [node("11", "a11", condition="DRAINING"), node("12", "a12")])], [nodes_req(2, [node21])]] ), parallel_sequence( [[node_feed_req(1, "11", CLBNotFoundError(lb_id=u"1"))], [node_feed_req(2, "21", "22feed")]] ), ] eff = get_clb_contents() self.assertEqual(perform_sequence(seq, eff), [assoc_obj(CLBNode.from_node_json(2, node21), drained_at=2.0)])
def test_no_draining(self): """ Doesnt fetch feeds if all nodes are ENABLED """ seq = [ lb_req("loadbalancers", True, {"loadBalancers": [{"id": 1}, {"id": 2}]}), parallel_sequence([[nodes_req(1, [node("11", "a11")])], [nodes_req(2, [node("21", "a21")])]]), parallel_sequence([]), # No nodes to fetch ] make_desc = partial( CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY ) eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), [ CLBNode(node_id="11", address="a11", description=make_desc(lb_id="1")), CLBNode(node_id="21", address="a21", description=make_desc(lb_id="2")), ], )
def test_no_draining(self): """ Doesnt fetch feeds if all nodes are ENABLED """ seq = [ lb_req('loadbalancers', True, {'loadBalancers': [{'id': 1}, {'id': 2}]}), parallel_sequence([[nodes_req(1, [node('11', 'a11')])], [nodes_req(2, [node('21', 'a21')])]]), parallel_sequence([]) # No nodes to fetch ] make_desc = partial(CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY) eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), [CLBNode(node_id='11', address='a11', description=make_desc(lb_id='1')), CLBNode(node_id='21', address='a21', description=make_desc(lb_id='2'))])
def test_lb_disappeared_during_node_fetch(self): """ If a load balancer gets deleted while fetching nodes, no nodes will be returned for it. """ seq = [ lb_req("loadbalancers", True, {"loadBalancers": [{"id": 1}, {"id": 2}]}), parallel_sequence( [ [nodes_req(1, [node("11", "a11")])], [lb_req("loadbalancers/2/nodes", True, CLBNotFoundError(lb_id=u"2"))], ] ), parallel_sequence([]), # No nodes to fetch ] make_desc = partial( CLBDescription, port=20, weight=2, condition=CLBNodeCondition.ENABLED, type=CLBNodeType.PRIMARY ) eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), [CLBNode(node_id="11", address="a11", description=make_desc(lb_id="1"))] )
def test_success(self): """ Gets LB contents with drained_at correctly """ node11 = node("11", "a11", condition="DRAINING") node12 = node("12", "a12") node21 = node("21", "a21", weight=3) node22 = node("22", "a22", weight=None, condition="DRAINING") seq = [ lb_req("loadbalancers", True, {"loadBalancers": [{"id": 1}, {"id": 2}]}), parallel_sequence([[nodes_req(1, [node11, node12])], [nodes_req(2, [node21, node22])]]), parallel_sequence([[node_feed_req(1, "11", "11feed")], [node_feed_req(2, "22", "22feed")]]), ] eff = get_clb_contents() self.assertEqual( perform_sequence(seq, eff), [ assoc_obj(CLBNode.from_node_json(1, node11), drained_at=1.0), CLBNode.from_node_json(1, node12), CLBNode.from_node_json(2, node21), assoc_obj(CLBNode.from_node_json(2, node22), drained_at=2.0), ], )