def test_revert_removed_child(): res1 = orm.DBResource(id="test1", name="test1", base_path="x") # NOQA res1.save() res1.add_input("a", "str", "9") res2 = orm.DBResource(id="test2", name="test2", base_path="x") # NOQA res2.save() res2.add_input("a", "str", 0) res1 = resource.load("test1") res2 = resource.load("test2") signals.connect(res1, res2) staged_log = change.stage_changes() assert len(staged_log) == 2 for item in staged_log: operations.move_to_commited(item.log_action) res2.remove() staged_log = change.stage_changes() assert len(staged_log) == 1 logitem = next(staged_log.collection()) operations.move_to_commited(logitem.log_action) with mock.patch.object(resource, "read_meta") as mread: mread.return_value = {"input": {"a": {"schema": "str!"}}} change.revert(logitem.uid) res2 = resource.load("test2") assert res2.args == {"a": "9"}
def test_revert_removal(): res = orm.DBResource(id='test1', name='test1', base_path='x') res.save() res.add_input('a', 'str', '9') res.add_input('location_id', 'str', '1') res.add_input('transports_id', 'str', '1') commited = orm.DBCommitedState.get_or_create('test1') commited.inputs = {'a': '9', 'location_id': '1', 'transports_id': '1'} commited.save() logitem =change.create_logitem( res.name, 'remove', change.create_diff({}, {'a': '9'}), [], base_path=res.base_path) log = data.SL() log.append(logitem) resource_obj = resource.load(res.name) resource_obj.remove() operations.move_to_commited(logitem.log_action) resources = orm.DBResource.load_all() assert resources == [] assert logitem.diff == [('remove', '', [('a', '9')])] with mock.patch.object(resource, 'read_meta') as mread: mread.return_value = {'input': {'a': {'schema': 'str!'}}, 'id': 'mocked'} change.revert(logitem.uid) resource_obj = resource.load('test1') assert resource_obj.args == {'a': '9', 'location_id': '1', 'transports_id': '1'}
def test_discard_connection(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() res2 = DBResource.from_dict('test2', {'name': 'test2', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res2.inputs['a'] = '0' res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res2 = resource.load('test2') res1.connect(res2, {'a': 'a'}) staged_log = change.stage_changes() assert len(staged_log) == 1 assert res2.args == {'a': '9'} change.discard_all() assert res2.args == {'a': '0'} assert len(change.stage_changes()) == 0
def test_revert_removed_child(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') # NOQA res1.save() res1.add_input('a', 'str', '9') res2 = orm.DBResource(id='test2', name='test2', base_path='x') # NOQA res2.save() res2.add_input('a', 'str', 0) res1 = resource.load('test1') res2 = resource.load('test2') signals.connect(res1, res2) staged_log = change.stage_changes() assert len(staged_log) == 2 for item in staged_log: operations.move_to_commited(item.log_action) res2.remove() staged_log = change.stage_changes() assert len(staged_log) == 1 logitem = next(staged_log.collection()) operations.move_to_commited(logitem.log_action) with mock.patch.object(resource, 'read_meta') as mread: mread.return_value = {'input': {'a': {'schema': 'str!'}}} change.revert(logitem.uid) res2 = resource.load('test2') assert res2.args == {'a': '9'}
def test_revert_removed_child(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1.save() res1.add_input('a', 'str', '9') res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2.save() res2.add_input('a', 'str', 0) res1 = resource.load('test1') res2 = resource.load('test2') signals.connect(res1, res2) staged_log = change.stage_changes() assert len(staged_log) == 2 for item in staged_log: operations.move_to_commited(item.log_action) res2.remove() staged_log = change.stage_changes() assert len(staged_log) == 1 logitem = next(staged_log.collection()) operations.move_to_commited(logitem.log_action) with mock.patch.object(resource, 'read_meta') as mread: mread.return_value = {'input': {'a': {'schema': 'str!'}}} change.revert(logitem.uid) res2 = resource.load('test2') assert res2.args == {'a': '9'}
def add_node(args, user_config): config = rs.load('kube-config') kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') internal_network = IPAddress(config.args['network']) def get_node_id(n): return n.name.split('-')[-1] kube_nodes = get_slave_nodes() newest_id = int(get_node_id(max(kube_nodes, key=get_node_id))) new_nodes = [setup_slave_node(config, user_config['kube_slaves'], kubernetes_master, calico_master, internal_network, i) for i in xrange(newest_id, newest_id + args.nodes)] kube_master = rs.load('kube-node-master') all_nodes = new_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_kube-') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, { 'name': 'hosts:name', 'ip': 'hosts:ip' })
def test_discard_removed(): res1 = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res1.inputs["a"] = "9" res1.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load("test1") res1.remove() assert len(change.stage_changes()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.stage_changes()) == 0 assert not resource.load("test1").to_be_removed()
def setup_nodes(config, user_config, num=1, existing_nodes=None): kube_nodes = [] kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') internal_network = IPAddress(config.args['network']) if existing_nodes: kube_nodes = [ setup_slave_node(config, kubernetes_master, calico_master, internal_network, i, None, node) for (i, node) in enumerate(existing_nodes) ] else: kube_nodes = [ setup_slave_node(config, kubernetes_master, calico_master, internal_network, i, user_config[i]) for i in xrange(num) ] kube_master = rs.load(MASTER_NODE_RESOURCE_NAME) all_nodes = kube_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, {'name': 'hosts:name', 'ip': 'hosts:ip'})
def add_dns(args, *_): config = rs.load('kube-config') kube_master = rs.load(MASTER_NODE_RESOURCE_NAME) master = rs.load('kubelet-master') kube_dns = cr.create('kube-dns', 'k8s/kubedns', {})[0] master.connect(kube_dns, {'master_port': 'api_port'}) kube_master.connect(kube_dns, {'ip': 'api_host'}) config.connect(kube_dns, {'cluster_domain': 'cluster_domain', 'cluster_dns': 'cluster_dns'})
def add_dns(args, *_): config = rs.load('kube-config') kube_master = rs.load('kube-node-master') master = rs.load('kubelet-master') kube_dns = cr.create('kube-dns', 'k8s/kubedns', {})[0] master.connect(kube_dns, {'master_port': 'api_port'}) kube_master.connect(kube_dns, {'ip': 'api_host'}) config.connect(kube_dns, { 'cluster_domain': 'cluster_domain', 'cluster_dns': 'cluster_dns' })
def test_discard_removed(): res1 = create_resource('test1') res1.inputs['a'] = '9' res1.save_lazy() res1 = resource.load('test1') res1.remove() ModelMeta.save_all_lazy() assert len(change.staged_log()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.staged_log()) == 0 assert not resource.load('test1').to_be_removed()
def test_discard_removed(): res1 = create_resource('test1') res1.db_obj.inputs['a'] = '9' res1.db_obj.save_lazy() res1 = resource.load('test1') res1.remove() ModelMeta.save_all_lazy() assert len(change.staged_log()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.staged_log()) == 0 assert not resource.load('test1').to_be_removed()
def test_revert_update(): commit = {'a': '10'} previous = {'a': '9'} res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.save() action = 'update' res.inputs['a'] = '9' resource_obj = resource.load(res.name) assert resource_obj.args == previous log = data.SL() logitem = change.create_logitem(res.name, action, change.create_diff(commit, previous), [], base_path=res.base_path) log.append(logitem) resource_obj.update(commit) operations.move_to_commited(logitem.log_action) assert logitem.diff == [['change', 'a', ['9', '10']]] assert resource_obj.args == commit change.revert(logitem.uid) assert resource_obj.args == previous
def test_discard_removed(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1.save() res1.add_input('a', 'str', '9') staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res1.remove() assert len(change.stage_changes()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.stage_changes()) == 0 assert not resource.load('test1').to_be_removed()
def test_update_action_after_commit(): res = resource.load(create_resource('1').name) res.set_operational() res.update({'a': 10}) ModelMeta.save_all_lazy() staged_log = change.staged_log() assert staged_log[0].action == 'update'
def test_revert_update(): commit = {'a': '10'} previous = {'a': '9'} res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.save() action = 'update' res.inputs['a'] = '9' resource_obj = resource.load(res.name) assert resource_obj.args == previous log = data.SL() logitem = change.create_logitem(res.name, action, change.create_diff(commit, previous), [], base_path=res.base_path) log.append(logitem) resource_obj.update(commit) operations.move_to_commited(logitem.log_action) assert logitem.diff == [('change', 'a', ('9', '10'))] assert resource_obj.args == commit change.revert(logitem.uid) assert resource_obj.args == previous
def test_revert_update(): commit = {'a': '10'} previous = {'a': '9'} res = orm.DBResource(id='test1', name='test1', base_path='x') res.save() res.add_input('a', 'str', '9') action = 'update' resource_obj = resource.load(res.name) assert resource_obj.args == previous log = data.SL() logitem =change.create_logitem( res.name, action, change.create_diff(commit, previous), [], base_path=res.base_path) log.append(logitem) resource_obj.update(commit) operations.move_to_commited(logitem.log_action) assert logitem.diff == [('change', 'a', ('9', '10'))] assert resource_obj.args == commit change.revert(logitem.uid) assert resource_obj.args == previous
def move_to_commited(log_action, *args, **kwargs): sl = data.SL() item = next((i for i in sl if i.log_action == log_action), None) if item: resource_obj = resource.load(item.resource) commited = CommitedResource.get_or_create(item.resource) updated = resource_obj.db_obj.updated if item.action == CHANGES.remove.name: resource_obj.delete() commited.state = resource.RESOURCE_STATE.removed.name else: resource_obj.set_operational() commited.state = resource.RESOURCE_STATE.operational.name commited.base_path = item.base_path updated = resource_obj.db_obj.updated # required to update `updated` field resource_obj.db_obj.save() commited.inputs = patch(item.diff, commited.inputs) # TODO fix TagsWrp to return list # commited.tags = resource_obj.tags sorted_connections = sorted(commited.connections) commited.connections = patch(item.connections_diff, sorted_connections) commited.save() item.log = 'history' item.state = 'success' item.updated = updated item.save()
def test_revert_removal(): res = create_resource('test1') res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', { 'inputs': { 'a': '9' }, 'state': 'operational' }) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() log_item = change.create_remove(resource_obj.name) log_item.save() uid = log_item.uid assert log_item.diff == [['remove', '', [['a', '9']]]] operations.commit_log_item(log_item) ModelMeta.save_all_lazy() with mock.patch.object(repository.Repository, 'read_meta') as mread: mread.return_value = { 'input': { 'a': { 'schema': 'str!' } }, 'id': 'mocked' } with mock.patch.object(repository.Repository, 'get_path') as mpath: mpath.return_value = 'x' change.revert(uid) ModelMeta.save_all_lazy() resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def set_error(log_action, *args, **kwargs): sl = data.SL() item = next((i for i in sl if i.log_action == log_action), None) if item: resource_obj = resource.load(item.res) resource.set_error() item.state = data.STATES.error sl.update(item)
def set_error(log_action, *args, **kwargs): sl = data.SL() item = next((i for i in sl if i.log_action == log_action), None) if item: resource_obj = resource.load(item.res) resource_obj.set_error() item.state = data.STATES.error sl.update(item)
def setup_nodes(config, user_config, num=1): kube_nodes = [] kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') internal_network = IPAddress(config.args['network']) kube_nodes = [ setup_slave_node(config, user_config, kubernetes_master, calico_master, internal_network, i) for i in xrange(num) ] kube_master = rs.load('kube-node-master') all_nodes = kube_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_kube-') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, {'name': 'hosts:name', 'ip': 'hosts:ip'})
def test_revert_update_connected(): res1 = create_resource('test1') res1.inputs['a'] = '9' res1.save_lazy() res2 = create_resource('test2') res2.inputs['a'] = '' res2.save_lazy() res3 = create_resource('test3') res3.inputs['a'] = '' res3.save_lazy() res1 = resource.load('test1') res2 = resource.load('test2') res3 = resource.load('test3') res1.connect(res2) res2.connect(res3) ModelMeta.save_all_lazy() staged_log = map(lambda res: change.create_run(res.name), (res1, res2, res3)) assert len(staged_log) == 3 for item in staged_log: assert item.action == 'run' operations.commit_log_item(item) res1.disconnect(res2) staged_log = map(lambda res: change.create_run(res.name), (res2, res3)) to_revert = [] for item in staged_log: assert item.action == 'run' to_revert.append(item.uid) operations.commit_log_item(item) change.revert_uids(sorted(to_revert, reverse=True)) ModelMeta.save_all_lazy() staged_log = map(lambda res: change.create_run(res.name), (res2, res3)) for item in staged_log: assert item.diff == [['change', 'a', ['', '9']]]
def set_error(log_action, *args, **kwargs): sl = data.SL() item = next((i for i in sl if i.log_action == log_action), None) if item: resource_obj = resource.load(item.resource) resource_obj.set_error() item.state = 'error' item.delete()
def test_revert_removal(): res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', {'inputs': {'a': '9'}, 'state': 'operational'}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() changes = change.stage_changes() assert len(changes) == 1 assert changes[0].diff == [['remove', '', [['a', '9']]]] operations.move_to_commited(changes[0].log_action) clear_cache() assert DBResource._c.obj_cache == {} # assert DBResource.bucket.get('test1').siblings == [] with mock.patch.object(repository.Repository, 'read_meta') as mread: mread.return_value = { 'input': {'a': {'schema': 'str!'}}, 'id': 'mocked' } with mock.patch.object(repository.Repository, 'get_path') as mpath: mpath.return_value = 'x' change.revert(changes[0].uid) ModelMeta.save_all_lazy() # assert len(DBResource.bucket.get('test1').siblings) == 1 resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def locate_named_transport_resoruce(resource, name): transports = resource.db_obj.inputs._get_field_val('transports_id', other='_key') transports_resource = load(transports) connections = transports_resource.connections just_names = filter(lambda x: x[1] == 'name', connections) transports = load_by_names([x[0] for x in just_names]) transport = next(x for x in transports if x.db_obj.inputs._get_raw_field_val('name') == name) return transport
def test_revert_removal(): res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', {'inputs': {'a': '9'}, 'state': 'operational'}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() changes = change.stage_changes() assert len(changes) == 1 assert changes[0].diff == [['remove', '', [['a', '9']]]] operations.move_to_commited(changes[0].log_action) ModelMeta.session_start() assert DBResource._c.obj_cache == {} assert DBResource.bucket.get('test1').siblings == [] with mock.patch.object(resource, 'read_meta') as mread: mread.return_value = { 'input': {'a': {'schema': 'str!'}}, 'id': 'mocked' } change.revert(changes[0].uid) ModelMeta.save_all_lazy() assert len(DBResource.bucket.get('test1').siblings) == 1 resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def setup_nodes(config, user_config, num=1): kube_nodes = [] kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') internal_network = IPAddress(config.args['network']) kube_nodes = [ setup_slave_node(config, user_config, kubernetes_master, calico_master, internal_network, i) for i in xrange(num)] kube_master = rs.load('kube-node-master') all_nodes = kube_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_kube-') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, { 'name': 'hosts:name', 'ip': 'hosts:ip' })
def test_revert_update_connected(): res1 = create_resource('test1') res1.update({'a': '9'}) res2 = create_resource('test2') res2.update({'a': ''}) res3 = create_resource('test3') res3.update({'a': ''}) res1 = resource.load('test1') res2 = resource.load('test2') res3 = resource.load('test3') res1.connect(res2) res2.connect(res3) ModelMeta.save_all_lazy() staged_items = change.staged_log() assert len(staged_items) == 3 for item in staged_items: assert item.action == 'run' operations.commit_log_item(item) res1.disconnect(res2) staged_log = change.staged_log() to_revert = [] for item in staged_log: assert item.action == 'run' to_revert.append(item.uid) operations.commit_log_item(item) change.revert_uids(sorted(to_revert, reverse=True)) ModelMeta.save_all_lazy() staged_log = change.staged_log() for item in staged_log: assert item.diff == [['change', 'a', ['', '9']]]
def test_discard_removed(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res1.remove() assert len(change.stage_changes()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.stage_changes()) == 0 assert not resource.load('test1').to_be_removed()
def test_revert_update_connected(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1.save() res1.add_input('a', 'str', '9') res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2.save() res2.add_input('a', 'str', 0) res3 = orm.DBResource(id='test3', name='test3', base_path='x') res3.save() res3.add_input('a', 'str', 0) res1 = resource.load('test1') res2 = resource.load('test2') res3 = resource.load('test3') signals.connect(res1, res2) signals.connect(res2, res3) staged_log = change.stage_changes() assert len(staged_log) == 3 for item in staged_log: operations.move_to_commited(item.log_action) assert len(staged_log) == 0 signals.disconnect(res1, res2) staged_log = change.stage_changes() assert len(staged_log) == 2 to_revert = [] for item in staged_log: operations.move_to_commited(item.log_action) to_revert.append(item.uid) change.revert_uids(sorted(to_revert, reverse=True)) staged_log = change.stage_changes() assert len(staged_log) == 2 for item in staged_log: assert item.diff == [['change', 'a', [0, '9']]]
def test_discard_connection(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1.save() res1.add_input('a', 'str', '9') res2 = orm.DBResource(id='test2', name='test2', base_path='x') res2.save() res2.add_input('a', 'str', '0') staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res2 = resource.load('test2') signals.connect(res1, res2) staged_log = change.stage_changes() assert len(staged_log) == 1 assert res2.args == {'a': '9'} change.discard_all() assert res2.args == {'a': '0'} assert len(change.stage_changes()) == 0
def test_discard_connection(): res1 = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res1.inputs["a"] = "9" res1.save_lazy() res2 = DBResource.from_dict( "test2", { "name": "test2", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res2.inputs["a"] = "0" res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load("test1") res2 = resource.load("test2") res1.connect(res2, {"a": "a"}) staged_log = change.stage_changes() assert len(staged_log) == 1 assert res2.args == {"a": "9"} change.discard_all() assert res2.args == {"a": "0"} assert len(change.stage_changes()) == 0
def test_discard_update(): res1 = create_resource('test1') res1.inputs['a'] = '9' res1.save_lazy() operations.commit_log_item(change.create_run(res1.name)) res1 = resource.load('test1') res1.update({'a': '11'}) ModelMeta.save_all_lazy() assert len(change.staged_log()) == 1 assert res1.args == {'a': '11'} change.discard_all() assert res1.args == {'a': '9'}
def test_revert_removal(): res = create_resource('test1') res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', {'inputs': {'a': '9'}, 'state': 'operational'}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() log_item = change.create_remove(resource_obj.name) log_item.save() uid = log_item.uid assert log_item.diff == [['remove', '', [['a', '9']]]] operations.commit_log_item(log_item) ModelMeta.save_all_lazy() with mock.patch.object(repository.Repository, 'read_meta') as mread: mread.return_value = { 'input': {'a': {'schema': 'str!'}}, 'id': 'mocked' } with mock.patch.object(repository.Repository, 'get_path') as mpath: mpath.return_value = 'x' change.revert(uid) ModelMeta.save_all_lazy() resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def test_discard_update(): res1 = orm.DBResource(id='test1', name='test1', base_path='x') res1.save() res1.add_input('a', 'str', '9') staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res1.update({'a': '11'}) assert len(change.stage_changes()) == 1 assert res1.args == {'a': '11'} change.discard_all() assert res1.args == {'a': '9'}
def test_discard_connection(): res1 = create_resource('test1') res1.inputs['a'] = '9' res1.save_lazy() res2 = create_resource('test2') res2.inputs['a'] = '0' res2.save_lazy() staged_log = map(change.create_run, (res1.name, res2.name)) for item in staged_log: operations.commit_log_item(item) res1 = resource.load('test1') res2 = resource.load('test2') res1.connect(res2, {'a': 'a'}) ModelMeta.save_all_lazy() staged_log = change.staged_log() assert len(staged_log) == 1 assert res2.args == {'a': '9'} change.discard_all() assert res2.args == {'a': '0'} assert len(change.staged_log()) == 0
def test_discard_connection(): res1 = create_resource('test1') res1.db_obj.inputs['a'] = '9' res1.db_obj.save_lazy() res2 = create_resource('test2') res2.db_obj.inputs['a'] = '0' res2.db_obj.save_lazy() staged_log = map(change.create_run, (res1, res2)) for item in staged_log: operations.commit_log_item(item) res1 = resource.load('test1') res2 = resource.load('test2') res1.connect(res2, {'a': 'a'}) ModelMeta.save_all_lazy() staged_log = change.staged_log() assert len(staged_log) == 1 assert res2.args == {'a': '9'} change.discard_all() assert res2.args == {'a': '0'} assert len(change.staged_log()) == 0
def test_revert_removal(): res = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res.inputs["a"] = "9" res.save_lazy() commited = CommitedResource.from_dict("test1", {"inputs": {"a": "9"}, "state": "operational"}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() changes = change.stage_changes() assert len(changes) == 1 assert changes[0].diff == [["remove", "", [["a", "9"]]]] operations.move_to_commited(changes[0].log_action) ModelMeta.session_start() assert DBResource._c.obj_cache == {} assert DBResource.bucket.get("test1").siblings == [] with mock.patch.object(resource, "read_meta") as mread: mread.return_value = {"input": {"a": {"schema": "str!"}}, "id": "mocked"} change.revert(changes[0].uid) ModelMeta.save_all_lazy() assert len(DBResource.bucket.get("test1").siblings) == 1 resource_obj = resource.load("test1") assert resource_obj.args == {"a": "9", "location_id": "", "transports_id": ""}
def add_node(args, user_config): config = rs.load('kube-config') kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') internal_network = IPAddress(config.args['network']) def get_node_id(n): return n.name.split('-')[-1] kube_nodes = get_slave_nodes() newest_id = int(get_node_id(max(kube_nodes, key=get_node_id))) new_nodes = [ setup_slave_node(config, user_config['kube_slaves'], kubernetes_master, calico_master, internal_network, i) for i in xrange(newest_id, newest_id + args.nodes) ] kube_master = rs.load('kube-node-master') all_nodes = new_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_kube-') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, {'name': 'hosts:name', 'ip': 'hosts:ip'})
def commit_log_item(item): resource_obj = resource.load(item.resource) commited = CommitedResource.get_or_create(item.resource) if item.action == CHANGES.remove.name: resource_obj.delete() commited.state = resource.RESOURCE_STATE.removed.name else: resource_obj.set_operational() commited.state = resource.RESOURCE_STATE.operational.name commited.base_path = item.base_path resource_obj.db_obj.save_lazy() commited.inputs = patch(item.diff, commited.inputs) # TODO fix TagsWrp to return list # commited.tags = resource_obj.tags sorted_connections = sorted(commited.connections) commited.connections = patch(item.connections_diff, sorted_connections) commited.save_lazy() item.to_history().save_lazy() item.delete()
def test_revert_update(): prev = {'a': '9'} new = {'a': '10'} res = create_resource('test1') res.save() action = 'run' resource_obj = resource.load(res.name) resource_obj.update(prev) logitem = change.create_logitem(res.name, action) operations.commit_log_item(logitem) resource_obj.update(new) logitem = change.create_logitem(res.name, action) uid = logitem.uid assert logitem.diff == [['change', 'a', ['9', '10']]] operations.commit_log_item(logitem) assert resource_obj.args == new change.revert(uid) assert resource_obj.args == {'a': '9'}
def move_to_commited(log_action, *args, **kwargs): sl = data.SL() item = next((i for i in sl if i.log_action == log_action), None) if item: sl.pop(item.uid) resource_obj = resource.load(item.res) commited = orm.DBCommitedState.get_or_create(item.res) if item.action == CHANGES.remove.name: resource_obj.delete() commited.state = resource.RESOURCE_STATE.removed.name else: resource_obj.set_operational() commited.state = resource.RESOURCE_STATE.operational.name commited.inputs = patch(item.diff, commited.inputs) commited.tags = resource_obj.tags sorted_connections = sorted(commited.connections) commited.connections = patch(item.signals_diff, sorted_connections) commited.base_path = item.base_path commited.save() cl = data.CL() item.state = data.STATES.success cl.append(item)
def test_revert_update_connected(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() res2 = DBResource.from_dict('test2', {'name': 'test2', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res2.inputs['a'] = '' res2.save_lazy() res3 = DBResource.from_dict('test3', {'name': 'test3', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res3.inputs['a'] = '' res3.save_lazy() res1 = resource.load('test1') res2 = resource.load('test2') res3 = resource.load('test3') res1.connect(res2) res2.connect(res3) ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 3 for item in staged_log: assert item.action == 'run' operations.move_to_commited(item.log_action) assert len(change.stage_changes()) == 0 res1.disconnect(res2) staged_log = change.stage_changes() assert len(staged_log) == 2 to_revert = [] for item in staged_log: assert item.action == 'update' operations.move_to_commited(item.log_action) to_revert.append(item.uid) change.revert_uids(sorted(to_revert, reverse=True)) ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 2 for item in staged_log: assert item.diff == [['change', 'a', ['', '9']]]
def add_dashboard(args, *_): kube_master = rs.load('kube-node-master') master = rs.load('kubelet-master') dashboard = cr.create('kubernetes-dashboard', 'k8s/dashboard', {})[0] master.connect(dashboard, {'master_port': 'api_port'}) kube_master.connect(dashboard, {'ip': 'api_host'})
def setup_nodes(num=1): kube_nodes = [] kubernetes_master = rs.load('kubelet-master') calico_master = rs.load('calico-master') config = rs.load('kube-config') for i in xrange(num): j = i + 1 kube_node = cr.create( 'kube-node-%d' % j, 'k8s/node', { 'name': 'kube-node-%d' % j, 'ip': '10.0.0.%d' % (3 + j), 'ssh_user': '******', 'ssh_password': '******', 'ssh_key': None })['kube-node-%d' % j] iface_node = cr.create( 'kube-node-%d-iface' % j, 'k8s/virt_iface', { 'name': 'cbr0', 'ipaddr': '172.20.%d.1' % (i + 1), # TODO(jnowak) support config for it 'onboot': 'yes', 'bootproto': 'static', 'type': 'Bridge' })['kube-node-%d-iface' % j] kube_node.connect(iface_node, {}) config.connect(iface_node, {'netmask': 'netmask'}) calico_node = cr.create('calico-node-%d' % j, 'k8s/calico', {})[0] kube_node.connect(calico_node, {'ip': 'ip'}) calico_master.connect(calico_node, {'etcd_authority': 'etcd_authority'}) calico_node.connect(calico_node, {'etcd_authority': 'etcd_authority_internal'}) calico_cni = cr.create('calico-cni-node-%d' % j, 'k8s/cni', {})[0] calico_node.connect(calico_cni, {'etcd_authority_internal': 'etcd_authority'}) docker = cr.create('kube-docker-%d' % j, 'k8s/docker')['kube-docker-%d' % j] kube_node.connect(docker, {}) iface_node.connect(docker, {'name': 'iface'}) kubelet = cr.create('kubelet-node-%d' % j, 'k8s/kubelet', { 'kubelet_args': '--v=5', })['kubelet-node-%d' % j] kube_node.connect(kubelet, {'name': 'kubelet_hostname'}) kubernetes_master.connect(kubelet, {'master_address': 'master_api'}) config.connect(kubelet, { 'cluster_domain': 'cluster_domain', 'cluster_dns': 'cluster_dns' }) add_event(Dep(docker.name, 'run', 'success', calico_node.name, 'run')) add_event(Dep(docker.name, 'run', 'success', kubelet.name, 'run')) add_event(Dep(calico_node.name, 'run', 'success', kubelet.name, 'run')) kube_nodes.append(kube_node) kube_master = rs.load('kube-node-master') all_nodes = kube_nodes[:] + [kube_master] hosts_files = rs.load_all(startswith='hosts_file_node_kube-') for node in all_nodes: for host_file in hosts_files: node.connect(host_file, {'name': 'hosts:name', 'ip': 'hosts:ip'})