def test_nested_two_listdict(rk): k1 = next(rk) k2 = next(rk) k3 = next(rk) r1 = create_resource(k1, {'name': 'first', 'inputs': {'config': [{"backends": [{}], 'something': 0}]}}) r2 = create_resource(k2, {'name': 'second', 'inputs': {"backends": [{"host": "second_host", "port": 2}], 'something': 1}}) r3 = create_resource(k3, {'name': 'third', 'inputs': {"backends": [{"host": "third_host", "port": 3}], 'something': 2}}) r2.connect(r1, {'backends': 'config:backends', 'something': 'config:something'}) r3.connect(r1, {'backends': 'config:backends', 'something': 'config:something'}) Resource.save_all_lazy() for sc in r1.inputs['config']: assert 'something' in sc assert 'backends' in sc assert isinstance(sc['backends'], list) assert isinstance(sc['something'], int)
def test_nested_simple_listdict(rk): k1 = next(rk) k2 = next(rk) k3 = next(rk) k4 = next(rk) k5 = next(rk) r1 = create_resource(k1, {'name': 'first', 'inputs': {'config': [{"backends": [{}], 'listen_port': 1}]}}) r2 = create_resource(k2, {'name': 'second', 'inputs': {'backend': {}}}) r3 = create_resource(k3, {'name': 'third', 'inputs': {'backend': {}}}) r5 = create_resource(k5, {'name': 'fifth', 'inputs': {"port": 5, "host": "fifth_host"}}) r4 = create_resource(k4, {'name': 'fourth', 'inputs': {"port": 4, "host": "fourth_host"}}) r4.connect(r2, {'port': "backend:port", 'host': 'backend:host'}) r5.connect(r3, {'port': "backend:port", 'host': 'backend:host'}) assert r2.inputs['backend'] == {'host': 'fourth_host', 'port': 4} assert r3.inputs['backend'] == {'host': 'fifth_host', 'port': 5} r2.connect(r1, {'backend': 'config:backends'}) r3.connect(r1, {'backend': 'config:backends'}) Resource.save_all_lazy() backends = next(x['backends'] for x in r1.inputs['config'] if 'backends' in x) assert len(backends) == 2
def test_discard_all_pending_changes_resources_created(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() res2 = DBResource.from_dict('test2', {'name': 'test2', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res2.inputs['a'] = '0' res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 2 change.discard_all() staged_log = change.stage_changes() assert len(staged_log) == 0 assert resource.load_all() == []
def test_discard_connection(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() res2 = DBResource.from_dict('test2', {'name': 'test2', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res2.inputs['a'] = '0' res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res2 = resource.load('test2') res1.connect(res2, {'a': 'a'}) staged_log = change.stage_changes() assert len(staged_log) == 1 assert res2.args == {'a': '9'} change.discard_all() assert res2.args == {'a': '0'} assert len(change.stage_changes()) == 0
def tagged_resources(): base_tags = ['n1=x', 'n2'] tags = base_tags + ['node=t1'] t1 = Resource.from_dict('t1', { 'name': 't1', 'tags': tags, 'base_path': 'x' }) t1.save_lazy() tags = base_tags + ['node=t2'] t2 = Resource.from_dict('t2', { 'name': 't2', 'tags': tags, 'base_path': 'x' }) t2.save_lazy() tags = base_tags + ['node=t3'] t3 = Resource.from_dict('t3', { 'name': 't3', 'tags': tags, 'base_path': 'x' }) t3.save_lazy() tags = ['node=t3'] t4 = Resource.from_dict('t4', { 'name': 't4', 'tags': tags, 'base_path': 'x' }) t4.save_lazy() ModelMeta.save_all_lazy() return [t1, t2, t3]
def test_discard_all_pending_changes_resources_created(): res1 = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res1.inputs["a"] = "9" res1.save_lazy() res2 = DBResource.from_dict( "test2", { "name": "test2", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res2.inputs["a"] = "0" res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 2 change.discard_all() staged_log = change.stage_changes() assert len(staged_log) == 0 assert resource.load_all() == []
def load_updated(since=None, with_childs=True): if since is None: startkey = StrInt.p_min() else: startkey = since candids = DBResource.updated.filter(startkey, StrInt.p_max()) if with_childs: candids = DBResource.childs(candids) return [Resource(r) for r in DBResource.multi_get(candids)]
def test_connect_other_list(rk): k1 = next(rk) k2 = next(rk) r1 = create_resource(k1, {'name': 'first', 'inputs': {'config': {"trackers": []}}}) r2 = create_resource(k2, {'name': 'second', 'inputs': {"trackers": ["t1", "t2"]}}) r2.connect(r1, {'trackers': 'config:trackers'}) Resource.save_all_lazy() assert r1.inputs['config']['trackers'] == ["t1", "t2"]
def tagged_resources(): tags = ['n1', 'n2', 'n3'] t1 = Resource.from_dict('t1', {'name': 't1', 'tags': tags, 'base_path': 'x'}) t1.save_lazy() t2 = Resource.from_dict('t2', {'name': 't2', 'tags': tags, 'base_path': 'x'}) t2.save_lazy() t3 = Resource.from_dict('t3', {'name': 't3', 'tags': tags, 'base_path': 'x'}) t3.save_lazy() ModelMeta.save_all_lazy() return [t1, t2, t3]
def load(name): r = DBResource.get(name) if not r: raise Exception('Resource {} does not exist in DB'.format(name)) return Resource(r)
def test_revert_create(): res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.inputs['a'] = '9' res.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 1 logitem = staged_log[0] operations.move_to_commited(logitem.log_action) assert logitem.diff == [['add', '', [['a', '9']]]] commited = CommitedResource.get('test1') assert commited.inputs == {'a': '9'} change.revert(logitem.uid) staged_log = change.stage_changes() assert len(staged_log) == 1 for item in staged_log: operations.move_to_commited(item.log_action) assert resource.load_all() == []
def load_by_tags(tags): tags = set(tags) candids_all = set() for tag in tags: candids = DBResource.tags.filter(tag) candids_all.update(set(candids)) return [Resource(r) for r in DBResource.multi_get(candids_all)]
def test_revert_create(): res = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res.inputs["a"] = "9" res.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() assert len(staged_log) == 1 logitem = staged_log[0] operations.move_to_commited(logitem.log_action) assert logitem.diff == [["add", "", [["a", "9"]]]] commited = CommitedResource.get("test1") assert commited.inputs == {"a": "9"} change.revert(logitem.uid) staged_log = change.stage_changes() assert len(staged_log) == 1 for item in staged_log: operations.move_to_commited(item.log_action) assert resource.load_all() == []
def test_revert_update(): commit = {'a': '10'} previous = {'a': '9'} res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.save() action = 'update' res.inputs['a'] = '9' resource_obj = resource.load(res.name) assert resource_obj.args == previous log = data.SL() logitem = change.create_logitem(res.name, action, change.create_diff(commit, previous), [], base_path=res.base_path) log.append(logitem) resource_obj.update(commit) operations.move_to_commited(logitem.log_action) assert logitem.diff == [['change', 'a', ['9', '10']]] assert resource_obj.args == commit change.revert(logitem.uid) assert resource_obj.args == previous
def test_remove_events(events_example): r = Resource.from_dict(dict(key='e1', name='e1', base_path='x')) r.save() to_be_removed = events_example[2] evapi.add_events('e1', events_example) evapi.remove_event(to_be_removed) assert to_be_removed not in evapi.all_events('e1')
def test_riak(): events = { 'riak_service1': [ evapi.React('riak_service1', 'run', 'success', 'riak_service2', 'run'), evapi.React('riak_service1', 'run', 'success', 'riak_service3', 'run') ], 'riak_service3': [ evapi.React('riak_service3', 'join', 'success', 'riak_service1', 'commit'), evapi.React('riak_service3', 'run', 'success', 'riak_service3', 'join') ], 'riak_service2': [ evapi.React('riak_service2', 'run', 'success', 'riak_service2', 'join'), evapi.React('riak_service2', 'join', 'success', 'riak_service1', 'commit') ], } for name in events: res = Resource.from_dict({'key': name, 'name': name}) res.save() res.inputs.add_new('location_id', '1') evapi.add_events(name, events[name]) changes_graph = nx.MultiDiGraph() changes_graph.add_node('riak_service1.run') evapi.build_edges(changes_graph, events) assert set(changes_graph.predecessors('riak_service1.commit')) == { 'riak_service2.join', 'riak_service3.join' }
def test_revert_update(): commit = {'a': '10'} previous = {'a': '9'} res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.save() action = 'update' res.inputs['a'] = '9' resource_obj = resource.load(res.name) assert resource_obj.args == previous log = data.SL() logitem = change.create_logitem(res.name, action, change.create_diff(commit, previous), [], base_path=res.base_path) log.append(logitem) resource_obj.update(commit) operations.move_to_commited(logitem.log_action) assert logitem.diff == [('change', 'a', ('9', '10'))] assert resource_obj.args == commit change.revert(logitem.uid) assert resource_obj.args == previous
def test_resource_childs(rk): k1 = next(rk) k2 = next(rk) k3 = next(rk) r1 = create_resource(k1, { 'name': 'first', 'inputs': { 'input1': 10, 'input2': 15 } }) r2 = create_resource(k2, { 'name': 'first', 'inputs': { 'input1': None, 'input2': None } }) r3 = create_resource(k3, { 'name': 'first', 'inputs': { 'input1': None, 'input2': None } }) r2.connect(r3, {'input1': 'input1'}) r1.connect(r2, {'input1': 'input1'}) r1.save() r2.save() r3.save() assert set(Resource.childs([r1.key])) == {r1.key, r2.key, r3.key}
def test_discard_removed(): res1 = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res1.inputs["a"] = "9" res1.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load("test1") res1.remove() assert len(change.stage_changes()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.stage_changes()) == 0 assert not resource.load("test1").to_be_removed()
def load_all(startswith=None): if startswith: start = startswith end = startswith + '~' candids = DBResource.bucket.get_index("$key", start, end).results else: candids = DBResource.updated.filter(StrInt.p_min(), StrInt.p_max()) return [Resource(r) for r in DBResource.multi_get(candids)]
def test_events(rk): k = next(rk) r1 = Resource.from_dict(k, {'events': ['event1', 'event2']}) r1.save() assert r1.events == ['event1', 'event2'] r1.events.pop() assert r1.events == ['event1']
def add_events(resource, lst): resource = Resource.get(resource) events = resource.events # TODO: currently we don't track mutable objects events.extend([ev.to_dict() for ev in lst]) resource.events = events # import pdb; pdb.settrace() resource.save_lazy()
def remove_event(ev): to_remove = ev.to_dict() resource = ev.parent resource = Resource.get(resource) # TODO: currently we don't track mutable objects events = resource.events events.remove(to_remove) resource.events = events resource.save_lazy()
def load_by_tags(query): if isinstance(query, (list, set, tuple)): query = '|'.join(query) parsed_tags = get_string_tokens(query) r_with_tags = [DBResource.tags.filter(tag) for tag in parsed_tags] r_with_tags = set(itertools.chain(*r_with_tags)) resources = [Resource(r) for r in DBResource.multi_get(r_with_tags)] return filter(lambda n: Expression(query, n.tags).evaluate(), resources)
def insert(self, changed_resources, changes_graph): changed_resources.append(self.parent_node) try: location_id = Resource.get(self.parent).inputs['location_id'] except (DBLayerNotFound, DBLayerSolarException): location_id = None changes_graph.add_node( self.parent_node, status='PENDING', target=location_id, errmsg='', type='solar_resource', args=[self.parent, self.parent_action])
def tagged_resources(): base_tags = ['n1=x', 'n2'] tags = base_tags + ['node=t1'] t1 = Resource.from_dict('t1', {'name': 't1', 'tags': tags, 'base_path': 'x'}) t1.save_lazy() tags = base_tags + ['node=t2'] t2 = Resource.from_dict('t2', {'name': 't2', 'tags': tags, 'base_path': 'x'}) t2.save_lazy() tags = base_tags + ['node=t3'] t3 = Resource.from_dict('t3', {'name': 't3', 'tags': tags, 'base_path': 'x'}) t3.save_lazy() tags = ['node=t3'] t4 = Resource.from_dict('t4', {'name': 't4', 'tags': tags, 'base_path': 'x'}) t4.save_lazy() ModelMeta.save_all_lazy() return [t1, t2, t3]
def insert(self, changed_resources, changes_graph): changed_resources.append(self.parent_node) try: location_id = Resource.get(self.parent).inputs['location_id'] except DBLayerNotFound: location_id = None changes_graph.add_node( self.parent_node, status='PENDING', target=location_id, errmsg='', type='solar_resource', args=[self.parent, self.parent_action])
def create_resource(name, tags=None): resource = DBResource.from_dict( name, {'name': name, 'base_path': 'x', 'state': '', 'tags': tags or [], 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) resource.save_lazy() return resource
def test_connect_other_list(rk): k1 = next(rk) k2 = next(rk) r1 = create_resource(k1, { 'name': 'first', 'inputs': { 'config': { "trackers": [] } } }) r2 = create_resource(k2, { 'name': 'second', 'inputs': { "trackers": ["t1", "t2"] } }) r2.connect(r1, {'trackers': 'config:trackers'}) Resource.save_all_lazy() assert r1.inputs['config']['trackers'] == ["t1", "t2"]
def nova_deps(): for name in ['nova', 'nova_api', 'nova_sch']: r = Resource.from_dict(dict(key=name, name=name)) r.inputs.add_new('location_id', '1') r.save() nova = [ evapi.Dep('nova', 'run', 'success', 'nova_sch', 'run'), evapi.React('nova', 'run', 'success', 'nova_api', 'update')] nova_api = [ evapi.React('nova_api', 'update', 'success', 'nova', 'reboot')] evapi.add_events('nova', nova) evapi.add_events('nova_api', nova_api) return {'nova': nova}
def test_discard_connection(): res1 = DBResource.from_dict( "test1", { "name": "test1", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res1.inputs["a"] = "9" res1.save_lazy() res2 = DBResource.from_dict( "test2", { "name": "test2", "base_path": "x", "state": RESOURCE_STATE.created.name, "meta_inputs": {"a": {"value": None, "schema": "str"}}, }, ) res2.inputs["a"] = "0" res2.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load("test1") res2 = resource.load("test2") res1.connect(res2, {"a": "a"}) staged_log = change.stage_changes() assert len(staged_log) == 1 assert res2.args == {"a": "9"} change.discard_all() assert res2.args == {"a": "0"} assert len(change.stage_changes()) == 0
def test_mandatory_revisit(): events = { 'e1': [evapi.Dep('e1', 'run', 'success', 'e2', 'run'), evapi.React('e1', 'run', 'success', 'e2', 'start')], 'e2': [evapi.React('e2', 'start', 'success', 'e2', 'run')]} for name in events: r = Resource.from_dict(dict(key=name, name=name)) r.inputs.add_new('location_id', '1') r.save() evapi.add_events(name, events[name]) changes_graph = nx.DiGraph() changes_graph.add_node('e1.run') evapi.build_edges(changes_graph, events) assert set(changes_graph.predecessors('e2.run')) == {'e1.run', 'e2.start'}
def create_from_spec(self, name, spec, args=None, tags=None): args = args or {} self.name = name if spec: if spec.startswith('/'): # it's full path, don't use repo self.base_path = spec metadata = read_meta(spec) else: repo, spec = Repository.parse(spec) metadata = repo.get_metadata(spec) self.base_path = repo.get_path(spec) else: metadata = deepcopy(self._metadata) self.base_path = spec # TODO: remove this old method? if tags is None: tags = [] m_tags = metadata.get('tags', []) tags.extend(m_tags) tags.append('resource={}'.format(name)) inputs = metadata.get('input', {}) self.auto_extend_inputs(inputs) self.db_obj = DBResource.from_dict( name, { 'id': name, 'name': name, 'actions_path': metadata.get('actions_path', ''), 'actions': metadata.get('actions', {}), 'base_name': metadata.get('base_name', ''), 'base_path': metadata.get('base_path', ''), 'handler': metadata.get('handler', ''), 'version': metadata.get('version', ''), 'meta_inputs': inputs, 'tags': tags, 'state': RESOURCE_STATE.created.name, 'managers': metadata.get('managers', []) }) self.create_inputs(args) self.db_obj.save() LogItem.new({ 'resource': self.name, 'action': 'run', 'log': 'staged', 'tags': self.tags }).save_lazy()
def create_from_spec(self, name, spec, args=None, tags=None): args = args or {} self.name = name if spec: if spec.startswith('/'): # it's full path, don't use repo self.base_path = spec metadata = read_meta(spec) else: repo, spec = Repository.parse(spec) metadata = repo.get_metadata(spec) self.base_path = repo.get_path(spec) else: metadata = deepcopy(self._metadata) self.base_path = spec # TODO: remove this old method? if tags is None: tags = [] m_tags = metadata.get('tags', []) tags.extend(m_tags) tags.append('resource={}'.format(name)) inputs = metadata.get('input', {}) self.auto_extend_inputs(inputs) self.db_obj = DBResource.from_dict( name, { 'id': name, 'name': name, 'actions_path': metadata.get('actions_path', ''), 'actions': metadata.get('actions', {}), 'base_name': metadata.get('base_name', ''), 'base_path': metadata.get('base_path', ''), 'handler': metadata.get('handler', ''), 'version': metadata.get('version', ''), 'meta_inputs': inputs, 'tags': tags, 'state': RESOURCE_STATE.created.name, 'managers': metadata.get('managers', []) }) self.create_inputs(args) self.db_obj.save() LogItem.new({ 'resource': self.name, 'action': 'run', 'log': 'staged', 'tags': self.tags}).save_lazy()
def create_resource(key, data): mi = data.get('meta_inputs', {}) for inp_name, inp_value in data.get('inputs', {}).items(): if isinstance(inp_value, list): if len(inp_value) == 1 and isinstance(inp_value[0], dict): schema = [{}] else: schema = ['str!'] elif isinstance(inp_value, dict): schema = {} else: schema = '%s!' % type(inp_value).__name__ mi.setdefault(inp_name, {"schema": schema}) data['meta_inputs'] = mi return Resource.from_dict(key, data)
def create_resource(name, tags=None): res = DBResource.from_dict( name, { 'name': name, 'base_path': 'x', 'state': resource.RESOURCE_STATE.created.name, 'tags': tags or [], 'meta_inputs': { 'a': { 'value': None, 'schema': 'str' } } }) res.save_lazy() return res
def test_revert_removal(): res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', {'inputs': {'a': '9'}, 'state': 'operational'}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() changes = change.stage_changes() assert len(changes) == 1 assert changes[0].diff == [['remove', '', [['a', '9']]]] operations.move_to_commited(changes[0].log_action) clear_cache() assert DBResource._c.obj_cache == {} # assert DBResource.bucket.get('test1').siblings == [] with mock.patch.object(repository.Repository, 'read_meta') as mread: mread.return_value = { 'input': {'a': {'schema': 'str!'}}, 'id': 'mocked' } with mock.patch.object(repository.Repository, 'get_path') as mpath: mpath.return_value = 'x' change.revert(changes[0].uid) ModelMeta.save_all_lazy() # assert len(DBResource.bucket.get('test1').siblings) == 1 resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def __init__(self, name, base_path, args=None, tags=None, virtual_resource=None): args = args or {} self.name = name if base_path: metadata = read_meta(base_path) else: metadata = deepcopy(self._metadata) self.base_path = base_path if tags is None: tags = [] m_tags = metadata.get('tags', []) tags.extend(m_tags) tags.append('resource={}'.format(metadata['id'])) self.virtual_resource = virtual_resource inputs = metadata.get('input', {}) self.auto_extend_inputs(inputs) self.db_obj = DBResource.from_dict( name, { 'id': name, 'name': name, 'actions_path': metadata.get('actions_path', ''), 'actions': metadata.get('actions', {}), 'base_name': metadata.get('base_name', ''), 'base_path': metadata.get('base_path', ''), 'handler': metadata.get('handler', ''), 'puppet_module': metadata.get('puppet_module', ''), 'version': metadata.get('version', ''), 'meta_inputs': inputs, 'tags': tags, 'state': RESOURCE_STATE.created.name, 'managers': metadata.get('managers', []) }) self.create_inputs(args) self.db_obj.save()
def insert(self, changed_resources, changes_graph): if self.parent_node in changes_graph: if self.child_node not in changes_graph: try: location_id = Resource.get(self.child).inputs[ 'location_id'] except DBLayerNotFound: location_id = None changes_graph.add_node( self.child_node, status='PENDING', target=location_id, errmsg='', type='solar_resource', args=[self.child, self.child_action]) changes_graph.add_edge( self.parent_node, self.child_node, state=self.state) changed_resources.append(self.child_node)
def test_revert_removal(): res = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res.inputs['a'] = '9' res.save_lazy() commited = CommitedResource.from_dict('test1', {'inputs': {'a': '9'}, 'state': 'operational'}) commited.save_lazy() resource_obj = resource.load(res.name) resource_obj.remove() ModelMeta.save_all_lazy() changes = change.stage_changes() assert len(changes) == 1 assert changes[0].diff == [['remove', '', [['a', '9']]]] operations.move_to_commited(changes[0].log_action) ModelMeta.session_start() assert DBResource._c.obj_cache == {} assert DBResource.bucket.get('test1').siblings == [] with mock.patch.object(resource, 'read_meta') as mread: mread.return_value = { 'input': {'a': {'schema': 'str!'}}, 'id': 'mocked' } change.revert(changes[0].uid) ModelMeta.save_all_lazy() assert len(DBResource.bucket.get('test1').siblings) == 1 resource_obj = resource.load('test1') assert resource_obj.args == { 'a': '9', 'location_id': '', 'transports_id': '' }
def test_discard_removed(): res1 = DBResource.from_dict('test1', {'name': 'test1', 'base_path': 'x', 'state': RESOURCE_STATE.created.name, 'meta_inputs': {'a': {'value': None, 'schema': 'str'}}}) res1.inputs['a'] = '9' res1.save_lazy() ModelMeta.save_all_lazy() staged_log = change.stage_changes() for item in staged_log: operations.move_to_commited(item.log_action) res1 = resource.load('test1') res1.remove() assert len(change.stage_changes()) == 1 assert res1.to_be_removed() change.discard_all() assert len(change.stage_changes()) == 0 assert not resource.load('test1').to_be_removed()