def process_result_set(self): """ @return: a Topo_Diff object consisting of the commit node and parent link """ ret = Topo_Diff() hash_parent = None hash_child = None ts_created = None for _, _, r_set in self.iter__r_set(): for row in r_set: for ret_dict in row: assert None == hash_parent # assert hash values set once only assert None == hash_child assert None == ts_created hash_parent = ret_dict['head_parent_commit']['hash'] hash_child = ret_dict['head_commit']['hash'] ts_created = ret_dict['ts_created'] ret.node_set_add = [{'id': self.n_id, '__label_set': ['__Commit']}] l = Link.Link_Ptr(src_id=hash_parent, dst_id=hash_child) l['id'] = self.l_id l['__type'] = '__Parent' ret.link_set_add = [l] ret.meta['ts_created'] = ts_created return ret
def gen_full_db_op_set(self, test_label): n_0, n_0_id = generate_random_node_dict(test_label) n_1, n_1_id = generate_random_node_dict(test_label) l_0, l_0_id = generate_random_link_dict(test_label, n_0_id, n_1_id) n_set = [n_0, n_1] l_set = [l_0] topo_diff = Topo_Diff(node_set_add=n_set, link_set_add=l_set) attr_diff = Attr_Diff() attr_diff.add_node_attr_write(n_0_id, 'attr_0', 0) test_rzdoc = generate_random_RZDoc(test_label) op_set = [ DBO_rzdoc__clone(), DBO_add_node_set(meta_attr_list_to_meta_attr_map(n_set)), DBO_add_link_set(meta_attr_list_to_meta_attr_map(l_set, meta_attr='__type')), DBO_diff_commit__attr(attr_diff), DBO_diff_commit__topo(topo_diff), DBO_rm_node_set(id_set=[n_0_id]), # block chain DBO_block_chain__init(test_rzdoc), DBO_block_chain__commit(commit_obj=topo_diff.to_json_dict()), # rzdoc DBO_rzdoc__create(test_rzdoc), DBO_rzdoc__delete(test_rzdoc), DBO_rzdoc__list(), DBO_rzdoc__lookup_by_name(test_rzdoc.name), ] return op_set
def gen_full_db_op_set(self, test_label): n_0, n_0_id = generate_random_node_dict(test_label) n_1, n_1_id = generate_random_node_dict(test_label) l_0, l_0_id = generate_random_link_dict(test_label, n_0_id, n_1_id) n_set = [n_0, n_1] l_set = [l_0] topo_diff = Topo_Diff(node_set_add=n_set, link_set_add=l_set) attr_diff = Attr_Diff() attr_diff.add_node_attr_write(n_0_id, 'attr_0', 0) test_rzdoc = generate_random_RZDoc(test_label) op_set = [ DBO_rzdoc__clone(), DBO_add_node_set(meta_attr_list_to_meta_attr_map(n_set)), DBO_add_link_set( meta_attr_list_to_meta_attr_map(l_set, meta_attr='__type')), DBO_diff_commit__attr(attr_diff), DBO_diff_commit__topo(topo_diff), DBO_rm_node_set(id_set=[n_0_id]), # block chain DBO_block_chain__init(test_rzdoc), DBO_block_chain__commit(commit_obj=topo_diff.to_json_dict()), # rzdoc DBO_rzdoc__create(test_rzdoc), DBO_rzdoc__delete(test_rzdoc), DBO_rzdoc__list(), DBO_rzdoc__lookup_by_name(test_rzdoc.name), ] return op_set
def process_result_set(self): """ @return: a Topo_Diff object consisting of the commit node and parent link """ ret = Topo_Diff() hash_parent = None hash_child = None ts_created = None for _, _, r_set in self.iter__r_set(): for row in r_set: for ret_dict in row: assert None == hash_parent # assert hash values set once only assert None == hash_child assert None == ts_created hash_parent = ret_dict['head_parent_commit']['hash'] hash_child = ret_dict['head_commit']['hash'] ts_created = ret_dict['ts_created'] ret.node_set_add = [{'id': self.n_id, '__label_set': ['__Commit']} ] l = Link.Link_Ptr(src_id=hash_parent, dst_id=hash_child) l['id'] = self.l_id l['__type'] = '__Parent' ret.link_set_add = [l] ret.meta['ts_created'] = ts_created return ret
def sanitize_input(req): rzdoc_name = request.get_json().get('rzdoc_name') topo_diff_dict = request.get_json()['topo_diff'] topo_diff = Topo_Diff.from_json_dict(topo_diff_dict) sanitize_input__topo_diff(topo_diff) return rzdoc_name, topo_diff
def test_REST_post_triggers_ws_multicast__topo_diff(self): class NS_test(BaseNamespace): def on_diff_commit__topo(self, *data): greenlet.getcurrent().data = data raise KeyboardInterrupt( ) # TODO: cleanup: properly close socket test_label = neo4j_test_util.rand_label() n, n_id = test_util.generate_random_node_dict(test_label) topo_diff = Topo_Diff(node_set_add=[n]) def c_0(): with RZ_websocket(namespace=NS_test) as (sock, _): c1_t.switch() # allow peer to POST sock.wait(8) # allow self to receive def c_1(): data = json.dumps({'topo_diff': topo_diff.to_json_dict()}) req = urllib2.Request( url='http://rhizi.local:8080/graph/diff-commit-topo', data=data, headers={'Content-Type': 'application/json'}) f = urllib2.urlopen(req) f.close() c0_t.switch() c0_t = greenlet(c_0) c0_t.data = None c1_t = greenlet(c_1) c0_t.switch() self.assertTrue(None != c0_t.data) self.assertEqual(2, len(c1_t.data))
def sanitize_input(req): diff_set_dict = req.get_json()['diff_set'] topo_diff_dict = diff_set_dict['__diff_set_topo'][0] topo_diff = Topo_Diff.from_json_dict(topo_diff_dict) sanitize_input__topo_diff(topo_diff) return topo_diff
def sanitize_input(req): diff_set_dict = req.get_json()['diff_set'] topo_diff_dict = diff_set_dict['__diff_set_topo'][0] topo_diff = Topo_Diff.from_json_dict(topo_diff_dict) sanitize_input__topo_diff(topo_diff) return topo_diff;
def test_ws_event__topo_diff(self): class NS_test(BaseNamespace): def on_diff_commit__topo(self, *data): greenlet.getcurrent().data = data raise KeyboardInterrupt( ) # TODO: cleanup: properly close socket test_label = neo4j_test_util.rand_label() n_0, n_0_id = test_util.generate_random_node_dict(test_label) n_1, n_1_id = test_util.generate_random_node_dict(test_label) l, l_id = test_util.generate_random_link_dict(test_label, n_0_id, n_1_id) topo_diff = Topo_Diff(node_set_add=[n_0, n_1], link_set_add=[l]) def c_0(): with RZ_websocket(namespace=NS_test) as (_, ns_sock): c1_t.switch() # allow peer to connect data = json.dumps(topo_diff, cls=Topo_Diff.JSON_Encoder) ns_sock.emit('diff_commit__topo', data) c1_t.switch() def c_1(): with RZ_websocket(namespace=NS_test) as (sock, _): c0_t.switch() # allow peer to emit sock.wait(8) # allow self to receive c0_t = greenlet(c_0) c1_t = greenlet(c_1) c1_t.data = None c0_t.switch() self.assertTrue(None != c1_t.data) self.assertEqual(2, len(c1_t.data)) diff_in = Topo_Diff.from_json_dict(c1_t.data[0]) commit_ret = Topo_Diff.Commit_Result_Type.from_json_dict(c1_t.data[1]) self.assertEqual(Topo_Diff, type(diff_in)) self.assertEqual(Topo_Diff.Commit_Result_Type, type(commit_ret))
def generate_random_diff__topo__minimal(test_label): """ @return: a ~minimal Topo_Diff containing three nodes and two links """ n_0, n_0_id = generate_random_node_dict(test_label) n_1, n_1_id = generate_random_node_dict(test_label) n_2, n_2_id = generate_random_node_dict(test_label) l_0, l_0_id = generate_random_link_dict(test_label, n_0_id, n_1_id) l_1, l_1_id = generate_random_link_dict(test_label, n_0_id, n_2_id) n_set = [n_0, n_1, n_2] l_set = [l_0, l_1] topo_diff = Topo_Diff(node_set_add=n_set, link_set_add=l_set) return topo_diff
def on_diff_commit__topo(self, json_data): # FIXME: sanitize input json_dict = json.loads(json_data) topo_diff = Topo_Diff.from_json_dict(json_dict['topo_diff']) log.info('ws: rx: topo diff: ' + str(topo_diff)) ctx = self.__context__common(json_dict) kernel = self.request.kernel topo_diff, commit_ret = kernel.diff_commit__topo(topo_diff, ctx) # handle serialization topo_diff_dict = topo_diff.to_json_dict() assert Topo_Diff.Commit_Result_Type == type(commit_ret) return self.multicast_msg('diff_commit__topo', topo_diff_dict, commit_ret)
def test_ws_event__topo_diff(self): class NS_test(BaseNamespace): def on_diff_commit__topo(self, *data): greenlet.getcurrent().data = data raise KeyboardInterrupt() # TODO: cleanup: properly close socket test_label = neo4j_test_util.rand_label() n_0, n_0_id = test_util.generate_random_node_dict(test_label) n_1, n_1_id = test_util.generate_random_node_dict(test_label) l, l_id = test_util.generate_random_link_dict(test_label, n_0_id, n_1_id) topo_diff = Topo_Diff(node_set_add=[n_0, n_1], link_set_add=[l]) def c_0(): with RZ_websocket(namespace=NS_test) as (_, ns_sock): c1_t.switch() # allow peer to connect data = json.dumps(topo_diff, cls=Topo_Diff.JSON_Encoder) ns_sock.emit('diff_commit__topo', data) c1_t.switch() def c_1(): with RZ_websocket(namespace=NS_test) as (sock, _): c0_t.switch() # allow peer to emit sock.wait(8) # allow self to receive c0_t = greenlet(c_0) c1_t = greenlet(c_1) c1_t.data = None c0_t.switch() self.assertTrue(None != c1_t.data) self.assertEqual(2, len(c1_t.data)) diff_in = Topo_Diff.from_json_dict(c1_t.data[0]) commit_ret = Topo_Diff.Commit_Result_Type.from_json_dict(c1_t.data[1]) self.assertEqual(Topo_Diff, type(diff_in)) self.assertEqual(Topo_Diff.Commit_Result_Type, type(commit_ret))
def test_diff_commit__attr(self): # create test node test_label = neo4j_test_util.rand_label() n_0, n_0_id = generate_random_node_dict(test_label) n_0['attr_0'] = 0 topo_diff = Topo_Diff(node_set_add=[n_0]) op = DBO_diff_commit__topo(topo_diff) self.db_ctl.exec_op(op) # apply attr_diff attr_diff = Attr_Diff() attr_diff.add_node_attr_write(n_0_id, 'attr_0', 0) attr_diff.add_node_attr_write(n_0_id, 'attr_1', 'a') attr_diff.add_node_attr_rm(n_0_id, 'attr_2') op = DBO_diff_commit__attr(attr_diff) ret_diff = self.db_ctl.exec_op(op) self.assertEqual(len(ret_diff.type__node), 1) self.assertTrue(None != ret_diff.type__node[n_0_id]) # attr-set only attr_diff = Attr_Diff() attr_diff.add_node_attr_write(n_0_id, 'attr_2', 0) op = DBO_diff_commit__attr(attr_diff) ret_diff = self.db_ctl.exec_op(op) self.assertTrue( None != ret_diff.type__node[n_0_id]['__attr_write'].get('attr_2')) # attr-remove only attr_diff = Attr_Diff() attr_diff.add_node_attr_rm(n_0_id, 'attr_2') op = DBO_diff_commit__attr(attr_diff) ret_diff = self.db_ctl.exec_op(op) self.assertTrue( 'attr_2' in ret_diff.type__node[n_0_id]['__attr_remove'])
def process_result_set(self): ret_nid_set_add = [] ret_lid_set_add = [] ret_nid_set_rm = [] ret_lid_set_rm = [] it = self.iter__sub_op() if self.n_add_map: for _, _, r_set in it.next().iter__r_set( ): # iterate over result sets for row in r_set: for ret_dict in row: n_id = ret_dict['id'] # see query return statement ret_nid_set_add.append(n_id) if self.l_add_map: for _, _, r_set in it.next().iter__r_set( ): # iterate over result sets for row in r_set: for ret_dict in row: l_id = ret_dict['id'] # see query return statement ret_lid_set_add.append(l_id) if self.l_rm_set: for _, _, row_set in it.next().iter__r_set(): for l_id in row_set: ret_lid_set_rm.extend(l_id) if self.n_rm_set: for _, _, row_set in it.next().iter__r_set(): for n_id in row_set: ret_nid_set_rm.extend(n_id) ret = Topo_Diff.Commit_Result_Type(node_id_set_add=ret_nid_set_add, link_id_set_add=ret_lid_set_add, node_id_set_rm=ret_nid_set_rm, link_id_set_rm=ret_lid_set_rm) return ret
def process_result_set(self): ret_n_set = [] ret_l_set = [] for _, _, r_set in self.iter__r_set(): for row in r_set: n, n_lbl_set, l_set = row.items() # see query return statement # reconstruct nodes assert None != n.get('id'), "db contains nodes with no id" n['__label_set'] = self.process_q_ret__n_label_set(n_lbl_set) ret_n_set.append(n) # reconstruct links from link tuples for l_tuple in l_set: assert 3 == len(l_tuple) # see query return statement if None == l_tuple[0]: # check if link dst is None # as link matching is optional, collect may yield empty sets continue ret_l, ret_l_type, ret_l_dst_id = l_tuple l = Link.Link_Ptr(src_id=n['id'], dst_id=ret_l_dst_id) l['id'] = ret_l['id'] l['__type'] = self.process_q_ret__l_type(ret_l_type) ret_l_set.append(l) if len(ret_n_set ) >= self.limit: # TODO: generalize logic, mv to DB_Driver log.warning( 'DB op result set larger than query limit: size: %d, limit: %d' % (len(ret_n_set), self.limit)) topo_diff = Topo_Diff(node_set_add=ret_n_set, link_set_add=ret_l_set) return topo_diff
def test_diff_commit__topo(self): test_label = neo4j_test_util.rand_label() n_0, n_0_id = generate_random_node_dict(test_label) n_1, n_1_id = generate_random_node_dict(test_label) n_2, n_2_id = generate_random_node_dict(test_label) l_0, l_0_id = generate_random_link_dict(test_label, n_0_id, n_1_id) l_1, l_1_id = generate_random_link_dict(test_label, n_0_id, n_2_id) n_set = [n_0, n_1, n_2] l_set = [l_0, l_1] topo_diff = Topo_Diff(node_set_add=n_set, link_set_add=l_set) # commit diff op = DBO_diff_commit__topo(topo_diff) ret_topo_diff = self.db_ctl.exec_op(op) # test return type self.assertTrue(hasattr(ret_topo_diff, 'node_id_set_add')) self.assertTrue(hasattr(ret_topo_diff, 'link_id_set_add')) self.assertTrue(hasattr(ret_topo_diff, 'node_id_set_rm')) self.assertTrue(hasattr(ret_topo_diff, 'link_id_set_rm')) # test return set lengths self.assertEqual(len(ret_topo_diff.node_id_set_add), len(n_set)) self.assertEqual(len(ret_topo_diff.link_id_set_add), len(l_set)) self.assertEqual(len(ret_topo_diff.node_id_set_rm), 0) self.assertEqual(len(ret_topo_diff.link_id_set_rm), 0) # assert nodes persisted id_set = self.db_ctl.exec_op( DBO_match_node_set_by_id_attribute([n_0_id, n_1_id])) self.assertEqual(len(id_set), 2) # assert links persisted l_ptr_0 = Link.link_ptr(src_id=n_0_id, dst_id=n_1_id) l_ptr_1 = Link.link_ptr(src_id=n_0_id, dst_id=n_2_id) op = DBO_load_link_set.init_from_link_ptr_set([l_ptr_0, l_ptr_1]) id_set = self.db_ctl.exec_op(op) self.assertEqual(len(id_set), 2) # remova links topo_diff = Topo_Diff(link_id_set_rm=[l_0_id, l_1_id]) op = DBO_diff_commit__topo(topo_diff) ret_topo_diff = self.db_ctl.exec_op(op) self.assertEqual(len(ret_topo_diff.link_id_set_rm), 2) # assert links removed op = DBO_load_link_set.init_from_link_ptr_set([l_ptr_0, l_ptr_1]) id_set = self.db_ctl.exec_op(op) self.assertEqual(len(id_set), 0) # removal nodes topo_diff = Topo_Diff(node_id_set_rm=[n_2_id]) op = DBO_diff_commit__topo(topo_diff) ret_topo_diff = self.db_ctl.exec_op(op) self.assertEqual(len(ret_topo_diff.node_id_set_rm), 1) # assert nodes removed op = DBO_match_node_set_by_id_attribute([n_2_id]) id_set = self.db_ctl.exec_op(op) self.assertEqual(len(id_set), 0)
def merge_topos(topos, names): """ Merge a number of Topo_Diff's result node set is union of nodes conflicting nodes: same name nodes take the first (id wise) attributes: take the first result link set is merged (well defined: replace dropped id by chosen id in all links) """ result = Topo_Diff() node_names = dict() node_ids = set() dropped_nodes = set() links_src_dst = set() def rename(links, from_id, to_id): for link in links: if link['__src_id'] == from_id: link['__src_id'] = to_id if link['__dst_id'] == from_id: link['__dst_id'] = to_id def merge(topo): links = topo.link_set_add nodes = topo.node_set_add new_nodes = [] for node in nodes: name = node['name'].lower() if name in node_names: dropped_nodes.add(node['name']) rename(links, node['id'], node_names[name]['id']) else: node_names[name] = node new_nodes.append(node) node_ids.add(node['id']) new_links = [] for link in links: k = (link['__src_id'], link['__dst_id']) if k not in links_src_dst: links_src_dst.add(k) new_links.append(link) if len(dropped_nodes) > 0: print("dropped duplicate nodes count: %s" % len(dropped_nodes)) if len(new_links) != len(links): print("dropped duplicate links count: %s" % (len(links) - len(new_links))) print("adding %s nodes, %s links" % (len(new_nodes), len(new_links))) result.node_set_add.extend(new_nodes) result.link_set_add.extend(new_links) for topo, name in zip(topos, names): print("merging %s" % name) merge(topo) # now realloc all ids since otherwise they are duplicates of originals renames = [(node['id'], create_id()) for node in result.node_set_add] def show(word): print("=" * 80) print(word) print(result.node_set_add) print(result.link_set_add) for src, dst in renames: rename(result.link_set_add, src, dst) for node, (_, new_id) in zip(result.node_set_add, renames): node['id'] = new_id return result
print('\n'.join('%30s %30s' % (d['name'].encode('utf-8').ljust(30), d['id'].encode('utf-8').ljust(30)) for d in kernel.rzdoc__search(''))) raise SystemExit if args.list_names: print('\n'.join(d['name'].encode('utf-8') for d in kernel.rzdoc__search(''))) raise SystemExit if args.delete: remove(args.delete) if args.rename_from and args.rename_to: rename(args.rename_from, args.rename_to) if args.clone: print(json.dumps(clone(args.clone).to_json_dict())) if args.create and args.create_name: with open(args.create) as fd: data = json.load(fd) create(args.create_name, Topo_Diff.from_json_dict(json.load(fd))) if args.merge_target: merge_sources = None if args.merge: merge_sources = args.merge.split(',') if args.merge_file: with open(args.merge_file) as fd: merge_sources = [noeol(line) for line in fd.readlines()] if merge_sources: merge(args.merge_target, merge_sources) if args.dump_single: dump([args.dump_single]) if args.dump_all: dump(None) if args.load: load(args.load)