def test_create_node(self, app): expected_root_node = Node(id=1, name='root', parent_id=None, lft=0, rgt=3) expected_new_node = Node(id=2, name='new node', parent_id=1, lft=1, rgt=2, tree_id=2) with app.app_context(): new_node = Node.create(expected_new_node.name, Node.get_by_id(expected_new_node.parent_id)) assert new_node == Node.get_by_id(expected_new_node.id) root_node = Node.get_by_id(expected_root_node.id) assert root_node == expected_root_node and new_node == expected_new_node
def test_update_note(self): Node(criteria='the first').save() Node(criteria='the second').save() serializer = NodeSerializer(Node.objects.last(), data={'predicate': 'snd', 'criteria': 'second', 'parent': 1}) if not serializer.is_valid(): self.fail(serializer.errors) serializer.save() node = Node.objects.last() self.assertEqual(node.predicate, 'snd') self.assertEqual(node.criteria, 'second') self.assertEqual(node.parent_id, 1)
def test_post_tree_with_root_node(self): Node(criteria='criteria').save() response = self.client.post('/trees/', {'title': 'slug 1', 'description': 'yolo', 'root_node': 1}) self.assertEqual(response.status_code, 201) tree = Tree.objects.last() self.assertEqual(tree.root_node.criteria, 'criteria')
def test_delete_project(self): p = Project(name="Test Project") n0 = Node(name="n0", project=p) n1 = Node(name="n1", project=p) n2 = Node(name="n2", project=p) n3 = Node(name="n3") db.session.add_all([p, n0, n1, n2, n3]) n1.add_sink(n0) n1.add_source(n2) db.session.commit() self.assertEqual(p.nodes.all(), [n0, n1, n2]) p.delete() db.session.commit() self.assertEqual(Node.query.all(), [n3]) self.assertEqual(Edge.query.all(), []) self.assertEqual(Project.query.all(), [])
def node(nodeid): current_node = Node.query.filter_by(id=nodeid).first_or_404() form = NodeForm() form.name.label = Label(field_id="name", text="Connect two nodes!") if form.validate_on_submit(): node2 = Node.query.filter_by(id=form.name.data).first() if node2 in current_node.sources(): current_node.remove_source(node2) db.session.commit() flash('Removed existing node as source') elif Node.query.filter_by(id=form.name.data).count() == 1: current_node.add_source( Node.query.filter_by(id=form.name.data).first()) db.session.commit() flash('Added existing node as source') else: project = current_node.project node = Node(name=form.name.data, project=project, created_by=current_user) current_node.add_source(node) db.session.add(node) db.session.commit() flash('Added new node as source') return redirect(url_for('main.node', nodeid=current_node.id)) return render_template('node.html', node=current_node, form=form, title=current_node.name)
def add_node(node_name): node = Node(name=node_name) node.save() lEdge = Edge(top=node, bot=node, dist=0) lEdge.save() return node.id
def test_delete_nodes(self): n1 = Node(name="Test Node") n2 = Node(name="Test Sink") n3 = Node(name="Test Source") db.session.add_all([n1, n2, n3]) n1.add_sink(n2) n1.add_source(n3) db.session.commit() self.assertFalse(Edge.query.all() == []) self.assertFalse(Node.query.all() == []) n1.delete() db.session.commit() self.assertEqual(Node.query.all(), [n2, n3]) self.assertEqual(Edge.query.all(), [])
def create(self, validated_data): if 'parent' in validated_data: parent_id = validated_data.pop('parent')['pk'] else: parent_id = None instance = Node(**validated_data) if parent_id: instance.parent_id = parent_id instance.save() return instance
def test_create_node(self): Node(criteria='the first').save() serializer = NodeSerializer(data={'predicate': 'pred', 'criteria': 'crit', 'parent': Node.objects.first().id}) if not serializer.is_valid(): self.fail(serializer.errors) serializer.save() node = Node.objects.last() self.assertEqual(node.predicate, 'pred') self.assertEqual(node.criteria, 'crit')
def post(self): if (request.json['id'] == '') | (request.json['text'] == ''): return Response(status=400) unique = True for n in Node.query.filter_by(parent=request.json['id']).all(): if n.text == request.json['text']: unique = False if unique & bool(re.fullmatch(r'(?:\w+\s?)+\S', request.json['text'])): u = Node(parent=request.json['id'], text=request.json['text']) db.session.add(u) db.session.commit() return Response(status=201)
def create_complex_decision_tree(): tree = Tree(slug='slug', title='title', description='description') tree.save() root_node = Node(criteria='mood') root_node.save() tree.root_node = root_node tree.save() happy = Node(predicate='happy', criteria='how happy', parent=root_node) happy.save() very_happy = Node(predicate='very happy', criteria='i am glad to hear that', parent=happy) very_happy.save() kind_of_happy = Node(predicate='kind of happy', criteria='wish you were happier', parent=happy) kind_of_happy.save() sad = Node(predicate='sad', criteria='i am sorry to hear that', parent=root_node) sad.save() melancholy = Node(predicate='just ok', criteria='ok then!', parent=root_node) melancholy.save()
def test_checks_for_sinks_and_sources(self): n0 = Node(name="n0 (root)") n1 = Node(name="n1 (level 1)") db.session.add(n0) db.session.add(n1) db.session.commit() self.assertEqual(n0.edges_sinks.all(), []) self.assertEqual(n1.edges_sinks.all(), []) self.assertEqual(n0.edges_sources.all(), []) self.assertEqual(n1.edges_sources.all(), []) self.assertEqual(n0.sources(), []) self.assertEqual(n1.sources(), []) self.assertEqual(n0.sinks(), []) self.assertEqual(n1.sinks(), []) self.assertFalse(n0.is_sink_for(n1)) self.assertFalse(n0.is_source_for(n1)) self.assertFalse(n0.is_connected_to(n1)) e = Edge() e.sink = n0 e.source = n1 db.session.add(e) db.session.commit() self.assertTrue(n0.is_sink_for(n1)) self.assertFalse(n0.is_source_for(n1)) self.assertTrue(n0.is_connected_to(n1)) self.assertFalse(n1.is_sink_for(n0)) self.assertTrue(n1.is_source_for(n0)) self.assertTrue(n1.is_connected_to(n0)) self.assertEqual(n1.edges_sinks.all(), n0.edges_sources.all()) self.assertEqual(n0.sources(), [n1]) self.assertEqual(n0.sinks(), []) self.assertEqual(n1.sources(), []) self.assertEqual(n1.sinks(), [n0])
def test_create_tree(self): Node(criteria='my node').save() serializer = TreeSerializer(data={'title': 'a title', 'description': 'description', 'root_node': 1}) if not serializer.is_valid(): self.fail(serializer.errors) serializer.save() tree = Tree.objects.first() self.assertEqual(tree.slug, 'a-title') self.assertEqual(tree.title, 'a title') self.assertEqual(tree.description, 'description')
def test_update_tree_with_node(self): Tree(slug='a-slug', title='a slug').save() Node(criteria='yolo').save() serializer = TreeSerializer(Tree.objects.first(), data={'root_node': Node.objects.first().id}, partial=True) if not serializer.is_valid(): self.fail(serializer.errors) serializer.save() tree = Tree.objects.first() self.assertEqual(tree.root_node.criteria, 'yolo')
def upsert_result(result): node = Node.query.filter_by(name=result.result_item.name).first() # create node structure if node is None: node = Node( name=result.result_item.name, timestamp=result.result_item.timestamp ) db.session.add(node) for (key, sub_result_items) in result.sub_result_items.items(): parent_node = Node( name="Season {}".format(key), timestamp=min(map(lambda n: n.timestamp, sub_result_items.values())), parent=node ) db.session.add(parent_node) for (_, sub_result_item) in sub_result_items.items(): child_node = Node( name=sub_result_item.name, timestamp=sub_result_item.timestamp, parent=parent_node ) db.session.add(child_node) # register term term = NodeTerm( name=result.search_terms, node=node ) db.session.add(term) # commit upserts db.session.commit() return term
def node_list(): """ Lists or creates nodes. Notes: Before creating we make sure there isn't already a node with the same name. We also check to see if a name key exists on the request to prevent breaking the app. Returns: (list | object): List of all nodes or a newly created node. """ # ------------------------------------------ # GET # ------------------------------------------ if request.method == 'GET': root_node = Node.query.filter_by(name='Root') return jsonify([node.serialize for node in root_node]), 200 # ------------------------------------------ # POST # ------------------------------------------ if request.method == 'POST': name = request.json.get('name') if name and isinstance(name, str) and len(name) >= 5: node = Node.query.filter_by(name=name).first() if node: return jsonify(f'Node with name {name} already exists'), 400 else: # Grab root node. root_node = Node.query.filter_by(name='Root').first() # Create new node & make relationship node = Node(name) node.parent = root_node # Add new node to session. db.session.add(node) db.session.commit() return jsonify(node.serialize), 201 else: return jsonify('Name must be minimum of 5 Alpha characters'), 400
def handle(self, *args, **options): edges = load(os.path.join(data_dir, 'data/edges')) nodes = load(os.path.join(data_dir, 'data/nodes')) for node in nodes: Node(**node).save() for e in edges: edge = Edge( **{k: v for k, v in e.items() if k != 'start' and k != 'end'}) start, end = Node.objects.filter( pk=e['start']), Node.objects.filter(pk=e['end']) edge.save() edge.start.set(start) edge.end.set(end)
def add(): form = NodeForm() form.cluster.choices = [(b.id, b.name) for b in Cluster.query.order_by('name').all()] if form.validate_on_submit(): node_active = form.active.data node_name = re.sub('[^A-Za-z0-9_]+', '', form.nodename.data) node_cluster_id = form.cluster.data node = Node(name=node_name, active=node_active, cluster_id=node_cluster_id) db.session.add(node) db.session.commit() return redirect(url_for('node.index'))
def create_sub_nodes(pk): """ Creates sub nodes for a specific node. Args: pk (int): Value of node id. Returns: (str): Text stating the new nodes were created. """ # Make sure the node exists. try: parent = get_object(Node, pk) except ObjectDoesntExist as error: return jsonify(error.message), 404 else: count = request.json.get('count') # Make sure they sent amount to generate. if count and isinstance(count, int): # Delete previous sub nodes. Node.query.filter_by(parent=parent).delete() db.session.commit() if count < 1 or count > 15: msg = 'Number of children to generate should be between 1-15' return jsonify(msg), 400 # Create new nodes. for i in range(count): node_name = str(randint(parent.min_num, parent.max_num)) node = Node(name=node_name) node.can_have_children = False node.parent = parent db.session.add(node) db.session.commit() # Return new node tree. return jsonify('New nodes Created.'), 200 else: return jsonify('Must send amount of children to generate'), 400
def project(projectid): project = Project.query.filter_by(id=projectid).first_or_404() nodeform = NodeForm() if nodeform.submit.data and nodeform.validate_on_submit(): node = Node(name=nodeform.name.data, project=project, created_by=current_user) db.session.add(node) db.session.commit() flash('Added new task.') return redirect(url_for('main.project', projectid=project.id)) nodes = Node.query.filter_by(project=project).all() return render_template('project.html', project=project, nodeform=nodeform, nodes=nodes, title=project.name)
def dbInsertSeedNodes(seed_nodes): ''' if Node table is empty -> insert nodes from seed_nodes these nodes are used as backup if default_cliet is not answering ''' db_node_list = db.session.query(Node).filter_by(active=True).all() # if no nodes in db -> add nodes from default_client_list if db_node_list == []: for endpoint in seed_nodes: # request pub_key from that node nis = nemConnect(endpoint,REQUEST_TIMEOUT) pub_key = nis.getNodePubKey() importance = nis.getImportanceOfPubKey(pub_key) # if node responds with pub_key it is added to db as active if pub_key is not None: active = 1 db.session.add(Node(pub_key=pub_key, endpoint=endpoint, active=active, importance=importance)) db.session.flush() db.session.commit()
def admin_add_nodes(): new_data = request.get_json()['newData'] result = Node.query.filter(Node.name == new_data['name'], Node.source_id == int( new_data['source_id'])).first() if result: return forbidden("已存在,不能重复添加") try: node = Node(type=int(new_data['type']), name=new_data['name'], source_id=int(new_data['source_id']), topic_id=int(new_data['topic_id']), num=int(new_data['num'])) db.session.add(node) db.session.commit() return jsonify({'success': True, 'data': True}) except: db.session.rollback() return forbidden('添加失败')
def add_node(request): ctx = {} provider = request.POST.get('new_provider') parent_id = request.POST.get('parent_id') parent_provider = request.POST.get('parent_provider') relation = request.POST.get('relation') relation_back = request.POST.get('relation_back') if request.method == 'POST' and relation != '': if provider == 'text': node = Node(user=request.user, text=request.POST.get('text')) node.save() make_relation(request.user, parent_id, parent_provider, node.id, provider, relation, relation_back) elif provider == 'url': url_text = request.POST.get('url') import hashlib m = hashlib.md5() m.update(url_text.encode('utf-8')) url_hash = m.hexdigest() try: title, image = get_url_info(url_text, url_hash) url = Url(user=request.user, url=url_text, name=title, image=image) url.url_hash = url_hash url.save() except: url = Url.objects.get(url_hash=url_hash) make_relation(request.user, parent_id, parent_provider, url_hash, provider, relation, relation_back) elif provider == 'file': # todo - add files new_path = os.path.join(parent_id, request.POST.get('name')) if os.path.isdir(parent_id) and os.path.isdir(new_path): os.mkdir(new_path) make_relation(request.user, parent_id, parent_provider, new_path, provider, relation, relation_back) return HttpResponseRedirect(request.META['HTTP_REFERER'])
def index(): form = NodeForm() nodes = Node.query.order_by(Node.id.desc()).all() node = Node() columns = node.serialize_columns() nod = [] for c in nodes: nod.append(c.as_dict()) form.cluster.choices = [(b.id, b.name) for b in Cluster.query.order_by('name').all()] base_url = url_for('node.index') action_url = url_for('node.add') return render_template('node.html', title='Node', rows=nod, columns=columns, base_url=base_url, action_url=action_url, per_page=current_app.config['ROWS_PER_PAGE'], form=form)
def test_addition_and_removal_of_sinks_and_sources(self): # create 4 nodes n0 = Node(name="n0") n1 = Node(name="n1") n2 = Node(name="n2") n3 = Node(name='n3') db.session.add_all([n0, n1, n2, n3]) db.session.commit() # create the following tree: # n2 --> n1 --> n0 # n3 --> n0 n1.add_sink(n0) n1.add_source(n2) n0.add_source(n3) db.session.commit() self.assertTrue(n1.is_sink_for(n2)) self.assertTrue(n1.is_source_for(n0)) self.assertTrue(n2.is_source_for(n1)) self.assertTrue(n0.is_sink_for(n1)) self.assertEqual(n0.sinks(), []) self.assertEqual(n0.sources(), [n1, n3]) self.assertEqual(n1.sinks(), [n0]) self.assertEqual(n1.sources(), [n2]) self.assertEqual(n2.sinks(), [n1]) self.assertEqual(n2.sources(), []) self.assertEqual(n3.sinks(), [n0]) self.assertEqual(n3.sources(), []) # test if clauses e = Edge.query.all() n1.add_source(n0) n1.add_sink(n2) db.session.commit() e_test = Edge.query.all() self.assertFalse(n2.is_sink_for(n1)) self.assertEqual(n1.edges_sinks.count(), 1) self.assertEqual(n2.edges_sources.count(), 0) self.assertEqual(e, e_test) # part removal of tree n1.remove_sink(n0) n1.remove_source(n2) db.session.commit() self.assertFalse(n1.is_sink_for(n2)) self.assertFalse(n1.is_source_for(n0)) self.assertFalse(n2.is_source_for(n1)) self.assertFalse(n0.is_sink_for(n1)) self.assertEqual(n0.sinks(), []) self.assertEqual(n0.sources(), [n3]) self.assertEqual(n1.sinks(), []) self.assertEqual(n1.sources(), []) self.assertEqual(n2.sinks(), []) self.assertEqual(n2.sources(), []) self.assertEqual(n3.sinks(), [n0]) self.assertEqual(n3.sources(), [])
def test_get_root_node(app): expected_root_node = Node(id=1, name='root', parent_id=None, lft=0, rgt=1) with app.app_context(): root_node = Node.get_root_node() assert root_node == expected_root_node
class Meta: model = Tree fields = ('slug', 'title', 'description', 'root_node') read_only_fields = ('slug', ) depth = 10 m = Node()
def db_update_node_list(nis_reachable_nodes, nis): ''' INPUT: :param nis_reachable_nodes: set of tubles (pub_key, endpoint, name, active) (from nemConnect.getReachableNodes()) :db: list of all Nodes :nemConnect: .getImportanceOfPubKeyNodePool OUTPUT: updates list of nodes in db Purpose of this function is only to add new nodes, activate inactive nodes and deactivate node, when other node with either tahe same pub_key or same endpoint apperars. There will be other mechanism to deactivate nodes for inactivity. Some thoughts on this function .... record = combination of (pub_key, endpoint, name, active), properties of Node constraints: it is impossible to have 2 nodes with 2 pub_keys on single endpoint scenarios: 1. high importance node changes ip (ip never used by any node) -> endpoint is updated in corresponding db record 2. 0 importance node changes ip (ip never used by any node)-> endpoint is updated in corresponding db record 3. 0 importance node is rebooted with new pub_key -> pub_key is updated in corresponding db record 4. node changes name -> name is updated in corresponding db record 5. previously deactivated node is now active -> activate bit is updated in corresponding db record 6. multiple nodes are booted with same pub_key -> let them live = PUB KEY IS NOT UNIQUE IN DB = way too dificult to implement, noone is doing this currently and it should not be done in my opinion -> reduce them to one record without special care = same as 1,2 = PUB KEY IS UNIQUE IN DB {This should be mentioned in explanation of final data} 7. node with already recorded pub_key changes endpoint to endpoint of different already recorded node -> is this life? -> pub_key wins, new node gets endpoint, old node is deactivated = ENDPOINT IS NOT UNIQUE IN DB 8. sb has multiple high importance nodes and shuffles their endpoints/pub_keys -> if we evaluate one record at the time this is reduced to 7 9. new node added to network -> add to db not to forget: endpoint uniquesness between active nodes has to be ensured while in all records endpoint is not unique pub keys are uique in db FLOW CHART: get records from nis api, and db if the record from nis api is the same as record in db it is droped from now on every record is update or new insert if pub_key in db: if endpoint in same record = scenarion(4,5)-> update name and active=True if not = s(1,2,6,7,8) -> update endpoint - BUT endpoint of active nodes has to be unique -> if endpoint not in different active record = s(1,2,6) -> update endpoint if endpoint in different active record = s(7,8) -> update endpoint, deactivate other record if pub_key not in db s(3,9): if pub key high importance = will be treated as new node: if endpoint in different active record -> new record, deactivate old record if endpoint not in different active record -> new record if pub key 0 importance = will be treated as reboot of existing node: if endpoint in different record with 0 importance: update only 0 importance record preferably active else = (9) -> new record ''' # filter out records from nem api that are already in db db_node_dump = db.session.query(Node).all() nis_reachable_nodes_update = nis_reachable_nodes - node_obj_to_tupl(db_node_dump) # set up nemConnect for pub_key,endpoint,name,active in nis_reachable_nodes_update: # if pub_key in db: update endpoint in db record. endpoint in all active nodes in db must be unique! # in other functions we will be sending http requests to all active nodes. to_update_db_record = db.session.query(Node).filter(Node.pub_key == pub_key).one_or_none() if to_update_db_record: # if pub_key and endpoint in same db record -> update name or activate if endpoint == to_update_db_record.endpoint: to_update_db_record.name, to_update_db_record.active = name, True # if endpoint different in db record -> update endpoint # BUT endpoint in all active nodes in db must be unique! else: # deactivate all other active records with same endpoint db.session.query(Node).filter(Node.endpoint == endpoint, Node.active == True).update({"active": False}) to_update_db_record.endpoint = endpoint # name and activity could also be different to_update_db_record.name, to_update_db_record.active = name, True #if pub_key not in db else: importance = nis.getImportanceOfPubKeyNodePool(pub_key) # if pub key high importance = will be treated as new node if importance > 0.0: # deactivate all other active records with same endpoint db.session.query(Node).filter(Node.endpoint == endpoint, Node.active == True).update({"active": False}) db.session.add(Node(pub_key=pub_key, endpoint=endpoint, name=name, active=True, importance=importance)) # if pub key 0 importance = will be treated as reboot of existing node: else: # if endpoint in different record with 0 importance: # update only 0 importance record preferably active to_update_db_record = db.session.query(Node)\ .filter(Node.endpoint == endpoint, Node.importance == 0.0)\ .order_by(Node.active)\ .first() if to_update_db_record: to_update_db_record.pub_key = pub_key # name and activity could also be different to_update_db_record.name, to_update_db_record.active = name, True else: # deactivate all other active records with same endpoint db.session.query(Node).filter(Node.endpoint == endpoint, Node.active == True).update({"active": False}) db.session.add(Node(pub_key=pub_key, endpoint=endpoint, name=name, active=True, importance=importance)) db.session.flush() db.session.commit()
# parser.add_argument('--email', metavar='N', type=str, default='*****@*****.**', # help='User that make the request') # parser.add_argument('--mac', metavar='N', type=str, default='123eqw', help='MAC OF THE NODE') # parser = argparse.ArgumentParser(description='Add user to the database.') # parser.add_argument('--name', type=str, default='default_name', help='Name of the node') # parser.add_argument('--email', metavar='N', type=str, default='*****@*****.**', # help='User that make the request') # parser.add_argument('--mac', metavar='N', type=str, default='MAC1234', help='MAC OF THE NODE') parser = argparse.ArgumentParser(description='Add user to the database.') parser.add_argument('--name', type=str, default='Name-juanse-1', help='Name of the node') parser.add_argument('--email', metavar='N', type=str, default='*****@*****.**', help='User that make the request') parser.add_argument('--mac', metavar='N', type=str, default='qewwqe', help='MAC OF THE NODE') args = parser.parse_args() u = User.query.filter(User.email == args.email).first() nodes = Node.query.filter(Node.mac == args.mac).all() if not nodes: n = Node(name=args.name, user_id=u.id, mac=args.mac) db.session.add(n) db.session.commit() nodes = Node.query.filter(Node.user_id == u.id) print u.id, 'has: \n' for node in nodes: print node.name, node.mac, node.timestamp