def persist_graph(request, graph_id): import pygraphviz as P import networkx as N graph = Graph.objects.get(pk=graph_id) dot_path = graph.dot_file.storage.location + '/' + graph.dot_file.name G = P.AGraph() # init empty graph try: G.read(dot_path) #read file nodes = G.nodes() edges = G.edges() Node.objects.filter(graph__id=graph_id).delete() Edge.objects.filter(graph__id=graph_id).delete() for node in nodes: new_node = Node() new_node.graph = graph new_node.name = node new_node.label = node.attr['label'] new_node.save() for edge in edges: new_edge = Edge() new_edge.graph = graph new_edge.name = edge new_edge.save() except: return False return HttpResponse(pygraphviz_graph(request, G)) #response
def post(self,request, *args, **kwargs): # check name is free if Node.objects.filter(parent=request.POST.get('parent'), name=request.POST.get('name')).count() > 0: return HttpResponse("This name already exists", status=400) node = Node(parent_id=request.POST.get('parent'), name=request.POST.get('name')) node.save() if use_cacheops: print 'invalidate' invalidate_model(Node) node = Node.objects.get(pk=node.pk) return HttpResponse(simplejson.dumps({ 'name' : node.name, 'icon' : 'default', 'loaded' : True, 'pk' : node.pk, 'position' : list(node.parent.get_children()).index(node), 'children' : [], 'parent' : node.parent.pk if node.parent is not None else None }), content_type="application/json", status=201)
def create_structureNode(long_title, text="", authors=()): structure = Node(node_type=Node.STRUCTURE_NODE, title=long_title) structure.save() text_obj = Text(node=structure, text=text) text_obj.save() for author in authors: text_obj.authors.add(author) text_obj.save() return structure
def test_build_child_tree_path(self): node = Node.objects.get(pk=6) child_node = Node(parent=node) self.assert_empty_tree_path(child_node) child_node.save() self.assertEquals( node.tree_path+PATH_SEPARATOR+child_node.zerro_filled_pk, child_node.tree_path )
def post(self): if not (session and session.get('uid')): return jsonify({'error': 'Not logged in'}) node = Node(title=request.form['title']) node.url = request.form['url'] node.score = 1 node.save() space = Space.objects(id=ObjectId(request.form['sid']))[0] space.nodes.append(node) space.save(cascade=True) return jsonify({'success':1})
def create_structureNode(long_title, text="", authors=()): structure = Node() structure.node_type = 'structureNode' structure.title = long_title structure.save() text_obj = Text() text_obj.node = structure text_obj.text = text text_obj.save() for author in authors: text_obj.authors.add(author) text_obj.save() return structure
def create_textNode(long_title, text="", authors=()): text_node = Node() text_node.node_type = Node.STRUCTURE_NODE text_node.title = long_title text_node.save() text_obj = Text() text_obj.node = text_node text_obj.text = text text_obj.save() for author in authors: text_obj.authors.add(author) text_obj.save() return text_node
def node_add(): r = {} n_json = request.get_json() n = Node.query.filter_by(name=n_json.get('name')).first() if n is not None: r['success'] = False r['message'] = "节点已存在" else: node = Node(n_json) log(node.__dict__) node.save() r['success'] = True r['data'] = node.json() return jsonify(r)
def create_structureNode(long_title, text="", authors=(), validate=False): if validate and not valid_title.match(long_title): raise ValueError('Invalid title "{}"'.format(long_title)) if validate: head = general_heading.match(text) if head is not None: raise ValueError('Headings are not allowed in text: {}'.format( head.group())) structure = Node(node_type=Node.STRUCTURE_NODE, title=long_title) structure.save() text_obj = Text(node=structure, text=text) text_obj.save() for author in authors: text_obj.authors.add(author) text_obj.save() return structure
def create_structureNode(long_title, text="", authors=(), validate=False): if validate and not valid_title.match(long_title): raise ValueError('Invalid title "{}"'.format(long_title)) if validate: head = general_heading.match(text) if head is not None: raise ValueError('Headings are not allowed in text: {}' .format(head.group())) structure = Node(node_type=Node.STRUCTURE_NODE, title=long_title) structure.save() text_obj = Text(node=structure, text=text) text_obj.save() for author in authors: text_obj.authors.add(author) text_obj.save() return structure
def _create_or_add_node(self, parent, name, users): node = Node(parentnode=parent, short_name=name, long_name=name.capitalize()) try: node.full_clean() node.save() except ValidationError: node = Node.objects.get(parentnode=parent, short_name=name) # allowed roles in node are: for admin in users['admin']: node.admins.add(self._create_or_add_user(admin)) if users['ln']: node.long_name = users['ln'][0] node.full_clean() node.save() vars(self)[node.get_path().replace('.', '_')] = node return node
def get(self,request, *args, **kwargs): parent = None if kwargs.get('pk',None) is not None: parent = self.get_object() children = parent.get_children() else: children = Node.objects.filter(parent=None) if children.count() == 0: node = Node(name="root") node.save() children = [node] json = [] for child in children: json.append({ 'name' : child.name, 'icon' : 'default', 'loaded' : child.is_leaf_node(), 'pk' : child.pk, 'children' : [], 'parent' : child.parent.pk if child.parent is not None else None }) if not child.is_leaf_node(): json[-1]['children'] = [{ 'name' : "Loading", 'icon' : "loading" }] return HttpResponse(simplejson.dumps(json), content_type="application/json")
def setUp(self): new_york = Node(name='new york', coordinates=Point(40, 73)) austin = Node(name='austin', coordinates=Point(30, 97)) san_fran = Node(name='san fran', coordinates=Point(37, 122)) chicago = Node(name='chicago', coordinates=Point(41, 87)) new_york.save() austin.save() san_fran.save() chicago.save() Edge(node_src=new_york, node_sink=chicago).save() Edge(node_src=chicago, node_sink=san_fran).save() Edge(node_src=new_york, node_sink=austin).save() Edge(node_src=austin, node_sink=san_fran).save() Edge(node_src=san_fran, node_sink=new_york).save()
def request_entity(obj_id): print('REQUESTING', obj_id) label_query = """PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX wd: <http://www.wikidata.org/entity/> select * where { """ + obj_id + """ rdfs:label ?label . FILTER (langMatches( lang(?label), "EN" ) ) } LIMIT 1""" primary_query = """SELECT ?propertyLabel ?entity_ { VALUES (?company) {(""" + obj_id + """)} ?company ?p ?statement . ?statement ?entity ?entity_ . ?property wikibase:claim ?p. ?property wikibase:statementProperty ?entity. SERVICE wikibase:label { bd:serviceParam wikibase:language "en" } FILTER(REGEX(STR(?entity_), "Q[0-9]*$")) }""" secondary_query = """SELECT ?propertyLabel ?entity_ { VALUES (?company) {(""" + obj_id + """)} ?company ?p ?statement . ?statement ?ps ?ps_ . ?wd wikibase:claim ?p. ?wd wikibase:statementProperty ?ps. ?statement ?entity ?entity_ . ?property wikibase:qualifier ?entity . SERVICE wikibase:label { bd:serviceParam wikibase:language "en" } FILTER(REGEX(STR(?entity_), "Q[0-9]*$")) }""" children = {} node_name = [] # did this to get a mutable data type # makes the 3 requests asyncronously to get the label and the primary and secondary connections loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) coroutines = [ get(label_query, node_name, True), get(primary_query, children), get(secondary_query, children) ] results = loop.run_until_complete(asyncio.gather(*coroutines)) # makes sure everything went well for x in results: if not x: raise "Node doesn't effectively exist" # print('there was an error parsing node', obj_id) # return {} # saves the node to the database n = Node(wid=obj_id, label=node_name[0], children=children) n.save() return children
def create_slot(short_title): slot = Node(node_type=Node.SLOT, title=short_title) slot.save() return slot
def create_from_path(path): """ Create a Node, Subject, Period, Assignment or AssignmentGroup from ``path``. Examples:: assignmentgroup = create_from_path( 'ifi:inf1100.spring05.oblig1.student1,student2') oblig1 = create_from_path( 'ifi:inf1100.spring05.oblig1') """ split = path.split(':', 1) nodes = split[0].split('.') for nodename in nodes: node = Node(short_name=nodename, long_name=nodename.capitalize()) try: node.clean() node.save() except: node = Node.objects.get(short_name=nodename) last = node if len(split) != 2: return last pathsplit = split[1].split('.') # Subject subjectname = pathsplit[0] subject = Subject(parentnode=node, short_name=subjectname, long_name=subjectname.capitalize()) try: subject.clean() subject.save() except: subject = Subject.objects.get(short_name=subjectname) last = subject # Period if len(pathsplit) > 1: periodname = pathsplit[1] period = Period(parentnode=subject, short_name=periodname, long_name=periodname.capitalize(), start_time=datetime.now(), end_time=datetime.now() + timedelta(10)) try: period.clean() period.save() except: period = Period.objects.get(parentnode=subject, short_name=periodname) last = period # Assignment if len(pathsplit) > 2: assignmentname = pathsplit[2] assignment = Assignment(parentnode=period, short_name=assignmentname, long_name=assignmentname.capitalize(), publishing_time=datetime.now()) assignment.clean() try: assignment.save() except: assignment = Assignment.objects.get(parentnode=period, short_name=assignmentname) last = assignment # Candidates if len(pathsplit) > 3: usernames = pathsplit[3].split(',') users = [] for u in usernames: user = User(username=u) try: user.save() except: user = User.objects.get(username=u) users.append(user) assignment_group = AssignmentGroup(parentnode=assignment) assignment_group.clean() assignment_group.save() for user in users: assignment_group.candidates.add(Candidate(student=user)) last = assignment_group return last
def test_skip_tree_path(self): node = Node() node.save(skip_tree_path=True) self.assertEquals('', node.tree_path)
def main(username): r = praw.Reddit(user_agent='trollolol v0.1') r.config.decode_html_entities = True m = MarkovChain('markov-data/%s.chain' % username) last_comment = None try: last_comment = Node.objects( username=username).order_by('-created').first() if last_comment: print("Checking for new messages.") comments = r.get_redditor(username).get_comments( limit=500, params={'after': last_comment.node_id}) else: raise except: print("No messages fetched yet, doing inital import") comments = r.get_redditor(username).get_comments(limit=500) for comment in comments: try: node = Node.objects.get(node_id=comment.name) except: node = Node(node_id=comment.name, parent_id=comment.parent_id, body=comment.body, created=comment.created, username=username) node.save() first_comment = Node.objects( username=username).order_by('+created').first() if first_comment: print("Checking for messages before %s." % first_comment.node_id) comments = r.get_redditor(username).get_comments( limit=500, params={'before': first_comment.node_id}) for comment in comments: try: node = Node.objects.get(node_id=comment.name) except: node = Node(node_id=comment.name, parent_id=comment.parent_id, body=comment.body, created=comment.created, username=username) node.save() comments = Node.objects(username=username).all() corpus = [] for comment in comments: corpus.append(comment.body) shuffle(corpus) if len(corpus) > 0: print( "We have %i messages to work with. Building new markov corpus now." % len(corpus)) m.generateDatabase(" ".join(corpus)) print("Looking for acceptable output for first round of transforms.") output = [] tries = 0 while len(output) < 10: tries = tries + 1 result = m.generateString() if tries < 100: if len(result.split(" ")) >= 10: sys.stdout.write("x") output.append(result) else: sys.stdout.write(".") print("") response = "" for result in output: response = response + " " + result print response else: print("No comments found.")
def create_slot(short_title): slot = Node() slot.node_type = 'slot' slot.title = short_title slot.save() return slot
def test_build_root_tree_path(self): node = Node() self.assert_empty_tree_path(node) node.save() self.assertEquals(node.pk, int(node.tree_path))
def main(username): r = praw.Reddit(user_agent='trollolol v0.1') r.config.decode_html_entities = True m = MarkovChain('markov-data/%s.chain' % username) last_comment = None try: last_comment = Node.objects(username=username).order_by('-created').first() if last_comment: print("Checking for new messages.") comments = r.get_redditor(username).get_comments(limit=500, params={'after': last_comment.node_id}) else: raise except: print("No messages fetched yet, doing inital import") comments = r.get_redditor(username).get_comments(limit=500) for comment in comments: try: node = Node.objects.get(node_id=comment.name) except: node = Node(node_id = comment.name, parent_id=comment.parent_id, body=comment.body, created=comment.created, username=username) node.save() first_comment = Node.objects(username=username).order_by('+created').first() if first_comment: print("Checking for messages before %s." % first_comment.node_id) comments = r.get_redditor(username).get_comments(limit=500, params={'before': first_comment.node_id}) for comment in comments: try: node = Node.objects.get(node_id=comment.name) except: node = Node(node_id = comment.name, parent_id=comment.parent_id, body=comment.body, created=comment.created, username=username) node.save() comments = Node.objects(username=username).all() corpus = [] for comment in comments: corpus.append(comment.body) shuffle(corpus) if len(corpus) > 0: print("We have %i messages to work with. Building new markov corpus now." % len(corpus)) m.generateDatabase(" ".join(corpus)) print("Looking for acceptable output for first round of transforms.") output = [] tries = 0 while len(output) < 10: tries = tries + 1 result = m.generateString() if tries < 100: if len(result.split(" ")) >= 10: sys.stdout.write("x") output.append(result) else: sys.stdout.write(".") print("") response = "" for result in output: response = response + " " + result print response else: print("No comments found.")
def create_from_path(path): """ Create a Node, Subject, Period, Assignment or AssignmentGroup from ``path``. Examples:: assignmentgroup = create_from_path( 'ifi:inf1100.spring05.oblig1.student1,student2') oblig1 = create_from_path( 'ifi:inf1100.spring05.oblig1') """ split = path.split(':', 1) nodes = split[0].split('.') for nodename in nodes: node = Node(short_name=nodename, long_name=nodename.capitalize()) try: node.clean() node.save() except: node = Node.objects.get(short_name=nodename) last = node if len(split) != 2: return last pathsplit = split[1].split('.') # Subject subjectname = pathsplit[0] subject = Subject(parentnode=node, short_name=subjectname, long_name=subjectname.capitalize()) try: subject.clean() subject.save() except: subject = Subject.objects.get(short_name=subjectname) last = subject # Period if len(pathsplit) > 1: periodname = pathsplit[1] period = Period(parentnode=subject, short_name=periodname, long_name=periodname.capitalize(), start_time=datetime.now(), end_time=datetime.now() + timedelta(10)) try: period.clean() period.save() except: period = Period.objects.get(parentnode=subject, short_name=periodname) last = period # Assignment if len(pathsplit) > 2: assignmentname = pathsplit[2] assignment = Assignment(parentnode=period, short_name=assignmentname, long_name=assignmentname.capitalize(), publishing_time=datetime.now()) assignment.clean() try: assignment.save() except: assignment = Assignment.objects.get(parentnode=period, short_name=assignmentname) last = assignment # Candidates if len(pathsplit) > 3: usernames = pathsplit[3].split(',') users = [] for u in usernames: try: user = get_user_model().objects.get(shortname=u) except get_user_model().DoesNotExist: user = get_user_model().objects.create_user(username=u) users.append(user) assignment_group = AssignmentGroup(parentnode=assignment) assignment_group.clean() assignment_group.save() for user in users: assignment_group.candidates.add(Candidate(student=user)) last = assignment_group return last