def retrieve(self, request, *args, **kargs): aid = int(kargs['article']) plugin = kargs['pk'] result = amcatxtas.get_result(aid, plugin) return Response({"results" : result})
def retrieve(self, request, *args, **kargs): aid = int(kargs['article']) plugin = kargs['pk'] result = get_result(aid, plugin) return Response({"results": result})
def get_context_data(self, **kwargs): from syntaxrules.syntaxtree import SyntaxTree from syntaxrules.soh import SOHServer ctx = super(ArticleRuleDetailsView, self).get_context_data(**kwargs) saf = amcatxtas.get_result(int(self.kwargs['article_id']), self.object.preprocessing) sid = int(self.request.GET.get("sid", 1)) sentences = list(self.get_sentences(saf)) soh = SOHServer(url="http://localhost:3030/x") t = SyntaxTree(soh) t.load_saf(saf, sid) g = t.get_graphviz() original_tree = base64.b64encode(g.draw(format='png', prog='dot')) ruleset = self.object.get_ruleset() t.apply_ruleset(ruleset) g = t.get_graphviz(grey_rel=True) processed_tree = base64.b64encode(g.draw(format='png', prog='dot')) ruleset_dump = json.dumps(ruleset, indent=2) saf_dump = json.dumps(saf, indent=2) ctx.update(locals()) return ctx
def to_native(self, ruleset): from syntaxrules import SyntaxTree, get_struct_tokens if self.many is False: # explicit compare because we don't want None # Get parse module = ruleset.preprocessing saf = get_result(self.context['article'], module) t = SyntaxTree(saf) if self.context['preprocess']: r = RuleSet.objects.get(pk=int(self.context['preprocess'])) t.apply_ruleset(r.get_ruleset()) # Apply rules t.apply_ruleset(ruleset.get_ruleset()) return list(get_struct_tokens(t.get_structs())) res = super(RulesetSerializer, self).to_native(ruleset) return res
def to_native(self, ruleset): from syntaxrules import SyntaxTree, get_struct_tokens if self.many is False: # explicit compare because we don't want None # Get parse module = ruleset.preprocessing saf = get_result(self.context['article'], module) t = SyntaxTree(saf) if self.context['preprocess']: r = RuleSet.objects.get(pk=int(self.context['preprocess'])) t.apply_ruleset(r.get_ruleset()) # Apply rules t.apply_ruleset(ruleset.get_ruleset()) return list(get_struct_tokens(t.get_structs())) res = super(RulesetSerializer,self).to_native(ruleset) return res
def get_context_data(self, **kwargs): from syntaxrules.syntaxtree import SyntaxTree ctx = super(ArticleRuleDetailsView, self).get_context_data(**kwargs) saf = amcatxtas.get_result(int(self.kwargs['article_id']), self.object.preprocessing) sid = int(self.request.GET.get("sid", 1)) sentences = list(self.get_sentences(saf)) t = SyntaxTree(saf, sid) g = t.get_graphviz() original_tree = base64.b64encode(g.draw(format='png', prog='dot')) if 'preprocess' in self.request.GET: prep = RuleSet.objects.get(pk=int(self.request.GET['preprocess'])) t.apply_ruleset(prep.get_ruleset()) g = t.get_graphviz(grey_rel=True) preprocessed_tree = base64.b64encode(g.draw(format='png', prog='dot')) trees = [] # [(name, tree), ] for intermediate trees ruleset = self.object.get_ruleset() updates = [t._get_lexicon_update(ruleset['lexicon'])] for rule in ruleset['rules']: updates.append(rule) if rule.get('display'): t.apply_updates(updates) updates = [] g = t.get_graphviz(grey_rel=True) png = base64.b64encode(g.draw(format='png', prog='dot')) trees.append(('After '+rule['label'], png)) t.apply_updates(updates) g = t.get_graphviz(grey_rel=True) processed_tree = base64.b64encode(g.draw(format='png', prog='dot')) ruleset_dump = json.dumps(ruleset, indent=2) saf_dump = json.dumps(saf, indent=2) ctx.update(locals()) return ctx
def get_context_data(self, **kwargs): from syntaxrules.syntaxtree import SyntaxTree ctx = super(ArticleRuleDetailsView, self).get_context_data(**kwargs) saf = amcatxtas.get_result(int(self.kwargs['article_id']), self.object.preprocessing) sid = int(self.request.GET.get("sid", 1)) sentences = list(self.get_sentences(saf)) t = SyntaxTree(saf, sid) g = t.get_graphviz() original_tree = base64.b64encode(g.draw(format='png', prog='dot')) if 'preprocess' in self.request.GET: prep = RuleSet.objects.get(pk=int(self.request.GET['preprocess'])) t.apply_ruleset(prep.get_ruleset()) g = t.get_graphviz(grey_rel=True) preprocessed_tree = base64.b64encode( g.draw(format='png', prog='dot')) trees = [] # [(name, tree), ] for intermediate trees ruleset = self.object.get_ruleset() updates = [t._get_lexicon_update(ruleset['lexicon'])] for rule in ruleset['rules']: updates.append(rule) if rule.get('display'): t.apply_updates(updates) updates = [] g = t.get_graphviz(grey_rel=True) png = base64.b64encode(g.draw(format='png', prog='dot')) trees.append(('After ' + rule['label'], png)) t.apply_updates(updates) g = t.get_graphviz(grey_rel=True) processed_tree = base64.b64encode(g.draw(format='png', prog='dot')) ruleset_dump = json.dumps(ruleset, indent=2) saf_dump = json.dumps(saf, indent=2) ctx.update(locals()) return ctx
def to_native(self, article): if article is None: return {} saf = get_result(article.pk, self.module) return list(self.get_xtas_results(article.pk, saf))