def GetConcept(concept, lang): strings = [] if not Concept.exists(concept, lang): print '{' print '\tword = "%s",' % concept print '\terror = "Word not found!",' print '}' return None result = Concept.get(concept, lang) lang = result.language.name word = result.text assertions = str(result.num_assertions) relations = {} for item in result.get_assertions(): if not (item.relation.name in relations): relations[item.relation.name] = [] relations[item.relation.name].append({ '-- comment': item.__str__(), 'first': item.concept1.text, 'second': item.concept2.text, 'score': item.score, 'frequency': item.frequency.value, 'mods': '', }) print '{' print '\tword = "%s",' % word print '\tlang = "%s",' % lang print '\tassertions = %s,' % assertions for item.relation.name in relations: print '\t', item.relation.name, ' =' print '\t{' for v in relations[item.relation.name]: print '\t\t{' if v['first'] != word: print '\t\t\tfirst = "%s",' % v['first'] else: print '\t\t\tsecond = "%s",' % v['second'] if v['mods'] != '': print '\t\t\tmods = "%s",' % v['mods'] print '\t\t\tscore = %d,' % v['score'] print '\t\t\tfrequency = %d,' % v['frequency'] print '\t\t},' print '\t},' print '}' return result
def eval_assertion(request, lang, concept1, reltype, concept2): c1 = Concept.get(concept1, lang) c2 = Concept.get(concept2, lang) svd = get_svd_results(lang) from csc.conceptnet4.analogyspace import eval_assertion lval, rval = eval_assertion(svd, relationtype=reltype, ltext=c1.text, rtext=c2.text) return {'lfeat_val': lval, 'rfeat_val': rval}
def start_lattice(request, concept_name, lr, relation_name, fconcept_name): from csc.conceptnet4.models import Concept, Feature concept = Concept.get(concept_name, 'en') # Build the concept tree edges = concept.get_tree() session_data = dict( concept=concept, feature=Feature.from_tuple((lr, relation_name, fconcept_name)), edges=edges, ) # Store the new session data, overwriting anything that was there before. request.session['lattice_learning'] = session_data # Render the graphviz. src = as_graphviz(edges) # Go to the initial view. g = GVGraph(src, engine='dot') return HttpResponse(''' <html><head><title>Test</title></head><body> <img src="%(src)s" usemap="#%(map_name)s">%(map)s </body></html>''' % dict(src='/lattice/graph_images/%s/' % g.key, map_name=g.map_name, map=g.map))
def normalize(part): try: if '/' in part: raise InputError('features currently not supported') return Concept.get(part, lang).text except Concept.DoesNotExist: raise InputError(_('Nothing known about the concept "%s".') % part)
def update_all(self, lang="en", n=10): if isinstance(lang, Language): lang = lang.id for conceptname in all_svd_concepts(lang): concept = Concept.get_raw(conceptname, lang) print concept self.update_concept(concept, n)
def from_feature(cls, lang, feature, normalize=False): reltype, other, slot = _split(feature) if normalize: concept = Concept.get(other, lang) else: concept = Concept.objects.get(language=lang, text=other) return cls(lang, Relation.objects.get(name=reltype), concept, slot)
def similar_concepts(request, lang, concept_name): # Default to retrieving 10 items. language = get_language(lang) count = int(request.GET.get('count', 10)) try: concept = Concept.get(concept_name, language) except Concept.DoesNotExist: return respond_with('commonsense/concept_noinfo.html', request, {'lang': lang, 'concept_name': concept_name}) similarities = concept.similarities.values_list('concept2__text', 'score') return respond_with('commonsense/_weighted_concepts.html', request, dict(similarities=similarities, lang=lang))
def predict_features(request, lang, concept_name): count = int(request.GET.get('count', 5)) language = get_language(lang) try: concept = Concept.get(concept_name, language) except Concept.DoesNotExist: return respond_with('commonsense/concept_noinfo.html', request, {'lang': lang, 'concept_name': concept_name}) predictions = ( list(concept.left_ynq.select_related('surface1', 'surface2')[:count]) + list(concept.right_ynq.select_related('surface1', 'surface2')[:count]) ) predictions = sorted([p for p in predictions if not p.obsolete()])[:count] return respond_with('commonsense/_predictions.html', request, dict(predictions=predictions, lang=lang))
def denormalize(concept_text): ''' Returns the canonical denormalized (user-visible) form of a concept, given its normalized text of a concept. ''' from csc.conceptnet4.models import Concept if isinstance(concept_text, tuple): text, lang = concept_text else: text, lang = concept_text, 'en' try: concept = Concept.get_raw(text, lang) result = concept.canonical_name.lower() except Concept.DoesNotExist: result = text if lang != 'en': return '%s [%s]' % (result, lang) else: return result
def concept(request, lang, concept_name): '''View for displaying a concept, specified by text.''' language = get_language(lang) try: concept = Concept.get(concept_name, language) except Concept.DoesNotExist: return respond_with('commonsense/concept_noinfo.html', request, {'lang': lang, 'concept_name': concept_name}) queryset = concept.raw_assertions_no_dupes().select_related('surface1', 'surface2', 'frame') return respond_with('commonsense/concept.html', request, dict( concept_name=concept_name, concept=concept, lang=lang, user=request.user, language=language, raw_assertions=queryset ))
def concept(request, lang, concept_name): '''View for displaying a concept, specified by text.''' if 'format' in request.GET: from commons.app.json import concept_info return concept_info(request, lang, concept_name) lang = get_language(lang) try: concept = Concept.get(concept_name, lang) except Concept.DoesNotExist: return respond_with('commons/concept_noinfo.html', request, {'lang': lang.id, 'concept_name': concept_name }) queryset = concept.get_assertions() per_page = int(request.GET.get('perpage', 8)) return object_list(request, queryset, paginate_by=per_page, template_name='commons/concept.html', template_object_name='assertion', extra_context={'concept_name': concept_name})
def _concept_relations(request, lang, concept, filter='all'): types = request.GET.get('types', 'All') limit = int(request.GET.get('limit', 10)) concept_obj = Concept.get(concept, lang) if filter == 'all': assertions = concept_obj.get_assertions(useful_only=True) elif filter == 'fwd': assertions = concept_obj.get_assertions_forward() elif filter == 'rev': assertions = concept_obj.get_assertions_reverse() else: raise TypeError('unknown concept_relations filter: %s' % (filter,)) if types != 'All': relations = [Relation.objects.get(name=t) for t in types.split(',')] import operator filters = reduce(operator.or_, [Q(relation=rel) for rel in relations]) assertions = assertions.filter(filters) return {'assertions': [assertion_to_dict(a) for a in assertions[:limit]]}
def GetConcept(concept, lang): strings = [] if not Concept.exists(concept, lang): print '{' print '\tword = "%s",' % concept print '\terror = "Word not found!",' print '}' return None result = Concept.get(concept, lang) lang = result.language.name word = result.text assertions = str(result.num_assertions) relations = {} for item in result.get_assertions(): if not (item.relation.name in relations): relations[item.relation.name] = [] relations[item.relation.name].append( { '-- comment': item.__str__(), 'first': item.concept1.text, 'second': item.concept2.text, 'score': item.score, 'frequency': item.frequency.value, 'mods': '', }) print '{' print '\tword = "%s",' % word print '\tlang = "%s",' % lang print '\tassertions = %s,' % assertions for item.relation.name in relations: print '\t', item.relation.name, ' =' print '\t{' for v in relations[item.relation.name]: print '\t\t{' if v['first'] != word: print '\t\t\tfirst = "%s",' % v['first'] else: print '\t\t\tsecond = "%s",' % v['second'] if v['mods'] != '': print '\t\t\tmods = "%s",' % v['mods'] print '\t\t\tscore = %d,' % v['score'] print '\t\t\tfrequency = %d,' % v['frequency'] print '\t\t},' print '\t},' print '}' return result
def concept_info(request, lang, concept): return concept_to_dict(Concept.get(concept, lang))
features_exists = ['IsA', 'DefinedAs', 'SymbolOf', 'MadeOf', 'AtLocation', 'CreatedBy', 'LocatedNear', 'PartOf', 'HasPrerequisite', 'HasProperty', 'ConceptuallyRelatedTo'] features_actions=['Causes', 'Desires', 'HasSubevent', 'HasFirstSubevent', 'HasLastSubevent', 'CausesDesire', 'CapableOf', 'InheritsFrom'] features_asobject = ['UsedFor', 'MotivatedByGoal', 'ObstructedBy', 'CreatedBy', 'ReceivesAction', 'ObstructedBy'] features_asemotional= [ 'Desires', 'MotivatedByGoal', 'CausesDesire'] out=[] #do some first-order analysis on these words out.append('*********most common assertions ************') for w in mywords: try: concepts = Concept.get(w, 'en') out.append(w) for a in concepts.get_assertions()[:15]: out.append(" " + str(a)) out.append('') except: pass #analyze relationships using a 2d analogy space #absolute values cnet = conceptnet_2d_from_db('en') analogyspace = cnet.svd(k=100) #normalized cnet_norm = conceptnet_2d_from_db('en').normalized() analogyspace2 = cnet_norm.svd(k=100)
self.reload() print "Loading Tensor" fl = file(self.file_name, 'rb') tensor = pickle.load(fl) fl.close() return tensor if __name__ == '__main__': #--------Some example concepts to use #print "Getting test concepts" dog = Concept.get('dog', 'en').text leash = Concept.get('leash', 'en').text umb = Concept.get('umbrella', 'en').text cat = Concept.get('cat', 'en').text #pets = [dog, cat] #temple = Concept.get('temple', 'en').text # hindu = Concept.get('hindu', 'en').text # asia = Concept.get('asia', 'en').text # nepal = Concept.get('Nepal', 'en').text # spain = Concept.get('Spain', 'en').text #test = [temple, asia, nepal] #----------Load Tensor tensor = LoadTensor().load()
def graph(request): from conceptnet_graph import as_graphviz, graph_around from csc.conceptnet4.models import Concept src = as_graphviz(graph_around(Concept.get('dog','en'))) xdot = render_xdot(src, engine='twopi') return HttpResponse(xdot, mimetype='text/plain')
from csc.conceptnet4.models import RawAssertion, Concept, Assertion,\ SurfaceForm from django.db import transaction people = Concept.get('people', 'en') person = Concept.get('person', 'en') @transaction.commit_on_success def fix_all(): for peopleform in people.surfaceform_set.all(): print peopleform peopleform.concept = person peopleform.save() for raw in RawAssertion.objects.filter(surface1=peopleform): print raw.update_assertion() for raw in RawAssertion.objects.filter(surface2=peopleform): print raw.update_assertion() if __name__ == '__main__': fix_all()
from csc.conceptnet4.models import Concept from csc.nl import get_nl #...tap the database to explore some concept dog = Concept.get('dog', 'en') print '' print "here are the key associations with %s" %'dog' for fwd in dog.get_assertions_forward()[:20]: print fwd print '' #this does a pca on a pickled tensor and finds things related to a word from csc.util.persist import get_picklecached_thing tensor = get_picklecached_thing('tensor.gz') #runs the svd svd = tensor.svd(k=100) #find similar concepts to a word myword = 'teach' print myword most_associated = svd.u_dotproducts_with(svd.weighted_u_vec(myword)).top_items(10) print '' print 'these words are most associated with %s' %myword for m in most_associated: print m #predict properties of a word
def update_concept(self, concept, n=10): for target, score in similarities_for_concept(concept.language.id, concept.text, n): sim, created = self.get_or_create(concept1=concept, concept2=Concept.get_raw(target, concept.language.id)) sim.score = makeDecimal(score) sim.save() self.prune_concept(concept)