def relation_tree_update(request): data = simplejson.loads(request.body) def _get_term(data): if 'id' in data: term = Term.objects.get(pk=data['id']) else: term = Term() term.name_en = data.get('name_en', '') term.name_la = data.get('name_la', '') term.name_cs = data.get('name_cs', '') term.system = data.get('system', '') term.bodypart = data.get('body_part', '') term.fma_id = data.get('fma_id', -1) term.save() return term result = [] for rel in data: if 'id' in rel: relation = Relation.objects.get(pk=rel['id']) else: relation = Relation() relation.type = get_object_or_404(RelationType, identifier=rel['name']) relation.term1 = _get_term(rel['term1']) relation.term2 = _get_term(rel['term2']) relation.labels = '|'.join(set(rel.get('labels', []))) relation.state = rel.get('state', 'unknown')[0] relation.save() result.append(relation.to_serializable()) return render_json(request, result)
def update_relations(request): if request.body: data = simplejson.loads(request.body) if len(data) > 0: Relation.objects.filter( term1=Term.objects.get_term_from_dict(data[0], 'term1') ).exclude( id__in=[r['id'] for r in data if 'id' in r], ).delete() for r_data in data: if 'id' in r_data: relation = Relation.objects.get(id=r_data['id'], state='v') else: relation = Relation() relation.text1 = r_data['text1'] relation.text2 = r_data['text2'] relation.type = RelationType.objects.from_identifier( identifier=r_data['name'], source=get_source(request),) relation.term1 = Term.objects.get_term_from_dict(r_data, 'term1') relation.term2 = Term.objects.get_term_from_dict(r_data, 'term2') add_fma_id(relation.term1, r_data.get('text1')) add_fma_id(relation.term2, r_data.get('text2')) relation.save() response = { 'type': 'success', 'msg': u'Změny byly uloženy', } return render_json(request, response)
def setup_database(drop=False): """ setup cache and database """ try: db.connect() except Exception as e: print(e) if drop: db.drop_tables(Relation.__subclasses__()) db.drop_tables(FlatEvent.__subclasses__()) db.drop_tables(ActivityEvent.__subclasses__()) db.create_tables(Relation.__subclasses__()) db.create_tables(FlatEvent.__subclasses__()) db.create_tables(ActivityEvent.__subclasses__())
def upload_file(): if request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) save_path = os.path.join(app.config['UPLOAD_FOLDER'], filename) doc = Document( document_name=filename, path_to_file=save_path ) database.session.add(doc) database.session.commit() doc_id = doc.get_id() # print(doc_id) rel = Relation( user_id=current_user.id_user, document_id=doc_id, type_relation='loader' ) database.session.add(rel) database.session.commit() file.save(save_path) return redirect(url_for('administrator_pages.upload_file', filename=filename)) return render_template( 'admin_pages/load_files.html', title='Страница загрузки файлов' )
def list_relations(request, *args, **kwargs): """list_relations() returns ...""" qs = Relation.objects.filter(is_deleted=False) if 'query' in request.REQUEST: qs = Relation.filter_queryset(qs, request.REQUEST['query']) result = [ { m.sattr_prefix : m.to_structure() } for m in qs ] return construct_retval(request, result)
def add_synonyms(self, kSynonyms, sSpace = 'global'): try: oSpace = Space.objects.get(name=sSpace) except Space.DoesNotExist: oSpace = Space(name='global') oSpace.save() for sWord, kSyns in kSynonyms.iteritems(): try: oWord = Word.objects.get(name=sWord, space=oSpace) except Word.DoesNotExist: oWord = Word(name=sWord, space=oSpace) oWord.save() for sSyn, kProps in kSyns.iteritems(): try: oSyn = Word.objects.get(name=sSyn, space=oSpace) except Word.DoesNotExist: oSyn = Word(name=sSyn, space=oSpace) oSyn.save() try: oRelation = Relation.objects.get(rel_one=oWord, rel_two=oSyn) oRelation.karma += kProps['weight'] oRelation.save() except Relation.DoesNotExist: try: oRelation = Relation.objects.get(rel_two=oWord, rel_one=oSyn) oRelation.karma += kProps['weight'] oRelation.save() except Relation.DoesNotExist: oRelation = Relation(rel_one=oWord, rel_two=oSyn, karma=kProps['weight'], space=oSpace) oRelation.save()
def remove_actor_from_movie(payload): aid = request.args.get('actorid', None, int) mid = request.args.get('movieid', None, int) if aid is None or mid is None: abort(404) relation = Relation.query.filter( Relation.actor_id == aid and Relation.movie_id == mid).one_or_none() if relation: relation.delete() else: Relation.rollback() # print(sys.exc_info()) abort(404) return jsonify({ "success": True, "actor_id": aid, "movie_id": mid })
def get(self): key = int((self.request.get('k'))) #event key event_key = ndb.Key(Event, key) main_event = Event.query(event_key == Event.key).get() logged_in_user = users.get_current_user() prof = Profile.query(logged_in_user.user_id() == Profile.id).get() new_relation = Relation(user_key=prof.key, event_key=main_event.key) current_relation = Relation.query( ndb.AND(Relation.user_key == prof.key, Relation.event_key == main_event.key)).get() if current_relation == None: new_relation.put() time.sleep(.5) user_events = Relation.query(Relation.event_key == event_key).fetch() profiles = [] for u in user_events: profile = u.user_key.get() profiles.append(profile) profiles.sort() template = JINJA_ENVIRONMENT.get_template('templates/event.html') self.response.write(template.render(Profile=profiles, event=main_event)) #new_relation.put() print prof print "" print main_event print current_relation print user_events
def main(request): url = request.POST.get('url', '') type = request.POST.get('submit', '').lower() new_url = '' error = '' if type: if url.startswith('http://'): if type == 'shorten': # Get the current count from the database # This count keeps track of how many urls we have made # Increment this number and call make_url # Save it to the database and we are good to go try: count_ref = Option.objects.get(title="count") except: # No existing count so we must create it count_ref = Option(title="count", value=0) count_ref.save() count_ref.value += 1 count_ref.save() internal_url = make_url(count_ref.value) new_url = 'http://%s/%s' % (request.get_host(), internal_url) r = Relation(internal_url=internal_url, external_url = url) r.save() else: error = "Invalid url. Urls must start with \"http://\"" t = loader.get_template('index.html') c = Context({ 'new_url': new_url, 'error': error, }) return http.HttpResponse(t.render(c))
def update_connections(connections, ctype): # Delete any connections that have been removed. new_connections = [ connection['id'] for connection in connections if connection['id'] ] # Watch out for odd behavior in list iteration while deleting. if ctype is 'collaborations': for connection in entity.collaborations: if connection.id not in new_connections: db.delete(connection) elif ctype is 'employments': for connection in entity.employments: if connection.id not in new_connections: db.delete(connection) elif ctype is 'relations': for connection in entity.relations: if connection.id not in new_connections: db.delete(connection) db.commit() for connection in connections: if connection['id']: # Connection exists, update details. oldconnection = Connection.query.get(connection['id']) if oldconnection.details != connection['details']: oldconnection.details = connection['details'] app.logger.debug('UPDATING CONNECTION DETAILS', oldconnection.details) elif 'entity_id' in connection: otherentity = Entity.query.get(connection['entity_id']) if ctype is 'collaborations': collaboration = Collaboration(entity, otherentity, connection['details']) app.logger.debug('CREATED NEW COLLABORATION ', collaboration.details) elif ctype is 'employments': employment = Employment(entity, otherentity, connection['details']) app.logger.debug('CREATED NEW EMPLOYMENT ', employment.details) elif ctype is 'relations': relation = Relation(entity, otherentity, connection['details']) app.logger.debug('CREATED NEW RELATION ', relation.details) db.commit()
def add_actor_to_movie(payload): try: movie_id = request.get_json()["movie_id"] actor_id = request.get_json()["actor_id"] # print("movie" ,movie_id) # print("actor",actor_id) relation = Relation(movie_id=movie_id, actor_id=actor_id) relation.insert() except exc.IntegrityError: Relation.rollback() # print(sys.exc_info()) abort(409) except: Relation.rollback() # print(sys.exc_info()) abort(422) return jsonify({ "movie_id": movie_id, "actor_id": actor_id, "success": True })
def format_db_files(part_i_files, part_ii_files): # print(part_ii_files) concept_out, sentence_out = [open_out_csv(out_path(filename)) for filename in config.entity_files] concept_out.writerow(config.output_headers['concept']) sentence_out.writerow(config.output_headers['sentence']) in_sentence_out, has_predicate_out = [open_out_csv(out_path(filename)) for filename in config.relation_files] in_sentence_out.writerow(config.output_headers['in_sentence']) has_predicate_out.writerow(config.output_headers['has_predicate']) i = 0 for part_i_file, part_ii_file in zip( sorted(part_i_files, reverse=True), sorted(part_ii_files, reverse=True) ): print(part_i_file, part_ii_file) start_time = time.time() predicate_out = open_out_csv( out_path(config.predicates_file.replace('.csv','%i.csv'%i)) ) statement_out = open_out_csv( out_path(config.statements_file.replace('.csv','%i.csv'%i)) ) i = i + 1 in_csv = open_in_csv( in_path(part_i_file) ) part_i_header = next(in_csv)[1:] statements_header = config.output_headers['statement'] + part_i_header predicates_header = config.output_headers['predicate'] + part_i_header statement_out.writerow( statements_header ) predicate_out.writerow( predicates_header ) predicates = dict() for line in in_csv: predicates[line[0]] = np.array(list(map(float,line[1:]))) print("finished processing ", part_i_file, time.time() - start_time) # Generate the output final output file as we iterate of the part-ii file in_csv = open_in_csv(in_path(part_ii_file)) header = next(in_csv) + part_i_header # Instantiate entity parser objects for each entity type subj = Entity( ids=config.subj_id, labels=config.subj_label, props=config.subj_props, header=header ) obj = Entity( ids=config.obj_id, labels=config.obj_label, props=config.obj_props, header=header) sentence = Entity( ids=config.sentence_id, labels=config.sentence_label, props=config.sentence_props, header=header ) predicate = Entity( ids=config.predicate_id, labels=config.predicate_label, props=part_i_header, header=header ) # Instantiate relationship(s) parser objects for each relationship type subj_in_sentence = Relation( enty1=subj, enty2=sentence, props=config.subj_in_sentence_props, header=header ) obj_in_sentence = Relation( enty1=obj, enty2=sentence, props=config.obj_in_sentence_props, header=header ) sentence_has_predicate = Relation( enty1=sentence, enty2=predicate, props=config.has_predicate_props, header=header ) statement = Statement( enty1=subj, enty2=obj, props=part_i_header, header=header ) print("started processing ", part_ii_file, time.time() - start_time) q = 0 for line in in_csv: key = line[13] info = line + list(predicates[key]) # statement.update(info) write_entity(subj, info, concept_out) write_entity(obj, info, concept_out) # write_entity(sentence, info, sentence_out) # write_entity(predicate, info, predicate_out) # write_relation(subj_in_sentence, info, in_sentence_out) # write_relation(obj_in_sentence, info, in_sentence_out) # write_relation(sentence_has_predicate, info, has_predicate_out) q = q + 1 if q%100000 == 0: print("%i loops "%q, time.time() - start_time) print("finished processing ", part_ii_file, time.time() - start_time) # for line in statement.fetch_statements(): # statement_out.writerow(line) # print("finished loop ", time.time() - start_time) exit()