def init_cuntries(path): vocab = Vocabulary() vocab.identifier = IrokoVocabularyIdentifiers.COUNTRIES.value vocab.human_name = 'Paises' db.session.add(vocab) db.session.commit() with open(path) as f: countries = json.load(f) for country in countries: nterm = Term() nterm.identifier = country['key'] nterm.description = country['text'] nterm.vocabulary_id = vocab.identifier db.session.add(nterm) db.session.commit()
def new_term(cls, data) -> Dict[str, Term]: msg = '' # try: valid_data = term_schema.load(data) term = Term.query.filter_by(identifier=valid_data['name']).first() if not term: # print(valid_data) term = Term() term.vocabulary_id = valid_data['vocabulary_id'] term.identifier = string_as_identifier(valid_data['name']) term.description = valid_data['description'] term.parent_id = valid_data['parent_id'] term.data = valid_data['data'] # print(term.data) db.session.add(term) # print(term) try: db.session.commit() cls._update_term_clasification(term, valid_data) msg = 'New Term CREATED name={0}'.format(term.identifier) return msg, term except sqlalchemyExc.SQLAlchemyError as e: msg = 'sqlalthemy: {0}'.format(e) db.session.rollback() return msg, None else: msg = 'Term already exist name={0}'.format(valid_data['name']) return msg, None
def init_unesco(path): subjects = Vocabulary() subjects.identifier = IrokoVocabularyIdentifiers.SUBJECTS.value subjects.human_name = 'Cobertura tematica' db.session.add(subjects) db.session.commit() groups = [ { 'name': 'http://vocabularies.unesco.org/thesaurus/domain1', 'description': 'Educación' }, { 'name': 'http://vocabularies.unesco.org/thesaurus/domain2', 'description': 'Ciencia' }, { 'name': 'http://vocabularies.unesco.org/thesaurus/domain3', 'description': 'Cultura' }, { 'name': 'http://vocabularies.unesco.org/thesaurus/domain4', 'description': 'Ciencias sociales y humanas' }, { 'name': 'http://vocabularies.unesco.org/thesaurus/domain5', 'description': 'Información y comunicación' }, { 'name': 'http://vocabularies.unesco.org/thesaurus/domain6', 'description': 'Política, derecho y economía' } ] graph = Graph() graph.load(path) for t in groups: term = Term() term.identifier = t['name'] term.description = t['description'] term.vocabulary_id = subjects.identifier db.session.add(term) db.session.commit() _add_group_terms(graph, t['name'], term, subjects) db.session.commit()
def _update_term_data(cls, term: Term, data): '''''' # return { # 'vocabulary_id': data['vocabulary_id'], # 'name': data['name'], # 'description': data['description'], # 'parent_id': data['parent_id'], # 'data': data['data'], # } # print(data) term.vocabulary_id = data['vocabulary_id'] # print(data) term.identifier = data['name'] # print(data) term.description = data['description'] # print(data) term.parent_id = data['parent_id'] # print(data) term.data = data['data']
def add_term(): """The create view.""" form = TermForm() # if the form is submitted and valid if form.validate_on_submit(): new_term = Term() form.parent.data if form.name.data: new_term.identifier = form.name.data if form.description.data: new_term.description = form.description.data if form.vocabulary.data: new_term.vocabulary_id = form.vocabulary.data if form.parent.data and form.parent.data != 0: new_term.parent_id = form.parent.data db.session.add(new_term) db.session.flush() if new_term.vocabulary.name == 'data_bases' and form.group.data != 0: new_group = TermClasification() new_group.term_base_id = new_term.id # id del termino que es base de datos new_group.term_group_id = form.group.data # id del termino del combo que dice el # grupo mes db.session.add(new_group) db.session.commit() flash(_('Term added'), 'info') return redirect(url_for('iroko_curator.add_term')) return render_template('add_term.html', form=form)
def _add_group_terms(graph, top_group, parent, vocab): for group in graph.objects(subject=URIRef(top_group), predicate=SKOS.term('member')): for concept in graph.objects(subject=group, predicate=SKOS.term('member')): pref, label = graph.preferredLabel(subject=concept, lang='es')[0] # print('---->>', concept, label) term = Term() term.identifier = str(concept) term.description = str(label) term.parent_id = parent.id term.vocabulary_id = vocab.identifier db.session.add(term)
def init_vocabulary(tax, tax_name, vocab): """init a vocabulary""" for k, term in tax[tax_name].items(): nterm = Term() nterm.identifier = string_as_identifier(term['name']) nterm.description = term['name'] nterm.vocabulary_id = vocab.identifier if term['parents'][0] != '0': if tax[tax_name][term['parents'][0]]: parent_name = string_as_identifier(tax[tax_name][term['parents'][0]]['name']) parent = Term.query.filter_by(identifier=parent_name).first() nterm.parent_id = parent.id db.session.add(nterm) db.session.commit()
def syncronize_miar_databases(self): """ sincroniza lo que hay en self.miar_dbs_file con la base de datos de iroko con los Term y Vocabulary """ # TODO: crear un rdf skos a partir de lo que hay en el fichero.... with open(self.miar_dbs_file, 'r', encoding='UTF-8') as file_dbs: archive = json.load(file_dbs) if archive: for archive_dbs in archive: miar_db_type_term = Term.query.filter_by( identifier=archive_dbs['url']).first() if not miar_db_type_term: miar_types = Term() miar_types.identifier = archive_dbs['url'] miar_types.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value miar_types.description = archive_dbs['name'] db.session.add(miar_types) miar_db_type_term = miar_types else: miar_db_type_term.identifier = archive_dbs['url'] miar_db_type_term.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value miar_db_type_term.description = archive_dbs['name'] db.session.flush() for archive_dbs_info in archive_dbs['dbs']: try: identifier = archive_dbs_info['url'] description = archive_dbs_info['name'] miar_db_term = Term.query.filter_by( identifier=identifier).first() if not miar_db_term: miar_dbs = Term() miar_dbs.identifier = identifier miar_dbs.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value miar_dbs.description = description miar_dbs.data = archive_dbs_info miar_dbs.parent_id = miar_db_type_term.id db.session.add(miar_dbs) else: miar_db_term.identifier = identifier miar_db_term.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value miar_db_term.description = description miar_db_term.data = archive_dbs_info miar_db_term.parent_id = miar_db_type_term.id db.session.flush() except Exception: pass db.session.commit() # db.session.flush() # miar_classification = TermClasification() # miar_classification.term_class_id = miar_db_type_term.id # miar_classification.term_clasified_id = miar_dbs.id # db.session.add(miar_classification) return 'success' else: return 'error'