Ejemplo n.º 1
0
    def new_term(cls, data) -> Dict[str, Term]:
        msg = ''
        # try:
        valid_data = term_schema.load(data)

        term = Term.query.filter_by(identifier=valid_data['name']).first()
        if not term:
            # print(valid_data)
            term = Term()
            term.vocabulary_id = valid_data['vocabulary_id']
            term.identifier = string_as_identifier(valid_data['name'])
            term.description = valid_data['description']
            term.parent_id = valid_data['parent_id']
            term.data = valid_data['data']
            # print(term.data)
            db.session.add(term)
            # print(term)
            try:
                db.session.commit()
                cls._update_term_clasification(term, valid_data)
                msg = 'New Term CREATED name={0}'.format(term.identifier)
                return msg, term
            except sqlalchemyExc.SQLAlchemyError as e:
                msg = 'sqlalthemy: {0}'.format(e)
                db.session.rollback()
                return msg, None
        else:
            msg = 'Term already exist name={0}'.format(valid_data['name'])
            return msg, None
Ejemplo n.º 2
0
    def _update_term_data(cls, term: Term, data):
        ''''''
        # return {
        #     'vocabulary_id': data['vocabulary_id'],
        #     'name': data['name'],
        #     'description': data['description'],
        #     'parent_id': data['parent_id'],
        #     'data': data['data'],
        # }

        # print(data)
        term.vocabulary_id = data['vocabulary_id']
        # print(data)
        term.identifier = data['name']
        # print(data)
        term.description = data['description']
        # print(data)
        term.parent_id = data['parent_id']
        # print(data)
        term.data = data['data']
Ejemplo n.º 3
0
    def syncronize_miar_databases(self):
        """
        sincroniza lo que  hay en self.miar_dbs_file con la base de datos de iroko
        con los Term y Vocabulary
        """
        # TODO: crear un rdf skos a partir de lo que hay en el fichero....

        with open(self.miar_dbs_file, 'r', encoding='UTF-8') as file_dbs:
            archive = json.load(file_dbs)

        if archive:
            for archive_dbs in archive:
                miar_db_type_term = Term.query.filter_by(
                    identifier=archive_dbs['url']).first()
                if not miar_db_type_term:
                    miar_types = Term()
                    miar_types.identifier = archive_dbs['url']
                    miar_types.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value
                    miar_types.description = archive_dbs['name']
                    db.session.add(miar_types)
                    miar_db_type_term = miar_types
                else:
                    miar_db_type_term.identifier = archive_dbs['url']
                    miar_db_type_term.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value
                    miar_db_type_term.description = archive_dbs['name']
                db.session.flush()
                for archive_dbs_info in archive_dbs['dbs']:
                    try:
                        identifier = archive_dbs_info['url']
                        description = archive_dbs_info['name']
                        miar_db_term = Term.query.filter_by(
                            identifier=identifier).first()
                        if not miar_db_term:
                            miar_dbs = Term()
                            miar_dbs.identifier = identifier
                            miar_dbs.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value
                            miar_dbs.description = description
                            miar_dbs.data = archive_dbs_info
                            miar_dbs.parent_id = miar_db_type_term.id
                            db.session.add(miar_dbs)
                        else:
                            miar_db_term.identifier = identifier
                            miar_db_term.vocabulary_id = IrokoVocabularyIdentifiers.INDEXES.value
                            miar_db_term.description = description
                            miar_db_term.data = archive_dbs_info
                            miar_db_term.parent_id = miar_db_type_term.id
                        db.session.flush()
                    except Exception:
                        pass

                    db.session.commit()

                    # db.session.flush()

                    # miar_classification = TermClasification()
                    # miar_classification.term_class_id = miar_db_type_term.id
                    # miar_classification.term_clasified_id = miar_dbs.id
                    # db.session.add(miar_classification)

            return 'success'
        else:
            return 'error'