def complete(self): clear_output(True) if self.toggle.value == 'Yes': os.remove(self.dbpath) self.dao = Dao(self.db_config, sqlalchemy_dao.POOL_DISABLED) self.createSQLTables() self.overwrite = True self.need_import = True else: self.dao = Dao(self.db_config, sqlalchemy_dao.POOL_DISABLED) if self.toggle2.value == 'Yes': self.need_import = True self.overwrite = False self.workflow.dao = self.dao if self.next_step is not None: if isinstance(self.next_step, Step): if self.workflow is not None: self.workflow.updateStatus(self.next_step.pos_id) self.next_step.start() else: raise TypeError( 'Type error for ' + self.name + '\'s next_step. Only Step can be the next_step, where its next_step is ' + str( type(self.next_step))) else: print("next step hasn't been set.") pass
def index(): schema = Schema(DOC_ID=NUMERIC(stored=True), TEXT=TEXT(stored=True)) ix = create_in("data/whoosh_idx/n2c2_sents", schema) writer = ix.writer() dao = Dao('sqlite+pysqlite:///data/test.sqlite', sqlalchemy_dao.POOL_DISABLED) with dao.create_session() as session: doc_iter = session.query(Document).filter( Document.DATASET_ID == 'n2c2_sents') for doc in doc_iter: writer.add_document(DOC_ID=doc.DOC_ID, TEXT=doc.TEXT) writer.commit()
def testRBLoop(self): logging.getLogger().setLevel(logging.WARN) ConfigReader() wf = Workflow() rb = ReviewRBInit(name="rb_review_init") wf.append(rb) rv = ReviewRBLoop(name='rb_review') wf.append(rv) wf.append( PreviousNextHTML( '<h2>Welcome to SmartAnno!</h2><h4>First, let's import txt data from a directory. </h4>', name='intro')) wf.filters = {'TypeA': ['heart'], 'TypeB': ['exam']} wf.types = ['TypeA', 'TypeB'] wf.task_id = 1 wf.umls_extended = {} wf.we_extended = {} wf.dao = Dao('sqlite+pysqlite:///data/demo.sqlite', sqlalchemy_dao.POOL_DISABLED) wf.start() if len(rb.branch_buttons) == 0: # if no records in the db, the optional buttons won't show rb.sample_size_input.value = 3 rb.complete() wf.start() print([doc.DOC_ID for doc in rb.data['docs']]) print([ anno.REVIEWED_TYPE for anno in wf.steps[0].data['annos'].values() ]) rb.sample_size_input.value = 1 rb.navigate(rb.branch_buttons[1]) pass
def __init__(self): server = get_config().get("mssql", "server") username = get_config().get("mssql", "username") password = get_config().get("mssql", "password") database = get_config().get("mssql", "database_name") params = urllib.parse.quote_plus( 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password) self.dao = Dao("mssql+pyodbc:///?odbc_connect=%s" % params) self.model = None
from SmartAnno.utils.KeywordsFiltering import KeywordsFiltering from SmartAnno.gui.PreviousNextWidgets import PreviousNextHTML from SmartAnno.utils.ReviewRBInit import ReviewRBInit from SmartAnno.utils.ReviewRBLoop import ReviewRBLoop from SmartAnno.utils.ReviewMLInit import ReviewMLInit from SmartAnno.utils.ReviewMLLoop import ReviewMLLoop from SmartAnno.models.logistic.LogisticBOWClassifiers import LogisticBOWClassifier from SmartAnno.utils.DataSetChooser import DataSetChooser logging.getLogger().setLevel(logging.DEBUG) ConfigReader('../conf/smartanno_conf.json') wf = Workflow(config_file=ConfigReader.config_file) wf.api_key = ConfigReader.getValue("api_key") wf.dao = Dao('sqlite+pysqlite:///../data/test.sqlite', sqlalchemy_dao.POOL_DISABLED) wf.task_name = 'language' wf.append( AnnotationTypeDef( '<h3>Annotation types:</h3><p>List all the types you want to identify below. Each type per line.<br/>If you' 'have too many types, try set up them separately, so that you won't need to choose from a long list ' 'for each sample. </p>', name='types')) wf.append(KeywordsFiltering(name='keywords')) wf.append( DataSetChooser( name='dataset_chooser', description='<h4>Choose which dateaset you want to use: </h4>')) rb = ReviewRBInit(name="rb_review_init") wf.append(rb) rv = ReviewRBLoop(name='rb_review', rush_rule='../conf/rush_rules.tsv')
def create_dao(self): # if self.dao is None: if self.is_test_db: self.dao = Dao(self.db_url, pool_size=POOL_DISABLED) else: self.dao = Dao(self.db_url)
class DatabaseController: instance = None def __init__(self): self.is_test_db = None self.db_url = None self.db_name = None self.dao = None self.session = None # for use with pure sqlalchemy # self.engine = None @classmethod def get_instance(cls): if cls.instance is None: cls.instance = DatabaseController() return cls.instance def disconnect(self): self.session.close() self.dao = None def connect(self, db_url, is_test_db=False, use_db=None): self.is_test_db = is_test_db if self.is_test_db: self.db_url = 'sqlite://' else: if use_db: self.db_url = db_url + '/' + use_db else: self.db_url = db_url self.create_dao() self.use_database(use_db) def use_database(self, use_db): if use_db is not None: statement = text("""use {};""".format(use_db)) self.get_session().execute(statement) self.db_name = use_db def create_dao(self): # if self.dao is None: if self.is_test_db: self.dao = Dao(self.db_url, pool_size=POOL_DISABLED) else: self.dao = Dao(self.db_url) def reconnect(self): dbc = DatabaseController.get_instance() dbc.disconnect() self.create_dao() self.use_database(self.db_name) def create_database(self, db_name): if not self.is_test_db: session = self.get_session() statement = text("""create database {};""".format(db_name)) session.execute(statement) statement = text( """ALTER DATABASE {} CHARACTER SET utf8;""".format(db_name)) session.execute(statement) def create_tables(self): # guarantees all tables are defined before create them. # the order matters from src.business.Person import Person from src.business.FaceRecord import FaceRecord from src.business.PersonFaceRecords import PersonFaceRecords from src.business.Camera import Camera from src.business.TensorFlowEnv import TensorFlowEnv from src.business.Experiment import Experiment from src.business.TrainConfiguration import TrainConfiguration from src.business.TestConfiguration import TestConfiguration from src.business.TrainExecution import TrainExecution # Model.metadata.create_all(self.engine) # TODO Is there better method to create all? Model.metadata.create_all(self.dao._engine) def drop_database(self, db_name): from sqlalchemy.sql import text # memory db doesnt need to drop if not self.is_test_db: statement = text("""drop database {};""".format(db_name)) try: self.get_session().execute(statement) except OperationalError as e: debug.msg('{} ... continuing'.format(str(e))) pass def get_session(self): if self.session is None: # session actually created with dao creation self.session = self.dao.create_session() # self.session.connection() return self.session
def initDao(self, dbfile): self.dao = Dao(self.db_config, sqlalchemy_dao.POOL_DISABLED) self.workflow.dao = self.dao self.workflow.dbpath = self.db_config[self.db_config.find(':///') + 4:] pass
from decimal import Decimal import sqlalchemy_dao from sqlalchemy import and_ from sqlalchemy_dao import Dao from SmartAnno.utils.ConfigReader import ConfigReader from SmartAnno.db.ORMs import Document cr = ConfigReader('../conf/smartanno_conf.json') # intro = IntroStep('<h2>Welcome to SmartAnno!</h2><h4>First, let's import txt data from a directory. </h4>', # name='intro') # wf = Workflow([intro, # DBInitiater(name='db_initiator')]) # wf.start() # intro.navigate(intro.branch_buttons[0]) print(18 / 4) print(round(Decimal(30 / 4))) print(round(0.5)) dao = Dao('sqlite+pysqlite:///../data/test.sqlite', sqlalchemy_dao.POOL_DISABLED) with dao.create_session() as session: doc_iter = session.query(Document).filter( and_(Document.DATASET_ID == 'n2c2_sents', Document.DOC_NAME == '303_2_24')).group_by(Document.DOC_NAME) for doc in doc_iter: print(doc)