def populate_lookup_tables(self): logger.info(f'Populate lookup tables in {self.url}') with session_scope(self) as s: for event_type in EventType.event_types(): enter_if_not_exists(s, event_type) for distractor_info in DistractorInfo.distractor_infos(): enter_if_not_exists(s, distractor_info)
def init(self): """ Create declarative tables and populate lookup tables. :return: """ logger.info(f'Create declarative tables in {self.url}') Base.metadata.create_all(self.engine) self.populate_lookup_tables() self.initialized = True
def create_engine(self) -> Engine: """ Initialize a database connection and return the database engine. :return: """ logger.info(f'Initialize database {self.url}') self.engine = create_engine(self.url) # Enforce sqlite foreign keys event.listen(self.engine, 'connect', _fk_pragma_on_connect) return self.engine
def clear(self) -> None: """ Truncate all database tables. .. todo:: Needs to be tested! :return: None """ logger.info(f'Clear database {self.url}') with closing(self.engine.connect()) as con: trans = con.begin() for table in reversed(Base.metadata.sorted_tables): con.execute(table.delete()) trans.commit()
def run(self) -> None: """ Open producer ports and start recording data in self.store (DataStore). Data is recorded until self.stop_event is triggered. :return: None """ logger.info('Running producer process "{}"'.format(str(self))) with open_port(self.producer): # Read until stopped while not self.stop_event.is_set(): # Read only if started if self.start_event.is_set(): self.producer.read(queue=self.queue) logger.info('Stopping producer process "{}"'.format(str(self)))
def join(self, timeout=1) -> int: """ Join the process. If the process is not alive to begin with, nothing happens. If the process won't shut down gracefully, it is forcefully terminated. :param timeout: Join timeout in seconds :return: Process exit code """ self.stop_event.set() # Attempt to gracefully join the process # If it fails, terminate the process forcefully if self.is_alive(): self._process.join(timeout) if self.is_alive(): logger.error( 'Producer process "{}" is not shutting down gracefully. ' 'Resorting to force terminate and join...'.format( str(self))) self._process.terminate() self._process.join(timeout) logger.info('Producer process "{}" joined successfully'.format( str(self))) return self._process.exitcode
def run(args): """ Run the craniodistractor application. :return: """ if args.enable_dummy_sensor: Config.ENABLE_DUMMY_SENSOR = True database = DefaultDatabase.SQLITE database.create_engine() machine = StateMachine(database) # Initialize session with database.session_scope() as s: session = Session() s.add(session) machine.session = session logger.register_machine(machine) logger.info('Start state machine') machine.start() ret = app.exec_() logger.info('Stop state machine') machine.stop() return ret
#!/usr/bin/env python import argparse import pandas as pd from pathlib import Path from cranio.model import Database, session_scope, Base, Measurement from cranio.utils import logger, configure_logging parser = argparse.ArgumentParser() parser.add_argument('path', help='Path to SQLite file (.db)', type=str) if __name__ == '__main__': configure_logging() args = parser.parse_args() path = Path(args.path) database = Database(drivername='sqlite', database=str(path)) database.create_engine() with session_scope(database) as s: for table_name, table in Base.metadata.tables.items(): path_out = path.parent / (table_name + '.csv') logger.info(f'Read table {table_name} from {database.url} to {path_out}') df = pd.read_sql_table(table_name, con=database.engine) df.to_csv(path_out, sep=';', index=False)
def load_document(): """ Dummy function. """ logger.info('load_document() called!')
def create_document(): """ Dummy function. """ logger.info('create_document() called!')