def init_data(*args, **kwargs): if not tables_need_copy and not exec_sqls: return dst_session = orm.get_maker(dst_engine)() src_session = orm.get_maker(src_engine)() if tables_need_copy: count = 0 for table in tables_need_copy: if table not in metadata.tables: raise AcceptableError('Table %s not in source database' % table) for table_name in tables_need_copy: table = metadata.tables[table_name] # get row count from table count += src_session.query(sa.func.count("*")).select_from(table).scalar() if count >= MAX_COPY_ROW: raise exceptions.CopyRowOverSize('Copy from table %s fail, too many rows copyed' % table_name) # build a query in src database query = src_session.query(table) with dst_session.begin(): for row in query: # execute insert sql on dst databases dst_session.execute(table.insert(row)) if exec_sqls: with dst_session.begin(): for sql in exec_sqls: dst_session.execute(sql) src_session.close() dst_session.close()
def build_session(connection): if connection.startswith('mysql'): engine = create_engine(sql_connection=connection, converter_class=SimpleFlowConverter, logging_name='taskflow') session_maker = get_maker(engine=engine) session = session_maker() elif connection.startswith('sqlite'): SimpleFlowSqliteConverter() engine=create_engine(sql_connection=connection, logging_name='taskflow') models.SimpleFlowTables.metadata.create_all(engine) session_maker = get_maker(engine) session = session_maker() else: raise TypeError('Connection type error for simpleflow') return session
def start(self): if not self.started: with self.lock: if self.started: return # use mysqlconnector as connect driver self._writer_engine = engines.create_engine( 'mysql+mysqlconnector://' + self.conf.connection, logging_name=self.name, debug=self.conf.debug, thread_checkin=False, idle_timeout=self.conf.idle_timeout, max_pool_size=self.conf.max_pool_size, max_overflow=self.conf.max_overflow, pool_timeout=self.conf.pool_timeout, mysql_sql_mode=self.conf.mysql_sql_mode, max_retries=self.conf.max_retries, retry_interval=self.conf.retry_interval, **self.connection_kwargs) self._writer_maker = orm.get_maker(engine=self._writer_engine) if self.conf.slave_connection: self._reader_engine = engines.create_engine( 'mysql+mysqlconnector://' + self.conf.slave_connection, logging_name=self.name, thread_checkin=False, idle_timeout=self.conf.idle_timeout, max_pool_size=self.conf.max_pool_size, max_overflow=self.conf.max_overflow, pool_timeout=self.conf.pool_timeout, mysql_sql_mode=self.conf.mysql_sql_mode, max_retries=self.conf.max_retries, retry_interval=self.conf.retry_interval, **self.connection_kwargs) self._reader_maker = orm.get_maker( engine=self._reader_engine) else: self._reader_engine = self._writer_engine self._reader_maker = self._writer_maker self._started = True
def init_plugin_data(engine): session_maker = orm.get_maker(engine=engine) session = session_maker() with session.begin(): # Start from 1 # So 1-2047 can be used as gkey id for i in xrange(1, 2048): row = GkeyMap(sid=i, host=None) session.add(row) session.flush() for _models in models: if hasattr(_models, 'init_data'): getattr(_models, 'init_data')(session) session.close()
def __init__(self, conf, threadpool, infoget): if not os.path.exists(conf.filecache): os.makedirs(conf.filecache, 0o755) self.path = os.path.join(conf.filecache, 'files') self.threadpool = threadpool self.infoget = infoget self.localfiles = {} self.downloading = {} self.lock = Semaphore() # init sqlite session engine = create_engine(sql_connection='sqlite:///%s' % os.path.join(conf.filecache, 'filemanager.db'), logging_name='filemanager') if not engine.has_table(models.FileDetail.__tablename__): # create table if needed models.FileManagerTables.metadata.create_all(engine) session_maker = get_maker(engine) self.session = session_maker()
from simpleservice.ormdb.orm import get_maker from simpleservice.ormdb.argformater import connformater FINISHED_STATES = (states.SUCCESS, states.FAILURE, states.REVERTED) dst = { 'host': '172.20.0.3', 'port': 3304, 'schema': 'simpleflow', 'user': '******', 'passwd': '111111' } sql_connection = connformater % dst engine = create_engine(sql_connection, converter_class=SimpleFlowConverter) session_maker = get_maker(engine=engine) session = session_maker() class UnfortunateTask(task.Task): def execute(self): print('executing %s' % self) boom = os.environ.get('BOOM') if boom: print('> Critical error: boom = %s' % boom) raise SystemExit() else: print('> this time not exiting') class TestTask(task.Task):
'passwd': '111111' } agent_id = random.randint(1, 100) sql_connection = connformater % dst engine = create_engine(sql_connection) metadata = MetaData() metadata.reflect(bind=engine) for tab in metadata.tables.keys(): print tab session_maker = orm.get_maker(engine=engine) session = session_maker() print 'init session finish' print '~~~~~~~~~~~~~~~~~~~~~~~~~~' print 'test add request_row' request_row = AsyncRequest() # with session.begin(): session.add(request_row) session.flush() print request_row print request_row.request_id print 'test add request_row finish' print '~~~~~~~~~~~~~~~~~~~~~~~~~~'