def copydb(src, dst, auths=None, tables_need_copy=None, exec_sqls=None, **kwargs): """copy database from src to dst tables_need_copy include table that need copy data exec_sqls include list of sql should be run after copy""" metadata = sa.MetaData() src_connection = connformater % src dst_connection = connformater % dst src_engine = create_engine(src_connection, thread_checkin=False, poolclass=NullPool, **kwargs) dst_engine = create_engine(dst_connection, thread_checkin=False, poolclass=NullPool, **kwargs) try: schema_info = get_schema_info(src_engine) if not schema_info: raise AcceptableError('Get source database error: %s not exist' % src_engine.url.database) metadata.reflect(bind=src_engine) except OperationalError as e: raise AcceptableError('Get source database info or Reflect source database error:%d, %s' % (e.orig[0], e.orig[1].replace("'", ''))) def init_data(*args, **kwargs): if not tables_need_copy and not exec_sqls: return dst_session = orm.get_maker(dst_engine)() src_session = orm.get_maker(src_engine)() if tables_need_copy: count = 0 for table in tables_need_copy: if table not in metadata.tables: raise AcceptableError('Table %s not in source database' % table) for table_name in tables_need_copy: table = metadata.tables[table_name] # get row count from table count += src_session.query(sa.func.count("*")).select_from(table).scalar() if count >= MAX_COPY_ROW: raise exceptions.CopyRowOverSize('Copy from table %s fail, too many rows copyed' % table_name) # build a query in src database query = src_session.query(table) with dst_session.begin(): for row in query: # execute insert sql on dst databases dst_session.execute(table.insert(row)) if exec_sqls: with dst_session.begin(): for sql in exec_sqls: dst_session.execute(sql) src_session.close() dst_session.close() init_database(dst_engine, metadata, auths, character_set=schema_info[1], collation_type=schema_info[2], init_data_func=init_data) return schema_info
def _create_schema(self, session, database, schema, auths, options, **kwargs): """create new schema intance on database_id""" try: _record = model_query(session, RecordDatabase, filter=RecordDatabase.record_id == int( database.reflection_id)).one() host = _record.host port = _record.port if database.passwd: connection = connformater % dict(user=database.user, passwd=database.passwd, host=host, port=port, schema=schema) engine = create_engine(connection, thread_checkin=False, poolclass=NullPool) utils.create_schema( engine, auths=auths, character_set=options.get('character_set'), collation_type=options.get('collation_type'), connection_timeout=3) yield host, port except Exception: raise
def _create_schema(self, session, database, schema, auths, options, **kwargs): """create new schema intance on database_id""" req = kwargs.pop('req') try: local_ip, port = self._get_entity(req, int(database.reflection_id)) if local_ip == 'unkonwn' or port == 0: raise exceptions.AcceptableDbError('Database not online') connection = connformater % dict(user=database.user, passwd=database.passwd, host=local_ip, port=port, schema=schema) engine = create_engine(connection, thread_checkin=False, poolclass=NullPool) utils.create_schema(engine, auths=auths, character_set=options.get('character_set'), collation_type=options.get('collation_type'), connection_timeout=3) yield local_ip, port except Exception: if LOG.isEnabledFor(logging.DEBUG): LOG.exception('Create schema fail') raise
def _copy_schema(self, session, src_database, src_schema, dst_database, dst_schema, auths, **kwargs): req = kwargs.pop('req') src_port, src_local_ip = self._get_entity( req, int(src_database.reflection_id)) dst_port, dst_local_ip = self._get_entity( req, int(dst_database.reflection_id)) src_info = dict(user=src_database.user, passwd=src_database.passwd, host=src_local_ip, port=src_port) dst_info = dict(user=dst_database.user, passwd=dst_database.passwd, host=dst_local_ip, port=dst_port) schema_info = utils.copydb( src=src_info, dst=dst_info, auths=auths, tables_need_copy=kwargs.get('tables_need_copy'), exec_sqls=kwargs.get('exec_sqls')) try: yield schema_info[1], schema_info[2] except Exception: engine = create_engine(connformater % dst_info, thread_checkin=False, poolclass=NullPool) utils.drop_schema(engine, auths) raise
def _copy_schema(self, session, src_database, src_schema, dst_database, dst_schema, auths, **kwargs): src_record = model_query(session, RecordDatabase, filter=RecordDatabase.record_id == int( src_database.reflection_id)).one() dst_record = model_query(session, RecordDatabase, filter=RecordDatabase.record_id == int( dst_database.reflection_id)).one() src_info = dict(user=src_database.user, passwd=src_database.passwd, host=src_record.host, port=src_record.port) dst_info = dict(user=dst_database.user, passwd=dst_database.passwd, host=dst_record.host, port=dst_record.port) schema_info = utils.copydb( src=src_info, dst=dst_info, auths=auths, tables_need_copy=kwargs.get('tables_need_copy'), exec_sqls=kwargs.get('exec_sqls')) try: yield schema_info[1], schema_info[2] except Exception: engine = create_engine(connformater % dst_info, thread_checkin=False, poolclass=NullPool) utils.drop_schema(engine, auths) raise
def init_database(db_info, metadata, auths=None, character_set=None, collation_type=None, init_data_func=None, **kwargs): character_set = character_set or 'utf8' if isinstance(db_info, Engine): engine = db_info else: database_connection = db_info if isinstance(db_info, dict): database_connection = connformater % db_info engine = create_engine(database_connection, thread_checkin=False, poolclass=NullPool, **kwargs) try: create_schema(engine, auths, character_set, collation_type) except OperationalError as e: raise AcceptableError('Create distribution database error:%d, %s' % (e.orig[0], e.orig[1].replace("'", ''))) except exceptions.DBExist as e: raise AcceptableError('Create distribution database error: %s' % e.message) try: metadata.create_all(bind=engine) if init_data_func: init_data_func(engine) except (OperationalError, SQLAlchemyError, DBError, Exception) as e: try: drop_schema(engine) except Exception: raise exceptions.DropCreatedDBFail('Create table fail, Drop database fail', str(engine.url)) if isinstance(e, OperationalError): raise AcceptableError('Create table error:%d, %s' % (e.orig[0], e.orig[1].replace("'", ''))) raise AcceptableError('Create tables or insert row error: %s' % e.message)
def build_session(connection): if connection.startswith('mysql'): engine = create_engine(sql_connection=connection, converter_class=SimpleFlowConverter, logging_name='taskflow') session_maker = get_maker(engine=engine) session = session_maker() elif connection.startswith('sqlite'): SimpleFlowSqliteConverter() engine=create_engine(sql_connection=connection, logging_name='taskflow') models.SimpleFlowTables.metadata.create_all(engine) session_maker = get_maker(engine) session = session_maker() else: raise TypeError('Connection type error for simpleflow') return session
def mysql_drop_replprivileges(master, slave, host, port): if host == 'unkonwn': raise InvalidArgument('Slave not on line') _connection = noschemaconn % dict(user=master.user, passwd=master.passwd, host=host, port=port) engine = create_engine(_connection, thread_checkin=False, poolclass=NullPool) auth = dict(user='******' % slave.database_id, passwd='repl-%s' % slave.passwd, source=host, privileges=common.REPLICATIONRIVILEGES) utils.drop_privileges(engine, auths=[auth, ])
def start(self): if not self.started: with self.lock: if self.started: return # use mysqlconnector as connect driver self._writer_engine = engines.create_engine( 'mysql+mysqlconnector://' + self.conf.connection, logging_name=self.name, debug=self.conf.debug, thread_checkin=False, idle_timeout=self.conf.idle_timeout, max_pool_size=self.conf.max_pool_size, max_overflow=self.conf.max_overflow, pool_timeout=self.conf.pool_timeout, mysql_sql_mode=self.conf.mysql_sql_mode, max_retries=self.conf.max_retries, retry_interval=self.conf.retry_interval, **self.connection_kwargs) self._writer_maker = orm.get_maker(engine=self._writer_engine) if self.conf.slave_connection: self._reader_engine = engines.create_engine( 'mysql+mysqlconnector://' + self.conf.slave_connection, logging_name=self.name, thread_checkin=False, idle_timeout=self.conf.idle_timeout, max_pool_size=self.conf.max_pool_size, max_overflow=self.conf.max_overflow, pool_timeout=self.conf.pool_timeout, mysql_sql_mode=self.conf.mysql_sql_mode, max_retries=self.conf.max_retries, retry_interval=self.conf.retry_interval, **self.connection_kwargs) self._reader_maker = orm.get_maker( engine=self._reader_engine) else: self._reader_engine = self._writer_engine self._reader_maker = self._writer_maker self._started = True
def __init__(self, conf, threadpool, infoget): if not os.path.exists(conf.filecache): os.makedirs(conf.filecache, 0o755) self.path = os.path.join(conf.filecache, 'files') self.threadpool = threadpool self.infoget = infoget self.localfiles = {} self.downloading = {} self.lock = Semaphore() # init sqlite session engine = create_engine(sql_connection='sqlite:///%s' % os.path.join(conf.filecache, 'filemanager.db'), logging_name='filemanager') if not engine.has_table(models.FileDetail.__tablename__): # create table if needed models.FileManagerTables.metadata.create_all(engine) session_maker = get_maker(engine) self.session = session_maker()
def revert(self, result, *args, **kwargs): super(MysqlUpdate, self).revert(result, *args, **kwargs) database = self.database if isinstance(result, failure.Failure) or database.update.rollback: # revert need backup if database.backup and database.update.revertable: LOG.info('Try revert %s %d database %s:%d/%s ' % (self.middleware.endpoint, self.middleware.entity, database.host, database.port, database.schema)) if not self.executed: LOG.info('Database %s:%d/%s no sql executed, ' 'nothing will be reverted' % (database.host, database.port, database.schema)) else: self.middleware.set_return(self.taskname, common.REVERTED) if not os.path.exists(database.backup): msg = 'No backup database file found! can not revert' LOG.error(msg) raise exceptions.DatabaseRevertError(msg) engine = create_engine(connformater % { 'user': database.user, 'passwd': database.passwd, 'host': database.host, 'port': database.port, 'schema': database.schema }, thread_checkin=False, poolclass=NullPool, charset=database.character_set) LOG.warning( 'Database %s will drop and re create in %s:%d' % (database.schema, database.host, database.port)) re_create_schema(engine) self.execute_sql_from_file(database.backup.file) self.executed = 0 LOG.info('Revert database success') self.middleware.set_return(self.taskname, common.REVERTED) else: if isinstance(result, failure.Failure): LOG.error( 'Database update fail, not revert because not backup file or unable to revert' )
def _delete_schema(self, session, database, schema, **kwargs): """delete schema intance on reflection_id""" req = kwargs.pop('req') local_ip, port = self._get_entity(req, int(database.reflection_id)) if port <= 0: raise exceptions.AcceptableDbError( 'Can not find Database port, not init finished') if not local_ip: raise exceptions.AcceptableDbError('Database agent is offline now') conn = connformater % dict(user=database.user, passwd=database.passwd, schema=schema.schema, host=local_ip, port=port) engine = create_engine(conn, thread_checkin=False, poolclass=NullPool) dropauths = None if schema.user != database.user: dropauths = privilegeutils.mysql_privileges(schema) utils.drop_schema(engine, dropauths) yield local_ip, port
def _delete_schema(self, session, database, schema, **kwargs): """delete schema intance on reflection_id""" _record = model_query(session, RecordDatabase, filter=RecordDatabase.record_id == int( database.reflection_id)).one() host = _record.host port = _record.port if database.passwd: connection = connformater % dict(user=database.user, passwd=database.passwd, schema=schema.schema, host=host, port=port) engine = create_engine(connection, thread_checkin=False, poolclass=NullPool) dropauths = None if schema.user != database.user: dropauths = privilegeutils.mysql_privileges(schema) utils.drop_schema(engine, dropauths) yield host, port
def execute_sql_from_row(self, logfile=None, timeout=None): database = self.database db_info = { 'user': database.user, 'passwd': database.passwd, 'host': database.host, 'port': database.port, 'schema': database.schema } database_connection = connformater % db_info engine = create_engine(database_connection, thread_checkin=False, poolclass=NullPool, charset=database.character_set) overtime = time.time() + timeout with engine.connect() as conn: LOG.info('MysqlUpdate connect %s:%d/%s mysql success' % (database.host, database.port, database.schema)) for sql in database.update.sql: try: if time.time() > overtime: raise exceptions.DatabaseExecuteError( 'Execute overtime') r = conn.execute(sql) # count = r.rowcount r.close() self.executed += 1 except Exception as e: msg = 'execute sql fail, index %d, sql file %s' % ( self.executed + 1, database.update.file) with open(logfile, 'w') as f: f.write(msg + '\n') f.write(sql + '\n') f.write(str(e)) self.middleware.dberrors.append(sql) LOG.error(msg) # engine.close() raise exceptions.DatabaseExecuteError(msg)
import sqlalchemy from simpleservice.ormdb.engines import create_engine dst = { 'host': '172.20.0.3', 'port': 3304, 'schema': 'manager', 'user': '******', 'passwd': '111111' } engine = create_engine(dst) cloum_types = [ sqlalchemy.UnicodeText, ] def get(engine, cloum): return engine.dialect.type_descriptor(cloum) for cloum in cloum_types: ret = get(engine, cloum) print ret, type(ret) for attr in dir(ret): print attr, getattr(ret, attr)
from simpleservice.ormdb.engines import create_engine from simpleservice.ormdb.orm import get_maker from simpleservice.ormdb.argformater import connformater FINISHED_STATES = (states.SUCCESS, states.FAILURE, states.REVERTED) dst = { 'host': '172.20.0.3', 'port': 3304, 'schema': 'simpleflow', 'user': '******', 'passwd': '111111' } sql_connection = connformater % dst engine = create_engine(sql_connection, converter_class=SimpleFlowConverter) session_maker = get_maker(engine=engine) session = session_maker() class UnfortunateTask(task.Task): def execute(self): print('executing %s' % self) boom = os.environ.get('BOOM') if boom: print('> Critical error: boom = %s' % boom) raise SystemExit() else: print('> this time not exiting')
from simpleservice.ormdb.engines import create_engine from goperation.manager import common dst = { 'host': '172.20.0.3', 'port': 3304, 'schema': 'manager', 'user': '******', 'passwd': '111111' } agent_id = random.randint(1, 100) sql_connection = connformater % dst engine = create_engine(sql_connection) metadata = MetaData() metadata.reflect(bind=engine) for tab in metadata.tables.keys(): print tab session_maker = orm.get_maker(engine=engine) session = session_maker() print 'init session finish' print '~~~~~~~~~~~~~~~~~~~~~~~~~~' print 'test add request_row' request_row = AsyncRequest()