def start(self, database_name, user, readonly=False, context=None, close=False, autocommit=False): ''' Start transaction ''' assert self.user is None assert self.database is None assert self.close is None assert self.context is None # Compute started_at before connect to ensure # it is strictly before all transactions started after # but it may be also before transactions started before self.started_at = self.monotonic_time() if not database_name: database = backend.Database().connect() else: database = backend.Database(database_name).connect() Flavor.set(backend.Database.flavor) self.connection = database.get_connection(readonly=readonly, autocommit=autocommit) self.user = user self.database = database self.readonly = readonly self.close = close self.context = context or {} self.create_records = {} self.delete_records = {} self.delete = {} self.trigger_records = defaultdict(set) self.timestamp = {} self.counter = 0 self._datamanagers = [] self._sub_transactions = [] self._sub_transactions_to_close = [] if database_name: from trytond.cache import Cache try: Cache.sync(self) except BaseException: self.stop(False) raise from trytond import iwc try: iwc.start(database_name) except BaseException: self.stop(False) raise return self
def _listen(cls, dbname): database = backend.Database(dbname) if not database.has_channel(): raise NotImplementedError logger.info("listening on channel '%s' of '%s'", cls._channel, dbname) conn = database.get_connection() try: cursor = conn.cursor() cursor.execute('LISTEN "%s"' % cls._channel) conn.commit() while cls._listener.get(dbname) == threading.current_thread(): readable, _, _ = select.select([conn], [], []) if not readable: continue conn.poll() while conn.notifies: notification = conn.notifies.pop() if notification.payload: reset = json.loads(notification.payload) for name in reset: inst = cls._instances[name] inst._clear(dbname) except Exception: logger.error("cache listener on '%s' crashed", dbname, exc_info=True) raise finally: database.put_connection(conn) with cls._listener_lock: if cls._listener.get(dbname) == threading.current_thread(): del cls._listener[dbname]
def _listen(cls, database): db = backend.Database(database) if not db.has_channel(): raise NotImplementedException logger.info("listening on channel '%s'", cls._channel) conn = db.get_connection() pid = os.getpid() try: cursor = conn.cursor() cursor.execute('LISTEN "%s"' % cls._channel) conn.commit() cls._messages[database] = messages = _MessageQueue(_cache_timeout) now = time.time() while cls._queues[pid, database]['timeout'] > now: readable, _, _ = select.select([conn], [], [], _select_timeout) if not readable: continue conn.poll() while conn.notifies: notification = conn.notifies.pop() payload = json.loads(notification.payload, object_hook=JSONDecoder()) channel = payload['channel'] message = payload['message'] messages.append(channel, message) with cls._queues_lock[pid]: events = cls._queues[ pid, database]['events'][channel].copy() cls._queues[pid, database]['events'][channel].clear() for event in events: event.set() now = time.time() except Exception: logger.error('bus listener on "%s" crashed', database, exc_info=True) with cls._queues_lock[pid]: del cls._queues[pid, database] raise finally: db.put_connection(conn) with cls._queues_lock[pid]: if cls._queues[pid, database]['timeout'] <= now: del cls._queues[pid, database] else: # A query arrived between the end of the while and here listener = threading.Thread(target=cls._listen, args=(database, ), daemon=True) cls._queues[pid, database]['listener'] = listener listener.start()
def run(cls, dbname): database = backend.Database(dbname) if database.has_channel(): pid = os.getpid() with cls._listener_lock[pid]: if (pid, dbname) not in cls._listener: cls._listener[pid, dbname] = listener = threading.Thread( target=cls._listen, args=(dbname, ), daemon=True) listener.start()
def drop_db(name=DB_NAME): if db_exist(name): database = backend.Database(name) database.close() with Transaction().start(None, 0, close=True, autocommit=True) as transaction: database.drop(transaction.connection, name) Pool.stop(name) Cache.drop(name)
def broadcast_init_pool(dbname): database = backend.Database(dbname) conn = database.get_connection() try: cursor = conn.cursor() cursor.execute('NOTIFY "%s", %%s' % 'ir_update', (json.dumps(['init_pool|%s' % get_worker_id()], separators=(',', ':')), )) conn.commit() finally: database.put_connection(conn) logger.info('init_pool(%s)', dbname)
def refresh_pool(cls, transaction): database = transaction.database dbname = database.name if not _clear_timeout and database.has_channel(): database = backend.Database(dbname) conn = database.get_connection() try: cursor = conn.cursor() cursor.execute( 'NOTIFY "%s", %%s' % cls._channel, ('refresh pool',)) conn.commit() finally: database.put_connection(conn)
def test_create_jsonb(self): "Test create dict as jsonb" connection = Transaction().connection if backend.Database().get_version(connection) < (9, 2): return Dict = Pool().get('test.dict_jsonb') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': {'a': 1, 'b': 2}, }]) self.assertDictEqual(dict_.dico, {'a': 1, 'b': 2})
def setup_model(cls): connection = Transaction().connection if backend.Database().get_version(connection) < (9, 2): return pool = Pool() for model in ['test.multi_selection', 'test.multi_selection_required']: Model = pool.get(model) cursor = connection.cursor() for name, field in Model._fields.items(): if field._type == 'multiselection': cursor.execute('ALTER TABLE "%s" ' 'ALTER COLUMN %s TYPE json USING %s::json' % (Model._table, name, name)) Transaction().commit()
def create_db(name=DB_NAME, lang='en'): if not db_exist(name): database = backend.Database() database.connect() connection = database.get_connection(autocommit=True) try: database.create(connection, name) finally: database.put_connection(connection, True) database = backend.Database(name) connection = database.get_connection() try: with connection.cursor() as cursor: database.init() ir_configuration = Table('ir_configuration') cursor.execute(*ir_configuration.insert( [ir_configuration.language], [[lang]])) connection.commit() finally: database.put_connection(connection) pool = Pool(name) pool.init(update=['res', 'ir'], lang=[lang]) with Transaction().start(name, 0): User = pool.get('res.user') Lang = pool.get('ir.lang') language, = Lang.search([('code', '=', lang)]) language.translatable = True language.save() users = User.search([('login', '!=', 'root')]) User.write(users, { 'language': language.id, }) Module = pool.get('ir.module') Module.update_list()
def drop(cls, dbname): with cls._listener_lock: listener = cls._listener.pop(dbname, None) if listener: database = backend.Database(dbname) conn = database.get_connection() try: cursor = conn.cursor() cursor.execute('NOTIFY "%s"' % cls._channel) conn.commit() finally: database.put_connection(conn) listener.join() for inst in cls._instances.values(): inst._timestamp.pop(dbname, None) inst._database_cache.pop(dbname, None) inst._transaction_lower.pop(dbname, None)
def stop(cls): to_join = [] current_pid = os.getpid() with cls._listener_lock[current_pid]: for pid, dbname in list(cls._listener): if pid != current_pid: continue to_join.append(cls._listener.pop((pid, dbname), None)) try: database = backend.Database(dbname) conn = database.get_connection() cursor = conn.cursor() cursor.execute('NOTIFY ir_update') conn.commit() finally: database.put_connection(conn) for listener in to_join: listener.join()
def test_search_element_jsonb(self): "Test search dict element on jsonb" connection = Transaction().connection if backend.Database().get_version(connection) < (9, 2): return pool = Pool() Dict = pool.get('test.dict_noschema') self.set_jsonb(Dict._table) dict_, = Dict.create([{ 'dico': {'a': 'Foo'}, }]) dicts = Dict.search([ ('dico.a', '=', "Foo"), ]) self.assertListEqual(dicts, [dict_])
def _pg_dump(cache_file): cmd = ['pg_dump', '-f', cache_file, '-F', 'c'] options, env = _pg_options() cmd.extend(options) cmd.append(DB_NAME) try: return not subprocess.call(cmd, env=env) except OSError: cache_name, _ = os.path.splitext(os.path.basename(cache_file)) cache_name = backend.TableHandler.convert_name(cache_name) # Ensure any connection is left open backend.Database(DB_NAME).close() with Transaction().start(None, 0, close=True, autocommit=True) as transaction: transaction.database.create(transaction.connection, cache_name, DB_NAME) open(cache_file, 'a').close() return True
def _listen(cls, dbname): database = backend.Database(dbname) if not database.has_channel(): raise NotImplementedError logger.info("listening on channel '%s' of '%s'", cls._channel, dbname) conn = database.get_connection() pid = os.getpid() current_thread = threading.current_thread() try: cursor = conn.cursor() cursor.execute('LISTEN "%s"' % cls._channel) conn.commit() while cls._listener.get((pid, dbname)) == current_thread: readable, _, _ = select.select([conn], [], []) if not readable: continue conn.poll() while conn.notifies: notification = conn.notifies.pop() if notification.payload: reset = json.loads(notification.payload) for name in reset: # XUNG # Name not in instances when control_vesion_upgrade table is locked # because another process is currently upgrading # We must ignore cache reset notifications (Not yet loaded anyway) if name in cls._instances: inst = cls._instances[name] inst._clear(dbname) cls._clean_last = datetime.now() except Exception: logger.error("cache listener on '%s' crashed", dbname, exc_info=True) raise finally: database.put_connection(conn) with cls._listener_lock[pid]: if cls._listener.get((pid, dbname)) == current_thread: del cls._listener[pid, dbname]
def login(request, database_name, user, parameters, language=None): try: backend.Database(database_name).connect() except backend.DatabaseOperationalError: logger.error('fail to connect to %s', database_name, exc_info=True) abort(HTTPStatus.NOT_FOUND) context = { 'language': language, '_request': request.context, } try: session = security.login(database_name, user, parameters, context=context) code = HTTPStatus.UNAUTHORIZED except RateLimitException: session = None code = HTTPStatus.TOO_MANY_REQUESTS if not session: abort(code) return session
def _listen(cls, dbname): database = backend.Database(dbname) if not database.has_channel(): raise NotImplementedError logger.info("listening on channel '%s' of '%s'", cls._channel, dbname) conn = database.get_connection(autocommit=True) pid = os.getpid() current_thread = threading.current_thread() try: cursor = conn.cursor() cursor.execute('LISTEN "%s"' % cls._channel) while cls._listener.get((pid, dbname)) == current_thread: readable, _, _ = select.select([conn], [], []) if not readable: continue conn.poll() while conn.notifies: notification = conn.notifies.pop() if notification.payload == 'refresh pool': Pool(dbname).refresh(_get_modules(cursor)) elif notification.payload: reset = json.loads(notification.payload) for name in reset: inst = cls._instances[name] inst._clear(dbname) cls._clean_last = datetime.now() except Exception: logger.error("cache listener on '%s' crashed", dbname, exc_info=True) raise finally: database.put_connection(conn) with cls._listener_lock[pid]: if cls._listener.get((pid, dbname)) == current_thread: del cls._listener[pid, dbname]
def _listen(cls, dbname): database = backend.Database(dbname) if not database.has_channel(): raise NotImplementedError logger.info("listening on channel ir_update of '%s'", dbname) conn = database.get_connection() pid = os.getpid() current_thread = threading.current_thread() try: cursor = conn.cursor() cursor.execute('LISTEN "ir_update"') conn.commit() while cls._listener.get((pid, dbname)) == current_thread: readable, _, _ = select.select([conn], [], []) if not readable: continue conn.poll() while conn.notifies: notification = conn.notifies.pop() if notification.payload: reset = json.loads(notification.payload) for name in reset: me = get_worker_id() if 'init_pool' in name and me != name.split( '|')[-1]: cls.on_init_pool(dbname) except Exception: logger.error("IWC listener on '%s' crashed", dbname, exc_info=True) raise finally: database.put_connection(conn) with cls._listener_lock[pid]: if cls._listener.get((pid, dbname)) == current_thread: del cls._listener[pid, dbname]
def db_exist(name=DB_NAME): database = backend.Database().connect() return name in database.list()
def run(options): main_lang = config.get('database', 'language') init = {} for db_name in options.database_names: init[db_name] = False database = backend.Database(db_name) database.connect() if options.update: if not database.test(): logger.info("init db") database.init() init[db_name] = True elif not database.test(): raise Exception('"%s" is not a Tryton database.' % db_name) for db_name in options.database_names: if options.update: with Transaction().start(db_name, 0) as transaction,\ transaction.connection.cursor() as cursor: database = backend.Database(db_name) database.connect() if not database.test(): raise Exception('"%s" is not a Tryton database.' % db_name) lang = Table('ir_lang') cursor.execute(*lang.select( lang.code, where=lang.translatable == Literal(True))) lang = set([x[0] for x in cursor]) lang.add(main_lang) else: lang = set() lang |= set(options.languages) pool = Pool(db_name) pool.init(update=options.update, lang=list(lang), activatedeps=options.activatedeps) if options.update_modules_list: with Transaction().start(db_name, 0) as transaction: Module = pool.get('ir.module') Module.update_list() if lang: with Transaction().start(db_name, 0) as transaction: pool = Pool() Lang = pool.get('ir.lang') languages = Lang.search([ ('code', 'in', lang), ]) Lang.write(languages, { 'translatable': True, }) for db_name in options.database_names: with Transaction().start(db_name, 0) as transaction: pool = Pool() User = pool.get('res.user') Configuration = pool.get('ir.configuration') configuration = Configuration(1) with transaction.set_context(active_test=False): admin, = User.search([('login', '=', 'admin')]) if options.email is not None: admin.email = options.email elif init[db_name]: admin.email = input('"admin" email for "%s": ' % db_name) if init[db_name] or options.password: configuration.language = main_lang # try to read password from environment variable # TRYTONPASSFILE, empty TRYTONPASSFILE ignored passpath = os.getenv('TRYTONPASSFILE') password = '' if passpath: try: with open(passpath) as passfile: password, = passfile.read().splitlines() except Exception as err: sys.stderr.write('Can not read password ' 'from "%s": "%s"\n' % (passpath, err)) if not password and not options.reset_password: while True: password = getpass('"admin" password for "%s": ' % db_name) password2 = getpass('"admin" password confirmation: ') if password != password2: sys.stderr.write( '"admin" password confirmation ' 'doesn\'t match "admin" password.\n') continue if not password: sys.stderr.write('"admin" password is required.\n') continue break if not options.reset_password: admin.password = password admin.save() if options.reset_password: User.reset_password([admin]) if options.test_email: send_test_email(options.test_email) if options.hostname is not None: configuration.hostname = options.hostname or None configuration.save() with Transaction().start(db_name, 0, readonly=True): if options.validate is not None: validate(options.validate, options.validate_percentage)
def db_exist(request, database_name): try: backend.Database(database_name).connect() return True except Exception: return False
def __init__(self, database_name, mpool): self.database = backend.Database(database_name).connect() self.connection = self.database.get_connection(autocommit=True) self.mpool = mpool