def get_login(cls, login, password): pool = Pool() LoginAttempt = pool.get('res.user.login.attempt') try: con = ldap_connection() if con: uid = config.get(section, 'uid', default='uid') users = cls.ldap_search_user(login, con, attrs=[uid]) if users and len(users) == 1: [(dn, attrs)] = users if (password and con.simple_bind_s(dn, unicode2str(password))): # Use ldap uid so we always get the right case login = attrs.get(uid, [login])[0] user_id, _ = cls._get_login(login) if user_id: LoginAttempt.remove(login) return user_id elif config.getboolean(section, 'create_user'): user, = cls.create([{ 'name': login, 'login': login, }]) return user.id except ldap.LDAPError: logger.error('LDAPError when login', exc_info=True) return super(User, cls).get_login(login, password)
def _login_ldap(cls, login, parameters): if 'password' not in parameters: msg = cls.fields_get(['password'])['password']['string'] raise LoginException('password', msg, type='password') password = parameters['password'] try: server = ldap_server() if server: uid = config.get(section, 'uid', default='uid') users = cls.ldap_search_user(login, server, attrs=[uid]) if users and len(users) == 1: [(dn, attrs)] = users con = ldap3.Connection(server, dn, password) if (password and con.bind()): # Use ldap uid so we always get the right case login = attrs.get(uid, [login])[0] user_id = cls._get_login(login)[0] if user_id: return user_id elif config.getboolean(section, 'create_user'): user, = cls.create([{ 'name': login, 'login': login, }]) return user.id except LDAPException: logger.error('LDAPError when login', exc_info=True)
def get_login(cls, login, password): pool = Pool() LoginAttempt = pool.get('res.user.login.attempt') try: con = ldap_connection() if con: uid = config.get(section, 'uid', default='uid') users = cls.ldap_search_user(login, con, attrs=[uid]) if users and len(users) == 1: [(dn, attrs)] = users if password and con.simple_bind_s(dn, password): # Use ldap uid so we always get the right case login = attrs.get(uid, [login])[0] user_id, _ = cls._get_login(login) if user_id: LoginAttempt.remove(login) return user_id elif config.getboolean(section, 'create_user'): user, = cls.create([{ 'name': login, 'login': login, }]) return user.id except ldap.LDAPError: logger.error('LDAPError when login', exc_info=True) return super(User, cls).get_login(login, password)
def work(options): Flavor.set(backend.Database.flavor) if not config.getboolean('queue', 'worker', default=False): return try: processes = options.processes or cpu_count() except NotImplementedError: processes = 1 logger.info("start %d workers", processes) mpool = MPool(processes, initializer, (options, ), options.maxtasksperchild) queues = [Queue(pool, mpool) for pool in initializer(options, False)] tasks = TaskList() timeout = options.timeout try: while True: while len(tasks.filter()) >= processes: time.sleep(0.1) for queue in queues: task_id, next_ = queue.pull(options.name) timeout = min(next_ or options.timeout, timeout, options.timeout) if task_id: tasks.append(queue.run(task_id)) break else: connections = [q.connection for q in queues] connections, _, _ = select.select(connections, [], [], timeout) for connection in connections: connection.poll() while connection.notifies: connection.notifies.pop(0) except KeyboardInterrupt: mpool.close()
def run_task(pool, task_id): if not isinstance(pool, Pool): database_list = Pool.database_list() pool = Pool(pool) if pool.database_name not in database_list: with Transaction().start(pool.database_name, 0, readonly=True): pool.init() Queue = pool.get('ir.queue') Error = pool.get('ir.error') name = '<Task %s@%s>' % (task_id, pool.database_name) logger.info('%s started', name) retry = config.getint('database', 'retry') try: for count in range(retry, -1, -1): if count != retry: time.sleep(0.02 * (retry - count)) with Transaction().start(pool.database_name, 0) as transaction: try: try: task, = Queue.search([('id', '=', task_id)]) except ValueError: # the task was rollbacked, nothing to do break with processing(name): task.run() break except backend.DatabaseOperationalError: if count: transaction.rollback() continue raise except (UserError, UserWarning) as e: Error.log(task, e) raise logger.info('%s done', name) except backend.DatabaseOperationalError: logger.info('%s failed, retrying', name, exc_info=True) if not config.getboolean('queue', 'worker', default=False): time.sleep(0.02 * retry) try: with Transaction().start(pool.database_name, 0) as transaction: if not transaction.database.has_channel(): logger.critical('%s failed', name, exc_info=True) return task = Queue(task_id) if task.scheduled_at and task.enqueued_at < task.scheduled_at: duration = (task.scheduled_at - task.enqueued_at) * 2 else: duration = dt.timedelta(seconds=2 * retry) duration = max(duration, dt.timedelta(hours=1)) scheduled_at = dt.datetime.now() + duration * random.random() Queue.push(task.name, task.data, scheduled_at=scheduled_at) except Exception: logger.critical( 'rescheduling %s failed', name, exc_info=True) except (UserError, UserWarning): logger.info('%s failed', name) except Exception: logger.critical('%s failed', name, exc_info=True)
def db_list(request, *args): if not config.getboolean('database', 'list'): abort(HTTPStatus.FORBIDDEN) context = {'_request': request.context} hostname = get_hostname(request.host) with Transaction().start( None, 0, context=context, readonly=True, close=True, ) as transaction: return transaction.database.list(hostname=hostname)
def db_list(request, *args): if not config.getboolean('database', 'list'): raise Exception('AccessDenied') context = {'_request': request.context} with Transaction().start(None, 0, context=context, close=True, _nocache=True) as transaction: return transaction.database.list()
def tryton_syntax_analysis(pool, update): if update: return from trytond.config import config disabled = config.getboolean('debug', 'disable_syntax_analysis') if disabled: return logging.getLogger('modules').info('Running trytond syntax analysis') detect_api_changes(pool)
def purge_listeners(cls, dbname): ''' Purges all listeners for a given database ''' pid = os.getpid() thread_id = None with cls._listener_lock[pid]: if (pid, dbname) in cls._listener: thread_id = cls._listener[pid, dbname].ident del cls._listener[pid, dbname] # JMO : doctest teardown remains stuck with code below # TODO: fix this if not config.getboolean('env', 'testing'): # We removed the thread from the list, but it can still be alive if it # is busy clearing some cache if thread_id is not None: while {thread_id} & {x.ident for x in threading.enumerate()}: time.sleep(0.01)
def run_task(pool, task_id): if not isinstance(pool, Pool): pool = Pool(pool) Queue = pool.get('ir.queue') name = '<Task %s@%s>' % (task_id, pool.database_name) logger.info('%s started', name) retry = config.getint('database', 'retry') try: for count in range(retry, -1, -1): if count != retry: time.sleep(0.02 * (retry - count)) with Transaction().start(pool.database_name, 0) as transaction: try: try: task, = Queue.search([('id', '=', task_id)]) except ValueError: # the task was rollbacked, nothing to do break with processing(name): task.run() break except backend.DatabaseOperationalError: if count: transaction.rollback() continue raise logger.info('%s done', name) except backend.DatabaseOperationalError: logger.info('%s failed, retrying', name, exc_info=True) if not config.getboolean('queue', 'worker', default=False): time.sleep(0.02 * retry) try: with Transaction().start(pool.database_name, 0) as transaction: task = Queue(task_id) scheduled_at = dt.datetime.now() scheduled_at += dt.timedelta( seconds=random.randint(0, 2 * retry)) Queue.push(task.name, task.data, scheduled_at=scheduled_at) except Exception: logger.critical('rescheduling %s failed', name, exc_info=True) except Exception: logger.critical('%s failed', name, exc_info=True)
def work(options): Flavor.set(backend.Database.flavor) if not config.getboolean('queue', 'worker', default=False): return try: processes = options.processes or cpu_count() except NotImplementedError: processes = 1 logger.info("start %d workers", processes) mpool = MPool( processes, initializer, (options.database_names,), options.maxtasksperchild) queues = [Queue(name, mpool) for name in options.database_names] tasks = TaskList() timeout = options.timeout selector = selectors.DefaultSelector() for queue in queues: selector.register(queue.connection, selectors.EVENT_READ) try: while True: while len(tasks.filter()) >= processes: time.sleep(0.1) for queue in queues: task_id, next_ = queue.pull(options.name) timeout = min( next_ or options.timeout, timeout, options.timeout) if task_id: tasks.append(queue.run(task_id)) break else: for key, _ in selector.select(timeout=timeout): connection = key.fileobj connection.poll() while connection.notifies: connection.notifies.pop(0) except KeyboardInterrupt: mpool.close() finally: selector.close()
def ldap_connection(): uri = config.get(section, 'uri') if not uri: return uri, _, _, _, _, extensions = parse_ldap_url(uri) if uri.scheme.startswith('ldaps'): scheme, port = 'ldaps', 636 else: scheme, port = 'ldap', 389 conn = ldap.initialize('%s://%s:%s/' % ( scheme, uri.hostname, uri.port or port)) if config.getboolean(section, 'active_directory', default=False): conn.set_option(ldap.OPT_REFERRALS, 0) if 'tls' in uri.scheme: conn.start_tls_s() bindname, = extensions.get('bindname', [None]) if not bindname: bindname, = extensions.get('!bindname', [None]) if bindname: # XXX find better way to get the password conn.simple_bind_s(bindname, config.get(section, 'bind_pass')) return conn
def ldap_connection(): uri = config.get(section, 'uri') if not uri: return uri, _, _, _, _, extensions = parse_ldap_url(uri) if uri.scheme.startswith('ldaps'): scheme, port = 'ldaps', 636 else: scheme, port = 'ldap', 389 conn = ldap.initialize('%s://%s:%s/' % (scheme, uri.hostname, uri.port or port)) if config.getboolean(section, 'active_directory', default=False): conn.set_option(ldap.OPT_REFERRALS, 0) if 'tls' in uri.scheme: conn.start_tls_s() bindname, = extensions.get('bindname', [None]) if not bindname: bindname, = extensions.get('!bindname', [None]) if bindname: # XXX find better way to get the password conn.simple_bind_s(bindname, config.get(section, 'bind_pass')) return conn
def process(cls, messages=None, emails=None, smtpd_datamanager=None): pool = Pool() WebShortener = pool.get('web.shortened_url') spy_pixel = config.getboolean('marketing', 'email_spy_pixel', default=False) @lru_cache(None) def short(url, record): url = WebShortener(record=record, redirect_url=url) url.save() return url.shortened_url def convert_href(message): def filter_(stream): for kind, data, pos in stream: if kind is START: tag, attrs = data if tag == 'a': href = attrs.get('href') attrs -= 'href' href = short(href, str(message)) attrs |= [(QName('href'), href)] data = tag, attrs elif kind is END and data == 'body' and spy_pixel: yield START, (QName('img'), Attrs([ (QName('src'), short(URL_OPEN, str(message))), (QName('height'), '1'), (QName('width'), '1'), ])), pos yield END, QName('img'), pos yield kind, data, pos return filter_ if not smtpd_datamanager: smtpd_datamanager = SMTPDataManager() if messages is None: messages = cls.search([ ('state', '=', 'sending'), ]) for message in messages: template = MarkupTemplate(message.content) for email in (emails or message.list_.emails): content = (template.generate(email=email).filter( convert_href(message)).render()) name = email.party.rec_name if email.party else '' from_cfg = (config.get('marketing', 'email_from') or config.get('email', 'from')) to = _formataddr(name, email.email) msg = MIMEMultipart('alternative') set_from_header(msg, from_cfg, message.from_ or from_cfg) msg['To'] = to msg['Subject'] = Header(message.title, 'utf-8') if html2text: converter = html2text.HTML2Text() part = MIMEText(converter.handle(content), 'plain', _charset='utf-8') msg.attach(part) part = MIMEText(content, 'html', _charset='utf-8') msg.attach(part) sendmail_transactional(from_cfg, getaddresses([to]), msg, datamanager=smtpd_datamanager) if not emails: cls.sent(messages)
def dispatch(host, port, protocol, database_name, user, session, object_type, object_name, method, *args, **kwargs): Database = backend.get('Database') DatabaseOperationalError = backend.get('DatabaseOperationalError') if object_type == 'common': if method == 'login': try: database = Database(database_name).connect() cursor = database.cursor() cursor.close() except Exception: return False res = security.login(database_name, user, session) with Transaction().start(database_name, 0): Cache.clean(database_name) Cache.resets(database_name) msg = res and 'successful login' or 'bad login or password' logger.info('%s \'%s\' from %s:%d using %s on database \'%s\'', msg, user, host, port, protocol, database_name) return res or False elif method == 'logout': name = security.logout(database_name, user, session) logger.info('logout \'%s\' from %s:%d ' 'using %s on database \'%s\'', name, host, port, protocol, database_name) return True elif method == 'version': return __version__ elif method == 'list_lang': return [ ('bg_BG', 'Български'), ('ca_ES', 'Català'), ('cs_CZ', 'Čeština'), ('de_DE', 'Deutsch'), ('en_US', 'English'), ('es_AR', 'Español (Argentina)'), ('es_EC', 'Español (Ecuador)'), ('es_ES', 'Español (España)'), ('es_CO', 'Español (Colombia)'), ('fr_FR', 'Français'), ('lt_LT', 'Lietuvių'), ('nl_NL', 'Nederlands'), ('ru_RU', 'Russian'), ('sl_SI', 'Slovenščina'), ] elif method == 'db_exist': try: database = Database(*args, **kwargs).connect() cursor = database.cursor() cursor.close(close=True) return True except Exception: return False elif method == 'list': if not config.getboolean('database', 'list'): raise Exception('AccessDenied') with Transaction().start(None, 0, close=True) as transaction: return transaction.database.list(transaction.cursor) elif method == 'create': return create(*args, **kwargs) elif method == 'restore': return restore(*args, **kwargs) elif method == 'drop': return drop(*args, **kwargs) elif method == 'dump': return dump(*args, **kwargs) return elif object_type == 'system': database = Database(database_name).connect() database_list = Pool.database_list() pool = Pool(database_name) if database_name not in database_list: pool.init() if method == 'listMethods': res = [] for type in ('model', 'wizard', 'report'): for object_name, obj in pool.iterobject(type=type): for method in obj.__rpc__: res.append(type + '.' + object_name + '.' + method) return res elif method == 'methodSignature': return 'signatures not supported' elif method == 'methodHelp': res = [] args_list = args[0].split('.') object_type = args_list[0] object_name = '.'.join(args_list[1:-1]) method = args_list[-1] obj = pool.get(object_name, type=object_type) return pydoc.getdoc(getattr(obj, method)) for count in range(config.getint('database', 'retry'), -1, -1): try: user = security.check(database_name, user, session) except DatabaseOperationalError: if count: continue raise break database_list = Pool.database_list() pool = Pool(database_name) if database_name not in database_list: with Transaction().start(database_name, user, readonly=True) as transaction: pool.init() obj = pool.get(object_name, type=object_type) if method in obj.__rpc__: rpc = obj.__rpc__[method] else: raise UserError('Calling method %s on %s %s is not allowed!' % (method, object_type, object_name)) log_message = '%s.%s.%s(*%s, **%s) from %s@%s:%d/%s' log_args = (object_type, object_name, method, args, kwargs, user, host, port, database_name) logger.info(log_message, *log_args) for count in range(config.getint('database', 'retry'), -1, -1): with Transaction().start(database_name, user, readonly=rpc.readonly) as transaction: Cache.clean(database_name) try: c_args, c_kwargs, transaction.context, transaction.timestamp \ = rpc.convert(obj, *args, **kwargs) meth = getattr(obj, method) if (rpc.instantiate is None or not is_instance_method(obj, method)): result = rpc.result(meth(*c_args, **c_kwargs)) else: assert rpc.instantiate == 0 inst = c_args.pop(0) if hasattr(inst, method): result = rpc.result(meth(inst, *c_args, **c_kwargs)) else: result = [rpc.result(meth(i, *c_args, **c_kwargs)) for i in inst] if not rpc.readonly: transaction.cursor.commit() except DatabaseOperationalError: transaction.cursor.rollback() if count and not rpc.readonly: continue raise except (NotLogged, ConcurrencyException, UserError, UserWarning): logger.debug(log_message, *log_args, exc_info=True) transaction.cursor.rollback() raise except Exception: logger.error(log_message, *log_args, exc_info=True) transaction.cursor.rollback() raise Cache.resets(database_name) with Transaction().start(database_name, 0) as transaction: pool = Pool(database_name) Session = pool.get('ir.session') try: Session.reset(session) except DatabaseOperationalError: logger.debug('Reset session failed', exc_info=True) # Silently fail when reseting session transaction.cursor.rollback() else: transaction.cursor.commit() logger.debug('Result: %s', result) return result
def enable_debug_views(pool, update): if update: return from trytond.config import config enabled = config.getboolean('debug', 'debug_views') if not enabled: return logging.getLogger().warning('Enabling debugging views') from trytond.model import ModelView, ModelSQL, fields from trytond.transaction import Transaction previous_fields_view_get = ModelView.fields_view_get.__func__ @classmethod def patched_fields_view_get(cls, view_id=None, view_type='form'): if not Transaction().context.get('developper_view'): return previous_fields_view_get(cls, view_id, view_type) if not issubclass(cls, ModelSQL): return previous_fields_view_get(cls, view_id, view_type) # Specific feature in tryton fork expand_toolbar = bool( Pool().get('ir.module').search([('name', '=', 'coog_core')])) result = { 'model': cls.__name__, 'type': view_type, 'field_childs': None, 'view_id': 0, } xml = '<?xml version="1.0"?>' fnames = [] if view_type == 'tree': xml += '<tree>' xml += '<field name="id"/>' xml += '<field name="rec_name" expand="1"/>' xml += '</tree>' fnames += ['rec_name', 'id'] else: res = cls.fields_get() xml += '<form col="2">' for fname in sorted(res): if res[fname]['type'] in ('timestamp'): continue relation = res[fname].get('relation', None) if relation: Target = Pool().get(relation) if not issubclass(Target, ModelView): continue if res[fname]['type'] in ( 'one2many', 'many2many', 'text', 'dict'): xml += '<separator name="%s" colspan="2"/>' % fname xml += '<field name="%s" colspan="2"' % fname if expand_toolbar: # expand_toolbar is available xml += ' height="200" expand_toolbar="0"/>' else: xml += ' height="200"/>' else: xml += '<label name="%s"/><field name="%s"/>' % ( fname, fname) fnames.append(fname) xml += '</form>' result['arch'] = xml result['fields'] = cls.fields_get(fnames) for fname in fnames: name = result['fields'][fname]['string'] + ' (%s)' % fname if issubclass(type(cls._fields[fname]), fields.Function): name += ' [Function]' result['fields'][fname].update({ 'string': name, 'states': {'readonly': True}, 'on_change': [], 'on_change_with': [], }) return result setattr(ModelView, 'fields_view_get', patched_fields_view_get)
_NUMBER_STATES.update({ 'invisible': ~Eval('validation', '').in_(['number_of_lines']), 'required': Eval('validation', '').in_(['number_of_lines']), }) _NUMBER_DEPENDS = _DEPENDS + ['validation'] STATES = [ ('draft', 'Draft'), ('validated', 'Validated'), ('cancel', 'Canceled'), ('posted', 'Posted'), ] if config.getboolean('account_statement', 'filestore', default=False): file_id = 'origin_file_id' store_prefix = config.get('account_statement', 'store_prefix', default=None) else: file_id = None store_prefix = None class Unequal(object): "Always different" def __eq__(self, other): return False
def fields_view_get(cls, view_id=None, view_type='form'): ''' Return a view definition. If view_id is None the first one will be used of view_type. The definition is a dictionary with keys: - model: the model name - type: the type of the view - view_id: the id of the view - arch: the xml description of the view - fields: a dictionary with the definition of each field in the view - field_childs: the name of the childs field for tree ''' key = (cls.__name__, view_id, view_type) result = cls._fields_view_get_cache.get(key) if result: return result result = {'model': cls.__name__} pool = Pool() View = pool.get('ir.ui.view') view = None inherit_view_id = None if view_id: view = View(view_id) else: domain = [ ('model', '=', cls.__name__), [ 'OR', ('inherit', '=', None), ('inherit.model', '!=', cls.__name__), ], ] views = View.search(domain) views = filter(lambda v: v.rng_type == view_type, views) if views: view = views[0] if view: if view.inherit: inherit_view_id = view.id view = view.inherit view_id = view.id # if a view was found if view: result['type'] = view.rng_type result['view_id'] = view_id result['arch'] = view.arch result['field_childs'] = view.field_childs # Check if view is not from an inherited model if view.model != cls.__name__: Inherit = pool.get(view.model) result['arch'] = Inherit.fields_view_get( result['view_id'])['arch'] view_id = inherit_view_id # get all views which inherit from (ie modify) this view views = View.search([ 'OR', [ ('inherit', '=', view_id), ('model', '=', cls.__name__), ], [ ('id', '=', view_id), ('inherit', '!=', None), ], ]) raise_p = False while True: try: views.sort(key=lambda x: cls._modules_list.index(x.module or None)) break except ValueError: if raise_p: raise # There is perhaps a new module in the directory ModelView._reset_modules_list() raise_p = True parser = etree.XMLParser(remove_comments=True) tree = etree.fromstring(result['arch'], parser=parser) for view in views: if view.domain: if not PYSONDecoder({ 'context': Transaction().context }).decode(view.domain): continue if not view.arch or not view.arch.strip(): continue tree_inherit = etree.fromstring(view.arch, parser=parser) tree = _inherit_apply(tree, tree_inherit) result['arch'] = etree.tostring(tree, encoding='utf-8').decode('utf-8') # otherwise, build some kind of default view else: if view_type == 'form': res = cls.fields_get() xml = '''<?xml version="1.0"?>''' \ '''<form col="4">''' for i in res: if i in ('create_uid', 'create_date', 'write_uid', 'write_date', 'id', 'rec_name'): continue if res[i]['type'] not in ('one2many', 'many2many'): xml += '<label name="%s"/>' % (i, ) xml += '<field name="%s"/>' % (i, ) if res[i]['type'] == 'text': xml += "<newline/>" else: xml += '<field name="%s" colspan="4"/>' % (i, ) xml += "</form>" elif view_type == 'tree': field = 'id' if cls._rec_name in cls._fields: field = cls._rec_name xml = '''<?xml version="1.0"?>''' \ '''<tree><field name="%s"/></tree>''' \ % (field,) else: xml = '' result['type'] = view_type result['arch'] = xml result['field_childs'] = None result['view_id'] = 0 # Update arch and compute fields from arch parser = etree.XMLParser(remove_blank_text=True) tree = etree.fromstring(result['arch'], parser) xarch, xfields = cls._view_look_dom_arch(tree, result['type'], result['field_childs']) result['arch'] = xarch result['fields'] = xfields if result['field_childs']: child_field = result['field_childs'] result['children_definitions'] = defs = {} model = cls requisite_fields = result['fields'].keys() requisite_fields.remove(child_field) while model and model.__name__ not in defs: fields_to_get = [ rfield for rfield in requisite_fields if hasattr(model, rfield) ] defs[model.__name__] = model.fields_get(fields_to_get + [child_field]) field = getattr(model, child_field, None) if field: model = pool.get(field.model_name) else: model = None else: result['children_definitions'] = {} if not config.getboolean( 'cache', 'disable_fields_view_get_cache', default=False): cls._fields_view_get_cache.set(key, result) return result
from trytond.model import ModelView, ModelSQL, fields from trytond.pyson import Eval from trytond.transaction import Transaction from .resource import ResourceMixin, resource_copy __all__ = ['AttachmentCopyMixin'] def firstline(description): try: return next((x for x in description.splitlines() if x.strip())) except StopIteration: return '' if config.getboolean('attachment', 'filestore', default=True): file_id = 'file_id' store_prefix = config.get('attachment', 'store_prefix', default=None) else: file_id = None store_prefix = None class Attachment(ResourceMixin, ModelSQL, ModelView): "Attachment" __name__ = 'ir.attachment' name = fields.Char('Name', required=True) type = fields.Selection([ ('data', 'Data'), ('link', 'Link'), ], 'Type', required=True)
_NUMBER_STATES = _STATES.copy() _NUMBER_STATES.update({ 'invisible': ~Eval('validation', '').in_(['number_of_lines']), 'required': Eval('validation', '').in_(['number_of_lines']), }) _NUMBER_DEPENDS = _DEPENDS + ['validation'] STATES = [ ('draft', 'Draft'), ('validated', 'Validated'), ('cancel', 'Canceled'), ('posted', 'Posted'), ] if config.getboolean('account_statement', 'filestore', default=False): file_id = 'origin_file_id' store_prefix = config.get( 'account_statement', 'store_prefix', default=None) else: file_id = None store_prefix = None class Unequal(object): "Always different" def __eq__(self, other): return False def __ne__(self, other):
def dispatch(host, port, protocol, database_name, user, session, object_type, object_name, method, *args, **kwargs): Database = backend.get('Database') DatabaseOperationalError = backend.get('DatabaseOperationalError') if object_type == 'common': if method == 'login': try: database = Database(database_name).connect() cursor = database.cursor() cursor.close() except Exception: return False res = security.login(database_name, user, session) with Transaction().start(database_name, 0): Cache.clean(database_name) Cache.resets(database_name) msg = res and 'successful login' or 'bad login or password' logger.info('%s \'%s\' from %s:%d using %s on database \'%s\'', msg, user, host, port, protocol, database_name) return res or False elif method == 'logout': name = security.logout(database_name, user, session) logger.info('logout \'%s\' from %s:%d ' 'using %s on database \'%s\'', name, host, port, protocol, database_name) return True elif method == 'version': return __version__ elif method == 'list_lang': return [ ('bg_BG', 'Български'), ('ca_ES', 'Català'), ('cs_CZ', 'Čeština'), ('de_DE', 'Deutsch'), ('en_US', 'English'), ('es_AR', 'Español (Argentina)'), ('es_EC', 'Español (Ecuador)'), ('es_ES', 'Español (España)'), ('es_CO', 'Español (Colombia)'), ('es_MX', 'Español (México)'), ('fr_FR', 'Français'), ('hu_HU', 'Magyar'), ('it_IT', 'Italiano'), ('lt_LT', 'Lietuvių'), ('nl_NL', 'Nederlands'), ('pt_BR', 'Português (Brasil)'), ('ru_RU', 'Russian'), ('sl_SI', 'Slovenščina'), ] elif method == 'db_exist': try: database = Database(*args, **kwargs).connect() cursor = database.cursor() cursor.close(close=True) return True except Exception: return False elif method == 'list': if not config.getboolean('database', 'list'): raise Exception('AccessDenied') with Transaction().start(None, 0, close=True) as transaction: return transaction.database.list(transaction.cursor) elif method == 'create': return create(*args, **kwargs) elif method == 'restore': return restore(*args, **kwargs) elif method == 'drop': return drop(*args, **kwargs) elif method == 'dump': return dump(*args, **kwargs) return elif object_type == 'system': database = Database(database_name).connect() database_list = Pool.database_list() pool = Pool(database_name) if database_name not in database_list: pool.init() if method == 'listMethods': res = [] for type in ('model', 'wizard', 'report'): for object_name, obj in pool.iterobject(type=type): for method in obj.__rpc__: res.append(type + '.' + object_name + '.' + method) return res elif method == 'methodSignature': return 'signatures not supported' elif method == 'methodHelp': res = [] args_list = args[0].split('.') object_type = args_list[0] object_name = '.'.join(args_list[1:-1]) method = args_list[-1] obj = pool.get(object_name, type=object_type) return pydoc.getdoc(getattr(obj, method)) for count in range(config.getint('database', 'retry'), -1, -1): try: user = security.check(database_name, user, session) except DatabaseOperationalError: if count: continue raise break database_list = Pool.database_list() pool = Pool(database_name) if database_name not in database_list: with Transaction().start(database_name, user, readonly=True) as transaction: pool.init() obj = pool.get(object_name, type=object_type) if method in obj.__rpc__: rpc = obj.__rpc__[method] else: raise UserError('Calling method %s on %s %s is not allowed!' % (method, object_type, object_name)) log_message = '%s.%s.%s(*%s, **%s) from %s@%s:%d/%s' log_args = (object_type, object_name, method, args, kwargs, user, host, port, database_name) logger.info(log_message, *log_args) for count in range(config.getint('database', 'retry'), -1, -1): with Transaction().start(database_name, user, readonly=rpc.readonly) as transaction: Cache.clean(database_name) try: c_args, c_kwargs, transaction.context, transaction.timestamp \ = rpc.convert(obj, *args, **kwargs) meth = getattr(obj, method) if (rpc.instantiate is None or not is_instance_method(obj, method)): result = rpc.result(meth(*c_args, **c_kwargs)) else: assert rpc.instantiate == 0 inst = c_args.pop(0) if hasattr(inst, method): result = rpc.result(meth(inst, *c_args, **c_kwargs)) else: result = [rpc.result(meth(i, *c_args, **c_kwargs)) for i in inst] if not rpc.readonly: transaction.cursor.commit() except DatabaseOperationalError: transaction.cursor.rollback() if count and not rpc.readonly: continue raise except (NotLogged, ConcurrencyException, UserError, UserWarning): logger.debug(log_message, *log_args, exc_info=True) transaction.cursor.rollback() raise except Exception: logger.error(log_message, *log_args, exc_info=True) transaction.cursor.rollback() raise Cache.resets(database_name) with Transaction().start(database_name, 0) as transaction: pool = Pool(database_name) Session = pool.get('ir.session') try: Session.reset(session) except DatabaseOperationalError: logger.debug('Reset session failed', exc_info=True) # Silently fail when reseting session transaction.cursor.rollback() else: transaction.cursor.commit() logger.debug('Result: %s', result) return result
from trytond.perf_analyzer import PerfLog, profile from trytond.perf_analyzer import logger as perf_logger from trytond.error_handling import error_wrap from trytond.worker import run_task from .wrappers import with_pool logger = logging.getLogger(__name__) # JCA: log slow RPC (> log_time_threshold) slow_threshold = config.getfloat('web', 'log_time_threshold', default=-1) if slow_threshold >= 0: slow_logger = logging.getLogger('trytond.rpc.performance') # JCA: Format json logs format_json_parameters = config.getboolean('web', 'format_parameters_logs', default=False) format_json_result = config.getboolean('web', 'format_result_logs', default=False) if format_json_parameters or format_json_result: import datetime import base64 import json from decimal import Decimal class DEBUGEncoder(json.JSONEncoder): serializers = {} @classmethod
try: import PIL from PIL import Image, ImageDraw, ImageFont except ImportError: PIL = None from trytond.config import config from trytond.model import ModelSQL, fields, Unique from trytond.pool import Pool from trytond.transaction import Transaction from trytond.wsgi import Base64Converter from .resource import ResourceMixin if config.getboolean('database', 'avatar_filestore', default=False): file_id = 'image_id' store_prefix = config.get('database', 'avatar_prefix', default=None) else: file_id = None store_prefix = None URL_BASE = config.get('web', 'avatar_base', default='') FONT = os.path.join(os.path.dirname(__file__), 'fonts', 'karla.ttf') class ImageMixin: __slots__ = () image = fields.Binary("Image", file_id=file_id, store_prefix=store_prefix) image_id = fields.Char("Image ID", readonly=True)
from werkzeug.exceptions import NotImplemented, BadRequest from trytond import backend from trytond.wsgi import app from trytond.transaction import Transaction from trytond.protocols.jsonrpc import JSONEncoder, JSONDecoder from trytond.config import config from trytond.tools import resolve logger = logging.getLogger(__name__) _db_timeout = config.getint('database', 'timeout') _cache_timeout = config.getint('bus', 'cache_timeout') _select_timeout = config.getint('bus', 'select_timeout') _long_polling_timeout = config.getint('bus', 'long_polling_timeout') _allow_subscribe = config.getboolean('bus', 'allow_subscribe') _url_host = config.get('bus', 'url_host') _web_cache_timeout = config.getint('web', 'cache_timeout') class _MessageQueue: Message = collections.namedtuple('Message', 'channel content timestamp') def __init__(self, timeout): super().__init__() self._lock = threading.Lock() self._timeout = timeout self._messages = [] def append(self, channel, element):
# This file is part of Tryton. The COPYRIGHT file at the top level of # this repository contains the full copyright notices and license terms. import datetime from sql import With, Literal, Null from sql.aggregate import Min from sql.functions import CurrentTimestamp, Extract from trytond.config import config from trytond.model import ModelSQL, fields from trytond.pool import Pool from trytond.tools import grouped_slice from trytond.transaction import Transaction has_worker = config.getboolean('queue', 'worker', default=False) class Queue(ModelSQL): "Queue" __name__ = 'ir.queue' name = fields.Char("Name", required=True) data = fields.Dict(None, "Data") enqueued_at = fields.Timestamp("Enqueued at", required=True) dequeued_at = fields.Timestamp("Dequeued at") finished_at = fields.Timestamp("Finished at") scheduled_at = fields.Timestamp("Scheduled at", help="When the task can start.") expected_at = fields.Timestamp("Expected at",
def config_session_exclusive(): return config.getboolean('session', 'exclusive', default=True)
def config_session_audit(): return config.getboolean('session', 'audit', default=True)
def visit_NameConstant(self, node): if node.value is None: self._write('None') elif node.value is True: self._write('True') elif node.value is False: self._write('False') else: raise Exception("Unknown NameConstant %r" % (node.value, )) ASTCodeGenerator.visit_NameConstant = visit_NameConstant if not hasattr(ASTTransformer, 'visit_NameConstant'): # Re-use visit_Name because _clone is deleted ASTTransformer.visit_NameConstant = ASTTransformer.visit_Name if config.getboolean('account_payment_sepa', 'filestore', default=False): file_id = 'message_file_id' store_prefix = config.get('account_payment_sepa', 'store_prefix', default=None) else: file_id = None store_prefix = None INITIATOR_IDS = [ (None, ''), ('sepa', "SEPA Creditor Identifier"), ('be_vat', "Belgian Enterprise Number"), ('es_nif', "Spanish VAT Number"), ]
def db_list(*args): if not config.getboolean('database', 'list'): raise Exception('AccessDenied') with Transaction().start(None, 0, close=True, _nocache=True) as transaction: return transaction.database.list()
def db_list(*args): if not config.getboolean('database', 'list'): raise Exception('AccessDenied') with Transaction().start(None, 0, close=True) as transaction: return transaction.database.list()