def __new__(cls, cr, uid, context, su=False): if uid == SUPERUSER_ID: su = True assert context is not None args = (cr, uid, context, su) # determine transaction object transaction = cr.transaction if transaction is None: transaction = cr.transaction = Transaction(Registry(cr.dbname)) # if env already exists, return it for env in transaction.envs: if env.args == args: return env # otherwise create environment, and add it in the set self = object.__new__(cls) args = (cr, uid, frozendict(context), su) self.cr, self.uid, self.context, self.su = self.args = args self.transaction = self.all = transaction self.registry = transaction.registry self.cache = transaction.cache self._cache_key = {} # memo {field: cache_key} self._protected = transaction.protected transaction.envs.add(self) return self
def export(ctx, language, db_name, module, fix): modules = module or ['all'] from odoo.modules.registry import Registry from odooku.api import environment from odoo.tools import trans_export with tempfile.TemporaryFile() as t: # Perform checks (and possible fixes) registry = Registry(db_name) with registry.cursor() as cr: with environment(cr) as env: lang = env['res.lang'].with_context( dict(active_test=False)).search([('code', '=', language)]) if not lang: raise ValueError("Language %s does not exist" % language) if not lang[0].active: if not fix: raise ValueError("Language %s is not activated" % language) else: installed = env['ir.module.module'].search([ ('state', '=', 'installed') ]) installed._update_translations(language) if module: installed = env['ir.module.module'].search([ ('name', 'in', module), ('state', '=', 'installed') ]) missing = set(module) - set( [mod.name for mod in installed]) if missing: if not fix: raise ValueError("Modules '%s' are not installed" % ", ".join(missing)) else: ctx.obj['config']['init'] = { module_name: 1 for module_name in module } # Export registry = Registry.new(db_name, update_module=fix) with registry.cursor() as cr: with environment(cr) as env: trans_export(language, modules, t, 'po', cr) t.seek(0) # Pipe to stdout while True: chunk = t.read(CHUNK_SIZE) if not chunk: break sys.stdout.buffer.write(chunk)
def newdbuuid(ctx, db_name): config = (ctx.obj['config']) from odoo.modules.registry import Registry from odooku.api import environment registry = Registry(db_name) with registry.cursor() as cr: with environment(cr) as env: env['ir.config_parameter'].init(force=True)
def reset(self): """ Reset the transaction. This clears the transaction, and reassigns the registry on all its environments. This operation is strongly recommended after reloading the registry. """ self.registry = Registry(self.registry.db_name) for env in self.envs: env.registry = self.registry lazy_property.reset_all(env) self.clear()
def create_user(self, login_name, help_id, db_name): result = {} result['is_success'] = True registry = Registry(db_name) cr = registry.cursor() env = api.Environment(cr, SUPERUSER_ID, {}) user_id = env['res.users'].create_wechat_mini_user(login_name, help_id) result['user_id'] = user_id cr.commit() cr.close() return json.dumps(result)
def get_openid(self, code, db_name): result = {} result['is_success'] = True cr = Registry(db_name).cursor() env = api.Environment(cr, SUPERUSER_ID, {}) is_success, token = env['wechat.mini.program.session'].get_token(code) result['token'] = token result['is_get_token'] = is_success cr.commit() cr.close() return json.dumps(result)
def setUp(self): super(TestAdvisoryLock, self).setUp() self.registry2 = Registry(common.get_db_name()) self.cr2 = self.registry2.cursor() self.env2 = api.Environment(self.cr2, self.env.uid, {}) @self.addCleanup def reset_cr2(): # rollback and close the cursor, and reset the environments self.env2.reset() self.cr2.rollback() self.cr2.close()
def cron_folder_auto_classification(self, path=False, processing_path=False, limit=False): if not path: path = (self.env["ir.config_parameter"].sudo().get_param( "document_quick_access_auto_classification.path", default=False)) if not path: return False if not processing_path and not self.env.context.get( "ignore_process_path"): processing_path = ( self.env["ir.config_parameter"].sudo().get_param( "document_quick_access_auto_classification.process_path", default=False, )) elements = [ os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) ] if limit: elements = elements[:limit] for element in elements: obj = self new_element = element if processing_path: new_cr = Registry(self.env.cr.dbname).cursor() try: if processing_path: new_element = os.path.join(processing_path, os.path.basename(element)) shutil.copy(element, new_element) obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay( **self._delay_vals())) obj._process_document(new_element) if processing_path: new_cr.commit() except Exception: if processing_path: os.unlink(new_element) new_cr.rollback() raise finally: if processing_path: new_cr.close() if processing_path: os.unlink(element) return True
def setUp(self): super().setUp() self.registry2 = Registry(common.get_db_name()) self.cr2 = self.registry2.cursor() self.env2 = api.Environment(self.cr2, self.env.uid, {}) @self.addCleanup def reset_cr2(): # rollback and close the cursor, and reset the environments self.env2.reset() self.cr2.rollback() self.cr2.close() self.backend2 = self.env2['infor.backend'].browse(self.backend.id)
def authenticate(token): try: a = 4 - len(token) % 4 if a != 0: token += '==' if a == 2 else '=' SERVER, db, login, uid, ts = base64.urlsafe_b64decode( str(token)).split(',') if int(ts) + 60 * 60 * 24 * 7 * 10 < time.time(): return False registry = Registry(db) cr = registry.cursor() env = api.Environment(cr, int(uid), {}) except Exception as e: return str(e) return env
def cron_move_documents(self, limit=False, path=False): if not path: path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner_queue.preprocess_path", default=False, )) dest_path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner.path", default=False)) if not path or not dest_path: return False elements = [ os.path.join(path, f) for f in os.listdir(path) if os.path.isfile(os.path.join(path, f)) ] if limit: elements = elements[:limit] min_time = int(time.time()) - 60 single_commit = self.env.context.get("scanner_single_commit", False) for element in elements: if os.path.getmtime( element) > min_time and not self.env.context.get( "scanner_ignore_time", False): continue filename = os.path.basename(element) new_element = os.path.join(dest_path, filename) shutil.copy(element, new_element) if not single_commit: new_cr = Registry(self.env.cr.dbname).cursor() try: if not single_commit: obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay()) else: obj = self.env[self._name].browse() obj.process_document(new_element) if not single_commit: new_cr.commit() except Exception: os.unlink(new_element) if not single_commit: new_cr.rollback() # error, rollback everything atomically raise finally: if not single_commit: new_cr.close() os.unlink(element) return True
def _next(self): if (getattr(threading.currentThread(), "testing", False) or self.env.context.get("install_mode") or self.env.context.get("ignore_safe", not self.safe)): return super()._next() new_cr = Registry(self.env.cr.dbname).cursor() try: env = api.Environment(new_cr, self.env.uid, self.env.context) res = env[self._name].browse(self.id) result = res.with_context(ignore_safe=True)._next() new_cr.commit() except Exception: new_cr.rollback() # error, rollback everything atomically raise finally: new_cr.close() return result
def setUp(self): super().setUp() self.backend = mock.MagicMock(name="backend") self.backend.env = self.env self.registry2 = Registry(common.get_db_name()) self.cr2 = self.registry2.cursor() self.env2 = api.Environment(self.cr2, self.env.uid, {}) self.backend2 = mock.MagicMock(name="backend2") self.backend2.env = self.env2 @self.addCleanup def reset_cr2(): # rollback and close the cursor, and reset the environments self.env2.reset() self.cr2.rollback() self.cr2.close()
def update(ctx, db_name, module, language, overwrite): context = {'overwrite': overwrite} from odoo.modules.registry import Registry from odooku.api import environment domain = [('state', '=', 'installed')] if module: domain = [('name', 'in', module)] for db in db_name: registry = Registry(db) with registry.cursor() as cr: with environment(cr) as env: mods = env['ir.module.module'].search(domain) mods.with_context( overwrite=overwrite)._update_translations(language)
def shell(ctx, input_file, db_name): from odoo.modules.registry import Registry from odooku.api import environment registry = Registry(db_name) with registry.cursor() as cr: with environment(cr) as env: context = { 'env': env, 'self': env.user } args = [] if input_file is not None: args = [input_file] bpython.embed(context, args=args, banner='Odooku shell')
def cenit_post(self, action, root=None): status_code = 400 environ = request.httprequest.headers.environ.copy() key = environ.get('HTTP_X_USER_ACCESS_KEY', False) token = environ.get('HTTP_X_USER_ACCESS_TOKEN', False) db_name = environ.get('HTTP_TENANT_DB', False) if not db_name: host = environ.get('HTTP_HOST', "") db_name = host.replace(".", "_").split(":")[0] registry = Registry(db_name) with registry.cursor() as cr: connection_model = registry['cenit.connection'] domain = [('key', '=', key), ('token', '=', token)] _logger.info( "Searching for a 'cenit.connection' with key '%s' and " "matching token", key) rc = connection_model.search(cr, SUPERUSER_ID, domain) _logger.info("Candidate connections: %s", rc) if rc: p = inflect.engine() flow_model = registry['cenit.flow'] context = {'sender': 'client', 'action': action} if root is None: for root, data in request.jsonrequest.items(): root = p.singular_noun(root) or root rc = flow_model.receive(cr, SUPERUSER_ID, root, data, context) if rc: status_code = 200 else: root = p.singular_noun(root) or root rc = flow_model.receive(cr, SUPERUSER_ID, root, request.jsonrequest, context) if rc: status_code = 200 else: status_code = 404 return {'status': status_code}
def __new__(cls, cr, uid, context): assert context is not None args = (cr, uid, context) # if env already exists, return it env, envs = None, cls.envs for env in envs: if env.args == args: return env # otherwise create environment, and add it in the set self = object.__new__(cls) self.cr, self.uid, self.context = self.args = (cr, uid, frozendict(context)) self.registry = Registry(cr.dbname) self.cache = envs.cache self._protected = StackMap() # {field: ids, ...} self.dirty = defaultdict(set) # {record: set(field_name), ...} self.all = envs envs.add(self) return self
def execute(conf_attrs, dbname, uid, obj, method, *args, **kwargs): _logger.info(str([dbname, uid, obj, method, args, kwargs])) if conf_attrs and len(conf_attrs.keys()) > 1: for attr, value in conf_attrs.items(): odoo.tools.config[attr] = value with Environment.manage(): registry = Registry(dbname) cr = registry.cursor() context = 'context' in kwargs and kwargs.pop('context') or {} env = Environment(cr, uid, context) cr.autocommit(True) # odoo.api.Environment._local.environments = env try: Model = env[obj] args = list(args) _logger.info('>>> %s' % str(args)) ids = args.pop(0) if ids: target = Model.search([('id', 'in', ids)]) else: target = Model getattr(env.registry[obj], method)(target, *args, **kwargs) # Commit only when function finish # env.cr.commit() except Exception as exc: env.cr.rollback() import traceback traceback.print_exc() raise exc #try: # raise execute.retry( # queue=execute.request.delivery_info['routing_key'], # exc=exc, countdown=(execute.request.retries + 1) * 60, # max_retries=5) #except Exception as retry_exc: # raise retry_exc finally: env.cr.close() return True
def __new__(cls, cr, uid, context, su=False): if uid == SUPERUSER_ID: su = True assert context is not None args = (cr, uid, context, su) # if env already exists, return it env, envs = None, cls.envs for env in envs: if env.args == args: return env # otherwise create environment, and add it in the set self = object.__new__(cls) args = (cr, uid, frozendict(context), su) self.cr, self.uid, self.context, self.su = self.args = args self.registry = Registry(cr.dbname) self.cache = envs.cache self._protected = envs.protected # proxy to shared data structure self.all = envs envs.add(self) return self
def import_(ctx, language, db_name, overwrite): context = {'overwrite': overwrite} from odoo.modules.registry import Registry from odooku.api import environment from odoo.tools import trans_load with tempfile.NamedTemporaryFile(suffix='.po', delete=False) as t: registry = Registry(db_name) # Read from stdin while True: chunk = sys.stdin.buffer.read(CHUNK_SIZE) if not chunk: break t.write(chunk) t.close() with registry.cursor() as cr: with environment(cr) as env: trans_load(cr, t.name, language, context=context) os.unlink(t.name)
def process(self): self.ensure_one() # Decide when/how to send lroe_operation queue_obj = self.env["queue.job"].sudo() company = self.company_id if not company.use_connector: try: lroe_response = self.send() if lroe_response.state == LROEOperationResponseState.CORRECT.value: self.mark_as_recorded() elif (lroe_response.state == LROEOperationResponseState.PARTIALLY_CORRECT.value): self.mark_as_warning() elif lroe_response.state in ( LROEOperationResponseState.BUILD_ERROR.value, LROEOperationResponseState.INCORRECT.value, ): self.mark_as_error() except Exception: new_cr = Registry(self.env.cr.dbname).cursor() env = api.Environment(new_cr, self.env.uid, self.env.context) lroe_operation = env["lroe.operation"].browse(self.id) lroe_operation.write( {"state": LROEOperationStateEnum.ERROR.value}) # If an operation has been sent successfully to the Tax Agency we need # to make sure that the current state is saved in case an exception # occurs in the following invoices. new_cr.commit() new_cr.close() raise else: eta = company._get_lroe_eta() new_delay = (self.sudo().with_context( company_id=company.id).with_delay( eta=eta).send_one_operation_job()) job = queue_obj.search([("uuid", "=", new_delay.uuid)], limit=1) self.sudo().jobs_ids |= job
hash_gen.update(random_data) return hash_gen.hexdigest()[:length] # Read OAuth2 constants and setup the token store: db_name = odoo.tools.config.get('db_name') if not db_name: _logger.error( "ERROR: To proper setup OAuth2 and Token Store - it's necessary to set the parameter 'db_name' in Odoo config file!" ) print( "ERROR: To proper setup OAuth2 and Token Store - it's necessary to set the parameter 'db_name' in Odoo config file!" ) else: # Read system parameters... registry = Registry(db_name) with registry.cursor() as cr: cr.execute("SELECT value FROM ir_config_parameter \ WHERE key = 'rest_api.use_redis_token_store'") res = cr.fetchone() use_redis_token_store = res and res[0].strip() if use_redis_token_store in ('0', 'False', 'None', 'false'): use_redis_token_store = False if not use_redis_token_store: # Setup Simple token store _logger.info("Setup Simple token store...") from . import simple_token_store token_store = simple_token_store.SimpleTokenStore() else: # Setup Redis token store _logger.info("Setup Redis token store...")
def cron_ssh_move_documents( self, host=False, port=False, user=False, password=False, ssh_path=False, ): dest_path = (self.env["ir.config_parameter"].sudo().get_param( "hash_search_document_scanner.path", default=False)) connection = SSHClient() connection.load_system_host_keys() if not dest_path: return False if not host: host = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.host", default=False) if not port: port = int(self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.port", default="0")) if not user: user = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.user", default=False) if not password: password = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.password", default=False, ) if not ssh_path: ssh_path = self.env["ir.config_parameter"].get_param( "hash_search_document_scanner_queue_ssh.ssh_path", default=False, ) connection.connect(hostname=host, port=port, username=user, password=password) sftp = connection.open_sftp() if ssh_path: sftp.chdir(ssh_path) elements = sftp.listdir_attr(".") min_time = int(time.time()) - 60 single_commit = self.env.context.get("scanner_single_commit", False) for element in elements: if element.st_atime > min_time and not self.env.context.get( "scanner_ignore_time", False): continue filename = element.filename new_element = os.path.join(dest_path, filename) if not single_commit: new_cr = Registry(self.env.cr.dbname).cursor() try: sftp.get(filename, new_element) if single_commit: obj = self.env[self._name].browse() else: obj = (api.Environment( new_cr, self.env.uid, self.env.context)[self._name].browse().with_delay()) obj.process_document(new_element) if not single_commit: new_cr.commit() except Exception: if os.path.exists(new_element): os.unlink(new_element) if not single_commit: new_cr.rollback() # error, rollback everything atomically raise finally: if not single_commit: new_cr.close() sftp.remove(element.filename) sftp.close() connection.close() return True
def register(db, uid, lang=None): registry = Registry(db) cr = registry.cursor() context = {lang: lang} if lang else {} env = api.Environment(cr, int(uid), context) return env
def write(self, vals): res = super(AccountDaysOverdue, self).write(vals) if self.env['account.move.line']._register_hook(): Registry(self.env.cr.dbname).registry_invalidated = True return res
def write(self, vals): res = super(Pricelist, self).write(vals) if (self.env["product.product"]._register_hook() or self.env["product.template"]._register_hook()): Registry(self.env.cr.dbname).registry_invalidated = True return res
def cursor(dbname, serialized=True): registry = Registry(dbname) db = registry._db return db.cursor(serialized=serialized)
def web_login(self, redirect=None, **kw): ensure_db() request.params['login_success'] = False if request.httprequest.method == 'GET' and redirect and request.session.uid: return http.redirect_with_hash(redirect) if not request.uid: request.uid = SUPERUSER_ID values = request.params.copy() try: values['databases'] = http.db_list() except exceptions.AccessDenied: values['databases'] = None # 只特殊处理post的情况 if request.httprequest.method == 'POST': registry = Registry(request.session.db) with registry.cursor() as cr: env = api.Environment(cr, 1, {}) login_user = env['res.users'].search([ ('login', '=', request.params['login']) ]) time_limit = int(env['ir.config_parameter'].get_param( 'auth_time_limit.login_error_times')) # 9次失败之后,不再后台验证密码,直接返回 if login_user and login_user.login_error_times >= time_limit: values['error'] = u'账户已被锁定!' return request.render('web.login', values) old_uid = request.uid uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password']) # 登录成功 if uid is not False: login_user.login_error_times = 0 request.params['login_success'] = True if not redirect: redirect = '/web' return http.redirect_with_hash(redirect) # 登录失败 request.uid = old_uid if login_user.exists(): try: login_user.sudo(login_user.id).check_credentials( request.params['password']) except exceptions.AccessDenied: # 密码错误 login_user.login_error_times += 1 if login_user.login_error_times == time_limit: values['error'] = u'账户已被锁定!' elif login_user.login_error_times >= time_limit - 3: values['error'] = u"您还有%s次机会!" % ( time_limit - login_user.login_error_times) else: values['error'] = _("Wrong login/password") else: # 密码正确, 其他验证失败, allowed_ips values['error'] = u'访问受限,请与管理员联系。' else: # 无此用户, 或者此用户active = False values['error'] = u"无此用户" return request.render('web.login', values)
def register(db, uid): registry = Registry(db) cr = registry.cursor() env = api.Environment(cr, int(uid), {}) return env