def test_denied_error_json(self, **kwargs): raise AccessDenied("This is an access denied rpc test")
def test_denied_error_http(self, **kwargs): raise AccessDenied("This is an access denied http test")
def update_states(self): if not self.env.is_admin(): raise AccessDenied() for record in self.search([]): record.write({})
def set_values(self): if not self.user_has_groups('website.group_website_designer'): raise AccessDenied() super(ResConfigSettings, self).set_values()
def check_credentials(self, password): """ Override this method to plug additional authentication methods""" user = self.sudo().search([('id', '=', self._uid), ('password', '=', password)]) if not user: raise AccessDenied()
def _auth_method_thing(cls): raise AccessDenied()
def set_google_maps_api_key(self): if not self.user_has_groups('website.group_website_designer'): raise AccessDenied() self.env['ir.config_parameter'].sudo().set_param('google_maps_api_key', (self.google_maps_api_key or '').strip())
def button_delete(self): """删除一个作者""" if not self.authors: raise AccessDenied("没有更多作者了") author_id = self.authors[0] self.authors = [Command.delete(author_id.id)]
def _check_credentials(self, password): super(ResUsers, self)._check_credentials(password) if self.otp_enable and self.company_id.is_open_2fa and not self.check_otp( request.params.get('tfa_code')): raise AccessDenied(_('Validation Code Error!'))
def get_default_google_maps_api_key(self, fields): if not self.user_has_groups('website.group_website_designer'): raise AccessDenied() google_maps_api_key = self.env['ir.config_parameter'].sudo().get_param('google_maps_api_key', default='') return dict(google_maps_api_key=google_maps_api_key)
def _auth_saml_validate(self, provider_id, token): """ return the validation data corresponding to the access token """ p = self.env['auth.saml.provider'].browse(provider_id) # we are not yet logged in, so the userid cannot have access to the # fields we need yet login = p.sudo()._get_lasso_for_provider() matching_attribute = p.matching_attribute try: login.processAuthnResponseMsg(token) except (lasso.DsError, lasso.ProfileCannotVerifySignatureError): raise AccessDenied(_('Lasso Profile cannot verify signature')) except lasso.ProfileStatusNotSuccessError: raise AccessDenied(_('Profile Status failure')) try: login.acceptSso() except lasso.Error as error: raise Exception( 'Invalid assertion : %s' % lasso.strError(error[0])) attrs = {} for att_statement in login.assertion.attributeStatement: for attribute in att_statement.attribute: name = None lformat = lasso.SAML2_ATTRIBUTE_NAME_FORMAT_BASIC nickname = None try: name = attribute.name.decode('ascii') except Exception: _logger.warning( 'sso_after_response: error decoding attribute name %s', attribute.dump(), ) else: try: if attribute.nameFormat: lformat = attribute.nameFormat.decode('ascii') if attribute.friendlyName: nickname = attribute.friendlyName except Exception: message = 'sso_after_response: name or format of an \ attribute failed to decode as ascii: %s' _logger.warning( message, attribute.dump(), exc_info=True ) try: if name: if lformat: if nickname: key = (name, lformat, nickname) else: key = (name, lformat) else: key = name attrs[key] = list() for value in attribute.attributeValue: content = [a.exportToXml() for a in value.any] content = ''.join(content) attrs[key].append(content.decode('utf8')) except Exception: message = 'sso_after_response: value of an \ attribute failed to decode as ascii: %s due to %s' _logger.warning( message, attribute.dump(), exc_info=True ) matching_value = None for k in attrs: if isinstance(k, tuple) and k[0] == matching_attribute: matching_value = attrs[k][0] break if not matching_value and matching_attribute == "subject.nameId": matching_value = login.assertion.subject.nameId.content elif not matching_value and matching_attribute != "subject.nameId": raise Exception( "Matching attribute %s not found in user attrs: %s" % ( matching_attribute, attrs)) validation = {'user_id': matching_value} return validation
def _check_rod_number(self): for record in self: if record.rod_number > 4: raise AccessDenied(("No puede superar 4"))
def _check_time_authentication(self, user): if user.time_auth: if self._check_out_of_time(user): raise AccessDenied("You cannot access the system at this time")
def _check_credentials(self, password): super(ResUsers, self)._check_credentials(password) # Determine whether to turn on two-factor authentication and verify the verification code if self.sudo().company_id.is_open_2fa and not self.sudo().check_otp(request.params.get('tfa_code')): # pass raise AccessDenied(_('Validation Code Error!'))
def set_has_google_maps(self): if not self.user_has_groups('website.group_website_designer'): raise AccessDenied() return self.env['ir.values'].sudo().set_default( 'website.config.settings', 'has_google_maps', self.has_google_maps)
def generate_fec(self): self.ensure_one() if not (self.env.is_admin() or self.env.user.has_group('account.group_account_user')): raise AccessDenied() # We choose to implement the flat file instead of the XML # file for 2 reasons : # 1) the XSD file impose to have the label on the account.move # but Odoo has the label on the account.move.line, so that's a # problem ! # 2) CSV files are easier to read/use for a regular accountant. # So it will be easier for the accountant to check the file before # sending it to the fiscal administration today = fields.Date.today() if self.date_from > today or self.date_to > today: raise UserError( _('You could not set the start date or the end date in the future.' )) if self.date_from >= self.date_to: raise UserError( _('The start date must be inferior to the end date.')) company = self.env.company company_legal_data = self._get_company_legal_data(company) header = [ u'JournalCode', # 0 u'JournalLib', # 1 u'EcritureNum', # 2 u'EcritureDate', # 3 u'CompteNum', # 4 u'CompteLib', # 5 u'CompAuxNum', # 6 We use partner.id u'CompAuxLib', # 7 u'PieceRef', # 8 u'PieceDate', # 9 u'EcritureLib', # 10 u'Debit', # 11 u'Credit', # 12 u'EcritureLet', # 13 u'DateLet', # 14 u'ValidDate', # 15 u'Montantdevise', # 16 u'Idevise', # 17 ] rows_to_write = [header] # INITIAL BALANCE unaffected_earnings_xml_ref = self.env.ref( 'account.data_unaffected_earnings') unaffected_earnings_line = True # used to make sure that we add the unaffected earning initial balance only once if unaffected_earnings_xml_ref: #compute the benefit/loss of last year to add in the initial balance of the current year earnings account unaffected_earnings_results = self._do_query_unaffected_earnings() unaffected_earnings_line = False sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(replace(MIN(aa.name), '|', '/'), '\t', '') AS CompteLib, '' AS CompAuxNum, '' AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type HAVING aat.type not in ('receivable', 'payable') ''' formatted_date_from = fields.Date.to_string(self.date_from).replace( '-', '') date_from = self.date_from formatted_date_year = date_from.year currency_digits = 2 self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() if not unaffected_earnings_line: account = self.env['account.account'].browse(account_id) if account.user_type_id.id == self.env.ref( 'account.data_unaffected_earnings').id: #add the benefit/loss of previous fiscal year to the first unaffected earnings account found. unaffected_earnings_line = True current_amount = float(listrow[11].replace( ',', '.')) - float(listrow[12].replace(',', '.')) unaffected_earnings_amount = float( unaffected_earnings_results[11].replace( ',', '.')) - float( unaffected_earnings_results[12].replace( ',', '.')) listrow_amount = current_amount + unaffected_earnings_amount if float_is_zero(listrow_amount, precision_digits=currency_digits): continue if listrow_amount > 0: listrow[11] = str(listrow_amount).replace('.', ',') listrow[12] = '0,00' else: listrow[11] = '0,00' listrow[12] = str(-listrow_amount).replace('.', ',') rows_to_write.append(listrow) #if the unaffected earnings account wasn't in the selection yet: add it manually if (not unaffected_earnings_line and unaffected_earnings_results and (unaffected_earnings_results[11] != '0,00' or unaffected_earnings_results[12] != '0,00')): #search an unaffected earnings account unaffected_earnings_account = self.env['account.account'].search( [('user_type_id', '=', self.env.ref('account.data_unaffected_earnings').id)], limit=1) if unaffected_earnings_account: unaffected_earnings_results[ 4] = unaffected_earnings_account.code unaffected_earnings_results[ 5] = unaffected_earnings_account.name rows_to_write.append(unaffected_earnings_results) # INITIAL BALANCE - receivable/payable sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(MIN(aa.name), '|', '/') AS CompteLib, CASE WHEN MIN(aat.type) IN ('receivable', 'payable') THEN CASE WHEN rp.ref IS null OR rp.ref = '' THEN rp.id::text ELSE replace(rp.ref, '|', '/') END ELSE '' END AS CompAuxNum, CASE WHEN aat.type IN ('receivable', 'payable') THEN COALESCE(replace(rp.name, '|', '/'), '') ELSE '' END AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type, rp.ref, rp.id HAVING aat.type in ('receivable', 'payable') ''' self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() rows_to_write.append(listrow) # LINES sql_query = ''' SELECT REGEXP_REPLACE(replace(aj.code, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalCode, REGEXP_REPLACE(replace(COALESCE(aj__name.value, aj.name), '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS JournalLib, REGEXP_REPLACE(replace(am.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS EcritureNum, TO_CHAR(am.date, 'YYYYMMDD') AS EcritureDate, aa.code AS CompteNum, REGEXP_REPLACE(replace(aa.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g') AS CompteLib, CASE WHEN aat.type IN ('receivable', 'payable') THEN CASE WHEN rp.ref IS null OR rp.ref = '' THEN rp.id::text ELSE replace(rp.ref, '|', '/') END ELSE '' END AS CompAuxNum, CASE WHEN aat.type IN ('receivable', 'payable') THEN COALESCE(REGEXP_REPLACE(replace(rp.name, '|', '/'), '[\\t\\r\\n]', ' ', 'g'), '') ELSE '' END AS CompAuxLib, CASE WHEN am.ref IS null OR am.ref = '' THEN '-' ELSE REGEXP_REPLACE(replace(am.ref, '|', '/'), '[\\t\\r\\n]', ' ', 'g') END AS PieceRef, TO_CHAR(COALESCE(am.invoice_date, am.date), 'YYYYMMDD') AS PieceDate, CASE WHEN aml.name IS NULL OR aml.name = '' THEN '/' WHEN aml.name SIMILAR TO '[\\t|\\s|\\n]*' THEN '/' ELSE REGEXP_REPLACE(replace(aml.name, '|', '/'), '[\\t\\n\\r]', ' ', 'g') END AS EcritureLib, replace(CASE WHEN aml.debit = 0 THEN '0,00' ELSE to_char(aml.debit, '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN aml.credit = 0 THEN '0,00' ELSE to_char(aml.credit, '000000000000000D99') END, '.', ',') AS Credit, CASE WHEN rec.name IS NULL THEN '' ELSE rec.name END AS EcritureLet, CASE WHEN aml.full_reconcile_id IS NULL THEN '' ELSE TO_CHAR(rec.create_date, 'YYYYMMDD') END AS DateLet, TO_CHAR(am.date, 'YYYYMMDD') AS ValidDate, CASE WHEN aml.amount_currency IS NULL OR aml.amount_currency = 0 THEN '' ELSE replace(to_char(aml.amount_currency, '000000000000000D99'), '.', ',') END AS Montantdevise, CASE WHEN aml.currency_id IS NULL THEN '' ELSE rc.name END AS Idevise FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_journal aj ON aj.id = am.journal_id LEFT JOIN ir_translation aj__name ON aj__name.res_id = aj.id AND aj__name.type = 'model' AND aj__name.name = 'account.journal,name' AND aj__name.lang = %s AND aj__name.value != '' JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id LEFT JOIN res_currency rc ON rc.id = aml.currency_id LEFT JOIN account_full_reconcile rec ON rec.id = aml.full_reconcile_id WHERE am.date >= %s AND am.date <= %s AND am.company_id = %s ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' ORDER BY am.date, am.name, aml.id ''' lang = self.env.user.lang or get_lang(self.env).code self._cr.execute(sql_query, (lang, self.date_from, self.date_to, company.id)) for row in self._cr.fetchall(): rows_to_write.append(list(row)) fecvalue = self._csv_write_rows(rows_to_write) end_date = fields.Date.to_string(self.date_to).replace('-', '') suffix = '' if self.export_type == "nonofficial": suffix = '-NONOFFICIAL' self.write({ 'fec_data': base64.encodebytes(fecvalue), # Filename = <siren>FECYYYYMMDD where YYYMMDD is the closing date 'filename': '%sFEC%s%s.csv' % (company_legal_data, end_date, suffix), }) # Set fiscal year lock date to the end date (not in test) fiscalyear_lock_date = self.env.company.fiscalyear_lock_date if not self.test_file and (not fiscalyear_lock_date or fiscalyear_lock_date < self.date_to): self.env.company.write({'fiscalyear_lock_date': self.date_to}) return { 'name': 'FEC', 'type': 'ir.actions.act_url', 'url': "web/content/?model=account.fr.fec&id=" + str(self.id) + "&filename_field=filename&field=fec_data&download=true&filename=" + self.filename, 'target': 'self', }
def set_values(self): if not self.user_has_groups('website.group_website_designer'): raise AccessDenied() super(ResConfigSettings, self).set_values() set_param = self.env['ir.config_parameter'].sudo().set_param set_param('disable_crawling', self.disable_crawling and '1' or '0')
def install_from_urls(self, urls): if not self.env.user.has_group('base.group_system'): raise AccessDenied() # One-click install is opt-in - cfr Issue #15225 ad_dir = tools.config.addons_data_dir if not os.access(ad_dir, os.W_OK): msg = (_("Automatic install of downloaded Apps is currently disabled.") + "\n\n" + _("To enable it, make sure this directory exists and is writable on the server:") + "\n%s" % ad_dir) _logger.warning(msg) raise UserError(msg) apps_server = urls.url_parse(self.get_apps_server()) OPENERP = odoo.release.product_name.lower() tmp = tempfile.mkdtemp() _logger.debug('Install from url: %r', urls) try: # 1. Download & unzip missing modules for module_name, url in urls.items(): if not url: continue # nothing to download, local version is already the last one up = urls.url_parse(url) if up.scheme != apps_server.scheme or up.netloc != apps_server.netloc: raise AccessDenied() try: _logger.info('Downloading module `%s` from OpenERP Apps', module_name) response = requests.get(url) response.raise_for_status() content = response.content except Exception: _logger.exception('Failed to fetch module %s', module_name) raise UserError(_('The `%s` module appears to be unavailable at the moment, please try again later.') % module_name) else: zipfile.ZipFile(io.BytesIO(content)).extractall(tmp) assert os.path.isdir(os.path.join(tmp, module_name)) # 2a. Copy/Replace module source in addons path for module_name, url in urls.items(): if module_name == OPENERP or not url: continue # OPENERP is special case, handled below, and no URL means local module module_path = modules.get_module_path(module_name, downloaded=True, display_warning=False) bck = backup(module_path, False) _logger.info('Copy downloaded module `%s` to `%s`', module_name, module_path) shutil.move(os.path.join(tmp, module_name), module_path) if bck: shutil.rmtree(bck) # 2b. Copy/Replace server+base module source if downloaded if urls.get(OPENERP): # special case. it contains the server and the base module. # extract path is not the same base_path = os.path.dirname(modules.get_module_path('base')) # copy all modules in the SERVER/odoo/addons directory to the new "odoo" module (except base itself) for d in os.listdir(base_path): if d != 'base' and os.path.isdir(os.path.join(base_path, d)): destdir = os.path.join(tmp, OPENERP, 'addons', d) # XXX 'odoo' subdirectory ? shutil.copytree(os.path.join(base_path, d), destdir) # then replace the server by the new "base" module server_dir = tools.config['root_path'] # XXX or dirname() bck = backup(server_dir) _logger.info('Copy downloaded module `odoo` to `%s`', server_dir) shutil.move(os.path.join(tmp, OPENERP), server_dir) #if bck: # shutil.rmtree(bck) self.update_list() with_urls = [module_name for module_name, url in urls.items() if url] downloaded = self.search([('name', 'in', with_urls)]) installed = self.search([('id', 'in', downloaded.ids), ('state', '=', 'installed')]) to_install = self.search([('name', 'in', list(urls)), ('state', '=', 'uninstalled')]) post_install_action = to_install.button_immediate_install() if installed or to_install: # in this case, force server restart to reload python code... self._cr.commit() odoo.service.server.restart() return { 'type': 'ir.actions.client', 'tag': 'home', 'params': {'wait': True}, } return post_install_action finally: shutil.rmtree(tmp)
def _assert_can_auth(self): """ Checks that the current environment even allows the current auth request to happen. The baseline implementation is a simple linear login cooldown: after a number of failures trying to log-in, the user (by login) is put on cooldown. During the cooldown period, login *attempts* are ignored and logged. .. warning:: The login counter is not shared between workers and not specifically thread-safe, the feature exists mostly for rate-limiting on large number of login attempts (brute-forcing passwords) so that should not be much of an issue. For a more complex strategy (e.g. database or distribute storage) override this method. To simply change the cooldown criteria (configuration, ...) override _on_login_cooldown instead. .. note:: This is a *context manager* so it can be called around the login procedure without having to call it itself. """ # needs request for remote address if not request: yield return reg = self.env.registry failures_map = getattr(reg, '_login_failures', None) if failures_map is None: failures_map = reg._login_failures = collections.defaultdict(lambda : (0, datetime.datetime.min)) source = request.httprequest.remote_addr (failures, previous) = failures_map[source] if self._on_login_cooldown(failures, previous): _logger.warn( "Login attempt ignored for %s on %s: " "%d failures since last success, last failure at %s. " "You can configure the number of login failures before a " "user is put on cooldown as well as the duration in the " "System Parameters. Disable this feature by setting " "\"base.login_cooldown_after\" to 0.", source, self.env.cr.dbname, failures, previous) if ipaddress.ip_address(source).is_private: _logger.warn( "The rate-limited IP address %s is classified as private " "and *might* be a proxy. If your Odoo is behind a proxy, " "it may be mis-configured. Check that you are running " "Odoo in Proxy Mode and that the proxy is properly configured, see " "https://www.odoo.com/documentation/11.0/setup/deploy.html#https for details.", source ) raise AccessDenied(_("Too many login failures, please wait a bit before trying again.")) try: yield except AccessDenied: (failures, __) = reg._login_failures[source] reg._login_failures[source] = (failures + 1, datetime.datetime.now()) raise else: reg._login_failures.pop(source, None)
def if_db_mgt_enabled(method, self, *args, **kwargs): if not odoo.tools.config['list_db']: _logger.error('Database management functions blocked, admin disabled database listing') raise AccessDenied() return method(self, *args, **kwargs)
def microsoft_auth_oauth(self, provider, params): access_token = params.get('access_token') login = self._microsoft_auth_oauth_signin(provider, params) if not login: raise AccessDenied() return self._cr.dbname, login, access_token