def action_validate(self): self._check_security_action_validate() current_employee = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError( _('Leave request must be confirmed in order to approve it.' )) if holiday.state == 'validate1' and not holiday.env.user.has_group( 'hr_holidays.group_hr_holidays_manager'): raise UserError( _('Only an HR Manager can apply the second approval on leave requests.' )) holiday.write({'state': 'validate'}) if holiday.double_validation: holiday.write({'second_approver_id': current_employee.id}) else: holiday.write({'first_approver_id': current_employee.id}) if holiday.holiday_type == 'employee' and holiday.type == 'remove': holiday._validate_leave_request() elif holiday.holiday_type == 'category': leaves = self.env['hr.holidays'] for employee in holiday.category_id.employee_ids: values = holiday._prepare_create_by_category(employee) leaves += self.with_context( mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].double_validation: leaves.action_validate() return True
def _notification_recipients(self, message, groups): """ Handle HR users and officers recipients that can validate or refuse holidays directly from email. """ groups = super(Holidays, self)._notification_recipients(message, groups) self.ensure_one() hr_actions = [] if self.state == 'confirm': app_action = self._notification_link_helper( 'controller', controller='/hr_holidays/validate') hr_actions += [{'url': app_action, 'title': _('Approve')}] if self.state in ['confirm', 'validate', 'validate1']: ref_action = self._notification_link_helper( 'controller', controller='/hr_holidays/refuse') hr_actions += [{'url': ref_action, 'title': _('Refuse')}] new_group = ('group_hr_holidays_user', lambda partner: bool(partner.user_ids) and any( user.has_group('hr_holidays.group_hr_holidays_user') for user in partner.user_ids), { 'actions': hr_actions, }) return [new_group] + groups
def _interval_dates(self, frequency, company): """ Method used to compute the theoretical date from which account move lines should be fetched @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} the theoretical date from which account move lines are fetched. date_stop date to which the move lines are fetched, always now() the dates are in their izi Database string representation """ date_stop = datetime.utcnow() interval_from = None name_interval = '' if frequency == 'daily': interval_from = date_stop - timedelta(days=1) name_interval = _('Daily Closing') elif frequency == 'monthly': month_target = date_stop.month > 1 and date_stop.month - 1 or 12 year_target = month_target < 12 and date_stop.year or date_stop.year - 1 interval_from = date_stop.replace(year=year_target, month=month_target) name_interval = _('Monthly Closing') elif frequency == 'annually': year_target = date_stop.year - 1 interval_from = date_stop.replace(year=year_target) name_interval = _('Annual Closing') return { 'interval_from': FieldDateTime.to_string(interval_from), 'date_stop': FieldDateTime.to_string(date_stop), 'name_interval': name_interval }
def write(self, vals): has_been_posted = False for move in self: if move.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting an account.move if vals.get('state') == 'posted': has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (move.state == "posted" and set(vals).intersection(MOVE_FIELDS)): raise UserError( _("According to the French law, you cannot modify a journal entry in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % ', '.join(MOVE_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (move.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( move.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the accounting.' )) res = super(AccountMove, self).write(vals) # write the hash and the secure_sequence_number when posting an account.move if has_been_posted: for move in self.filtered( lambda m: m.company_id._is_accounting_unalterable() and not (m.l10n_fr_secure_sequence_number or m.l10n_fr_hash)): new_number = move.company_id.l10n_fr_secure_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': move._get_new_hash(new_number) } res |= super(AccountMove, move).write(vals_hashing) return res
class BarcodeRule(models.Model): _inherit = 'barcode.rule' type = fields.Selection(selection_add=[ ('weight', _('Weighted Product')), ('price', _('Priced Product')), ('discount', _('Discounted Product')), ('client', _('Client')), ('cashier', _('Cashier')) ])
def _message_notification_recipients(self, message, recipients): result = super(Holidays, self)._message_notification_recipients( message, recipients) leave_type = self.env[message.model].browse(message.res_id).type title = _("See Leave") if leave_type == 'remove' else _( "See Allocation") for res in result: if result[res].get('button_access'): result[res]['button_access']['title'] = title return result
def _parse_import_data_recursive(self, model, prefix, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[model].fields_get() for name, field in all_fields.items(): name = prefix + name if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field[ 'type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: # datetime.str[fp]time takes *native strings* in both # versions, for both data and pattern user_format = pycompat.to_native( options.get('%s_format' % field['type'])) for num, line in enumerate(data): if line[index]: line[index] = line[index].strip() if line[index]: try: line[index] = dt.strftime( dt.strptime( pycompat.to_native(line[index]), user_format), server_format) except ValueError as e: raise ValueError( _("Column %s contains incorrect values. Error in line %d: %s" ) % (name, num + 1, e)) except Exception as e: raise ValueError( _("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) # Check if the field is in import_field and is a relational (followed by /) # Also verify that the field name exactly match the import_field at the correct level. elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields): # Recursive call with the relational as new model and add the field name to the prefix self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data
def _read_file(self, options): """ Dispatch to specific method to read file content, according to its mimetype or file type :param options : dict of reading options (quoting, separator, ...) """ self.ensure_one() # guess mimetype from file content mimetype = guess_mimetype(self.file) (file_extension, handler, req) = FILE_TYPE_DICT.get(mimetype, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using guessed mimetype %s", self.file_name or '<unknown>', self.id, mimetype) # try reading with user-provided mimetype (file_extension, handler, req) = FILE_TYPE_DICT.get(self.file_type, (None, None, None)) if handler: try: return getattr(self, '_read_' + file_extension)(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %d) using user-provided mimetype %s", self.file_name or '<unknown>', self.id, self.file_type) # fallback on file extensions as mime types can be unreliable (e.g. # software setting incorrect mime types, or non-installed software # leading to browser not sending mime types) if self.file_name: p, ext = os.path.splitext(self.file_name) if ext in EXTENSIONS: try: return getattr(self, '_read_' + ext[1:])(options) except Exception: _logger.warn( "Failed to read file '%s' (transient id %s) using file extension", self.file_name, self.id) if req: raise ImportError( _("Unable to load \"{extension}\" file: requires Python module \"{modname}\"" ).format(extension=file_extension, modname=req)) raise ValueError( _("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX" ).format(self.file_type))
def _check_hash_integrity(self, company_id): """Checks that all posted moves have still the same data as when they were posted and raises an error with the result. """ def build_move_info(move): entry_reference = _('(ref.: %s)') move_reference_string = move.ref and entry_reference % move.ref or '' return [move.name, move_reference_string] moves = self.search([('state', '=', 'posted'), ('company_id', '=', company_id), ('l10n_fr_secure_sequence_number', '!=', 0)], order="l10n_fr_secure_sequence_number ASC") if not moves: raise UserError( _('There isn\'t any journal entry flagged for data inalterability yet for the company %s. This mechanism only runs for journal entries generated after the installation of the module France - Certification CGI 286 I-3 bis.' ) % self.env.user.company_id.name) previous_hash = u'' start_move_info = [] for move in moves: if move.l10n_fr_hash != move._compute_hash( previous_hash=previous_hash): raise UserError( _('Corrupted data on journal entry with id %s.') % move.id) if not previous_hash: #save the date and sequence number of the first move hashed start_move_info = build_move_info(move) previous_hash = move.l10n_fr_hash end_move_info = build_move_info(move) report_dict = { 'start_move_name': start_move_info[0], 'start_move_ref': start_move_info[1], 'end_move_name': end_move_info[0], 'end_move_ref': end_move_info[1] } # Raise on success raise UserError( _('''Successful test ! The journal entries are guaranteed to be in their original and inalterable state From: %(start_move_name)s %(start_move_ref)s To: %(end_move_name)s %(end_move_ref)s For this report to be legally meaningful, please download your certification from your customer account on izi.asia (Only for izi Enterprise users).''' ) % report_dict)
def get_google_drive_url(self, res_id, template_id): self.ensure_one() self = self.sudo() model = self.model_id filter_name = self.filter_id.name if self.filter_id else False record = self.env[model.model].browse(res_id).read()[0] record.update({'model': model.name, 'filter': filter_name}) name_gdocs = self.name_template try: name_gdocs = name_gdocs % record except: raise UserError( _("At least one key cannot be found in your Google Drive name pattern" )) attachments = self.env["ir.attachment"].search([ ('res_model', '=', model.model), ('name', '=', name_gdocs), ('res_id', '=', res_id) ]) url = False if attachments: url = attachments[0].url else: url = self.copy_doc(res_id, template_id, name_gdocs, model.model).get('url') return url
def action_refuse(self): self._check_security_action_refuse() current_employee = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate', 'validate1']: raise UserError( _('Leave request must be confirmed or validated in order to refuse it.' )) if holiday.state == 'validate1': holiday.write({ 'state': 'refuse', 'first_approver_id': current_employee.id }) else: holiday.write({ 'state': 'refuse', 'second_approver_id': current_employee.id }) # Delete the meeting if holiday.meeting_id: holiday.meeting_id.unlink() # If a category that created several holidays, cancel all related holiday.linked_request_ids.action_refuse() self._remove_resource_leave() return True
def _get_pos_mercury_config_id(self, config, journal_id): journal = config.journal_ids.filtered(lambda r: r.id == journal_id) if journal and journal.pos_mercury_config_id: return journal.pos_mercury_config_id else: raise UserError(_("No Mercury configuration associated with the journal."))
def unlink(self): for holiday in self.filtered(lambda holiday: holiday.state not in ['draft', 'cancel', 'confirm']): raise UserError( _('You cannot delete a leave which is in %s state.') % (holiday.state, )) return super(Holidays, self).unlink()
def _onchange_update_posted(self): if self.update_posted and self.company_id._is_accounting_unalterable(): field_string = self._fields['update_posted'].get_description( self.env)['string'] raise UserError( _("According to the French law, you cannot modify a journal in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % field_string)
def _read_xls_book(self, book): sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in pycompat.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( pycompat.text_type(cell.value) if is_float else pycompat.text_type(int(cell.value))) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple( cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT ) if is_datetime else dt. strftime(DEFAULT_SERVER_DATE_FORMAT)) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value)) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values
def _send_email(self): """ send notification email to a new portal user """ if not self.env.user.email: raise UserError( _('You must have an email address in your User Preferences to send emails.' )) # determine subject and body in the portal user's language template = self.env.ref('portal.mail_template_data_portal_welcome') for wizard_line in self: lang = wizard_line.user_id.lang partner = wizard_line.user_id.partner_id portal_url = partner.with_context( signup_force_type_in_url='', lang=lang)._get_signup_url_for_action()[partner.id] partner.signup_prepare() if template: template.with_context(dbname=self._cr.dbname, portal_url=portal_url, lang=lang).send_mail(wizard_line.id, force_send=True) else: _logger.warning( "No email template found for sending email to the portal user" ) return True
def copy(self, default=None): view = self.env['ir.ui.view'].browse(self.view_id.id) # website.page's ir.ui.view should have a different key than the one it # is copied from. # (eg: website_version: an ir.ui.view record with the same key is # expected to be the same ir.ui.view but from another version) new_view = view.copy({ 'key': view.key + '.copy', 'name': '%s %s' % (view.name, _('(copy)')) }) default = { 'name': '%s %s' % (self.name, _('(copy)')), 'url': self.env['website'].get_unique_path(self.url), 'view_id': new_view.id, } return super(Page, self).copy(default=default)
def _get_pos_session(self): pos_session = self.env['pos.session'].search([('state', '=', 'opened'), ('user_id', '=', self.env.uid)], limit=1) if not pos_session: raise UserError(_("No opened point of sale session for user %s found") % self.env.user.name) pos_session.login() return pos_session
def unlink(self): for line in self.filtered( lambda s: s.company_id._is_accounting_unalterable( ) and s.journal_id.journal_user): raise UserError( _('You cannot modify anything on a bank statement line (name: %s) that was created by point of sale operations.' ) % (line.name, )) return super(AccountBankStatementLine, self).unlink()
def button_cancel(self): #by-pass the normal behavior/message that tells people can cancel a posted journal entry #if the journal allows it. if self.company_id._is_accounting_unalterable(): raise UserError( _('You cannot modify a posted journal entry. This ensures its inalterability.' )) super(AccountMove, self).button_cancel()
def view_init(self, fields): """ Check some preconditions before the wizard executes. """ for lead in self.env['crm.lead'].browse( self._context.get('active_ids', [])): if lead.probability == 100: raise UserError( _("Closed/Dead leads cannot be converted into opportunities." )) return False
def write(self, vals): # restrict the operation in case we are trying to write a forbidden field if set(vals).intersection(LINE_FIELDS): if any(l.company_id._is_accounting_unalterable() and l.move_id.state == 'posted' for l in self): raise UserError( _("According to the French law, you cannot modify a journal item in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % ', '.join(LINE_FIELDS)) return super(AccountMoveLine, self).write(vals)
def _compute_ressource_id(self): result = {} for record in self: word = self._get_key_from_url(record.google_drive_template_url) if word: record.google_drive_resource_id = word else: raise UserError(_("Please enter a valid Google Document URL.")) return result
def open_track_speakers_list(self): return { 'name': _('Speakers'), 'domain': [('id', 'in', self.mapped('partner_id').ids)], 'view_type': 'form', 'view_mode': 'kanban,form', 'res_model': 'res.partner', 'view_id': False, 'type': 'ir.actions.act_window', }
def _fields_view_get_address(self, arch): arch = super(Partner, self)._fields_view_get_address(arch) # render the partner address accordingly to address_view_id doc = etree.fromstring(arch) if doc.xpath("//field[@name='city_id']"): return arch for city_node in doc.xpath("//field[@name='city']"): replacement_xml = """ <div> <field name="country_enforce_cities" invisible="1"/> <field name='city' placeholder="%s" attrs="{'invisible': [('country_enforce_cities', '=', True), ('city_id', '!=', False)], 'readonly': [('type', '=', 'contact'), ('parent_id', '!=', False)]}"/> <field name='city_id' placeholder="%s" attrs="{'invisible': [('country_enforce_cities', '=', False)], 'readonly': [('type', '=', 'contact'), ('parent_id', '!=', False)]}" context="{'default_country_id': country_id}" domain="[('country_id', '=', country_id)]"/> </div> """ % (_('City'), _('City')) city_id_node = etree.fromstring(replacement_xml) city_node.getparent().replace(city_node, city_id_node) arch = etree.tostring(doc, encoding='unicode') return arch
def create(self, vals): # restrict the operation in case we are trying to set a forbidden field if self.company_id._is_accounting_unalterable(): if vals.get('update_posted'): field_string = self._fields['update_posted'].get_description( self.env)['string'] raise UserError( _("According to the French law, you cannot modify a journal in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % field_string) return super(AccountJournal, self).create(vals)
def portal_my_leads(self, page=1, date_begin=None, date_end=None, sortby=None, **kw): values = self._prepare_portal_layout_values() CrmLead = request.env['crm.lead'] domain = self.get_domain_my_lead(request.env.user) searchbar_sortings = { 'date': {'label': _('Newest'), 'order': 'create_date desc'}, 'name': {'label': _('Name'), 'order': 'name'}, 'contact_name': {'label': _('Contact Name'), 'order': 'contact_name'}, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # archive groups - Default Group By 'create_date' archive_groups = self._get_archive_groups('crm.lead', domain) if date_begin and date_end: domain += [('create_date', '>', date_begin), ('create_date', '<=', date_end)] # pager lead_count = CrmLead.search_count(domain) pager = request.website.pager( url="/my/leads", url_args={'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby}, total=lead_count, page=page, step=self._items_per_page ) # content according to pager and archive selected leads = CrmLead.search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) values.update({ 'date': date_begin, 'leads': leads, 'page_name': 'lead', 'archive_groups': archive_groups, 'default_url': '/my/leads', 'pager': pager, 'searchbar_sortings': searchbar_sortings, 'sortby': sortby, }) return request.render("website_crm_partner_assign.portal_my_leads", values)
def get_error_messages(self): emails = [] partners_error_empty = self.env['res.partner'] partners_error_emails = self.env['res.partner'] partners_error_user = self.env['res.partner'] for wizard_user in self.with_context(active_test=False).filtered( lambda w: w.in_portal and not w.partner_id.user_ids): email = extract_email(wizard_user.email) if not email: partners_error_empty |= wizard_user.partner_id elif email in emails: partners_error_emails |= wizard_user.partner_id user = self.env['res.users'].sudo().with_context( active_test=False).search([('login', '=', email)]) if user: partners_error_user |= wizard_user.partner_id emails.append(email) error_msg = [] if partners_error_empty: error_msg.append( "%s\n- %s" % (_("Some contacts don't have a valid email: "), '\n- '.join( partners_error_empty.mapped('display_name')))) if partners_error_emails: error_msg.append( "%s\n- %s" % (_("Several contacts have the same email: "), '\n- '.join( partners_error_emails.mapped('email')))) if partners_error_user: error_msg.append("%s\n- %s" % (_( "Some contacts have the same email as an existing portal user:"******"To resolve this error, you can: \n" "- Correct the emails of the relevant contacts\n" "- Grant access only to contacts with unique emails")) return error_msg
def action_apply(self): self.env['res.partner'].check_access_rights('write') """ From selected partners, add corresponding users to chosen portal group. It either granted existing user, or create new one (and add it to the group). """ error_msg = self.get_error_messages() if error_msg: raise UserError("\n\n".join(error_msg)) for wizard_user in self.sudo().with_context(active_test=False): group_portal = wizard_user.wizard_id.portal_id if not group_portal.is_portal: raise UserError( _('Group %s is not a portal') % group_portal.name) user = wizard_user.partner_id.user_ids[ 0] if wizard_user.partner_id.user_ids else None # update partner email, if a new one was introduced if wizard_user.partner_id.email != wizard_user.email: wizard_user.partner_id.write({'email': wizard_user.email}) # add portal group to relative user of selected partners if wizard_user.in_portal: user_portal = None # create a user if necessary, and make sure it is in the portal group if not user: if wizard_user.partner_id.company_id: company_id = wizard_user.partner_id.company_id.id else: company_id = self.env[ 'res.company']._company_default_get('res.users') user_portal = wizard_user.sudo().with_context( company_id=company_id)._create_user() else: user_portal = user wizard_user.write({'user_id': user_portal.id}) if not wizard_user.user_id.active or group_portal not in wizard_user.user_id.groups_id: wizard_user.user_id.write({ 'active': True, 'groups_id': [(4, group_portal.id)] }) # prepare for the signup process wizard_user.user_id.partner_id.signup_prepare() wizard_user.with_context(active_test=True)._send_email() wizard_user.refresh() else: # remove the user (if it exists) from the portal group if user and group_portal in user.groups_id: # if user belongs to portal only, deactivate it if len(user.groups_id) <= 1: user.write({ 'groups_id': [(3, group_portal.id)], 'active': False }) else: user.write({'groups_id': [(3, group_portal.id)]})
def action_draft(self): for holiday in self: if not holiday.can_reset: raise UserError( _('Only an HR Manager or the concerned employee can reset to draft.' )) if holiday.state not in ['confirm', 'refuse']: raise UserError( _('Leave request state must be "Refused" or "To Approve" in order to reset to Draft.' )) holiday.write({ 'state': 'draft', 'first_approver_id': False, 'second_approver_id': False, }) linked_requests = holiday.mapped('linked_request_ids') for linked_request in linked_requests: linked_request.action_draft() linked_requests.unlink() return True