def _interval_dates(self, frequency, company): """ Method used to compute the theoretical date from which account move lines should be fetched @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} the theoretical date from which account move lines are fetched. date_stop date to which the move lines are fetched, always now() the dates are in their Swerp Database string representation """ date_stop = datetime.utcnow() interval_from = None name_interval = '' if frequency == 'daily': interval_from = date_stop - timedelta(days=1) name_interval = _('Daily Closing') elif frequency == 'monthly': month_target = date_stop.month > 1 and date_stop.month - 1 or 12 year_target = month_target < 12 and date_stop.year or date_stop.year - 1 interval_from = date_stop.replace(year=year_target, month=month_target) name_interval = _('Monthly Closing') elif frequency == 'annually': year_target = date_stop.year - 1 interval_from = date_stop.replace(year=year_target) name_interval = _('Annual Closing') return { 'interval_from': FieldDateTime.to_string(interval_from), 'date_stop': FieldDateTime.to_string(date_stop), 'name_interval': name_interval }
class RecruitmentStage(models.Model): _name = "hr.recruitment.stage" _description = "Recruitment Stages" _order = 'sequence' name = fields.Char("Stage name", required=True, translate=True) sequence = fields.Integer( "Sequence", default=10, help="Gives the sequence order when displaying a list of stages.") job_id = fields.Many2one('hr.job', string='Job Specific', ondelete='cascade', help='Specific job that uses this stage. Other jobs will not use this stage.') requirements = fields.Text("Requirements") template_id = fields.Many2one( 'mail.template', "Automated Email", help="If set, a message is posted on the applicant using the template when the applicant is set to the stage.") fold = fields.Boolean( "Folded in Recruitment Pipe", help="This stage is folded in the kanban view when there are no records in that stage to display.") legend_blocked = fields.Char( 'Red Kanban Label', default=lambda self: _('Blocked'), translate=True, required=True) legend_done = fields.Char( 'Green Kanban Label', default=lambda self: _('Ready for Next Stage'), translate=True, required=True) legend_normal = fields.Char( 'Grey Kanban Label', default=lambda self: _('In Progress'), translate=True, required=True) @api.model def default_get(self, fields): if self._context and self._context.get('default_job_id') and not self._context.get('hr_recruitment_stage_mono', False): context = dict(self._context) context.pop('default_job_id') self = self.with_context(context) return super(RecruitmentStage, self).default_get(fields)
def _notify_get_groups(self, message, groups): """ Handle HR users and officers recipients that can validate or refuse holidays directly from email. """ groups = super(HolidaysAllocation, self)._notify_get_groups(message, groups) self.ensure_one() hr_actions = [] if self.state == 'confirm': app_action = self._notify_get_action_link( 'controller', controller='/allocation/validate') hr_actions += [{'url': app_action, 'title': _('Approve')}] if self.state in ['confirm', 'validate', 'validate1']: ref_action = self._notify_get_action_link( 'controller', controller='/allocation/refuse') hr_actions += [{'url': ref_action, 'title': _('Refuse')}] holiday_user_group_id = self.env.ref( 'hr_holidays.group_hr_holidays_user').id new_group = ('group_hr_holidays_user', lambda pdata: pdata['type'] == 'user' and holiday_user_group_id in pdata['groups'], { 'actions': hr_actions, }) return [new_group] + groups
def _parse_date_from_data(self, data, index, name, field_type, options): dt = datetime.datetime fmt = fields.Date.to_string if field_type == 'date' else fields.Datetime.to_string d_fmt = options.get('date_format') dt_fmt = options.get('datetime_format') for num, line in enumerate(data): if not line[index]: continue v = line[index].strip() try: # first try parsing as a datetime if it's one if dt_fmt and field_type == 'datetime': try: line[index] = fmt(dt.strptime(v, dt_fmt)) continue except ValueError: pass # otherwise try parsing as a date whether it's a date # or datetime line[index] = fmt(dt.strptime(v, d_fmt)) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e))
def website_set_ga_data(self, website_id, ga_client_id, ga_analytics_key): if not request.env.user.has_group('base.group_system'): return { 'error': { 'title': _('Access Error'), 'message': _('You do not have sufficient rights to perform that action.' ), } } if not ga_analytics_key or not ga_client_id.endswith( '.apps.googleusercontent.com'): return { 'error': { 'title': _('Incorrect Client ID / Key'), 'message': _('The Google Analytics Client ID or Key you entered seems incorrect.' ), } } Website = request.env['website'] current_website = website_id and Website.browse( website_id) or Website.get_current_website() request.env['res.config.settings'].create({ 'google_management_client_id': ga_client_id, 'google_analytics_key': ga_analytics_key, 'website_id': current_website.id, }).execute() return True
def write(self, vals): has_been_posted = False for order in self: if order.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting or invoicing an pos.order if vals.get('state') in ['paid', 'done', 'invoiced']: has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (order.state in ['paid', 'done', 'invoiced'] and set(vals).intersection(ORDER_FIELDS)): raise UserError( _('According to the French law, you cannot modify a point of sale order. Forbidden fields: %s.' ) % ', '.join(ORDER_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (order.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( order.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the point of sale.' )) res = super(pos_order, self).write(vals) # write the hash and the secure_sequence_number when posting or invoicing a pos order if has_been_posted: for order in self.filtered( lambda o: o.company_id._is_accounting_unalterable() and not (o.l10n_fr_secure_sequence_number or o.l10n_fr_hash)): new_number = order.company_id.l10n_fr_pos_cert_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': order._get_new_hash(new_number) } res |= super(pos_order, order).write(vals_hashing) return res
def format_failure_reason(self): self.ensure_one() if self.failure_type != 'UNKNOWN': return dict(type(self).failure_type.selection).get( self.failure_type, _('No Error')) else: return _("Unknown error") + ": %s" % (self.failure_reason or '')
def write(self, vals): has_been_posted = False for move in self: if move.company_id._is_accounting_unalterable(): # write the hash and the secure_sequence_number when posting an account.move if vals.get('state') == 'posted': has_been_posted = True # restrict the operation in case we are trying to write a forbidden field if (move.state == "posted" and set(vals).intersection(MOVE_FIELDS)): raise UserError( _("According to the French law, you cannot modify a journal entry in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % ', '.join(MOVE_FIELDS)) # restrict the operation in case we are trying to overwrite existing hash if (move.l10n_fr_hash and 'l10n_fr_hash' in vals) or ( move.l10n_fr_secure_sequence_number and 'l10n_fr_secure_sequence_number' in vals): raise UserError( _('You cannot overwrite the values ensuring the inalterability of the accounting.' )) res = super(AccountMove, self).write(vals) # write the hash and the secure_sequence_number when posting an account.move if has_been_posted: for move in self.filtered( lambda m: m.company_id._is_accounting_unalterable() and not (m.l10n_fr_secure_sequence_number or m.l10n_fr_hash)): new_number = move.company_id.l10n_fr_secure_sequence_id.next_by_id( ) vals_hashing = { 'l10n_fr_secure_sequence_number': new_number, 'l10n_fr_hash': move._get_new_hash(new_number) } res |= super(AccountMove, move).write(vals_hashing) return res
def _compute_duration_display(self): for allocation in self: allocation.duration_display = '%g %s' % ( (float_round(allocation.number_of_hours_display, precision_digits=2) if allocation.type_request_unit == 'hour' else float_round( allocation.number_of_days_display, precision_digits=2)), _('hours') if allocation.type_request_unit == 'hour' else _('days'))
def _check_hash_integrity(self, company_id): """Checks that all posted or invoiced pos orders have still the same data as when they were posted and raises an error with the result. """ def build_order_info(order): entry_reference = _('(Receipt ref.: %s)') order_reference_string = order.pos_reference and entry_reference % order.pos_reference or '' return [ ctx_tz(order, 'date_order'), order.l10n_fr_secure_sequence_number, order.name, order_reference_string, ctx_tz(order, 'write_date') ] orders = self.search([('state', 'in', ['paid', 'done', 'invoiced']), ('company_id', '=', company_id), ('l10n_fr_secure_sequence_number', '!=', 0)], order="l10n_fr_secure_sequence_number ASC") if not orders: raise UserError( _('There isn\'t any order flagged for data inalterability yet for the company %s. This mechanism only runs for point of sale orders generated after the installation of the module France - Certification CGI 286 I-3 bis. - POS' ) % self.env.user.company_id.name) previous_hash = u'' start_order_info = [] for order in orders: if order.l10n_fr_hash != order._compute_hash( previous_hash=previous_hash): raise UserError( _('Corrupted data on point of sale order with id %s.') % order.id) previous_hash = order.l10n_fr_hash orders_sorted_date = orders.sorted(lambda o: o.date_order) start_order_info = build_order_info(orders_sorted_date[0]) end_order_info = build_order_info(orders_sorted_date[-1]) report_dict = { 'start_order_name': start_order_info[2], 'start_order_ref': start_order_info[3], 'start_order_date': start_order_info[0], 'end_order_name': end_order_info[2], 'end_order_ref': end_order_info[3], 'end_order_date': end_order_info[0] } # Raise on success raise UserError( _('''Successful test ! The point of sale orders are guaranteed to be in their original and inalterable state From: %(start_order_name)s %(start_order_ref)s recorded on %(start_order_date)s To: %(end_order_name)s %(end_order_ref)s recorded on %(end_order_date)s For this report to be legally meaningful, please download your certification from your customer account on Swerp.com (Only for Swerp Enterprise users).''' ) % report_dict)
def _check_approval_update(self, state): """ Check if target state is achievable. """ current_employee = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) is_officer = self.env.user.has_group( 'hr_holidays.group_hr_holidays_user') is_manager = self.env.user.has_group( 'hr_holidays.group_hr_holidays_manager') for holiday in self: val_type = holiday.holiday_status_id.sudo().validation_type if state == 'confirm': continue if state == 'draft': if holiday.employee_id != current_employee and not is_manager: raise UserError( _('Only a Leave Manager can reset other people leaves.' )) continue if not is_officer: raise UserError( _('Only a Leave Officer or Manager can approve or refuse leave requests.' )) if is_officer: # use ir.rule based first access check: department, members, ... (see security.xml) holiday.check_access_rule('write') if holiday.employee_id == current_employee and not is_manager: raise UserError( _('Only a Leave Manager can approve its own requests.')) if (state == 'validate1' and val_type == 'both') or (state == 'validate' and val_type == 'manager'): manager = holiday.employee_id.parent_id or holiday.employee_id.department_id.manager_id if (manager and manager != current_employee ) and not self.env.user.has_group( 'hr_holidays.group_hr_holidays_manager'): raise UserError( _('You must be either %s\'s manager or Leave manager to approve this leave' ) % (holiday.employee_id.name)) if state == 'validate' and val_type == 'both': if not self.env.user.has_group( 'hr_holidays.group_hr_holidays_manager'): raise UserError( _('Only an Leave Manager can apply the second approval on leave requests.' ))
def _check_hash_integrity(self, company_id): """Checks that all posted moves have still the same data as when they were posted and raises an error with the result. """ def build_move_info(move): entry_reference = _('(ref.: %s)') move_reference_string = move.ref and entry_reference % move.ref or '' return [move.name, move_reference_string] moves = self.search([('state', '=', 'posted'), ('company_id', '=', company_id), ('l10n_fr_secure_sequence_number', '!=', 0)], order="l10n_fr_secure_sequence_number ASC") if not moves: raise UserError( _('There isn\'t any journal entry flagged for data inalterability yet for the company %s. This mechanism only runs for journal entries generated after the installation of the module France - Certification CGI 286 I-3 bis.' ) % self.env.user.company_id.name) previous_hash = u'' start_move_info = [] for move in moves: if move.l10n_fr_hash != move._compute_hash( previous_hash=previous_hash): raise UserError( _('Corrupted data on journal entry with id %s.') % move.id) if not previous_hash: #save the date and sequence number of the first move hashed start_move_info = build_move_info(move) previous_hash = move.l10n_fr_hash end_move_info = build_move_info(move) report_dict = { 'start_move_name': start_move_info[0], 'start_move_ref': start_move_info[1], 'end_move_name': end_move_info[0], 'end_move_ref': end_move_info[1] } # Raise on success raise UserError( _('''Successful test ! The journal entries are guaranteed to be in their original and inalterable state From: %(start_move_name)s %(start_move_ref)s To: %(end_move_name)s %(end_move_ref)s For this report to be legally meaningful, please download your certification from your customer account on Swerp.com (Only for Swerp Enterprise users).''' ) % report_dict)
def action_refuse(self): current_employee = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) if any(holiday.state not in ['confirm', 'validate', 'validate1'] for holiday in self): raise UserError( _('Leave request must be confirmed or validated in order to refuse it.' )) validated_holidays = self.filtered( lambda hol: hol.state == 'validate1') validated_holidays.write({ 'state': 'refuse', 'first_approver_id': current_employee.id }) (self - validated_holidays).write({ 'state': 'refuse', 'second_approver_id': current_employee.id }) # If a category that created several holidays, cancel all related linked_requests = self.mapped('linked_request_ids') if linked_requests: linked_requests.action_refuse() self.activity_update() return True
def _onchange_update_posted(self): if self.update_posted and self.company_id._is_accounting_unalterable(): field_string = self._fields['update_posted'].get_description( self.env)['string'] raise UserError( _("According to the French law, you cannot modify a journal in order for its posted data to be updated or deleted. Unauthorized field: %s." ) % field_string)
def get_google_drive_url(self, res_id, template_id): self.ensure_one() self = self.sudo() model = self.model_id filter_name = self.filter_id.name if self.filter_id else False record = self.env[model.model].browse(res_id).read()[0] record.update({'model': model.name, 'filter': filter_name}) name_gdocs = self.name_template try: name_gdocs = name_gdocs % record except: raise UserError( _("At least one key cannot be found in your Google Drive name pattern." )) attachments = self.env["ir.attachment"].search([ ('res_model', '=', model.model), ('name', '=', name_gdocs), ('res_id', '=', res_id) ]) url = False if attachments: url = attachments[0].url else: url = self.copy_doc(res_id, template_id, name_gdocs, model.model).get('url') return url
def _check_validity_dates(self): for leave_type in self: if leave_type.validity_start and leave_type.validity_stop and \ leave_type.validity_start > leave_type.validity_stop: raise ValidationError( _("End of validity period should be greater than start of validity period" ))
def write(self, vals): if ('mail_message_id' in vals or 'res_partner_id' in vals) and not self.env.user._is_admin(): raise AccessError( _("Can not update the message or recipient of a notification.") ) return super(Notification, self).write(vals)
def _send_email(self): """ send notification email to a new portal user """ if not self.env.user.email: raise UserError( _('You must have an email address in your User Preferences to send emails.' )) # determine subject and body in the portal user's language template = self.env.ref('portal.mail_template_data_portal_welcome') for wizard_line in self: lang = wizard_line.user_id.lang partner = wizard_line.user_id.partner_id portal_url = partner.with_context( signup_force_type_in_url='', lang=lang)._get_signup_url_for_action()[partner.id] partner.signup_prepare() if template: template.with_context(dbname=self._cr.dbname, portal_url=portal_url, lang=lang).send_mail(wizard_line.id, force_send=True) else: _logger.warning( "No email template found for sending email to the portal user" ) return True
def unlink(self): for holiday in self.filtered(lambda holiday: holiday.state not in ['draft', 'cancel', 'confirm']): raise UserError( _('You cannot delete a leave which is in %s state.') % (holiday.state, )) return super(HolidaysAllocation, self).unlink()
def _convert_import_data(self, fields, options): """ Extracts the input BaseModel and fields list (with ``False``-y placeholders for fields to *not* import) into a format Model.import_data can use: a fields list without holes and the precisely matching data matrix :param list(str|bool): fields :returns: (data, fields) :rtype: (list(list(str)), list(str)) :raises ValueError: in case the import data could not be converted """ # Get indices for non-empty fields indices = [index for index, field in enumerate(fields) if field] if not indices: raise ValueError(_("You must configure at least one field to import")) # If only one index, itemgetter will return an atom rather # than a 1-tuple if len(indices) == 1: mapper = lambda row: [row[indices[0]]] else: mapper = operator.itemgetter(*indices) # Get only list of actually imported fields import_fields = [f for f in fields if f] rows_to_import = self._read_file(options) if options.get('headers'): rows_to_import = itertools.islice(rows_to_import, 1, None) data = [ list(row) for row in pycompat.imap(mapper, rows_to_import) # don't try inserting completely empty rows (e.g. from # filtering out o2m fields) if any(row) ] return data, import_fields
def _read_xls_book(self, book): sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in pycompat.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( pycompat.text_type(cell.value) if is_float else pycompat.text_type(int(cell.value)) ) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple(cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT) if is_datetime else dt.strftime(DEFAULT_SERVER_DATE_FORMAT) ) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value) ) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values
def get_google_drive_config(self, res_model, res_id): ''' Function called by the js, when no google doc are yet associated with a record, with the aim to create one. It will first seek for a google.docs.config associated with the model `res_model` to find out what's the template of google doc to copy (this is usefull if you want to start with a non-empty document, a type or a name different than the default values). If no config is associated with the `res_model`, then a blank text document with a default name is created. :param res_model: the object for which the google doc is created :param ids: the list of ids of the objects for which the google doc is created. This list is supposed to have a length of 1 element only (batch processing is not supported in the code, though nothing really prevent it) :return: the config id and config name ''' # TO DO in master: fix my signature and my model if isinstance(res_model, pycompat.string_types): res_model = self.env['ir.model'].search([('model', '=', res_model) ]).id if not res_id: raise UserError( _("Creating google drive may only be done by one at a time.")) # check if a model is configured with a template configs = self.search([('model_id', '=', res_model)]) config_values = [] for config in configs.sudo(): if config.filter_id: if config.filter_id.user_id and config.filter_id.user_id.id != self.env.user.id: #Private continue try: domain = [('id', 'in', [res_id])] + ast.literal_eval( config.filter_id.domain) except: raise UserError( _("The document filter must not include any 'dynamic' part, so it should not be based on the current time or current user, for example." )) additionnal_context = ast.literal_eval( config.filter_id.context) google_doc_configs = self.env[ config.filter_id.model_id].with_context( **additionnal_context).search(domain) if google_doc_configs: config_values.append({ 'id': config.id, 'name': config.name }) else: config_values.append({'id': config.id, 'name': config.name}) return config_values
def action_confirm(self): if self.filtered(lambda holiday: holiday.state != 'draft'): raise UserError( _('Leave request must be in Draft state ("To Submit") in order to confirm it.' )) res = self.write({'state': 'confirm'}) self.activity_update() return res
def _get_pos_mercury_config_id(self, config, journal_id): journal = config.journal_ids.filtered(lambda r: r.id == journal_id) if journal and journal.pos_mercury_config_id: return journal.pos_mercury_config_id else: raise UserError( _("No Mercury configuration associated with the journal."))
def unlink(self): for line in self.filtered( lambda s: s.company_id._is_accounting_unalterable( ) and s.journal_id.journal_user): raise UserError( _('You cannot modify anything on a bank statement line (name: %s) that was created by point of sale operations.' ) % (line.name, )) return super(AccountBankStatementLine, self).unlink()
def button_cancel(self): #by-pass the normal behavior/message that tells people can cancel a posted journal entry #if the journal allows it. if self.company_id._is_accounting_unalterable(): raise UserError( _('You cannot modify a posted journal entry. This ensures its inalterability.' )) super(AccountMove, self).button_cancel()
def _check_session_timing(self): self.ensure_one() date_today = datetime.utcnow() session_start = Datetime.from_string(self.start_at) if not date_today - timedelta(hours=24) <= session_start: raise UserError( _("This session has been opened another day. To comply with the French law, you should close sessions on a daily basis. Please close session %s and open a new one." ) % self.name) return True
def write(self, vals): # restrict the operation in case we are trying to write a forbidden field if set(vals).intersection(LINE_FIELDS): if any(l.company_id._is_accounting_unalterable() and l.order_id.state in ['done', 'invoiced'] for l in self): raise UserError( _('According to the French law, you cannot modify a point of sale order line. Forbidden fields: %s.' ) % ', '.join(LINE_FIELDS)) return super(PosOrderLine, self).write(vals)
def build_order_info(order): entry_reference = _('(Receipt ref.: %s)') order_reference_string = order.pos_reference and entry_reference % order.pos_reference or '' return [ ctx_tz(order, 'date_order'), order.l10n_fr_secure_sequence_number, order.name, order_reference_string, ctx_tz(order, 'write_date') ]
def view_init(self, fields): """ Check some preconditions before the wizard executes. """ for lead in self.env['crm.lead'].browse( self._context.get('active_ids', [])): if lead.probability == 100: raise UserError( _("Closed/Dead leads cannot be converted into opportunities." )) return False