class L10nLatamDocumentType(models.Model): _name = 'l10n_latam.document.type' _description = 'Latam Document Type' _order = 'sequence, id' active = fields.Boolean(default=True) sequence = fields.Integer( default=10, required=True, help= 'To set in which order show the documents type taking into account the most' ' commonly used first') country_id = fields.Many2one( 'res.country', required=True, index=True, help='Country in which this type of document is valid') name = fields.Char(required=True, index=True, help='The document name') doc_code_prefix = fields.Char( 'Document Code Prefix', help= "Prefix for Documents Codes on Invoices and Account Moves. For eg. 'FA ' will" " build 'FA 0001-0000001' Document Number") code = fields.Char(help='Code used by different localizations') report_name = fields.Char( 'Name on Reports', help='Name that will be printed in reports, for example "CREDIT NOTE"') internal_type = fields.Selection( [('invoice', 'Invoices'), ('debit_note', 'Debit Notes'), ('credit_note', 'Credit Notes')], index=True, help= 'Analog to flectra account.move.move_type but with more options allowing to identify the kind of document we are' ' working with. (not only related to account.move, could be for documents of other models like stock.picking)' ) def _format_document_number(self, document_number): """ Method to be inherited by different localizations. The purpose of this method is to allow: * making validations on the document_number. If it is wrong it should raise an exception * format the document_number against a pattern and return it """ self.ensure_one() return document_number def name_get(self): result = [] for rec in self: name = rec.name if rec.code: name = '(%s) %s' % (rec.code, name) result.append((rec.id, name)) return result @api.model def _name_search(self, name, args=None, operator='ilike', limit=100, name_get_uid=None): args = args or [] if operator == 'ilike' and not (name or '').strip(): domain = [] else: domain = ['|', ('name', 'ilike', name), ('code', 'ilike', name)] return self._search(expression.AND([domain, args]), limit=limit, access_rights_uid=name_get_uid) def _filter_taxes_included(self, taxes): """ This method is to be inherited by different localizations and must return filter the given taxes recordset returning the taxes to be included on reports of this document type. All taxes are going to be discriminated except the one returned by this method. """ self.ensure_one() return self.env['account.tax']
class ProjectCreateSalesOrder(models.TransientModel): _name = 'project.create.sale.order' _description = "Create SO from project" @api.model def default_get(self, fields): result = super(ProjectCreateSalesOrder, self).default_get(fields) active_model = self._context.get('active_model') if active_model != 'project.project': raise UserError( _("You can only apply this action from a project.")) active_id = self._context.get('active_id') if 'project_id' in fields and active_id: project = self.env['project.project'].browse(active_id) if project.sale_order_id: raise UserError(_("The project has already a sale order.")) result['project_id'] = active_id if not result.get('partner_id', False): result['partner_id'] = project.partner_id.id if project.bill_type == 'customer_project' and not result.get( 'line_ids', False): if project.pricing_type == 'employee_rate': default_product = self.env.ref( 'sale_timesheet.time_product', False) result['line_ids'] = [(0, 0, { 'employee_id': e.employee_id.id, 'product_id': e.timesheet_product_id.id or default_product.id, 'price_unit': e.price_unit if e.timesheet_product_id else default_product.lst_price }) for e in project.sale_line_employee_ids] employee_from_timesheet = project.task_ids.timesheet_ids.employee_id - project.sale_line_employee_ids.employee_id result['line_ids'] += [(0, 0, { 'employee_id': e.id, 'product_id': default_product.id, 'price_unit': default_product.lst_price }) for e in employee_from_timesheet] else: result['line_ids'] = [(0, 0, { 'product_id': p.id, 'price_unit': p.lst_price }) for p in project.task_ids.timesheet_product_id] return result project_id = fields.Many2one( 'project.project', "Project", domain=[('sale_line_id', '=', False)], help="Project for which we are creating a sales order", required=True) company_id = fields.Many2one(related='project_id.company_id') partner_id = fields.Many2one('res.partner', string="Customer", required=True, help="Customer of the sales order") commercial_partner_id = fields.Many2one( related='partner_id.commercial_partner_id') pricing_type = fields.Selection(related="project_id.pricing_type") link_selection = fields.Selection( [('create', 'Create a new sales order'), ('link', 'Link to an existing sales order')], required=True, default='create') sale_order_id = fields.Many2one( 'sale.order', string="Sales Order", domain= "['|', '|', ('partner_id', '=', partner_id), ('partner_id', 'child_of', commercial_partner_id), ('partner_id', 'parent_of', partner_id)]" ) line_ids = fields.One2many('project.create.sale.order.line', 'wizard_id', string='Lines') info_invoice = fields.Char(compute='_compute_info_invoice') @api.depends('sale_order_id', 'link_selection') def _compute_info_invoice(self): for line in self: tasks = line.project_id.tasks.filtered( lambda t: not t.non_allow_billable) domain = self.env[ 'sale.order.line']._timesheet_compute_delivered_quantity_domain( ) timesheet = self.env['account.analytic.line'].read_group( domain + [('task_id', 'in', tasks.ids), ('so_line', '=', False), ('timesheet_invoice_id', '=', False)], ['unit_amount'], ['task_id']) unit_amount = round( sum(t.get('unit_amount', 0) for t in timesheet), 2) if timesheet else 0 if not unit_amount: line.info_invoice = False continue company_uom = self.env.company.timesheet_encode_uom_id label = _("hours") if company_uom == self.env.ref('uom.product_uom_day'): label = _("days") if line.link_selection == 'create': line.info_invoice = _( "%(amount)s %(label)s will be added to the new Sales Order.", amount=unit_amount, label=label) else: line.info_invoice = _( "%(amount)s %(label)s will be added to the selected Sales Order.", amount=unit_amount, label=label) @api.onchange('partner_id') def _onchange_partner_id(self): self.sale_order_id = False def action_link_sale_order(self): task_no_sale_line = self.project_id.tasks.filtered( lambda task: not task.sale_line_id) # link the project to the SO line self.project_id.write({ 'sale_line_id': self.sale_order_id.order_line[0].id, 'sale_order_id': self.sale_order_id.id, 'partner_id': self.partner_id.id, }) if self.pricing_type == 'employee_rate': lines_already_present = dict([ (l.employee_id.id, l) for l in self.project_id.sale_line_employee_ids ]) EmployeeMap = self.env['project.sale.line.employee.map'].sudo() for wizard_line in self.line_ids: if wizard_line.employee_id.id not in lines_already_present: EmployeeMap.create({ 'project_id': self.project_id.id, 'sale_line_id': wizard_line.sale_line_id.id, 'employee_id': wizard_line.employee_id.id, }) else: lines_already_present[wizard_line.employee_id.id].write( {'sale_line_id': wizard_line.sale_line_id.id}) self.project_id.tasks.filtered( lambda task: task.non_allow_billable).sale_line_id = False tasks = self.project_id.tasks.filtered( lambda t: not t.non_allow_billable) # assign SOL to timesheets for map_entry in self.project_id.sale_line_employee_ids: self.env['account.analytic.line'].search([ ('task_id', 'in', tasks.ids), ('employee_id', '=', map_entry.employee_id.id), ('so_line', '=', False) ]).write({'so_line': map_entry.sale_line_id.id}) else: dict_product_sol = dict([(l.product_id.id, l.id) for l in self.sale_order_id.order_line]) # remove SOL for task without product # and if a task has a product that match a product from a SOL, we put this SOL on task. for task in task_no_sale_line: if not task.timesheet_product_id: task.sale_line_id = False elif task.timesheet_product_id.id in dict_product_sol: task.write({ 'sale_line_id': dict_product_sol[task.timesheet_product_id.id] }) def action_create_sale_order(self): # if project linked to SO line or at least on tasks with SO line, then we consider project as billable. if self.project_id.sale_line_id: raise UserError( _("The project is already linked to a sales order item.")) # at least one line if not self.line_ids: raise UserError(_("At least one line should be filled.")) if self.pricing_type == 'employee_rate': # all employee having timesheet should be in the wizard map timesheet_employees = self.env['account.analytic.line'].search([ ('task_id', 'in', self.project_id.tasks.ids) ]).mapped('employee_id') map_employees = self.line_ids.mapped('employee_id') missing_meployees = timesheet_employees - map_employees if missing_meployees: raise UserError( _('The Sales Order cannot be created because you did not enter some employees that entered timesheets on this project. Please list all the relevant employees before creating the Sales Order.\nMissing employee(s): %s' ) % (', '.join(missing_meployees.mapped('name')))) # check here if timesheet already linked to SO line timesheet_with_so_line = self.env[ 'account.analytic.line'].search_count([('task_id', 'in', self.project_id.tasks.ids), ('so_line', '!=', False)]) if timesheet_with_so_line: raise UserError( _('The sales order cannot be created because some timesheets of this project are already linked to another sales order.' )) # create SO according to the chosen billable type sale_order = self._create_sale_order() view_form_id = self.env.ref('sale.view_order_form').id action = self.env["ir.actions.actions"]._for_xml_id( "sale.action_orders") action.update({ 'views': [(view_form_id, 'form')], 'view_mode': 'form', 'name': sale_order.name, 'res_id': sale_order.id, }) return action def _create_sale_order(self): """ Private implementation of generating the sales order """ sale_order = self.env['sale.order'].create({ 'project_id': self.project_id.id, 'partner_id': self.partner_id.id, 'analytic_account_id': self.project_id.analytic_account_id.id, 'client_order_ref': self.project_id.name, 'company_id': self.project_id.company_id.id, }) sale_order.onchange_partner_id() sale_order.onchange_partner_shipping_id() # rewrite the user as the onchange_partner_id erases it sale_order.write({'user_id': self.project_id.user_id.id}) sale_order.onchange_user_id() # create the sale lines, the map (optional), and assign existing timesheet to sale lines self._make_billable(sale_order) # confirm SO sale_order.action_confirm() return sale_order def _make_billable(self, sale_order): if self.pricing_type == 'fixed_rate': self._make_billable_at_project_rate(sale_order) else: self._make_billable_at_employee_rate(sale_order) def _make_billable_at_project_rate(self, sale_order): self.ensure_one() task_left = self.project_id.tasks.filtered( lambda task: not task.sale_line_id) ticket_timesheet_ids = self.env.context.get('ticket_timesheet_ids', []) for wizard_line in self.line_ids: task_ids = self.project_id.tasks.filtered( lambda task: not task.sale_line_id and task. timesheet_product_id == wizard_line.product_id) task_left -= task_ids # trying to simulate the SO line created a task, according to the product configuration # To avoid, generating a task when confirming the SO task_id = False if task_ids and wizard_line.product_id.service_tracking in [ 'task_in_project', 'task_global_project' ]: task_id = task_ids.ids[0] # create SO line sale_order_line = self.env['sale.order.line'].create({ 'order_id': sale_order.id, 'product_id': wizard_line.product_id.id, 'price_unit': wizard_line.price_unit, 'project_id': self.project_id. id, # prevent to re-create a project on confirmation 'task_id': task_id, 'product_uom_qty': 0.0, }) if ticket_timesheet_ids and not self.project_id.sale_line_id and not task_ids: # With pricing = "project rate" in project. When the user wants to create a sale order from a ticket in helpdesk # The project cannot contain any tasks. Thus, we need to give the first sale_order_line created to link # the timesheet to this first sale order line. # link the project to the SO line self.project_id.write({ 'sale_order_id': sale_order.id, 'sale_line_id': sale_order_line.id, 'partner_id': self.partner_id.id, }) # link the tasks to the SO line task_ids.write({ 'sale_line_id': sale_order_line.id, 'partner_id': sale_order.partner_id.id, 'email_from': sale_order.partner_id.email, }) # assign SOL to timesheets search_domain = [('task_id', 'in', task_ids.ids), ('so_line', '=', False)] if ticket_timesheet_ids: search_domain = [('id', 'in', ticket_timesheet_ids), ('so_line', '=', False)] self.env['account.analytic.line'].search(search_domain).write( {'so_line': sale_order_line.id}) sale_order_line.with_context({ 'no_update_planned_hours': True }).write({'product_uom_qty': sale_order_line.qty_delivered}) if ticket_timesheet_ids and self.project_id.sale_line_id and not self.project_id.tasks and len( self.line_ids) > 1: # Then, we need to give to the project the last sale order line created self.project_id.write({'sale_line_id': sale_order_line.id}) else: # Otherwise, we are in the normal behaviour # link the project to the SO line self.project_id.write({ 'sale_order_id': sale_order.id, 'sale_line_id': sale_order_line.id, # we take the last sale_order_line created 'partner_id': self.partner_id.id, }) if task_left: task_left.sale_line_id = False def _make_billable_at_employee_rate(self, sale_order): # trying to simulate the SO line created a task, according to the product configuration # To avoid, generating a task when confirming the SO task_id = self.env['project.task'].search( [('project_id', '=', self.project_id.id)], order='create_date DESC', limit=1).id project_id = self.project_id.id lines_already_present = dict([ (l.employee_id.id, l) for l in self.project_id.sale_line_employee_ids ]) non_billable_tasks = self.project_id.tasks.filtered( lambda task: not task.sale_line_id) non_allow_billable_tasks = self.project_id.tasks.filtered( lambda task: task.non_allow_billable) map_entries = self.env['project.sale.line.employee.map'] EmployeeMap = self.env['project.sale.line.employee.map'].sudo() # create SO lines: create on SOL per product/price. So many employee can be linked to the same SOL map_product_price_sol = {} # (product_id, price) --> SOL for wizard_line in self.line_ids: map_key = (wizard_line.product_id.id, wizard_line.price_unit) if map_key not in map_product_price_sol: values = { 'order_id': sale_order.id, 'product_id': wizard_line.product_id.id, 'price_unit': wizard_line.price_unit, 'product_uom_qty': 0.0, } if wizard_line.product_id.service_tracking in [ 'task_in_project', 'task_global_project' ]: values['task_id'] = task_id if wizard_line.product_id.service_tracking in [ 'task_in_project', 'project_only' ]: values['project_id'] = project_id sale_order_line = self.env['sale.order.line'].create(values) map_product_price_sol[map_key] = sale_order_line if wizard_line.employee_id.id not in lines_already_present: map_entries |= EmployeeMap.create({ 'project_id': self.project_id.id, 'sale_line_id': map_product_price_sol[map_key].id, 'employee_id': wizard_line.employee_id.id, }) else: map_entries |= lines_already_present[ wizard_line.employee_id.id] lines_already_present[wizard_line.employee_id.id].write( {'sale_line_id': map_product_price_sol[map_key].id}) # link the project to the SO self.project_id.write({ 'sale_order_id': sale_order.id, 'sale_line_id': sale_order.order_line[0].id, 'partner_id': self.partner_id.id, }) non_billable_tasks.write({ 'partner_id': sale_order.partner_id.id, 'email_from': sale_order.partner_id.email, }) non_allow_billable_tasks.sale_line_id = False tasks = self.project_id.tasks.filtered( lambda t: not t.non_allow_billable) # assign SOL to timesheets for map_entry in map_entries: search_domain = [('employee_id', '=', map_entry.employee_id.id), ('so_line', '=', False)] ticket_timesheet_ids = self.env.context.get( 'ticket_timesheet_ids', []) if ticket_timesheet_ids: search_domain.append(('id', 'in', ticket_timesheet_ids)) else: search_domain.append(('task_id', 'in', tasks.ids)) self.env['account.analytic.line'].search(search_domain).write( {'so_line': map_entry.sale_line_id.id}) map_entry.sale_line_id.with_context({ 'no_update_planned_hours': True }).write({'product_uom_qty': map_entry.sale_line_id.qty_delivered}) return map_entries
class SurveyUserInput(models.Model): """ Metadata for a set of one user's answers to a particular survey """ _name = "survey.user_input" _rec_name = 'date_create' _description = 'Survey User Input' survey_id = fields.Many2one('survey.survey', string='Survey', required=True, readonly=True, ondelete='restrict') date_create = fields.Datetime('Creation Date', default=fields.Datetime.now, required=True, readonly=True, copy=False) deadline = fields.Datetime('Deadline', help="Date by which the person can open the survey and submit answers", oldname="date_deadline") type = fields.Selection([('manually', 'Manually'), ('link', 'Link')], string='Answer Type', default='manually', required=True, readonly=True, oldname="response_type") state = fields.Selection([ ('new', 'Not started yet'), ('skip', 'Partially completed'), ('done', 'Completed')], string='Status', default='new', readonly=True) test_entry = fields.Boolean(readonly=True) token = fields.Char('Identification token', default=lambda self: str(uuid.uuid4()), readonly=True, required=True, copy=False) # Optional Identification data partner_id = fields.Many2one('res.partner', string='Partner', readonly=True) email = fields.Char('E-mail', readonly=True) # Displaying data last_displayed_page_id = fields.Many2one('survey.page', string='Last displayed page') # The answers ! user_input_line_ids = fields.One2many('survey.user_input_line', 'user_input_id', string='Answers', copy=True) # URLs used to display the answers result_url = fields.Char("Public link to the survey results", related='survey_id.result_url') print_url = fields.Char("Public link to the empty survey", related='survey_id.print_url') quizz_score = fields.Float("Score for the quiz", compute="_compute_quizz_score", default=0.0) @api.depends('user_input_line_ids.quizz_mark') def _compute_quizz_score(self): for user_input in self: user_input.quizz_score = sum(user_input.user_input_line_ids.mapped('quizz_mark')) _sql_constraints = [ ('unique_token', 'UNIQUE (token)', 'A token must be unique!'), ('deadline_in_the_past', 'CHECK (deadline >= date_create)', 'The deadline cannot be in the past') ] @api.model def do_clean_emptys(self): """ Remove empty user inputs that have been created manually (used as a cronjob declared in data/survey_cron.xml) """ an_hour_ago = fields.Datetime.to_string(datetime.datetime.now() - datetime.timedelta(hours=1)) self.search([('type', '=', 'manually'), ('state', '=', 'new'), ('date_create', '<', an_hour_ago)]).unlink() @api.multi def action_survey_resend(self): """ Send again the invitation """ self.ensure_one() local_context = { 'survey_resent_token': True, 'default_partner_ids': self.partner_id and [self.partner_id.id] or [], 'default_multi_email': self.email or "", 'default_public': 'email_private', } return self.survey_id.with_context(local_context).action_send_survey() @api.multi def action_view_answers(self): """ Open the website page with the survey form """ self.ensure_one() return { 'type': 'ir.actions.act_url', 'name': "View Answers", 'target': 'self', 'url': '%s/%s' % (self.print_url, self.token) } @api.multi def action_survey_results(self): """ Open the website page with the survey results """ self.ensure_one() return { 'type': 'ir.actions.act_url', 'name': "Survey Results", 'target': 'self', 'url': self.result_url }
class HrWorkEntry(models.Model): _name = 'hr.work.entry' _description = 'HR Work Entry' _order = 'conflict desc,state,date_start' name = fields.Char(required=True) active = fields.Boolean(default=True) employee_id = fields.Many2one( 'hr.employee', required=True, domain= "['|', ('company_id', '=', False), ('company_id', '=', company_id)]") date_start = fields.Datetime(required=True, string='From') date_stop = fields.Datetime(compute='_compute_date_stop', store=True, readonly=False, string='To') duration = fields.Float(compute='_compute_duration', store=True, string="Period") work_entry_type_id = fields.Many2one('hr.work.entry.type', index=True) color = fields.Integer(related='work_entry_type_id.color', readonly=True) state = fields.Selection([('draft', 'Draft'), ('validated', 'Validated'), ('conflict', 'Conflict'), ('cancelled', 'Cancelled')], default='draft') company_id = fields.Many2one('res.company', string='Company', readonly=True, required=True, default=lambda self: self.env.company) conflict = fields.Boolean( 'Conflicts', compute='_compute_conflict', store=True) # Used to show conflicting work entries first _sql_constraints = [ ('_work_entry_has_end', 'check (date_stop IS NOT NULL)', 'Work entry must end. Please define an end date or a duration.'), ('_work_entry_start_before_end', 'check (date_stop > date_start)', 'Starting time should be before end time.') ] @api.depends('state') def _compute_conflict(self): for rec in self: rec.conflict = rec.state == 'conflict' @api.depends('date_stop', 'date_start') def _compute_duration(self): for work_entry in self: work_entry.duration = work_entry._get_duration( work_entry.date_start, work_entry.date_stop) @api.depends('date_start', 'duration') def _compute_date_stop(self): for work_entry in self.filtered(lambda w: w.date_start and w.duration): work_entry.date_stop = work_entry.date_start + relativedelta( hours=work_entry.duration) def _get_duration(self, date_start, date_stop): if not date_start or not date_stop: return 0 dt = date_stop - date_start return dt.days * 24 + dt.seconds / 3600 # Number of hours def action_validate(self): """ Try to validate work entries. If some errors are found, set `state` to conflict for conflicting work entries and validation fails. :return: True if validation succeded """ work_entries = self.filtered( lambda work_entry: work_entry.state != 'validated') if not work_entries._check_if_error(): work_entries.write({'state': 'validated'}) return True return False def _check_if_error(self): if not self: return False undefined_type = self.filtered(lambda b: not b.work_entry_type_id) undefined_type.write({'state': 'conflict'}) conflict = self._mark_conflicting_work_entries( min(self.mapped('date_start')), max(self.mapped('date_stop'))) return undefined_type or conflict @api.model def _mark_conflicting_work_entries(self, start, stop): """ Set `state` to `conflict` for overlapping work entries between two dates. Return True if overlapping work entries were detected. """ # Use the postgresql range type `tsrange` which is a range of timestamp # It supports the intersection operator (&&) useful to detect overlap. # use '()' to exlude the lower and upper bounds of the range. # Filter on date_start and date_stop (both indexed) in the EXISTS clause to # limit the resulting set size and fasten the query. self.flush(['date_start', 'date_stop', 'employee_id', 'active']) query = """ SELECT b1.id FROM hr_work_entry b1 WHERE b1.date_start <= %s AND b1.date_stop >= %s AND active = TRUE AND EXISTS ( SELECT 1 FROM hr_work_entry b2 WHERE b2.date_start <= %s AND b2.date_stop >= %s AND active = TRUE AND tsrange(b1.date_start, b1.date_stop, '()') && tsrange(b2.date_start, b2.date_stop, '()') AND b1.id <> b2.id AND b1.employee_id = b2.employee_id ); """ self.env.cr.execute(query, (stop, start, stop, start)) conflicts = [res.get('id') for res in self.env.cr.dictfetchall()] self.browse(conflicts).write({ 'state': 'conflict', }) return bool(conflicts) @api.model_create_multi def create(self, vals_list): work_entries = super().create(vals_list) work_entries._check_if_error() return work_entries def write(self, vals): skip_check = not bool({ 'date_start', 'date_stop', 'employee_id', 'work_entry_type_id', 'active' } & vals.keys()) if 'state' in vals: if vals['state'] == 'draft': vals['active'] = True elif vals['state'] == 'cancelled': vals['active'] = False skip_check &= all(self.mapped(lambda w: w.state != 'conflict')) if 'active' in vals: vals['state'] = 'draft' if vals['active'] else 'cancelled' with self._error_checking(skip=skip_check): return super(HrWorkEntry, self).write(vals) def unlink(self): with self._error_checking(): return super().unlink() def _reset_conflicting_state(self): self.filtered(lambda w: w.state == 'conflict').write( {'state': 'draft'}) @contextmanager def _error_checking(self, start=None, stop=None, skip=False): """ Context manager used for conflicts checking. When exiting the context manager, conflicts are checked for all work entries within a date range. By default, the start and end dates are computed according to `self` (min and max respectively) but it can be overwritten by providing other values as parameter. :param start: datetime to overwrite the default behaviour :param stop: datetime to overwrite the default behaviour :param skip: If True, no error checking is done """ try: skip = skip or self.env.context.get('hr_work_entry_no_check', False) start = start or min(self.mapped('date_start'), default=False) stop = stop or max(self.mapped('date_stop'), default=False) if not skip and start and stop: work_entries = self.sudo().with_context( hr_work_entry_no_check=True).search([ ('date_start', '<', stop), ('date_stop', '>', start), ('state', 'not in', ('validated', 'cancelled')), ]) work_entries._reset_conflicting_state() yield except OperationalError: # the cursor is dead, do not attempt to use it or we will shadow the root exception # with a "psycopg2.InternalError: current transaction is aborted, ..." skip = True raise finally: if not skip and start and stop: # New work entries are handled in the create method, # no need to reload work entries. work_entries.exists()._check_if_error()
class StockWarehouseOrderpoint(models.Model): """ Defines Minimum stock rules. """ _name = "stock.warehouse.orderpoint" _description = "Minimum Inventory Rule" _check_company_auto = True _order = "location_id,company_id,id" @api.model def default_get(self, fields): res = super().default_get(fields) warehouse = None if 'warehouse_id' not in res and res.get('company_id'): warehouse = self.env['stock.warehouse'].search( [('company_id', '=', res['company_id'])], limit=1) if warehouse: res['warehouse_id'] = warehouse.id res['location_id'] = warehouse.lot_stock_id.id return res @api.model def _domain_product_id(self): domain = "('type', '=', 'product')" if self.env.context.get('active_model') == 'product.template': product_template_id = self.env.context.get('active_id', False) domain = f"('product_tmpl_id', '=', {product_template_id})" elif self.env.context.get('default_product_id', False): product_id = self.env.context.get('default_product_id', False) domain = f"('id', '=', {product_id})" return f"[{domain}, '|', ('company_id', '=', False), ('company_id', '=', company_id)]" name = fields.Char('Name', copy=False, required=True, readonly=True, default=lambda self: self.env['ir.sequence']. next_by_code('stock.orderpoint')) trigger = fields.Selection([('auto', 'Auto'), ('manual', 'Manual')], string='Trigger', default='auto', required=True) active = fields.Boolean( 'Active', default=True, help= "If the active field is set to False, it will allow you to hide the orderpoint without removing it." ) snoozed_until = fields.Date('Snoozed', help="Hidden until next scheduler.") warehouse_id = fields.Many2one('stock.warehouse', 'Warehouse', check_company=True, ondelete="cascade", required=True) location_id = fields.Many2one('stock.location', 'Location', index=True, ondelete="cascade", required=True, check_company=True) product_tmpl_id = fields.Many2one('product.template', related='product_id.product_tmpl_id') product_id = fields.Many2one('product.product', 'Product', index=True, domain=lambda self: self._domain_product_id(), ondelete='cascade', required=True, check_company=True) product_category_id = fields.Many2one('product.category', name='Product Category', related='product_id.categ_id', store=True) product_uom = fields.Many2one('uom.uom', 'Unit of Measure', related='product_id.uom_id') product_uom_name = fields.Char(string='Product unit of measure label', related='product_uom.display_name', readonly=True) product_min_qty = fields.Float( 'Min Quantity', digits='Product Unit of Measure', required=True, default=0.0, help= "When the virtual stock equals to or goes below the Min Quantity specified for this field, Flectra generates " "a procurement to bring the forecasted quantity to the Max Quantity.") product_max_qty = fields.Float( 'Max Quantity', digits='Product Unit of Measure', required=True, default=0.0, help= "When the virtual stock goes below the Min Quantity, Flectra generates " "a procurement to bring the forecasted quantity to the Quantity specified as Max Quantity." ) qty_multiple = fields.Float( 'Multiple Quantity', digits='Product Unit of Measure', default=1, required=True, help= "The procurement quantity will be rounded up to this multiple. If it is 0, the exact quantity will be used." ) group_id = fields.Many2one( 'procurement.group', 'Procurement Group', copy=False, help= "Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by stock rules will be grouped into one big picking." ) company_id = fields.Many2one('res.company', 'Company', required=True, index=True, default=lambda self: self.env.company) allowed_location_ids = fields.One2many( comodel_name='stock.location', compute='_compute_allowed_location_ids') rule_ids = fields.Many2many('stock.rule', string='Rules used', compute='_compute_rules') json_lead_days_popover = fields.Char(compute='_compute_json_popover') lead_days_date = fields.Date(compute='_compute_lead_days') allowed_route_ids = fields.Many2many('stock.location.route', compute='_compute_allowed_route_ids') route_id = fields.Many2one('stock.location.route', string='Preferred Route', domain="[('id', 'in', allowed_route_ids)]") qty_on_hand = fields.Float('On Hand', readonly=True, compute='_compute_qty') qty_forecast = fields.Float('Forecast', readonly=True, compute='_compute_qty') qty_to_order = fields.Float('To Order', compute='_compute_qty_to_order', store=True, readonly=False) _sql_constraints = [ ('qty_multiple_check', 'CHECK( qty_multiple >= 0 )', 'Qty Multiple must be greater than or equal to zero.'), ] @api.depends('warehouse_id') def _compute_allowed_location_ids(self): loc_domain = [('usage', 'in', ('internal', 'view'))] # We want to keep only the locations # - strictly belonging to our warehouse # - not belonging to any warehouses for orderpoint in self: other_warehouses = self.env['stock.warehouse'].search([ ('id', '!=', orderpoint.warehouse_id.id) ]) for view_location_id in other_warehouses.mapped( 'view_location_id'): loc_domain = expression.AND([ loc_domain, ['!', ('id', 'child_of', view_location_id.id)] ]) loc_domain = expression.AND([ loc_domain, [ '|', ('company_id', '=', False), ('company_id', '=', orderpoint.company_id.id) ] ]) orderpoint.allowed_location_ids = self.env[ 'stock.location'].search(loc_domain) @api.depends('warehouse_id', 'location_id') def _compute_allowed_route_ids(self): route_by_product = self.env['stock.location.route'].search([ ('product_selectable', '=', True), ]) self.allowed_route_ids = route_by_product.ids @api.depends('rule_ids', 'product_id.seller_ids', 'product_id.seller_ids.delay') def _compute_json_popover(self): FloatConverter = self.env['ir.qweb.field.float'] for orderpoint in self: if not orderpoint.product_id or not orderpoint.location_id: orderpoint.json_lead_days_popover = False continue dummy, lead_days_description = orderpoint.rule_ids._get_lead_days( orderpoint.product_id) orderpoint.json_lead_days_popover = dumps({ 'title': _('Replenishment'), 'icon': 'fa-area-chart', 'popoverTemplate': 'stock.leadDaysPopOver', 'lead_days_date': format_date(self.env, orderpoint.lead_days_date), 'lead_days_description': lead_days_description, 'today': format_date(self.env, fields.Date.today()), 'trigger': orderpoint.trigger, 'qty_forecast': FloatConverter.value_to_html( orderpoint.qty_forecast, {'decimal_precision': 'Product Unit of Measure'}), 'qty_to_order': FloatConverter.value_to_html( orderpoint.qty_to_order, {'decimal_precision': 'Product Unit of Measure'}), 'product_min_qty': FloatConverter.value_to_html( orderpoint.product_min_qty, {'decimal_precision': 'Product Unit of Measure'}), 'product_max_qty': FloatConverter.value_to_html( orderpoint.product_max_qty, {'decimal_precision': 'Product Unit of Measure'}), 'product_uom_name': orderpoint.product_uom_name, 'virtual': orderpoint.trigger == 'manual' and orderpoint.create_uid.id == SUPERUSER_ID, }) @api.depends('rule_ids', 'product_id.seller_ids', 'product_id.seller_ids.delay') def _compute_lead_days(self): for orderpoint in self.with_context(bypass_delay_description=True): if not orderpoint.product_id or not orderpoint.location_id: orderpoint.lead_days_date = False continue lead_days, dummy = orderpoint.rule_ids._get_lead_days( orderpoint.product_id) lead_days_date = fields.Date.today() + relativedelta.relativedelta( days=lead_days) orderpoint.lead_days_date = lead_days_date @api.depends('route_id', 'product_id', 'location_id', 'company_id', 'warehouse_id', 'product_id.route_ids') def _compute_rules(self): for orderpoint in self: if not orderpoint.product_id or not orderpoint.location_id: orderpoint.rule_ids = False continue orderpoint.rule_ids = orderpoint.product_id._get_rules_from_location( orderpoint.location_id, route_ids=orderpoint.route_id) @api.constrains('product_id') def _check_product_uom(self): ''' Check if the UoM has the same category as the product standard UoM ''' if any(orderpoint.product_id.uom_id.category_id != orderpoint.product_uom.category_id for orderpoint in self): raise ValidationError( _('You have to select a product unit of measure that is in the same category as the default unit of measure of the product' )) @api.onchange('location_id') def _onchange_location_id(self): warehouse = self.location_id.get_warehouse().id if warehouse: self.warehouse_id = warehouse @api.onchange('warehouse_id') def _onchange_warehouse_id(self): """ Finds location id for changed warehouse. """ if self.warehouse_id: self.location_id = self.warehouse_id.lot_stock_id.id else: self.location_id = False @api.onchange('product_id') def _onchange_product_id(self): if self.product_id: self.product_uom = self.product_id.uom_id.id @api.onchange('company_id') def _onchange_company_id(self): if self.company_id: self.warehouse_id = self.env['stock.warehouse'].search( [('company_id', '=', self.company_id.id)], limit=1) def write(self, vals): if 'company_id' in vals: for orderpoint in self: if orderpoint.company_id.id != vals['company_id']: raise UserError( _("Changing the company of this record is forbidden at this point, you should rather archive it and create a new one." )) return super().write(vals) @api.model def action_open_orderpoints(self): return self._get_orderpoint_action() def action_replenish(self): self._procure_orderpoint_confirm(company_id=self.env.company) notification = False if len(self) == 1: notification = self._get_replenishment_order_notification() # Forced to call compute quantity because we don't have a link. self._compute_qty() self.filtered(lambda o: o.create_uid.id == SUPERUSER_ID and o. qty_to_order <= 0.0 and o.trigger == 'manual').unlink() return notification def action_replenish_auto(self): self.trigger = 'auto' return self.action_replenish() @api.depends('product_id', 'location_id', 'product_id.stock_move_ids', 'product_id.stock_move_ids.state', 'product_id.stock_move_ids.product_uom_qty') def _compute_qty(self): orderpoints_contexts = defaultdict( lambda: self.env['stock.warehouse.orderpoint']) for orderpoint in self: if not orderpoint.product_id or not orderpoint.location_id: orderpoint.qty_on_hand = False orderpoint.qty_forecast = False continue orderpoint_context = orderpoint._get_product_context() product_context = frozendict({ **self.env.context, **orderpoint_context }) orderpoints_contexts[product_context] |= orderpoint for orderpoint_context, orderpoints_by_context in orderpoints_contexts.items( ): products_qty = orderpoints_by_context.product_id.with_context( orderpoint_context)._product_available() products_qty_in_progress = orderpoints_by_context._quantity_in_progress( ) for orderpoint in orderpoints_by_context: orderpoint.qty_on_hand = products_qty[ orderpoint.product_id.id]['qty_available'] orderpoint.qty_forecast = products_qty[ orderpoint.product_id. id]['virtual_available'] + products_qty_in_progress[ orderpoint.id] @api.depends('qty_multiple', 'qty_forecast', 'product_min_qty', 'product_max_qty') def _compute_qty_to_order(self): for orderpoint in self: if not orderpoint.product_id or not orderpoint.location_id: orderpoint.qty_to_order = False continue qty_to_order = 0.0 rounding = orderpoint.product_uom.rounding if float_compare(orderpoint.qty_forecast, orderpoint.product_min_qty, precision_rounding=rounding) < 0: qty_to_order = max( orderpoint.product_min_qty, orderpoint.product_max_qty) - orderpoint.qty_forecast remainder = orderpoint.qty_multiple > 0 and qty_to_order % orderpoint.qty_multiple or 0.0 if float_compare(remainder, 0.0, precision_rounding=rounding) > 0: qty_to_order += orderpoint.qty_multiple - remainder orderpoint.qty_to_order = qty_to_order def _set_default_route_id(self): """ Write the `route_id` field on `self`. This method is intendend to be called on the orderpoints generated when openning the replenish report. """ self = self.filtered(lambda o: not o.route_id) rules_groups = self.env['stock.rule'].read_group( [('route_id.product_selectable', '!=', False), ('location_id', 'in', self.location_id.ids), ('action', 'in', ['pull_push', 'pull'])], ['location_id', 'route_id'], ['location_id', 'route_id'], lazy=False) for g in rules_groups: if not g.get('route_id'): continue orderpoints = self.filtered( lambda o: o.location_id.id == g['location_id'][0]) orderpoints.route_id = g['route_id'] def _get_product_context(self): """Used to call `virtual_available` when running an orderpoint.""" self.ensure_one() return { 'location': self.location_id.id, 'to_date': datetime.combine(self.lead_days_date, time.max) } def _get_orderpoint_action(self): """Create manual orderpoints for missing product in each warehouses. It also removes orderpoints that have been replenish. In order to do it: - It uses the report.stock.quantity to find missing quantity per product/warehouse - It checks if orderpoint already exist to refill this location. - It checks if it exists other sources (e.g RFQ) tha refill the warehouse. - It creates the orderpoints for missing quantity that were not refill by an upper option. return replenish report ir.actions.act_window """ action = self.env["ir.actions.actions"]._for_xml_id( "stock.action_orderpoint_replenish") action['context'] = self.env.context # Search also with archived ones to avoid to trigger product_location_check SQL constraints later # It means that when there will be a archived orderpoint on a location + product, the replenishment # report won't take in account this location + product and it won't create any manual orderpoint # In master: the active field should be remove orderpoints = self.env['stock.warehouse.orderpoint'].with_context( active_test=False).search([]) # Remove previous automatically created orderpoint that has been refilled. to_remove = orderpoints.filtered( lambda o: o.create_uid.id == SUPERUSER_ID and o.qty_to_order <= 0.0 and o.trigger == 'manual') to_remove.unlink() orderpoints = orderpoints - to_remove to_refill = defaultdict(float) all_product_ids = [] all_warehouse_ids = [] # Take 3 months since it's the max for the forecast report to_date = add(fields.date.today(), months=3) qty_by_product_warehouse = self.env[ 'report.stock.quantity'].read_group( [('date', '=', to_date), ('state', '=', 'forecast')], ['product_id', 'product_qty', 'warehouse_id'], ['product_id', 'warehouse_id'], lazy=False) for group in qty_by_product_warehouse: warehouse_id = group.get( 'warehouse_id') and group['warehouse_id'][0] if group['product_qty'] >= 0.0 or not warehouse_id: continue all_product_ids.append(group['product_id'][0]) all_warehouse_ids.append(warehouse_id) to_refill[(group['product_id'][0], warehouse_id)] = group['product_qty'] if not to_refill: return action # Recompute the forecasted quantity for missing product today but at this time # with their real lead days. key_to_remove = [] # group product by lead_days and warehouse in order to read virtual_available # in batch pwh_per_day = defaultdict(list) for (product, warehouse), quantity in to_refill.items(): product = self.env['product.product'].browse( product).with_prefetch(all_product_ids) warehouse = self.env['stock.warehouse'].browse( warehouse).with_prefetch(all_warehouse_ids) rules = product._get_rules_from_location(warehouse.lot_stock_id) lead_days = rules.with_context( bypass_delay_description=True)._get_lead_days(product)[0] pwh_per_day[(lead_days, warehouse)].append(product.id) for (days, warehouse), p_ids in pwh_per_day.items(): products = self.env['product.product'].browse(p_ids) qties = products.with_context( warehouse=warehouse.id, to_date=fields.datetime.now() + relativedelta.relativedelta(days=days)).read( ['virtual_available']) for qty in qties: if float_compare( qty['virtual_available'], 0, precision_rounding=product.uom_id.rounding) >= 0: key_to_remove.append((qty['id'], warehouse.id)) else: to_refill[(qty['id'], warehouse.id)] = qty['virtual_available'] for key in key_to_remove: del to_refill[key] if not to_refill: return action # Remove incoming quantity from other origin than moves (e.g RFQ) product_ids, warehouse_ids = zip(*to_refill) dummy, qty_by_product_wh = self.env['product.product'].browse( product_ids)._get_quantity_in_progress(warehouse_ids=warehouse_ids) rounding = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') # Group orderpoint by product-warehouse orderpoint_by_product_warehouse = self.env[ 'stock.warehouse.orderpoint'].read_group( [('id', 'in', orderpoints.ids)], ['product_id', 'warehouse_id', 'qty_to_order:sum'], ['product_id', 'warehouse_id'], lazy=False) orderpoint_by_product_warehouse = { (record.get('product_id')[0], record.get('warehouse_id')[0]): record.get('qty_to_order') for record in orderpoint_by_product_warehouse } for (product, warehouse), product_qty in to_refill.items(): qty_in_progress = qty_by_product_wh.get( (product, warehouse)) or 0.0 qty_in_progress += orderpoint_by_product_warehouse.get( (product, warehouse), 0.0) # Add qty to order for other orderpoint under this warehouse. if not qty_in_progress: continue to_refill[(product, warehouse)] = product_qty + qty_in_progress to_refill = { k: v for k, v in to_refill.items() if float_compare(v, 0.0, precision_digits=rounding) < 0.0 } lot_stock_id_by_warehouse = self.env['stock.warehouse'].search_read( [('id', 'in', [g[1] for g in to_refill.keys()])], ['lot_stock_id']) lot_stock_id_by_warehouse = { w['id']: w['lot_stock_id'][0] for w in lot_stock_id_by_warehouse } # With archived ones to avoid `product_location_check` SQL constraints orderpoint_by_product_location = self.env[ 'stock.warehouse.orderpoint'].with_context( active_test=False).read_group( [('id', 'in', orderpoints.ids)], ['product_id', 'location_id', 'ids:array_agg(id)'], ['product_id', 'location_id'], lazy=False) orderpoint_by_product_location = { (record.get('product_id')[0], record.get('location_id')[0]): record.get('ids')[0] for record in orderpoint_by_product_location } orderpoint_values_list = [] for (product, warehouse), product_qty in to_refill.items(): lot_stock_id = lot_stock_id_by_warehouse[warehouse] orderpoint_id = orderpoint_by_product_location.get( (product, lot_stock_id)) if orderpoint_id: self.env['stock.warehouse.orderpoint'].browse( orderpoint_id).qty_forecast += product_qty else: orderpoint_values = self.env[ 'stock.warehouse.orderpoint']._get_orderpoint_values( product, lot_stock_id) orderpoint_values.update({ 'name': _('Replenishment Report'), 'warehouse_id': warehouse, 'company_id': self.env['stock.warehouse'].browse( warehouse).company_id.id, }) orderpoint_values_list.append(orderpoint_values) orderpoints = self.env['stock.warehouse.orderpoint'].with_user( SUPERUSER_ID).create(orderpoint_values_list) for orderpoint in orderpoints: orderpoint.route_id = orderpoint.product_id.route_ids[:1] orderpoints.filtered(lambda o: not o.route_id)._set_default_route_id() return action @api.model def _get_orderpoint_values(self, product, location): return { 'product_id': product, 'location_id': location, 'product_max_qty': 0.0, 'product_min_qty': 0.0, 'trigger': 'manual', } def _get_replenishment_order_notification(self): return False def _quantity_in_progress(self): """Return Quantities that are not yet in virtual stock but should be deduced from orderpoint rule (example: purchases created from orderpoints)""" return dict(self.mapped(lambda x: (x.id, 0.0))) def _prepare_procurement_values(self, date=False, group=False): """ Prepare specific key for moves or other components that will be created from a stock rule comming from an orderpoint. This method could be override in order to add other custom key that could be used in move/po creation. """ date_planned = date or fields.Date.today() return { 'route_ids': self.route_id, 'date_planned': date_planned, 'date_deadline': date or False, 'warehouse_id': self.warehouse_id, 'orderpoint_id': self, 'group_id': group or self.group_id, } def _procure_orderpoint_confirm(self, use_new_cursor=False, company_id=None, raise_user_error=True): """ Create procurements based on orderpoints. :param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing 1000 orderpoints. This is appropriate for batch jobs only. """ self = self.with_company(company_id) orderpoints_noprefetch = self.read(['id']) orderpoints_noprefetch = [ orderpoint['id'] for orderpoint in orderpoints_noprefetch ] for orderpoints_batch in split_every(1000, orderpoints_noprefetch): if use_new_cursor: cr = registry(self._cr.dbname).cursor() self = self.with_env(self.env(cr=cr)) orderpoints_batch = self.env['stock.warehouse.orderpoint'].browse( orderpoints_batch) orderpoints_exceptions = [] while orderpoints_batch: procurements = [] for orderpoint in orderpoints_batch: if float_compare(orderpoint.qty_to_order, 0.0, precision_rounding=orderpoint.product_uom. rounding) == 1: date = datetime.combine(orderpoint.lead_days_date, time.min) values = orderpoint._prepare_procurement_values( date=date) procurements.append( self.env['procurement.group'].Procurement( orderpoint.product_id, orderpoint.qty_to_order, orderpoint.product_uom, orderpoint.location_id, orderpoint.name, orderpoint.name, orderpoint.company_id, values)) try: with self.env.cr.savepoint(): self.env['procurement.group'].with_context( from_orderpoint=True).run( procurements, raise_user_error=raise_user_error) except ProcurementException as errors: for procurement, error_msg in errors.procurement_exceptions: orderpoints_exceptions += [ (procurement.values.get('orderpoint_id'), error_msg) ] failed_orderpoints = self.env[ 'stock.warehouse.orderpoint'].concat( *[o[0] for o in orderpoints_exceptions]) if not failed_orderpoints: _logger.error('Unable to process orderpoints') break orderpoints_batch -= failed_orderpoints except OperationalError: if use_new_cursor: cr.rollback() continue else: raise else: orderpoints_batch._post_process_scheduler() break # Log an activity on product template for failed orderpoints. for orderpoint, error_msg in orderpoints_exceptions: existing_activity = self.env['mail.activity'].search([ ('res_id', '=', orderpoint.product_id.product_tmpl_id.id), ('res_model_id', '=', self.env.ref('product.model_product_template').id), ('note', '=', error_msg) ]) if not existing_activity: orderpoint.product_id.product_tmpl_id.activity_schedule( 'mail.mail_activity_data_warning', note=error_msg, user_id=orderpoint.product_id.responsible_id.id or SUPERUSER_ID, ) if use_new_cursor: cr.commit() cr.close() return {} def _post_process_scheduler(self): return True
class MailComposer(models.TransientModel): """ Generic message composition wizard. You may inherit from this wizard at model and view levels to provide specific features. The behavior of the wizard depends on the composition_mode field: - 'comment': post on a record. The wizard is pre-populated via ``get_record_data`` - 'mass_mail': wizard in mass mailing mode where the mail details can contain template placeholders that will be merged with actual data before being sent to each recipient. """ _name = 'mail.compose.message' _inherit = 'mail.message' _description = 'Email composition wizard' _log_access = True _batch_size = 500 @api.model def default_get(self, fields): """ Handle composition mode. Some details about context keys: - comment: default mode, model and ID of a record the user comments - default_model or active_model - default_res_id or active_id - reply: active_id of a message the user replies to - default_parent_id or message_id or active_id: ID of the mail.message we reply to - message.res_model or default_model - message.res_id or default_res_id - mass_mail: model and IDs of records the user mass-mails - active_ids: record IDs - default_model or active_model """ result = super(MailComposer, self).default_get(fields) # v6.1 compatibility mode result['composition_mode'] = result.get('composition_mode', self._context.get('mail.compose.message.mode', 'comment')) result['model'] = result.get('model', self._context.get('active_model')) result['res_id'] = result.get('res_id', self._context.get('active_id')) result['parent_id'] = result.get('parent_id', self._context.get('message_id')) if 'no_auto_thread' not in result and (result['model'] not in self.env or not hasattr(self.env[result['model']], 'message_post')): result['no_auto_thread'] = True # default values according to composition mode - NOTE: reply is deprecated, fall back on comment if result['composition_mode'] == 'reply': result['composition_mode'] = 'comment' vals = {} if 'active_domain' in self._context: # not context.get() because we want to keep global [] domains vals['active_domain'] = '%s' % self._context.get('active_domain') if result['composition_mode'] == 'comment': vals.update(self.get_record_data(result)) for field in vals: if field in fields: result[field] = vals[field] # TDE HACK: as mailboxes used default_model='res.users' and default_res_id=uid # (because of lack of an accessible pid), creating a message on its own # profile may crash (res_users does not allow writing on it) # Posting on its own profile works (res_users redirect to res_partner) # but when creating the mail.message to create the mail.compose.message # access rights issues may rise # We therefore directly change the model and res_id if result['model'] == 'res.users' and result['res_id'] == self._uid: result['model'] = 'res.partner' result['res_id'] = self.env.user.partner_id.id if fields is not None: [result.pop(field, None) for field in list(result) if field not in fields] return result @api.model def _get_composition_mode_selection(self): return [('comment', 'Post on a document'), ('mass_mail', 'Email Mass Mailing'), ('mass_post', 'Post on Multiple Documents')] composition_mode = fields.Selection(selection=_get_composition_mode_selection, string='Composition mode', default='comment') partner_ids = fields.Many2many( 'res.partner', 'mail_compose_message_res_partner_rel', 'wizard_id', 'partner_id', 'Additional Contacts') use_active_domain = fields.Boolean('Use active domain') active_domain = fields.Text('Active domain', readonly=True) attachment_ids = fields.Many2many( 'ir.attachment', 'mail_compose_message_ir_attachments_rel', 'wizard_id', 'attachment_id', 'Attachments') is_log = fields.Boolean('Log an Internal Note', help='Whether the message is an internal note (comment mode only)') subject = fields.Char(default=False) # mass mode options notify = fields.Boolean('Notify followers', help='Notify followers of the document (mass post only)') auto_delete = fields.Boolean('Delete Emails', help='Delete sent emails (mass mailing only)') keep_days = fields.Integer('Keep days', default=-1, help="This value defines the no. of days " "the emails should be recorded " "in the system: \n -1 = Email will be deleted " "immediately once it is send \n greater than 0 = Email " "will be deleted after " "the no. of days are met.") auto_delete_message = fields.Boolean('Delete Message Copy', help='Do not keep a copy of the email in the document communication history (mass mailing only)') template_id = fields.Many2one( 'mail.template', 'Use template', index=True, domain="[('model', '=', model)]") # mail_message updated fields message_type = fields.Selection(default="comment") subtype_id = fields.Many2one(default=lambda self: self.sudo().env.ref('mail.mt_comment', raise_if_not_found=False).id) @api.multi def check_access_rule(self, operation): """ Access rules of mail.compose.message: - create: if - model, no res_id, I create a message in mass mail mode - then: fall back on mail.message acces rules """ # Author condition (CREATE (mass_mail)) if operation == 'create' and self._uid != SUPERUSER_ID: # read mail_compose_message.ids to have their values message_values = {} self._cr.execute('SELECT DISTINCT id, model, res_id FROM "%s" WHERE id = ANY (%%s) AND res_id = 0' % self._table, (self.ids,)) for mid, rmod, rid in self._cr.fetchall(): message_values[mid] = {'model': rmod, 'res_id': rid} # remove from the set to check the ids that mail_compose_message accepts author_ids = [mid for mid, message in message_values.items() if message.get('model') and not message.get('res_id')] self = self.browse(list(set(self.ids) - set(author_ids))) # not sure slef = ... return super(MailComposer, self).check_access_rule(operation) @api.multi def _notify(self, force_send=False, user_signature=True): """ Override specific notify method of mail.message, because we do not want that feature in the wizard. """ return @api.model def get_record_data(self, values): """ Returns a defaults-like dict with initial values for the composition wizard when sending an email related a previous email (parent_id) or a document (model, res_id). This is based on previously computed default values. """ result, subject = {}, False if values.get('parent_id'): parent = self.env['mail.message'].browse(values.get('parent_id')) result['record_name'] = parent.record_name, subject = tools.ustr(parent.subject or parent.record_name or '') if not values.get('model'): result['model'] = parent.model if not values.get('res_id'): result['res_id'] = parent.res_id partner_ids = values.get('partner_ids', list()) + [(4, id) for id in parent.partner_ids.ids] if self._context.get('is_private') and parent.author_id: # check message is private then add author also in partner list. partner_ids += [(4, parent.author_id.id)] result['partner_ids'] = partner_ids elif values.get('model') and values.get('res_id'): doc_name_get = self.env[values.get('model')].browse(values.get('res_id')).name_get() result['record_name'] = doc_name_get and doc_name_get[0][1] or '' subject = tools.ustr(result['record_name']) re_prefix = _('Re:') if subject and not (subject.startswith('Re:') or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result['subject'] = subject return result #------------------------------------------------------ # Wizard validation and send #------------------------------------------------------ # action buttons call with positionnal arguments only, so we need an intermediary function # to ensure the context is passed correctly @api.multi def send_mail_action(self): # TDE/ ??? return self.send_mail() @api.multi def send_mail(self, auto_commit=False): """ Process the wizard content and proceed with sending the related email(s), rendering any template patterns on the fly if needed. """ for wizard in self: # Duplicate attachments linked to the email.template. # Indeed, basic mail.compose.message wizard duplicates attachments in mass # mailing mode. But in 'single post' mode, attachments of an email template # also have to be duplicated to avoid changing their ownership. if wizard.attachment_ids and wizard.composition_mode != 'mass_mail' and wizard.template_id: new_attachment_ids = [] for attachment in wizard.attachment_ids: if attachment in wizard.template_id.attachment_ids: new_attachment_ids.append(attachment.copy({'res_model': 'mail.compose.message', 'res_id': wizard.id}).id) else: new_attachment_ids.append(attachment.id) wizard.write({'attachment_ids': [(6, 0, new_attachment_ids)]}) # Mass Mailing mass_mode = wizard.composition_mode in ('mass_mail', 'mass_post') Mail = self.env['mail.mail'] ActiveModel = self.env[wizard.model if wizard.model else 'mail.thread'] if wizard.template_id: # template user_signature is added when generating body_html # mass mailing: use template auto_delete value -> note, for emails mass mailing only Mail = Mail.with_context(mail_notify_user_signature=False) ActiveModel = ActiveModel.with_context(mail_notify_user_signature=False, mail_auto_delete=wizard.template_id.auto_delete, mail_keep_days=wizard.template_id.keep_days) if not hasattr(ActiveModel, 'message_post'): ActiveModel = self.env['mail.thread'].with_context(thread_model=wizard.model) if wizard.composition_mode == 'mass_post': # do not send emails directly but use the queue instead # add context key to avoid subscribing the author ActiveModel = ActiveModel.with_context(mail_notify_force_send=False, mail_create_nosubscribe=True) # wizard works in batch mode: [res_id] or active_ids or active_domain if mass_mode and wizard.use_active_domain and wizard.model: res_ids = self.env[wizard.model].search(safe_eval(wizard.active_domain)).ids elif mass_mode and wizard.model and self._context.get('active_ids'): res_ids = self._context['active_ids'] else: res_ids = [wizard.res_id] batch_size = int(self.env['ir.config_parameter'].sudo().get_param('mail.batch_size')) or self._batch_size sliced_res_ids = [res_ids[i:i + batch_size] for i in range(0, len(res_ids), batch_size)] if wizard.composition_mode == 'mass_mail' or wizard.is_log or (wizard.composition_mode == 'mass_post' and not wizard.notify): # log a note: subtype is False subtype_id = False elif wizard.subtype_id: subtype_id = wizard.subtype_id.id else: subtype_id = self.sudo().env.ref('mail.mt_comment', raise_if_not_found=False).id for res_ids in sliced_res_ids: batch_mails = Mail all_mail_values = wizard.get_mail_values(res_ids) for res_id, mail_values in all_mail_values.items(): if wizard.composition_mode == 'mass_mail': batch_mails |= Mail.create(mail_values) else: ActiveModel.browse(res_id).message_post( message_type=wizard.message_type, subtype_id=subtype_id, **mail_values) if wizard.composition_mode == 'mass_mail': batch_mails.send(auto_commit=auto_commit) return {'type': 'ir.actions.act_window_close'} @api.multi def get_mail_values(self, res_ids): """Generate the values that will be used by send_mail to create mail_messages or mail_mails. """ self.ensure_one() results = dict.fromkeys(res_ids, False) rendered_values = {} mass_mail_mode = self.composition_mode == 'mass_mail' # render all template-based value at once if mass_mail_mode and self.model: rendered_values = self.render_message(res_ids) # compute alias-based reply-to in batch reply_to_value = dict.fromkeys(res_ids, None) if mass_mail_mode and not self.no_auto_thread: # reply_to_value = self.env['mail.thread'].with_context(thread_model=self.model).browse(res_ids).message_get_reply_to(default=self.email_from) reply_to_value = self.env['mail.thread'].with_context(thread_model=self.model).message_get_reply_to(res_ids, default=self.email_from) for res_id in res_ids: # static wizard (mail.message) values mail_values = { 'subject': self.subject, 'body': self.body or '', 'parent_id': self.parent_id and self.parent_id.id, 'partner_ids': [partner.id for partner in self.partner_ids], 'attachment_ids': [attach.id for attach in self.attachment_ids], 'author_id': self.author_id.id, 'email_from': self.email_from, 'record_name': self.record_name, 'no_auto_thread': self.no_auto_thread, 'mail_server_id': self.mail_server_id.id, 'mail_activity_type_id': self.mail_activity_type_id.id, } # mass mailing: rendering override wizard static values if mass_mail_mode and self.model: if self.model in self.env and hasattr(self.env[self.model], 'message_get_email_values'): mail_values.update(self.env[self.model].browse(res_id).message_get_email_values()) # keep a copy unless specifically requested, reset record name (avoid browsing records) mail_values.update(notification=not self.auto_delete_message, model=self.model, res_id=res_id, record_name=False) # auto deletion of mail_mail if self.auto_delete or self.template_id.auto_delete: mail_values['auto_delete'] = True mail_values['keep_days'] = \ self.keep_days or self.template_id.keep_days # rendered values using template email_dict = rendered_values[res_id] mail_values['partner_ids'] += email_dict.pop('partner_ids', []) mail_values.update(email_dict) if not self.no_auto_thread: mail_values.pop('reply_to') if reply_to_value.get(res_id): mail_values['reply_to'] = reply_to_value[res_id] if self.no_auto_thread and not mail_values.get('reply_to'): mail_values['reply_to'] = mail_values['email_from'] # mail_mail values: body -> body_html, partner_ids -> recipient_ids mail_values['body_html'] = mail_values.get('body', '') mail_values['recipient_ids'] = [(4, id) for id in mail_values.pop('partner_ids', [])] # process attachments: should not be encoded before being processed by message_post / mail_mail create mail_values['attachments'] = [(name, base64.b64decode(enc_cont)) for name, enc_cont in email_dict.pop('attachments', list())] attachment_ids = [] for attach_id in mail_values.pop('attachment_ids'): new_attach_id = self.env['ir.attachment'].browse(attach_id).copy({'res_model': self._name, 'res_id': self.id}) attachment_ids.append(new_attach_id.id) mail_values['attachment_ids'] = self.env['mail.thread']._message_preprocess_attachments( mail_values.pop('attachments', []), attachment_ids, 'mail.message', 0) results[res_id] = mail_values return results #------------------------------------------------------ # Template methods #------------------------------------------------------ @api.multi @api.onchange('template_id') def onchange_template_id_wrapper(self): self.ensure_one() values = self.onchange_template_id(self.template_id.id, self.composition_mode, self.model, self.res_id)['value'] for fname, value in values.items(): setattr(self, fname, value) @api.multi def onchange_template_id(self, template_id, composition_mode, model, res_id): """ - mass_mailing: we cannot render, so return the template values - normal mode: return rendered values /!\ for x2many field, this onchange return command instead of ids """ if template_id and composition_mode == 'mass_mail': template = self.env['mail.template'].browse(template_id) fields = ['subject', 'body_html', 'email_from', 'reply_to', 'mail_server_id'] values = dict((field, getattr(template, field)) for field in fields if getattr(template, field)) if template.attachment_ids: values['attachment_ids'] = [att.id for att in template.attachment_ids] if template.mail_server_id: values['mail_server_id'] = template.mail_server_id.id if template.user_signature and 'body_html' in values: signature = self.env.user.signature values['body_html'] = tools.append_content_to_html(values['body_html'], signature, plaintext=False) elif template_id: values = self.generate_email_for_composer(template_id, [res_id])[res_id] # transform attachments into attachment_ids; not attached to the document because this will # be done further in the posting process, allowing to clean database if email not send Attachment = self.env['ir.attachment'] for attach_fname, attach_datas in values.pop('attachments', []): data_attach = { 'name': attach_fname, 'datas': attach_datas, 'datas_fname': attach_fname, 'res_model': 'mail.compose.message', 'res_id': 0, 'type': 'binary', # override default_type from context, possibly meant for another model! } values.setdefault('attachment_ids', list()).append(Attachment.create(data_attach).id) else: default_values = self.with_context(default_composition_mode=composition_mode, default_model=model, default_res_id=res_id).default_get(['composition_mode', 'model', 'res_id', 'parent_id', 'partner_ids', 'subject', 'body', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id']) values = dict((key, default_values[key]) for key in ['subject', 'body', 'partner_ids', 'email_from', 'reply_to', 'attachment_ids', 'mail_server_id'] if key in default_values) if values.get('body_html'): values['body'] = values.pop('body_html') # This onchange should return command instead of ids for x2many field. # ORM handle the assignation of command list on new onchange (api.v8), # this force the complete replacement of x2many field with # command and is compatible with onchange api.v7 values = self._convert_to_write(values) return {'value': values} @api.multi def save_as_template(self): """ hit save as template button: current form value will be a new template attached to the current document. """ for record in self: model = self.env['ir.model']._get(record.model or 'mail.message') model_name = model.name or '' template_name = "%s: %s" % (model_name, tools.ustr(record.subject)) values = { 'name': template_name, 'subject': record.subject or False, 'body_html': record.body or False, 'model_id': model.id or False, 'attachment_ids': [(6, 0, [att.id for att in record.attachment_ids])], } template = self.env['mail.template'].create(values) # generate the saved template record.write({'template_id': template.id}) record.onchange_template_id_wrapper() return _reopen(self, record.id, record.model, context=self._context) #------------------------------------------------------ # Template rendering #------------------------------------------------------ @api.multi def render_message(self, res_ids): """Generate template-based values of wizard, for the document records given by res_ids. This method is meant to be inherited by email_template that will produce a more complete dictionary, using Jinja2 templates. Each template is generated for all res_ids, allowing to parse the template once, and render it multiple times. This is useful for mass mailing where template rendering represent a significant part of the process. Default recipients are also computed, based on mail_thread method message_get_default_recipients. This allows to ensure a mass mailing has always some recipients specified. :param browse wizard: current mail.compose.message browse record :param list res_ids: list of record ids :return dict results: for each res_id, the generated template values for subject, body, email_from and reply_to """ self.ensure_one() multi_mode = True if isinstance(res_ids, pycompat.integer_types): multi_mode = False res_ids = [res_ids] subjects = self.render_template(self.subject, self.model, res_ids) bodies = self.render_template(self.body, self.model, res_ids, post_process=True) emails_from = self.render_template(self.email_from, self.model, res_ids) replies_to = self.render_template(self.reply_to, self.model, res_ids) default_recipients = {} if not self.partner_ids: default_recipients = self.env['mail.thread'].message_get_default_recipients(res_model=self.model, res_ids=res_ids) results = dict.fromkeys(res_ids, False) for res_id in res_ids: results[res_id] = { 'subject': subjects[res_id], 'body': bodies[res_id], 'email_from': emails_from[res_id], 'reply_to': replies_to[res_id], } results[res_id].update(default_recipients.get(res_id, dict())) # generate template-based values if self.template_id: template_values = self.generate_email_for_composer( self.template_id.id, res_ids, fields=['email_to', 'partner_to', 'email_cc', 'attachment_ids', 'mail_server_id']) else: template_values = {} for res_id in res_ids: if template_values.get(res_id): # recipients are managed by the template results[res_id].pop('partner_ids') results[res_id].pop('email_to') results[res_id].pop('email_cc') # remove attachments from template values as they should not be rendered template_values[res_id].pop('attachment_ids', None) else: template_values[res_id] = dict() # update template values by composer values template_values[res_id].update(results[res_id]) return multi_mode and template_values or template_values[res_ids[0]] @api.model def generate_email_for_composer(self, template_id, res_ids, fields=None): """ Call email_template.generate_email(), get fields relevant for mail.compose.message, transform email_cc and email_to into partner_ids """ multi_mode = True if isinstance(res_ids, pycompat.integer_types): multi_mode = False res_ids = [res_ids] if fields is None: fields = ['subject', 'body_html', 'email_from', 'email_to', 'partner_to', 'email_cc', 'reply_to', 'attachment_ids', 'mail_server_id'] returned_fields = fields + ['partner_ids', 'attachments'] values = dict.fromkeys(res_ids, False) template_values = self.env['mail.template'].with_context(tpl_partners_only=True).browse(template_id).generate_email(res_ids, fields=fields) for res_id in res_ids: res_id_values = dict((field, template_values[res_id][field]) for field in returned_fields if template_values[res_id].get(field)) res_id_values['body'] = res_id_values.pop('body_html', '') values[res_id] = res_id_values return multi_mode and values or values[res_ids[0]] @api.model def render_template(self, template, model, res_ids, post_process=False): return self.env['mail.template'].render_template(template, model, res_ids, post_process=post_process)
class ReportMembership(models.Model): '''Membership Analysis''' _name = 'report.membership' _description = 'Membership Analysis' _auto = False _rec_name = 'start_date' start_date = fields.Date(string='Start Date', readonly=True) date_to = fields.Date(string='End Date', readonly=True, help="End membership date") num_waiting = fields.Integer(string='# Waiting', readonly=True) num_invoiced = fields.Integer(string='# Invoiced', readonly=True) num_paid = fields.Integer(string='# Paid', readonly=True) tot_pending = fields.Float(string='Pending Amount', digits=0, readonly=True) tot_earned = fields.Float(string='Earned Amount', digits=0, readonly=True) partner_id = fields.Many2one('res.partner', string='Member', readonly=True) associate_member_id = fields.Many2one('res.partner', string='Associate Member', readonly=True) membership_id = fields.Many2one('product.product', string='Membership Product', readonly=True) membership_state = fields.Selection(STATE, string='Current Membership State', readonly=True) user_id = fields.Many2one('res.users', string='Salesperson', readonly=True) company_id = fields.Many2one('res.company', string='Company', readonly=True) quantity = fields.Integer(readonly=True) def init(self): '''Create the view''' tools.drop_view_if_exists(self._cr, self._table) self._cr.execute(""" CREATE OR REPLACE VIEW %s AS ( SELECT MIN(id) AS id, partner_id, count(membership_id) as quantity, user_id, membership_state, associate_member_id, membership_amount, date_to, start_date, COUNT(num_waiting) AS num_waiting, COUNT(num_invoiced) AS num_invoiced, COUNT(num_paid) AS num_paid, SUM(tot_pending) AS tot_pending, SUM(tot_earned) AS tot_earned, membership_id, company_id FROM (SELECT MIN(p.id) AS id, p.id AS partner_id, p.user_id AS user_id, p.membership_state AS membership_state, p.associate_member AS associate_member_id, p.membership_amount AS membership_amount, p.membership_stop AS date_to, p.membership_start AS start_date, CASE WHEN ml.state = 'waiting' THEN ml.id END AS num_waiting, CASE WHEN ml.state = 'invoiced' THEN ml.id END AS num_invoiced, CASE WHEN ml.state = 'paid' THEN ml.id END AS num_paid, CASE WHEN ml.state IN ('waiting', 'invoiced') THEN SUM(aml.price_subtotal) ELSE 0 END AS tot_pending, CASE WHEN ml.state = 'paid' OR p.membership_state = 'old' THEN SUM(aml.price_subtotal) ELSE 0 END AS tot_earned, ml.membership_id AS membership_id, p.company_id AS company_id FROM res_partner p LEFT JOIN membership_membership_line ml ON (ml.partner = p.id) LEFT JOIN account_move_line aml ON (ml.account_invoice_line = aml.id) LEFT JOIN account_move am ON (aml.move_id = am.id) WHERE p.membership_state != 'none' and p.active = 'true' GROUP BY p.id, p.user_id, p.membership_state, p.associate_member, p.membership_amount, p.membership_start, ml.membership_id, p.company_id, ml.state, ml.id ) AS foo GROUP BY start_date, date_to, partner_id, user_id, membership_id, company_id, membership_state, associate_member_id, membership_amount )""" % (self._table,))
class Namespace(models.Model): _name = "openapi.namespace" _description = "Integration" active = fields.Boolean("Active", default=True) name = fields.Char( "Name", required=True, help="""Integration name, e.g. ebay, amazon, magento, etc. The name is used in api endpoint""", ) description = fields.Char("Description") log_ids = fields.One2many("openapi.log", "namespace_id", string="Logs") log_count = fields.Integer("Log count", compute="_compute_log_count") log_request = fields.Selection( [("disabled", "Disabled"), ("info", "Short"), ("debug", "Full")], "Log Requests", default="disabled", ) log_response = fields.Selection( [("disabled", "Disabled"), ("error", "Errors only"), ("debug", "Full")], "Log Responses", default="error", ) last_log_date = fields.Datetime(compute="_compute_last_used", string="Latest usage") access_ids = fields.One2many( "openapi.access", "namespace_id", string="Accesses", context={"active_test": False}, ) user_ids = fields.Many2many("res.users", string="Allowed Users", default=lambda self: self.env.user) token = fields.Char( "Identification token", default=lambda self: str(uuid.uuid4()), readonly=True, required=True, copy=False, help= "Token passed by a query string parameter to access the specification.", ) spec_url = fields.Char("Specification Link", compute="_compute_spec_url") _sql_constraints = [( "name_uniq", "unique (name)", "A namespace already exists with this name. Namespace's name must be unique!", )] def name_get(self): return [( record.id, "/api/v1/%s%s" % ( record.name, " (%s)" % record.description if record.description else "", ), ) for record in self] @api.model def _fix_name(self, vals): if "name" in vals: vals["name"] = urlparse.quote_plus(vals["name"].lower()) return vals @api.model def create(self, vals): vals = self._fix_name(vals) return super(Namespace, self).create(vals) def write(self, vals): vals = self._fix_name(vals) return super(Namespace, self).write(vals) def get_OAS(self): current_host = self.env["ir.config_parameter"].sudo().get_param( "web.base.url") parsed_current_host = urlparse.urlparse(current_host) report_parameters = [ { "name": "report_external_id", "in": "path", "description": "Report xml id or report name", "required": True, "type": "string", }, { "name": "docids", "in": "path", "description": "One identifier or several identifiers separated by commas", "required": True, "type": "string", }, ] spec = collections.OrderedDict([ ("swagger", "2.0"), ("info", { "title": self.name, "version": self.write_date }), ("host", parsed_current_host.netloc), ("basePath", "/api/v1/%s" % self.name), ("schemes", [parsed_current_host.scheme]), ( "consumes", ["multipart/form-data", "application/x-www-form-urlencoded"], ), ("produces", ["application/json"]), ( "paths", { "/report/pdf/{report_external_id}/{docids}": { "get": { "summary": "Get PDF report file for %s namespace" % self.name, "description": "Returns PDF report file for %s namespace" % self.name, "operationId": "getPdfReportFileFor%sNamespace" % self.name.capitalize(), "produces": ["application/pdf"], "responses": { "200": { "description": "A PDF report file for %s namespace." % self.name, "schema": { "type": "file" }, } }, "parameters": report_parameters, "tags": ["report"], } }, "/report/html/{report_external_id}/{docids}": { "get": { "summary": "Get HTML report file for %s namespace" % self.name, "description": "Returns HTML report file for %s namespace" % self.name, "operationId": "getHtmlReportFileFor%sNamespace" % self.name.capitalize(), "produces": ["application/pdf"], "responses": { "200": { "description": "A HTML report file for %s namespace." % self.name, "schema": { "type": "file" }, } }, "parameters": report_parameters, "tags": ["report"], } }, }, ), ( "definitions", { "ErrorResponse": { "type": "object", "required": ["error", "error_descrip"], "properties": { "error": { "type": "string" }, "error_descrip": { "type": "string" }, }, }, }, ), ( "responses", { "400": { "description": "Invalid Data", "schema": { "$ref": "#/definitions/ErrorResponse" }, }, "401": { "description": "Authentication information is missing or invalid", "schema": { "$ref": "#/definitions/ErrorResponse" }, }, "500": { "description": "Server Error", "schema": { "$ref": "#/definitions/ErrorResponse" }, }, }, ), ("securityDefinitions", { "basicAuth": { "type": "basic" } }), ("security", [{ "basicAuth": [] }]), ("tags", []), ]) for openapi_access in self.access_ids.filtered("active"): OAS_part_for_model = openapi_access.get_OAS_part() spec["tags"].append(OAS_part_for_model["tag"]) del OAS_part_for_model["tag"] pinguin.update(spec, OAS_part_for_model) return spec @api.depends("name", "token") def _compute_spec_url(self): for record in self: record.spec_url = "/api/v1/{}/swagger.json?token={}&db={}".format( record.name, record.token, self._cr.dbname, ) def reset_token(self): for record in self: token = str(uuid.uuid4()) while self.search([("token", "=", token)]).exists(): token = str(uuid.uuid4()) record.write({"token": token}) def action_show_logs(self): return { "name": "Logs", "view_mode": "tree,form", "res_model": "openapi.log", "type": "ir.actions.act_window", "domain": [["namespace_id", "=", self.id]], } def _compute_last_used(self): for s in self: s.last_log_date = (s.env["openapi.log"].search( [("namespace_id", "=", s.id), ("create_date", "!=", False)], limit=1, order="id desc", ).create_date) def _compute_log_count(self): self._cr.execute( "SELECT COUNT(*) FROM openapi_log WHERE namespace_id=(%s);", [str(self.id)]) self.log_count = self._cr.dictfetchone()["count"]
class EventMailScheduler(models.Model): """ Event automated mailing. This model replaces all existing fields and configuration allowing to send emails on events since Flectra 9. A cron exists that periodically checks for mailing to run. """ _name = 'event.mail' _rec_name = 'event_id' _description = 'Event Automated Mailing' event_id = fields.Many2one('event.event', string='Event', required=True, ondelete='cascade') sequence = fields.Integer('Display order') interval_nbr = fields.Integer('Interval', default=1) interval_unit = fields.Selection([('now', 'Immediately'), ('hours', 'Hour(s)'), ('days', 'Day(s)'), ('weeks', 'Week(s)'), ('months', 'Month(s)')], string='Unit', default='hours', required=True) interval_type = fields.Selection([('after_sub', 'After each registration'), ('before_event', 'Before the event'), ('after_event', 'After the event')], string='Trigger ', default="before_event", required=True) template_id = fields.Many2one( 'mail.template', string='Email Template', domain=[('model', '=', 'event.registration')], required=True, ondelete='restrict', help= 'This field contains the template of the mail that will be automatically sent' ) scheduled_date = fields.Datetime('Scheduled Sent Mail', compute='_compute_scheduled_date', store=True) mail_registration_ids = fields.One2many('event.mail.registration', 'scheduler_id') mail_sent = fields.Boolean('Mail Sent on Event') done = fields.Boolean('Sent', compute='_compute_done', store=True) @api.one @api.depends('mail_sent', 'interval_type', 'event_id.registration_ids', 'mail_registration_ids') def _compute_done(self): if self.interval_type in ['before_event', 'after_event']: self.done = self.mail_sent else: self.done = len(self.mail_registration_ids) == len( self.event_id.registration_ids) and all( mail.mail_sent for mail in self.mail_registration_ids) @api.one @api.depends('event_id.state', 'event_id.date_begin', 'interval_type', 'interval_unit', 'interval_nbr') def _compute_scheduled_date(self): if self.event_id.state not in ['confirm', 'done']: self.scheduled_date = False else: if self.interval_type == 'after_sub': date, sign = self.event_id.create_date, 1 elif self.interval_type == 'before_event': date, sign = self.event_id.date_begin, -1 else: date, sign = self.event_id.date_end, 1 self.scheduled_date = datetime.strptime( date, tools.DEFAULT_SERVER_DATETIME_FORMAT) + _INTERVALS[ self.interval_unit](sign * self.interval_nbr) @api.one def execute(self): now = fields.Datetime.now() if self.interval_type == 'after_sub': # update registration lines lines = [(0, 0, { 'registration_id': registration.id }) for registration in ( self.event_id.registration_ids - self.mapped('mail_registration_ids.registration_id'))] if lines: self.write({'mail_registration_ids': lines}) # execute scheduler on registrations self.mail_registration_ids.filtered( lambda reg: reg.scheduled_date and reg.scheduled_date <= now ).execute() else: # Do not send emails if the mailing was scheduled before the event but the event is over if not self.mail_sent and (self.interval_type != 'before_event' or self.event_id.date_end > now): self.event_id.mail_attendees(self.template_id.id) self.write({'mail_sent': True}) return True @api.model def _warn_template_error(self, scheduler, exception): # We warn ~ once by hour ~ instead of every 10 min if the interval unit is more than 'hours'. if random.random() < 0.1666 or scheduler.interval_unit in ('now', 'hours'): ex_s = exception_to_unicode(exception) try: event, template = scheduler.event_id, scheduler.template_id emails = list( set([ event.organizer_id.email, event.user_id.email, template.write_uid.email ])) subject = _("WARNING: Event Scheduler Error for event: %s" % event.name) body = _("""Event Scheduler for: - Event: %s (%s) - Scheduled: %s - Template: %s (%s) Failed with error: - %s You receive this email because you are: - the organizer of the event, - or the responsible of the event, - or the last writer of the template.""" % (event.name, event.id, scheduler.scheduled_date, template.name, template.id, ex_s)) email = self.env['ir.mail_server'].build_email( email_from=self.env.user.email, email_to=emails, subject=subject, body=body, ) self.env['ir.mail_server'].send_email(email) except Exception as e: _logger.error( "Exception while sending traceback by email: %s.\n Original Traceback:\n%s", e, exception) pass @api.model def run(self, autocommit=False): schedulers = self.search([ ('done', '=', False), ('scheduled_date', '<=', datetime.strftime(fields.datetime.now(), tools.DEFAULT_SERVER_DATETIME_FORMAT)) ]) for scheduler in schedulers: try: with self.env.cr.savepoint(): scheduler.execute() except Exception as e: _logger.exception(e) self.invalidate_cache() self._warn_template_error(scheduler, e) else: if autocommit: self.env.cr.commit() return True
class AccountFrFec(models.TransientModel): _name = 'account.fr.fec' _description = 'Ficher Echange Informatise' date_from = fields.Date(string='Start Date', required=True) date_to = fields.Date(string='End Date', required=True) fec_data = fields.Binary('FEC File', readonly=True) filename = fields.Char(string='Filename', size=256, readonly=True) export_type = fields.Selection([ ('official', 'Official FEC report (posted entries only)'), ('nonofficial', 'Non-official FEC report (posted and unposted entries)'), ], string='Export Type', required=True, default='official') def do_query_unaffected_earnings(self): ''' Compute the sum of ending balances for all accounts that are of a type that does not bring forward the balance in new fiscal years. This is needed because we have to display only one line for the initial balance of all expense/revenue accounts in the FEC. ''' sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, '120/129' AS CompteNum, 'Benefice (perte) reporte(e)' AS CompteLib, '' AS CompAuxNum, '' AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN COALESCE(sum(aml.balance), 0) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN COALESCE(sum(aml.balance), 0) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 'f' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' company = self.env.user.company_id formatted_date_from = self.date_from.replace('-', '') date_from = datetime.strptime(self.date_from, DEFAULT_SERVER_DATE_FORMAT) formatted_date_year = date_from.year self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) listrow = [] row = self._cr.fetchone() listrow = list(row) return listrow @api.multi def generate_fec(self): self.ensure_one() # We choose to implement the flat file instead of the XML # file for 2 reasons : # 1) the XSD file impose to have the label on the account.move # but Flectra has the label on the account.move.line, so that's a # problem ! # 2) CSV files are easier to read/use for a regular accountant. # So it will be easier for the accountant to check the file before # sending it to the fiscal administration header = [ u'JournalCode', # 0 u'JournalLib', # 1 u'EcritureNum', # 2 u'EcritureDate', # 3 u'CompteNum', # 4 u'CompteLib', # 5 u'CompAuxNum', # 6 We use partner.id u'CompAuxLib', # 7 u'PieceRef', # 8 u'PieceDate', # 9 u'EcritureLib', # 10 u'Debit', # 11 u'Credit', # 12 u'EcritureLet', # 13 u'DateLet', # 14 u'ValidDate', # 15 u'Montantdevise', # 16 u'Idevise', # 17 ] company = self.env.user.company_id if not company.vat: raise Warning( _("Missing VAT number for company %s") % company.name) if company.vat[0:2] != 'FR': raise Warning(_("FEC is for French companies only !")) fecfile = io.BytesIO() w = pycompat.csv_writer(fecfile, delimiter='|') w.writerow(header) # INITIAL BALANCE unaffected_earnings_xml_ref = self.env.ref( 'account.data_unaffected_earnings') unaffected_earnings_line = True # used to make sure that we add the unaffected earning initial balance only once if unaffected_earnings_xml_ref: #compute the benefit/loss of last year to add in the initial balance of the current year earnings account unaffected_earnings_results = self.do_query_unaffected_earnings() unaffected_earnings_line = False sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(replace(MIN(aa.name), '|', '/'), '\t', '') AS CompteLib, '' AS CompAuxNum, '' AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type HAVING sum(aml.balance) != 0 AND aat.type not in ('receivable', 'payable') ''' formatted_date_from = self.date_from.replace('-', '') date_from = datetime.strptime(self.date_from, DEFAULT_SERVER_DATE_FORMAT) formatted_date_year = date_from.year self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() if not unaffected_earnings_line: account = self.env['account.account'].browse(account_id) if account.user_type_id.id == self.env.ref( 'account.data_unaffected_earnings').id: #add the benefit/loss of previous fiscal year to the first unaffected earnings account found. unaffected_earnings_line = True current_amount = float(listrow[11].replace( ',', '.')) - float(listrow[12].replace(',', '.')) unaffected_earnings_amount = float( unaffected_earnings_results[11].replace( ',', '.')) - float( unaffected_earnings_results[12].replace( ',', '.')) listrow_amount = current_amount + unaffected_earnings_amount if listrow_amount > 0: listrow[11] = str(listrow_amount).replace('.', ',') listrow[12] = '0,00' else: listrow[11] = '0,00' listrow[12] = str(-listrow_amount).replace('.', ',') w.writerow(listrow) #if the unaffected earnings account wasn't in the selection yet: add it manually if (not unaffected_earnings_line and unaffected_earnings_results and (unaffected_earnings_results[11] != '0,00' or unaffected_earnings_results[12] != '0,00')): #search an unaffected earnings account unaffected_earnings_account = self.env['account.account'].search( [('user_type_id', '=', self.env.ref('account.data_unaffected_earnings').id)], limit=1) if unaffected_earnings_account: unaffected_earnings_results[ 4] = unaffected_earnings_account.code unaffected_earnings_results[ 5] = unaffected_earnings_account.name w.writerow(unaffected_earnings_results) # INITIAL BALANCE - receivable/payable sql_query = ''' SELECT 'OUV' AS JournalCode, 'Balance initiale' AS JournalLib, 'OUVERTURE/' || %s AS EcritureNum, %s AS EcritureDate, MIN(aa.code) AS CompteNum, replace(MIN(aa.name), '|', '/') AS CompteLib, CASE WHEN rp.ref IS null OR rp.ref = '' THEN COALESCE('ID ' || rp.id, '') ELSE replace(rp.ref, '|', '/') END AS CompAuxNum, COALESCE(replace(rp.name, '|', '/'), '') AS CompAuxLib, '-' AS PieceRef, %s AS PieceDate, '/' AS EcritureLib, replace(CASE WHEN sum(aml.balance) <= 0 THEN '0,00' ELSE to_char(SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN sum(aml.balance) >= 0 THEN '0,00' ELSE to_char(-SUM(aml.balance), '000000000000000D99') END, '.', ',') AS Credit, '' AS EcritureLet, '' AS DateLet, %s AS ValidDate, '' AS Montantdevise, '' AS Idevise, MIN(aa.id) AS CompteID FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN account_account_type aat ON aa.user_type_id = aat.id WHERE am.date < %s AND am.company_id = %s AND aat.include_initial_balance = 't' AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' GROUP BY aml.account_id, aat.type, rp.ref, rp.id HAVING sum(aml.balance) != 0 AND aat.type in ('receivable', 'payable') ''' self._cr.execute( sql_query, (formatted_date_year, formatted_date_from, formatted_date_from, formatted_date_from, self.date_from, company.id)) for row in self._cr.fetchall(): listrow = list(row) account_id = listrow.pop() w.writerow([s.encode("utf-8") for s in listrow]) # LINES sql_query = ''' SELECT replace(replace(aj.code, '|', '/'), '\t', '') AS JournalCode, replace(replace(aj.name, '|', '/'), '\t', '') AS JournalLib, replace(replace(am.name, '|', '/'), '\t', '') AS EcritureNum, TO_CHAR(am.date, 'YYYYMMDD') AS EcritureDate, aa.code AS CompteNum, replace(replace(aa.name, '|', '/'), '\t', '') AS CompteLib, CASE WHEN rp.ref IS null OR rp.ref = '' THEN COALESCE('ID ' || rp.id, '') ELSE replace(rp.ref, '|', '/') END AS CompAuxNum, COALESCE(replace(replace(rp.name, '|', '/'), '\t', ''), '') AS CompAuxLib, CASE WHEN am.ref IS null OR am.ref = '' THEN '-' ELSE replace(replace(am.ref, '|', '/'), '\t', '') END AS PieceRef, TO_CHAR(am.date, 'YYYYMMDD') AS PieceDate, CASE WHEN aml.name IS NULL THEN '/' ELSE replace(replace(aml.name, '|', '/'), '\t', '') END AS EcritureLib, replace(CASE WHEN aml.debit = 0 THEN '0,00' ELSE to_char(aml.debit, '000000000000000D99') END, '.', ',') AS Debit, replace(CASE WHEN aml.credit = 0 THEN '0,00' ELSE to_char(aml.credit, '000000000000000D99') END, '.', ',') AS Credit, CASE WHEN rec.name IS NULL THEN '' ELSE rec.name END AS EcritureLet, CASE WHEN aml.full_reconcile_id IS NULL THEN '' ELSE TO_CHAR(rec.create_date, 'YYYYMMDD') END AS DateLet, TO_CHAR(am.date, 'YYYYMMDD') AS ValidDate, CASE WHEN aml.amount_currency IS NULL OR aml.amount_currency = 0 THEN '' ELSE replace(to_char(aml.amount_currency, '000000000000000D99'), '.', ',') END AS Montantdevise, CASE WHEN aml.currency_id IS NULL THEN '' ELSE rc.name END AS Idevise FROM account_move_line aml LEFT JOIN account_move am ON am.id=aml.move_id LEFT JOIN res_partner rp ON rp.id=aml.partner_id JOIN account_journal aj ON aj.id = am.journal_id JOIN account_account aa ON aa.id = aml.account_id LEFT JOIN res_currency rc ON rc.id = aml.currency_id LEFT JOIN account_full_reconcile rec ON rec.id = aml.full_reconcile_id WHERE am.date >= %s AND am.date <= %s AND am.company_id = %s AND (aml.debit != 0 OR aml.credit != 0) ''' # For official report: only use posted entries if self.export_type == "official": sql_query += ''' AND am.state = 'posted' ''' sql_query += ''' ORDER BY am.date, am.name, aml.id ''' self._cr.execute(sql_query, (self.date_from, self.date_to, company.id)) for row in self._cr.fetchall(): w.writerow(list(row)) siren = company.vat[4:13] end_date = self.date_to.replace('-', '') suffix = '' if self.export_type == "nonofficial": suffix = '-NONOFFICIAL' fecvalue = fecfile.getvalue() self.write({ 'fec_data': base64.encodestring(fecvalue), # Filename = <siren>FECYYYYMMDD where YYYMMDD is the closing date 'filename': '%sFEC%s%s.csv' % (siren, end_date, suffix), }) fecfile.close() action = { 'name': 'FEC', 'type': 'ir.actions.act_url', 'url': "web/content/?model=account.fr.fec&id=" + str(self.id) + "&filename_field=filename&field=fec_data&download=true&filename=" + self.filename, 'target': 'self', } return action
class IrAttachment(models.Model): """Attachments are used to link binary files or url to any openerp document. External attachment storage --------------------------- The computed field ``datas`` is implemented using ``_file_read``, ``_file_write`` and ``_file_delete``, which can be overridden to implement other storage engines. Such methods should check for other location pseudo uri (example: hdfs://hadoopserver). The default implementation is the file:dirname location that stores files on the local filesystem using name based on their sha1 hash """ _name = 'ir.attachment' _order = 'id desc' @api.depends('res_model', 'res_id') def _compute_res_name(self): for attachment in self: if attachment.res_model and attachment.res_id: record = self.env[attachment.res_model].browse(attachment.res_id) attachment.res_name = record.display_name @api.model def _storage(self): return self.env['ir.config_parameter'].sudo().get_param('ir_attachment.location', 'file') @api.model def _filestore(self): return config.filestore(self._cr.dbname) @api.model def force_storage(self): """Force all attachments to be stored in the currently configured storage""" if not self.env.user._is_admin(): raise AccessError(_('Only administrators can execute this action.')) # domain to retrieve the attachments to migrate domain = { 'db': [('store_fname', '!=', False)], 'file': [('db_datas', '!=', False)], }[self._storage()] for attach in self.search(domain): attach.write({'datas': attach.datas}) return True @api.model def _full_path(self, path): # sanitize path path = re.sub('[.]', '', path) path = path.strip('/\\') return os.path.join(self._filestore(), path) @api.model def _get_path(self, bin_data, sha): # retro compatibility fname = sha[:3] + '/' + sha full_path = self._full_path(fname) if os.path.isfile(full_path): return fname, full_path # keep existing path # scatter files across 256 dirs # we use '/' in the db (even on windows) fname = sha[:2] + '/' + sha full_path = self._full_path(fname) dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): os.makedirs(dirname) return fname, full_path @api.model def _file_read(self, fname, bin_size=False): full_path = self._full_path(fname) r = '' try: if bin_size: r = human_size(os.path.getsize(full_path)) else: r = base64.b64encode(open(full_path,'rb').read()) except (IOError, OSError): _logger.info("_read_file reading %s", full_path, exc_info=True) return r @api.model def _file_write(self, value, checksum): bin_value = base64.b64decode(value) fname, full_path = self._get_path(bin_value, checksum) if not os.path.exists(full_path): try: with open(full_path, 'wb') as fp: fp.write(bin_value) # add fname to checklist, in case the transaction aborts self._mark_for_gc(fname) except IOError: _logger.info("_file_write writing %s", full_path, exc_info=True) return fname @api.model def _file_delete(self, fname): # simply add fname to checklist, it will be garbage-collected later self._mark_for_gc(fname) def _mark_for_gc(self, fname): """ Add ``fname`` in a checklist for the filestore garbage collection. """ # we use a spooldir: add an empty file in the subdirectory 'checklist' full_path = os.path.join(self._full_path('checklist'), fname) if not os.path.exists(full_path): dirname = os.path.dirname(full_path) if not os.path.isdir(dirname): with tools.ignore(OSError): os.makedirs(dirname) open(full_path, 'ab').close() @api.model def _file_gc(self): """ Perform the garbage collection of the filestore. """ if self._storage() != 'file': return # Continue in a new transaction. The LOCK statement below must be the # first one in the current transaction, otherwise the database snapshot # used by it may not contain the most recent changes made to the table # ir_attachment! Indeed, if concurrent transactions create attachments, # the LOCK statement will wait until those concurrent transactions end. # But this transaction will not see the new attachements if it has done # other requests before the LOCK (like the method _storage() above). cr = self._cr cr.commit() # prevent all concurrent updates on ir_attachment while collecting! cr.execute("LOCK ir_attachment IN SHARE MODE") # retrieve the file names from the checklist checklist = {} for dirpath, _, filenames in os.walk(self._full_path('checklist')): dirname = os.path.basename(dirpath) for filename in filenames: fname = "%s/%s" % (dirname, filename) checklist[fname] = os.path.join(dirpath, filename) # determine which files to keep among the checklist whitelist = set() for names in cr.split_for_in_conditions(checklist): cr.execute("SELECT store_fname FROM ir_attachment WHERE store_fname IN %s", [names]) whitelist.update(row[0] for row in cr.fetchall()) # remove garbage files, and clean up checklist removed = 0 for fname, filepath in checklist.items(): if fname not in whitelist: try: os.unlink(self._full_path(fname)) removed += 1 except (OSError, IOError): _logger.info("_file_gc could not unlink %s", self._full_path(fname), exc_info=True) with tools.ignore(OSError): os.unlink(filepath) # commit to release the lock cr.commit() _logger.info("filestore gc %d checked, %d removed", len(checklist), removed) @api.depends('store_fname', 'db_datas') def _compute_datas(self): bin_size = self._context.get('bin_size') for attach in self: if attach.store_fname: attach.datas = self._file_read(attach.store_fname, bin_size) else: attach.datas = attach.db_datas def _inverse_datas(self): location = self._storage() for attach in self: # compute the fields that depend on datas value = attach.datas bin_data = base64.b64decode(value) if value else b'' vals = { 'file_size': len(bin_data), 'checksum': self._compute_checksum(bin_data), 'index_content': self._index(bin_data, attach.datas_fname, attach.mimetype), 'store_fname': False, 'db_datas': value, } if value and location != 'db': # save it to the filestore vals['store_fname'] = self._file_write(value, vals['checksum']) vals['db_datas'] = False # take current location in filestore to possibly garbage-collect it fname = attach.store_fname # write as superuser, as user probably does not have write access super(IrAttachment, attach.sudo()).write(vals) if fname: self._file_delete(fname) def _compute_checksum(self, bin_data): """ compute the checksum for the given datas :param bin_data : datas in its binary form """ # an empty file has a checksum too (for caching) return hashlib.sha1(bin_data or b'').hexdigest() def _compute_mimetype(self, values): """ compute the mimetype of the given values :param values : dict of values to create or write an ir_attachment :return mime : string indicating the mimetype, or application/octet-stream by default """ mimetype = None if values.get('mimetype'): mimetype = values['mimetype'] if not mimetype and values.get('datas_fname'): mimetype = mimetypes.guess_type(values['datas_fname'])[0] if not mimetype and values.get('url'): mimetype = mimetypes.guess_type(values['url'])[0] if values.get('datas') and (not mimetype or mimetype == 'application/octet-stream'): mimetype = guess_mimetype(base64.b64decode(values['datas'])) return mimetype or 'application/octet-stream' def _check_contents(self, values): mimetype = values['mimetype'] = self._compute_mimetype(values) xml_like = 'ht' in mimetype or 'xml' in mimetype # hta, html, xhtml, etc. force_text = (xml_like and (not self.env.user._is_admin() or self.env.context.get('attachments_mime_plainxml'))) if force_text: values['mimetype'] = 'text/plain' return values @api.model def _index(self, bin_data, datas_fname, file_type): """ compute the index content of the given filename, or binary data. This is a python implementation of the unix command 'strings'. :param bin_data : datas in binary form :return index_content : string containing all the printable character of the binary data """ index_content = False if file_type: index_content = file_type.split('/')[0] if index_content == 'text': # compute index_content only for text type words = re.findall(b"[\x20-\x7E]{4,}", bin_data) index_content = b"\n".join(words).decode('ascii') return index_content name = fields.Char('Attachment Name', required=True) datas_fname = fields.Char('File Name') description = fields.Text('Description') res_name = fields.Char('Resource Name', compute='_compute_res_name', store=True) res_model = fields.Char('Resource Model', readonly=True, help="The database object this attachment will be attached to.") res_field = fields.Char('Resource Field', readonly=True) res_id = fields.Integer('Resource ID', readonly=True, help="The record id this is attached to.") create_date = fields.Datetime('Date Created', readonly=True) create_uid = fields.Many2one('res.users', string='Owner', readonly=True) company_id = fields.Many2one('res.company', string='Company', change_default=True, default=lambda self: self.env['res.company']._company_default_get('ir.attachment')) type = fields.Selection([('url', 'URL'), ('binary', 'File')], string='Type', required=True, default='binary', change_default=True, help="You can either upload a file from your computer or copy/paste an internet link to your file.") url = fields.Char('Url', index=True, size=1024) public = fields.Boolean('Is public document') # for external access access_token = fields.Char('Access Token', groups="base.group_user") # the field 'datas' is computed and may use the other fields below datas = fields.Binary(string='File Content', compute='_compute_datas', inverse='_inverse_datas') db_datas = fields.Binary('Database Data') store_fname = fields.Char('Stored Filename') file_size = fields.Integer('File Size', readonly=True) checksum = fields.Char("Checksum/SHA1", size=40, index=True, readonly=True) mimetype = fields.Char('Mime Type', readonly=True) index_content = fields.Text('Indexed Content', readonly=True, prefetch=False) @api.model_cr_context def _auto_init(self): res = super(IrAttachment, self)._auto_init() tools.create_index(self._cr, 'ir_attachment_res_idx', self._table, ['res_model', 'res_id']) return res @api.model def check(self, mode, values=None): """Restricts the access to an ir.attachment, according to referred model In the 'document' module, it is overriden to relax this hard rule, since more complex ones apply there. """ # collect the records to check (by model) model_ids = defaultdict(set) # {model_name: set(ids)} require_employee = False if self: self._cr.execute('SELECT res_model, res_id, create_uid, public FROM ir_attachment WHERE id IN %s', [tuple(self.ids)]) for res_model, res_id, create_uid, public in self._cr.fetchall(): if public and mode == 'read': continue if not (res_model and res_id): if create_uid != self._uid: require_employee = True continue model_ids[res_model].add(res_id) if values and values.get('res_model') and values.get('res_id'): model_ids[values['res_model']].add(values['res_id']) # check access rights on the records for res_model, res_ids in model_ids.items(): # ignore attachments that are not attached to a resource anymore # when checking access rights (resource was deleted but attachment # was not) if res_model not in self.env: require_employee = True continue records = self.env[res_model].browse(res_ids).exists() if len(records) < len(res_ids): require_employee = True # For related models, check if we can write to the model, as unlinking # and creating attachments can be seen as an update to the model records.check_access_rights('write' if mode in ('create', 'unlink') else mode) records.check_access_rule(mode) if require_employee: if not (self.env.user._is_admin() or self.env.user.has_group('base.group_user')): raise AccessError(_("Sorry, you are not allowed to access this document.")) @api.model def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None): # add res_field=False in domain if not present; the arg[0] trick below # works for domain items and '&'/'|'/'!' operators too if not any(arg[0] in ('id', 'res_field') for arg in args): args.insert(0, ('res_field', '=', False)) ids = super(IrAttachment, self)._search(args, offset=offset, limit=limit, order=order, count=False, access_rights_uid=access_rights_uid) if self._uid == SUPERUSER_ID: # rules do not apply for the superuser return len(ids) if count else ids if not ids: return 0 if count else [] # Work with a set, as list.remove() is prohibitive for large lists of documents # (takes 20+ seconds on a db with 100k docs during search_count()!) orig_ids = ids ids = set(ids) # For attachments, the permissions of the document they are attached to # apply, so we must remove attachments for which the user cannot access # the linked document. # Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs), # and the permissions are checked in super() and below anyway. model_attachments = defaultdict(lambda: defaultdict(set)) # {res_model: {res_id: set(ids)}} self._cr.execute("""SELECT id, res_model, res_id, public FROM ir_attachment WHERE id IN %s""", [tuple(ids)]) for row in self._cr.dictfetchall(): if not row['res_model'] or row['public']: continue # model_attachments = {res_model: {res_id: set(ids)}} model_attachments[row['res_model']][row['res_id']].add(row['id']) # To avoid multiple queries for each attachment found, checks are # performed in batch as much as possible. for res_model, targets in model_attachments.items(): if res_model not in self.env: continue if not self.env[res_model].check_access_rights('read', False): # remove all corresponding attachment ids ids.difference_update(itertools.chain(*targets.values())) continue # filter ids according to what access rules permit target_ids = list(targets) allowed = self.env[res_model].with_context(active_test=False).search([('id', 'in', target_ids)]) for res_id in set(target_ids).difference(allowed.ids): ids.difference_update(targets[res_id]) # sort result according to the original sort ordering result = [id for id in orig_ids if id in ids] return len(result) if count else list(result) @api.multi def read(self, fields=None, load='_classic_read'): self.check('read') return super(IrAttachment, self).read(fields, load=load) @api.multi def write(self, vals): self.check('write', values=vals) # remove computed field depending of datas for field in ('file_size', 'checksum'): vals.pop(field, False) if 'mimetype' in vals or 'datas' in vals: vals = self._check_contents(vals) return super(IrAttachment, self).write(vals) @api.multi def copy(self, default=None): self.check('write') return super(IrAttachment, self).copy(default) @api.multi def unlink(self): self.check('unlink') # First delete in the database, *then* in the filesystem if the # database allowed it. Helps avoid errors when concurrent transactions # are deleting the same file, and some of the transactions are # rolled back by PostgreSQL (due to concurrent updates detection). to_delete = set(attach.store_fname for attach in self if attach.store_fname) res = super(IrAttachment, self).unlink() for file_path in to_delete: self._file_delete(file_path) return res @api.model def create(self, values): # remove computed field depending of datas for field in ('file_size', 'checksum'): values.pop(field, False) values = self._check_contents(values) self.browse().check('write', values=values) return super(IrAttachment, self).create(values) @api.one def generate_access_token(self): if self.access_token: return self.access_token access_token = str(uuid.uuid4()) self.write({'access_token': access_token}) return access_token @api.model def action_get(self): return self.env['ir.actions.act_window'].for_xml_id('base', 'action_attachment')
class ApplicantEducation(models.Model): _name = "applicant.education" _description = "Applicant Education" _rec_name = "from_date" _order = "from_date" from_date = fields.Date(string='From Date') to_date = fields.Date(string='To Date') education_rank = fields.Char('Education Rank') school_name = fields.Char(string='School Name', size=256) grade = fields.Char('Education Field/Major') field = fields.Char(string='Major/Field of Education', size=128) illiterate = fields.Boolean('Illiterate') active = fields.Boolean(string='Active', default=True) applicant_id = fields.Many2one( 'hr.applicant', 'Applicant Ref', ondelete='cascade') edu_type = fields.Selection( [('Local', 'Local'), ('Abroad', 'Abroad')], string='School Location', default="Local") country_id = fields.Many2one('res.country', 'Country') state_id = fields.Many2one('res.country.state', 'State') province = fields.Char("Province") @api.onchange('edu_type') def onchange_edu_type(self): for rec in self: if rec.edu_type == 'Local': rec.abroad_country_id = False else: rec.local_province_id = False rec.local_district_id = False @api.onchange('illiterate') def onchange_illiterate(self): for rec in self: rec.from_date = False rec.to_date = False rec.education_rank = '' rec.school_name = '' rec.grade = '' rec.field = '' rec.edu_type = '' rec.country_id = False rec.state_id = False rec.province = '' @api.model def create(self, vals): if (self._context.get('active_model') == 'hr.applicant' and self._context.get('active_id')): vals.update({'applicant_id': self._context.get('active_id')}) return super(ApplicantEducation, self).create(vals) @api.onchange('from_date', 'to_date') def onchange_date(self): if self.to_date and datetime.strptime( self.to_date, DEFAULT_SERVER_DATE_FORMAT) >= datetime.today(): warning = {'title': _('User Alert !'), 'message': _( 'To date must be less than today!')} self.to_date = False return {'warning': warning} if self.from_date and self.to_date and self.from_date > self.to_date: warning = {'title': _('User Alert !'), 'message': _( 'To Date must be greater than From Date !')} self.to_date = False return {'warning': warning}
class ApplicantMedicalDetails(models.Model): _name = "hr.applicant.medical.details" _description = "Applicant Medical Details" _rec_name = 'medical_examination' medical_examination = fields.Char('Medical Examination') vital_sign = fields.Char('Vital sign') date = fields.Date( 'Date', default=fields.Date.context_today, readonly=True) doc_comment = fields.Char('Doctor’s Comments') head_face_scalp = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Head, Face, Scalp') nose_sinuses = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Nose/Sinuses') mouth_throat = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Mouth/Throat') ears_tms = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Ears/TMs') eyes_pupils_ocular = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Eyes/Pupils/Ocular Motility') heart_vascular_system = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Heart/Vascular System') lungs = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Lungs') abdomen_hernia = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Abdomen/Hernia') msk_strengh = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'MSK-Strength') neurological = fields.Selection( [('Abnormal', 'Abnormal'), ('Normal', 'Normal')], 'Neurological (Reflexes, Sensation)') glasses_needed = fields.Boolean('Glasses Needed?') urine_drug_serene = fields.Selection( [('Negative', 'Negative'), ('Positive', 'Positive')], 'Urine Drug Serene') fit_for_full_duty = fields.Boolean('Fully Fit for Duty?') good_health = fields.Boolean('Good Health?') serious_illness = fields.Boolean('Series Illness or Disease?') broken_bones = fields.Boolean('Broken Bones or Surgery?') medications = fields.Boolean('Medications at this time?') serious_wound = fields.Boolean('Seriously Wounded?') allergic = fields.Boolean('Allergic to any medication?') epilepsy = fields.Boolean('Epilepsy') history_drug_use = fields.Boolean('Any History of drug use?') applicant_id = fields.Many2one( 'hr.applicant', 'Applicant Ref', ondelete='cascade') active = fields.Boolean(string='Active', default=True) blood_name = fields.Selection( [('A', 'A'), ('B', 'B'), ('O', 'O'), ('AB', 'AB')], "Blood Type") blood_type = fields.Selection([('+', '+'), ('-', '-')], 'Blood Type') @api.model def create(self, vals): if (self._context.get('active_model') == 'hr.applicant' and self._context.get('active_id')): vals.update({'applicant_id': self._context.get('active_id')}) return super(ApplicantMedicalDetails, self).create(vals)
class HelpdeskStage(models.Model): _name = 'helpdesk.stage' _order = 'sequence, id' _description = 'Helpdesk Stage' def _default_team_ids(self): team_id = self.env.context.get('default_team_id') if team_id: return [(4, team_id, 0)] name = fields.Char('Stage Name', required=True, translate=True) description = fields.Text(translate=True) sequence = fields.Integer('Sequence', default=10) stage_type = fields.Selection([('draft', 'Draft'), ('new', 'New'), ('in_progress', 'In Progress'), ('done', 'Done')], string='Stage Type', required=True) is_close = fields.Boolean( 'Closing Stage', help= 'Tickets in this stage are considered as done. This is used notably when ' 'computing SLAs and KPIs on tickets.') fold = fields.Boolean( 'Folded in Kanban', help= 'This stage is folded in the kanban view when there are no records in that stage to display.' ) team_ids = fields.Many2many( 'helpdesk.team', relation='team_stage_rel', string='Team', default=_default_team_ids, help= 'Specific team that uses this stage. Other teams will not be able to see or use this stage.' ) template_id = fields.Many2one( 'mail.template', 'Email Template', domain="[('model', '=', 'helpdesk.ticket')]", help= "Automated email sent to the ticket's customer when the ticket reaches this stage." ) legend_blocked = fields.Char( 'Red Kanban Label', default=lambda s: _('Blocked'), translate=True, required=True, help= 'Override the default value displayed for the blocked state for kanban selection, when the task or issue is in that stage.' ) legend_done = fields.Char( 'Green Kanban Label', default=lambda s: _('Ready'), translate=True, required=True, help= 'Override the default value displayed for the done state for kanban selection, when the task or issue is in that stage.' ) legend_normal = fields.Char( 'Grey Kanban Label', default=lambda s: _('In Progress'), translate=True, required=True, help= 'Override the default value displayed for the normal state for kanban selection, when the task or issue is in that stage.' ) def unlink(self): stages = self default_team_id = self.env.context.get('default_team_id') if default_team_id: shared_stages = self.filtered(lambda x: len(x.team_ids) > 1 and default_team_id in x.team_ids.ids) tickets = self.env['helpdesk.ticket'].with_context( active_test=False).search([('team_id', '=', default_team_id), ('stage_id', 'in', self.ids)]) if shared_stages and not tickets: shared_stages.write({'team_ids': [(3, default_team_id)]}) stages = self.filtered(lambda x: x not in shared_stages) return super(HelpdeskStage, stages).unlink()
class ReportProjectTaskUser(models.Model): _name = "report.project.task.user" _description = "Tasks Analysis" _order = 'name desc, project_id' _auto = False name = fields.Char(string='Task Title', readonly=True) user_id = fields.Many2one('res.users', string='Assigned To', readonly=True) date_assign = fields.Datetime(string='Assignment Date', readonly=True) date_end = fields.Datetime(string='Ending Date', readonly=True) date_deadline = fields.Date(string='Deadline', readonly=True) date_last_stage_update = fields.Datetime(string='Last Stage Update', readonly=True) project_id = fields.Many2one('project.project', string='Project', readonly=True) working_days_close = fields.Float( string='# Working Days to Close', digits=(16, 2), readonly=True, group_operator="avg", help="Number of Working Days to close the task") working_days_open = fields.Float( string='# Working Days to Assign', digits=(16, 2), readonly=True, group_operator="avg", help="Number of Working Days to Open the task") delay_endings_days = fields.Float(string='# Days to Deadline', digits=(16, 2), readonly=True) nbr = fields.Integer( '# of Tasks', readonly=True) # TDE FIXME master: rename into nbr_tasks priority = fields.Selection([('0', 'Low'), ('1', 'Normal'), ('2', 'High')], readonly=True, string="Priority") state = fields.Selection([('normal', 'In Progress'), ('blocked', 'Blocked'), ('done', 'Ready for next stage')], string='Kanban State', readonly=True) company_id = fields.Many2one('res.company', string='Company', readonly=True) partner_id = fields.Many2one('res.partner', string='Customer', readonly=True) stage_id = fields.Many2one('project.task.type', string='Stage', readonly=True) def _select(self): select_str = """ SELECT (select 1 ) AS nbr, t.id as id, t.date_assign as date_assign, t.date_end as date_end, t.date_last_stage_update as date_last_stage_update, t.date_deadline as date_deadline, t.user_id, t.project_id, t.priority, t.name as name, t.company_id, t.partner_id, t.stage_id as stage_id, t.kanban_state as state, t.working_days_close as working_days_close, t.working_days_open as working_days_open, (extract('epoch' from (t.date_deadline-(now() at time zone 'UTC'))))/(3600*24) as delay_endings_days """ return select_str def _group_by(self): group_by_str = """ GROUP BY t.id, t.create_date, t.write_date, t.date_assign, t.date_end, t.date_deadline, t.date_last_stage_update, t.user_id, t.project_id, t.priority, t.name, t.company_id, t.partner_id, t.stage_id """ return group_by_str def init(self): tools.drop_view_if_exists(self._cr, self._table) self._cr.execute(""" CREATE view %s as %s FROM project_task t WHERE t.active = 'true' %s """ % (self._table, self._select(), self._group_by()))
class ir_cron(models.Model): """ Model describing cron jobs (also called actions or tasks). """ # TODO: perhaps in the future we could consider a flag on ir.cron jobs # that would cause database wake-up even if the database has not been # loaded yet or was already unloaded (e.g. 'force_db_wakeup' or something) # See also flectra.cron _name = "ir.cron" _order = 'cron_name' _description = 'Scheduled Actions' ir_actions_server_id = fields.Many2one('ir.actions.server', 'Server action', delegate=True, ondelete='restrict', required=True) cron_name = fields.Char('Name', related='ir_actions_server_id.name', store=True) user_id = fields.Many2one('res.users', string='Scheduler User', default=lambda self: self.env.user, required=True) active = fields.Boolean(default=True) interval_number = fields.Integer(default=1, help="Repeat every x.") interval_type = fields.Selection([('minutes', 'Minutes'), ('hours', 'Hours'), ('days', 'Days'), ('weeks', 'Weeks'), ('months', 'Months')], string='Interval Unit', default='months') numbercall = fields.Integer( string='Number of Calls', default=1, help= 'How many times the method is called,\na negative number indicates no limit.' ) doall = fields.Boolean( string='Repeat Missed', help= "Specify if missed occurrences should be executed when the server restarts." ) nextcall = fields.Datetime( string='Next Execution Date', required=True, default=fields.Datetime.now, help="Next planned execution date for this job.") priority = fields.Integer( default=5, help= 'The priority of the job, as an integer: 0 means higher priority, 10 means lower priority.' ) @api.model def create(self, values): values['usage'] = 'ir_cron' return super(ir_cron, self).create(values) @api.multi def method_direct_trigger(self): self.check_access_rights('write') for cron in self: self.sudo(user=cron.user_id.id).ir_actions_server_id.run() return True @api.model def _handle_callback_exception(self, cron_name, server_action_id, job_id, job_exception): """ Method called when an exception is raised by a job. Simply logs the exception and rollback the transaction. """ self._cr.rollback() @api.model def _callback(self, cron_name, server_action_id, job_id): """ Run the method associated to a given job. It takes care of logging and exception handling. Note that the user running the server action is the user calling this method. """ try: if self.pool != self.pool.check_signaling(): # the registry has changed, reload self in the new registry self.env.reset() self = self.env()[self._name] log_depth = (None if _logger.isEnabledFor(logging.DEBUG) else 1) flectra.netsvc.log( _logger, logging.DEBUG, 'cron.object.execute', (self._cr.dbname, self._uid, '*', cron_name, server_action_id), depth=log_depth) start_time = False if _logger.isEnabledFor(logging.DEBUG): start_time = time.time() self.env['ir.actions.server'].browse(server_action_id).run() if start_time and _logger.isEnabledFor(logging.DEBUG): end_time = time.time() _logger.debug('%.3fs (cron %s, server action %d with uid %d)', end_time - start_time, cron_name, server_action_id, self.env.uid) self.pool.signal_changes() except Exception as e: self.pool.reset_changes() _logger.exception( "Call from cron %s for server action #%s failed in Job #%s", cron_name, server_action_id, job_id) self._handle_callback_exception(cron_name, server_action_id, job_id, e) @classmethod def _process_job(cls, job_cr, job, cron_cr): """ Run a given job taking care of the repetition. :param job_cr: cursor to use to execute the job, safe to commit/rollback :param job: job to be run (as a dictionary). :param cron_cr: cursor holding lock on the cron job row, to use to update the next exec date, must not be committed/rolled back! """ try: with api.Environment.manage(): cron = api.Environment(job_cr, job['user_id'], {})[cls._name] # Use the user's timezone to compare and compute datetimes, # otherwise unexpected results may appear. For instance, adding # 1 month in UTC to July 1st at midnight in GMT+2 gives July 30 # instead of August 1st! now = fields.Datetime.context_timestamp(cron, datetime.now()) nextcall = fields.Datetime.context_timestamp( cron, fields.Datetime.from_string(job['nextcall'])) numbercall = job['numbercall'] ok = False while nextcall < now and numbercall: if numbercall > 0: numbercall -= 1 if not ok or job['doall']: cron._callback(job['cron_name'], job['ir_actions_server_id'], job['id']) if numbercall: nextcall += _intervalTypes[job['interval_type']]( job['interval_number']) ok = True addsql = '' if not numbercall: addsql = ', active=False' cron_cr.execute( "UPDATE ir_cron SET nextcall=%s, numbercall=%s" + addsql + " WHERE id=%s", (fields.Datetime.to_string( nextcall.astimezone(pytz.UTC)), numbercall, job['id'])) cron.invalidate_cache() finally: job_cr.commit() cron_cr.commit() @classmethod def _process_jobs(cls, db_name): """ Try to process all cron jobs. This selects in database all the jobs that should be processed. It then tries to lock each of them and, if it succeeds, run the cron job (if it doesn't succeed, it means the job was already locked to be taken care of by another thread) and return. :raise BadVersion: if the version is different from the worker's :raise BadModuleState: if modules are to install/upgrade/remove """ db = flectra.sql_db.db_connect(db_name) threading.current_thread().dbname = db_name try: with db.cursor() as cr: # Make sure the database has the same version as the code of # base and that no module must be installed/upgraded/removed cr.execute( "SELECT latest_version FROM ir_module_module WHERE name=%s", ['base']) (version, ) = cr.fetchone() cr.execute( "SELECT COUNT(*) FROM ir_module_module WHERE state LIKE %s", ['to %']) (changes, ) = cr.fetchone() if version is None: raise BadModuleState() elif version != BASE_VERSION: raise BadVersion() # Careful to compare timestamps with 'UTC' - everything is UTC as of v6.1. cr.execute("""SELECT * FROM ir_cron WHERE numbercall != 0 AND active AND nextcall <= (now() at time zone 'UTC') ORDER BY priority""") jobs = cr.dictfetchall() if changes: if not jobs: raise BadModuleState() # nextcall is never updated if the cron is not executed, # it is used as a sentinel value to check whether cron jobs # have been locked for a long time (stuck) parse = fields.Datetime.from_string oldest = min([parse(job['nextcall']) for job in jobs]) if datetime.now() - oldest > MAX_FAIL_TIME: flectra.modules.reset_modules_state(db_name) else: raise BadModuleState() for job in jobs: lock_cr = db.cursor() try: # Try to grab an exclusive lock on the job row from within the task transaction # Restrict to the same conditions as for the search since the job may have already # been run by an other thread when cron is running in multi thread lock_cr.execute("""SELECT * FROM ir_cron WHERE numbercall != 0 AND active AND nextcall <= (now() at time zone 'UTC') AND id=%s FOR UPDATE NOWAIT""", (job['id'], ), log_exceptions=False) locked_job = lock_cr.fetchone() if not locked_job: _logger.debug( "Job `%s` already executed by another process/thread. skipping it", job['cron_name']) continue # Got the lock on the job row, run its code _logger.info('Starting job `%s`.', job['cron_name']) job_cr = db.cursor() try: registry = flectra.registry(db_name) registry[cls._name]._process_job(job_cr, job, lock_cr) except Exception: _logger.exception( 'Unexpected exception while processing cron job %r', job) finally: job_cr.close() except psycopg2.OperationalError as e: if e.pgcode == '55P03': # Class 55: Object not in prerequisite state; 55P03: lock_not_available _logger.debug( 'Another process/thread is already busy executing job `%s`, skipping it.', job['cron_name']) continue else: # Unexpected OperationalError raise finally: # we're exiting due to an exception while acquiring the lock lock_cr.close() finally: if hasattr(threading.current_thread(), 'dbname'): del threading.current_thread().dbname @classmethod def _acquire_job(cls, db_name): """ Try to process all cron jobs. This selects in database all the jobs that should be processed. It then tries to lock each of them and, if it succeeds, run the cron job (if it doesn't succeed, it means the job was already locked to be taken care of by another thread) and return. This method hides most exceptions related to the database's version, the modules' state, and such. """ try: cls._process_jobs(db_name) except BadVersion: _logger.warning( 'Skipping database %s as its base version is not %s.', db_name, BASE_VERSION) except BadModuleState: _logger.warning( 'Skipping database %s because of modules to install/upgrade/remove.', db_name) except psycopg2.ProgrammingError as e: if e.pgcode == '42P01': # Class 42 — Syntax Error or Access Rule Violation; 42P01: undefined_table # The table ir_cron does not exist; this is probably not an OpenERP database. _logger.warning( 'Tried to poll an undefined table on database %s.', db_name) else: raise except Exception: _logger.warning('Exception in cron:', exc_info=True) @api.multi def _try_lock(self): """Try to grab a dummy exclusive write-lock to the rows with the given ids, to make sure a following write() or unlink() will not block due to a process currently executing those cron tasks""" try: self._cr.execute( """SELECT id FROM "%s" WHERE id IN %%s FOR UPDATE NOWAIT""" % self._table, [tuple(self.ids)], log_exceptions=False) except psycopg2.OperationalError: self._cr.rollback( ) # early rollback to allow translations to work for the user feedback raise UserError( _("Record cannot be modified right now: " "This cron task is currently being executed and may not be modified " "Please try again in a few minutes")) @api.multi def write(self, vals): self._try_lock() return super(ir_cron, self).write(vals) @api.multi def unlink(self): self._try_lock() return super(ir_cron, self).unlink() @api.multi def try_write(self, values): try: with self._cr.savepoint(): self._cr.execute( """SELECT id FROM "%s" WHERE id IN %%s FOR UPDATE NOWAIT""" % self._table, [tuple(self.ids)], log_exceptions=False) except psycopg2.OperationalError: pass else: return super(ir_cron, self).write(values) return False @api.model def toggle(self, model, domain): active = bool(self.env[model].search_count(domain)) return self.try_write({'active': active})
class PaymentAcquirerStripe(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection(selection_add=[('stripe', 'Stripe')]) stripe_secret_key = fields.Char(required_if_provider='stripe', groups='base.group_user') stripe_publishable_key = fields.Char(required_if_provider='stripe', groups='base.group_user') stripe_image_url = fields.Char( "Checkout Image URL", groups='base.group_user', help="A relative or absolute URL pointing to a square image of your " "brand or product. As defined in your Stripe profile. See: " "https://stripe.com/docs/checkout") @api.multi def stripe_form_generate_values(self, tx_values): self.ensure_one() stripe_tx_values = dict(tx_values) temp_stripe_tx_values = { 'company': self.company_id.name, 'amount': tx_values.get('amount'), 'currency': tx_values.get('currency') and tx_values.get('currency').name or '', 'currency_id': tx_values.get('currency') and tx_values.get('currency').id or '', 'address_line1': tx_values.get('partner_address'), 'address_city': tx_values.get('partner_city'), 'address_country': tx_values.get('partner_country') and tx_values['partner_country'].name or '', 'email': tx_values.get('partner_email'), 'address_zip': tx_values.get('partner_zip'), 'name': tx_values.get('partner_name'), 'phone': tx_values.get('partner_phone'), } temp_stripe_tx_values['returndata'] = stripe_tx_values.pop( 'return_url', '') stripe_tx_values.update(temp_stripe_tx_values) return stripe_tx_values @api.model def _get_stripe_api_url(self): return 'api.stripe.com/v1' @api.model def stripe_s2s_form_process(self, data): payment_token = self.env['payment.token'].sudo().create({ 'cc_number': data['cc_number'], 'cc_holder_name': data['cc_holder_name'], 'cc_expiry': data['cc_expiry'], 'cc_brand': data['cc_brand'], 'cvc': data['cvc'], 'acquirer_id': int(data['acquirer_id']), 'partner_id': int(data['partner_id']) }) return payment_token @api.multi def stripe_s2s_form_validate(self, data): self.ensure_one() # mandatory fields for field_name in [ "cc_number", "cvc", "cc_holder_name", "cc_expiry", "cc_brand" ]: if not data.get(field_name): return False return True def _get_feature_support(self): """Get advanced feature support by provider. Each provider should add its technical in the corresponding key for the following features: * fees: support payment fees computations * authorize: support authorizing payment (separates authorization and capture) * tokenize: support saving payment data in a payment.tokenize object """ res = super(PaymentAcquirerStripe, self)._get_feature_support() res['tokenize'].append('stripe') return res
class sale_order_line(models.Model): _inherit = 'sale.order.line' @api.depends('state', 'product_uom_qty', 'qty_delivered', 'qty_to_invoice', 'qty_invoiced') def _compute_invoice_status(self): """ Compute the invoice status of a SO line. Possible statuses: - no: if the SO is not in status 'sale' or 'done', we consider that there is nothing to invoice. This is also hte default value if the conditions of no other status is met. - to invoice: we refer to the quantity to invoice of the line. Refer to method `_get_to_invoice_qty()` for more information on how this quantity is calculated. - upselling: this is possible only for a product invoiced on ordered quantities for which we delivered more than expected. The could arise if, for example, a project took more time than expected but we decided not to invoice the extra cost to the client. This occurs onyl in state 'sale', so that when a SO is set to done, the upselling opportunity is removed from the list. - invoiced: the quantity invoiced is larger or equal to the quantity ordered. """ precision = self.env['decimal.precision'].precision_get( 'Product Unit of Measure') for line in self: if not line.order_id.invoice_policy: if line.state not in ('sale', 'done'): line.invoice_status = 'no' elif not float_is_zero(line.qty_to_invoice, precision_digits=precision): line.invoice_status = 'to invoice' elif line.state == 'sale' and line.product_id.invoice_policy == 'order' and\ float_compare(line.qty_delivered, line.product_uom_qty, precision_digits=precision) == 1: line.invoice_status = 'upselling' elif float_compare(line.qty_invoiced, line.product_uom_qty, precision_digits=precision) >= 0: line.invoice_status = 'invoiced' else: line.invoice_status = 'no' else: if line.state not in ('sale', 'done'): line.invoice_status = 'no' elif not float_is_zero(line.qty_to_invoice, precision_digits=precision): line.invoice_status = 'to invoice' elif line.state == 'sale' and line.order_id.invoice_policy == 'order' and\ float_compare(line.qty_delivered, line.product_uom_qty, precision_digits=precision) == 1: line.invoice_status = 'upselling' elif float_compare(line.qty_invoiced, line.product_uom_qty, precision_digits=precision) >= 0: line.invoice_status = 'invoiced' else: line.invoice_status = 'no' @api.depends('qty_invoiced', 'qty_delivered', 'product_uom_qty', 'order_id.state') def _get_to_invoice_qty(self): """ Compute the quantity to invoice. If the invoice policy is order, the quantity to invoice is calculated from the ordered quantity. Otherwise, the quantity delivered is used. """ for line in self: if not line.order_id.invoice_policy: if line.order_id.state in ['sale', 'done']: if line.product_id.invoice_policy == 'order': line.qty_to_invoice = line.product_uom_qty - line.qty_invoiced else: if line.product_id.type == 'service': if line.product_uom_qty - line.qty_invoiced > 0.0: line.qty_to_invoice = line.product_uom_qty - line.qty_invoiced else: line.qty_to_invoice = line.qty_delivered - line.qty_invoiced else: line.qty_to_invoice = 0 else: if line.order_id.state in ['sale', 'done']: if line.order_id.invoice_policy == 'order': line.qty_to_invoice = line.product_uom_qty - line.qty_invoiced else: if line.product_id.type == 'service': if line.product_uom_qty - line.qty_invoiced > 0.0: line.qty_to_invoice = line.product_uom_qty - line.qty_invoiced else: line.qty_to_invoice = line.qty_delivered - line.qty_invoiced else: line.qty_to_invoice = 0 invoice_status = fields.Selection([('upselling', 'Upselling Opportunity'), ('invoiced', 'Fully Invoiced'), ('to invoice', 'To Invoice'), ('no', 'Nothing to Invoice')], string='Invoice Status', compute='_compute_invoice_status', store=True, readonly=True, default='no') qty_to_invoice = fields.Float( compute='_get_to_invoice_qty', string='To Invoice', store=True, readonly=True, digits=dp.get_precision('Product Unit of Measure'), default=0.0)
class LandedCost(models.Model): _name = 'stock.landed.cost' _description = 'Stock Landed Cost' _inherit = 'mail.thread' name = fields.Char('Name', default=lambda self: _('New'), copy=False, readonly=True, track_visibility='always') date = fields.Date('Date', default=fields.Date.context_today, copy=False, required=True, states={'done': [('readonly', True)]}, track_visibility='onchange') picking_ids = fields.Many2many('stock.picking', string='Pickings', copy=False, states={'done': [('readonly', True)]}) cost_lines = fields.One2many('stock.landed.cost.lines', 'cost_id', 'Cost Lines', copy=True, states={'done': [('readonly', True)]}) valuation_adjustment_lines = fields.One2many( 'stock.valuation.adjustment.lines', 'cost_id', 'Valuation Adjustments', states={'done': [('readonly', True)]}) description = fields.Text('Item Description', states={'done': [('readonly', True)]}) amount_total = fields.Float('Total', compute='_compute_total_amount', digits=0, store=True, track_visibility='always') state = fields.Selection([('draft', 'Draft'), ('done', 'Posted'), ('cancel', 'Cancelled')], 'State', default='draft', copy=False, readonly=True, track_visibility='onchange') account_move_id = fields.Many2one('account.move', 'Journal Entry', copy=False, readonly=True) account_journal_id = fields.Many2one('account.journal', 'Account Journal', required=True, states={'done': [('readonly', True)]}) @api.one @api.depends('cost_lines.price_unit') def _compute_total_amount(self): self.amount_total = sum(line.price_unit for line in self.cost_lines) @api.model def create(self, vals): if vals.get('name', _('New')) == _('New'): vals['name'] = self.env['ir.sequence'].next_by_code( 'stock.landed.cost') return super(LandedCost, self).create(vals) @api.multi def unlink(self): self.button_cancel() return super(LandedCost, self).unlink() @api.multi def _track_subtype(self, init_values): if 'state' in init_values and self.state == 'done': return 'stock_landed_costs.mt_stock_landed_cost_open' return super(LandedCost, self)._track_subtype(init_values) @api.multi def button_cancel(self): if any(cost.state == 'done' for cost in self): raise UserError( _('Validated landed costs cannot be cancelled, but you could create negative landed costs to reverse them' )) return self.write({'state': 'cancel'}) @api.multi def button_validate(self): if any(cost.state != 'draft' for cost in self): raise UserError(_('Only draft landed costs can be validated')) if any(not cost.valuation_adjustment_lines for cost in self): raise UserError( _('No valuation adjustments lines. You should maybe recompute the landed costs.' )) if not self._check_sum(): raise UserError( _('Cost and adjustments lines do not match. You should maybe recompute the landed costs.' )) for cost in self: move = self.env['account.move'] move_vals = { 'journal_id': cost.account_journal_id.id, 'date': cost.date, 'ref': cost.name, 'line_ids': [], } for line in cost.valuation_adjustment_lines.filtered( lambda line: line.move_id): # Prorate the value at what's still in stock cost_to_add = ( line.move_id.remaining_qty / line.move_id.product_qty) * line.additional_landed_cost new_landed_cost_value = line.move_id.landed_cost_value + line.additional_landed_cost line.move_id.write({ 'landed_cost_value': new_landed_cost_value, 'value': line.move_id.value + cost_to_add, 'remaining_value': line.move_id.remaining_value + cost_to_add, 'price_unit': (line.move_id.value + cost_to_add) / line.move_id.product_qty, }) # `remaining_qty` is negative if the move is out and delivered proudcts that were not # in stock. qty_out = 0 if line.move_id._is_in(): qty_out = line.move_id.product_qty - line.move_id.remaining_qty elif line.move_id._is_out(): qty_out = line.move_id.product_qty move_vals['line_ids'] += line._create_accounting_entries( move, qty_out) move = move.create(move_vals) cost.write({'state': 'done', 'account_move_id': move.id}) move.post() return True def _check_sum(self): """ Check if each cost line its valuation lines sum to the correct amount and if the overall total amount is correct also """ prec_digits = self.env['decimal.precision'].precision_get('Account') for landed_cost in self: total_amount = sum( landed_cost.valuation_adjustment_lines.mapped( 'additional_landed_cost')) if not tools.float_compare(total_amount, landed_cost.amount_total, precision_digits=prec_digits) == 0: return False val_to_cost_lines = defaultdict(lambda: 0.0) for val_line in landed_cost.valuation_adjustment_lines: val_to_cost_lines[ val_line.cost_line_id] += val_line.additional_landed_cost if any( tools.float_compare(cost_line.price_unit, val_amount, precision_digits=prec_digits) != 0 for cost_line, val_amount in val_to_cost_lines.items()): return False return True def get_valuation_lines(self): lines = [] for move in self.mapped('picking_ids').mapped('move_lines'): # it doesn't make sense to make a landed cost for a product that isn't set as being valuated in real time at real cost if move.product_id.valuation != 'real_time' or move.product_id.cost_method != 'fifo': continue vals = { 'product_id': move.product_id.id, 'move_id': move.id, 'quantity': move.product_qty, 'former_cost': move.value, 'weight': move.product_id.weight * move.product_qty, 'volume': move.product_id.volume * move.product_qty } lines.append(vals) if not lines and self.mapped('picking_ids'): raise UserError( _('The selected picking does not contain any move that would be impacted by landed costs. Landed costs are only possible for products configured in real time valuation with real price costing method. Please make sure it is the case, or you selected the correct picking' )) return lines @api.multi def compute_landed_cost(self): AdjustementLines = self.env['stock.valuation.adjustment.lines'] AdjustementLines.search([('cost_id', 'in', self.ids)]).unlink() digits = dp.get_precision('Product Price')(self._cr) towrite_dict = {} for cost in self.filtered(lambda cost: cost.picking_ids): total_qty = 0.0 total_cost = 0.0 total_weight = 0.0 total_volume = 0.0 total_line = 0.0 all_val_line_values = cost.get_valuation_lines() for val_line_values in all_val_line_values: for cost_line in cost.cost_lines: val_line_values.update({ 'cost_id': cost.id, 'cost_line_id': cost_line.id }) self.env['stock.valuation.adjustment.lines'].create( val_line_values) total_qty += val_line_values.get('quantity', 0.0) total_weight += val_line_values.get('weight', 0.0) total_volume += val_line_values.get('volume', 0.0) former_cost = val_line_values.get('former_cost', 0.0) # round this because former_cost on the valuation lines is also rounded total_cost += tools.float_round( former_cost, precision_digits=digits[1]) if digits else former_cost total_line += 1 for line in cost.cost_lines: value_split = 0.0 for valuation in cost.valuation_adjustment_lines: value = 0.0 if valuation.cost_line_id and valuation.cost_line_id.id == line.id: if line.split_method == 'by_quantity' and total_qty: per_unit = (line.price_unit / total_qty) value = valuation.quantity * per_unit elif line.split_method == 'by_weight' and total_weight: per_unit = (line.price_unit / total_weight) value = valuation.weight * per_unit elif line.split_method == 'by_volume' and total_volume: per_unit = (line.price_unit / total_volume) value = valuation.volume * per_unit elif line.split_method == 'equal': value = (line.price_unit / total_line) elif line.split_method == 'by_current_cost_price' and total_cost: per_unit = (line.price_unit / total_cost) value = valuation.former_cost * per_unit else: value = (line.price_unit / total_line) if digits: value = tools.float_round( value, precision_digits=digits[1], rounding_method='UP') fnc = min if line.price_unit > 0 else max value = fnc(value, line.price_unit - value_split) value_split += value if valuation.id not in towrite_dict: towrite_dict[valuation.id] = value else: towrite_dict[valuation.id] += value for key, value in towrite_dict.items(): AdjustementLines.browse(key).write( {'additional_landed_cost': value}) return True
class Applicant(models.Model): _name = "hr.applicant" _description = "Applicant" _order = "priority desc, id desc" _inherit = ['mail.thread.cc', 'mail.activity.mixin', 'utm.mixin'] name = fields.Char("Subject / Application Name", required=True, help="Email subject for applications sent via email") active = fields.Boolean( "Active", default=True, help= "If the active field is set to false, it will allow you to hide the case without removing it." ) description = fields.Text("Description") email_from = fields.Char("Email", size=128, help="Applicant email", compute='_compute_partner_phone_email', inverse='_inverse_partner_email', store=True) probability = fields.Float("Probability") partner_id = fields.Many2one('res.partner', "Contact", copy=False) create_date = fields.Datetime("Creation Date", readonly=True, index=True) stage_id = fields.Many2one( 'hr.recruitment.stage', 'Stage', ondelete='restrict', tracking=True, compute='_compute_stage', store=True, readonly=False, domain="['|', ('job_ids', '=', False), ('job_ids', '=', job_id)]", copy=False, index=True, group_expand='_read_group_stage_ids') last_stage_id = fields.Many2one( 'hr.recruitment.stage', "Last Stage", help= "Stage of the applicant before being in the current stage. Used for lost cases analysis." ) categ_ids = fields.Many2many('hr.applicant.category', string="Tags") company_id = fields.Many2one('res.company', "Company", compute='_compute_company', store=True, readonly=False, tracking=True) user_id = fields.Many2one('res.users', "Recruiter", compute='_compute_user', tracking=True, store=True, readonly=False) date_closed = fields.Datetime("Closed", compute='_compute_date_closed', store=True, index=True) date_open = fields.Datetime("Assigned", readonly=True, index=True) date_last_stage_update = fields.Datetime("Last Stage Update", index=True, default=fields.Datetime.now) priority = fields.Selection(AVAILABLE_PRIORITIES, "Appreciation", default='0') job_id = fields.Many2one( 'hr.job', "Applied Job", domain= "['|', ('company_id', '=', False), ('company_id', '=', company_id)]", tracking=True) salary_proposed_extra = fields.Char( "Proposed Salary Extra", help="Salary Proposed by the Organisation, extra advantages", tracking=True) salary_expected_extra = fields.Char( "Expected Salary Extra", help="Salary Expected by Applicant, extra advantages", tracking=True) salary_proposed = fields.Float("Proposed Salary", group_operator="avg", help="Salary Proposed by the Organisation", tracking=True) salary_expected = fields.Float("Expected Salary", group_operator="avg", help="Salary Expected by Applicant", tracking=True) availability = fields.Date( "Availability", help= "The date at which the applicant will be available to start working", tracking=True) partner_name = fields.Char("Applicant's Name") partner_phone = fields.Char("Phone", size=32, compute='_compute_partner_phone_email', inverse='_inverse_partner_phone', store=True) partner_mobile = fields.Char("Mobile", size=32, compute='_compute_partner_phone_email', inverse='_inverse_partner_mobile', store=True) type_id = fields.Many2one('hr.recruitment.degree', "Degree") department_id = fields.Many2one( 'hr.department', "Department", compute='_compute_department', store=True, readonly=False, domain= "['|', ('company_id', '=', False), ('company_id', '=', company_id)]", tracking=True) day_open = fields.Float(compute='_compute_day', string="Days to Open", compute_sudo=True) day_close = fields.Float(compute='_compute_day', string="Days to Close", compute_sudo=True) delay_close = fields.Float(compute="_compute_day", string='Delay to Close', readonly=True, group_operator="avg", help="Number of days to close", store=True) color = fields.Integer("Color Index", default=0) emp_id = fields.Many2one('hr.employee', string="Employee", help="Employee linked to the applicant.", copy=False) user_email = fields.Char(related='user_id.email', string="User Email", readonly=True) attachment_number = fields.Integer(compute='_get_attachment_number', string="Number of Attachments") employee_name = fields.Char(related='emp_id.name', string="Employee Name", readonly=False, tracking=False) attachment_ids = fields.One2many('ir.attachment', 'res_id', domain=[('res_model', '=', 'hr.applicant') ], string='Attachments') kanban_state = fields.Selection([('normal', 'Grey'), ('done', 'Green'), ('blocked', 'Red')], string='Kanban State', copy=False, default='normal', required=True) legend_blocked = fields.Char(related='stage_id.legend_blocked', string='Kanban Blocked') legend_done = fields.Char(related='stage_id.legend_done', string='Kanban Valid') legend_normal = fields.Char(related='stage_id.legend_normal', string='Kanban Ongoing') application_count = fields.Integer(compute='_compute_application_count', help='Applications with the same email') meeting_count = fields.Integer(compute='_compute_meeting_count', help='Meeting Count') refuse_reason_id = fields.Many2one('hr.applicant.refuse.reason', string='Refuse Reason', tracking=True) @api.depends('date_open', 'date_closed') def _compute_day(self): for applicant in self: if applicant.date_open: date_create = applicant.create_date date_open = applicant.date_open applicant.day_open = ( date_open - date_create).total_seconds() / (24.0 * 3600) else: applicant.day_open = False if applicant.date_closed: date_create = applicant.create_date date_closed = applicant.date_closed applicant.day_close = ( date_closed - date_create).total_seconds() / (24.0 * 3600) applicant.delay_close = applicant.day_close - applicant.day_open else: applicant.day_close = False applicant.delay_close = False @api.depends('email_from') def _compute_application_count(self): application_data = self.env['hr.applicant'].with_context( active_test=False).read_group( [('email_from', 'in', list(set(self.mapped('email_from'))))], ['email_from'], ['email_from']) application_data_mapped = dict( (data['email_from'], data['email_from_count']) for data in application_data) applicants = self.filtered(lambda applicant: applicant.email_from) for applicant in applicants: applicant.application_count = application_data_mapped.get( applicant.email_from, 1) - 1 (self - applicants).application_count = False def _compute_meeting_count(self): if self.ids: meeting_data = self.env['calendar.event'].sudo().read_group( [('applicant_id', 'in', self.ids)], ['applicant_id'], ['applicant_id']) mapped_data = { m['applicant_id'][0]: m['applicant_id_count'] for m in meeting_data } else: mapped_data = dict() for applicant in self: applicant.meeting_count = mapped_data.get(applicant.id, 0) def _get_attachment_number(self): read_group_res = self.env['ir.attachment'].read_group( [('res_model', '=', 'hr.applicant'), ('res_id', 'in', self.ids)], ['res_id'], ['res_id']) attach_data = dict( (res['res_id'], res['res_id_count']) for res in read_group_res) for record in self: record.attachment_number = attach_data.get(record.id, 0) @api.model def _read_group_stage_ids(self, stages, domain, order): # retrieve job_id from the context and write the domain: ids + contextual columns (job or default) job_id = self._context.get('default_job_id') search_domain = [('job_ids', '=', False)] if job_id: search_domain = ['|', ('job_ids', '=', job_id)] + search_domain if stages: search_domain = ['|', ('id', 'in', stages.ids)] + search_domain stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids) @api.depends('job_id', 'department_id') def _compute_company(self): for applicant in self: company_id = False if applicant.department_id: company_id = applicant.department_id.company_id.id if not company_id and applicant.job_id: company_id = applicant.job_id.company_id.id applicant.company_id = company_id or self.env.company.id @api.depends('job_id') def _compute_department(self): for applicant in self: applicant.department_id = applicant.job_id.department_id.id @api.depends('job_id') def _compute_stage(self): for applicant in self: if applicant.job_id: if not applicant.stage_id: stage_ids = self.env['hr.recruitment.stage'].search( [ '|', ('job_ids', '=', False), ('job_ids', '=', applicant.job_id.id), ('fold', '=', False) ], order='sequence asc', limit=1).ids applicant.stage_id = stage_ids[0] if stage_ids else False else: applicant.stage_id = False @api.depends('job_id') def _compute_user(self): for applicant in self: applicant.user_id = applicant.job_id.user_id.id or self.env.uid @api.depends('partner_id') def _compute_partner_phone_email(self): for applicant in self: applicant.partner_phone = applicant.partner_id.phone applicant.partner_mobile = applicant.partner_id.mobile applicant.email_from = applicant.partner_id.email def _inverse_partner_email(self): for applicant in self.filtered(lambda a: a.partner_id and a.email_from and not a.partner_id.email): applicant.partner_id.email = applicant.email_from def _inverse_partner_phone(self): for applicant in self.filtered( lambda a: a.partner_id and a.partner_phone and not a.partner_id .phone): applicant.partner_id.phone = applicant.partner_phone def _inverse_partner_mobile(self): for applicant in self.filtered( lambda a: a.partner_id and a.partner_mobile and not a. partner_id.mobile): applicant.partner_id.mobile = applicant.partner_mobile @api.depends('stage_id') def _compute_date_closed(self): for applicant in self: if applicant.stage_id and applicant.stage_id.fold: applicant.date_closed = fields.datetime.now() else: applicant.date_closed = False @api.model def create(self, vals): if vals.get('department_id' ) and not self._context.get('default_department_id'): self = self.with_context( default_department_id=vals.get('department_id')) if vals.get('user_id'): vals['date_open'] = fields.Datetime.now() if vals.get('email_from'): vals['email_from'] = vals['email_from'].strip() return super(Applicant, self).create(vals) def write(self, vals): # user_id change: update date_open if vals.get('user_id'): vals['date_open'] = fields.Datetime.now() if vals.get('email_from'): vals['email_from'] = vals['email_from'].strip() # stage_id: track last stage before update if 'stage_id' in vals: vals['date_last_stage_update'] = fields.Datetime.now() if 'kanban_state' not in vals: vals['kanban_state'] = 'normal' for applicant in self: vals['last_stage_id'] = applicant.stage_id.id res = super(Applicant, self).write(vals) else: res = super(Applicant, self).write(vals) return res def get_empty_list_help(self, help): if 'active_id' in self.env.context and self.env.context.get( 'active_model') == 'hr.job': alias_id = self.env['hr.job'].browse( self.env.context['active_id']).alias_id else: alias_id = False nocontent_values = { 'help_title': _('No application yet'), 'para_1': _('Let people apply by email to save time.'), 'para_2': _('Attachments, like resumes, get indexed automatically.'), } nocontent_body = """ <p class="o_view_nocontent_empty_folder">%(help_title)s</p> <p>%(para_1)s<br/>%(para_2)s</p>""" if alias_id and alias_id.alias_domain and alias_id.alias_name: email = alias_id.display_name email_link = "<a href='mailto:%s'>%s</a>" % (email, email) nocontent_values['email_link'] = email_link nocontent_body += """<p class="o_copy_paste_email">%(email_link)s</p>""" return nocontent_body % nocontent_values def action_makeMeeting(self): """ This opens Meeting's calendar view to schedule meeting on current applicant @return: Dictionary value for created Meeting view """ self.ensure_one() partners = self.partner_id | self.user_id.partner_id | self.department_id.manager_id.user_id.partner_id category = self.env.ref('hr_recruitment.categ_meet_interview') res = self.env['ir.actions.act_window']._for_xml_id( 'calendar.action_calendar_event') res['context'] = { 'default_partner_ids': partners.ids, 'default_user_id': self.env.uid, 'default_name': self.name, 'default_categ_ids': category and [category.id] or False, } return res def action_get_attachment_tree_view(self): action = self.env['ir.actions.act_window']._for_xml_id( 'base.action_attachment') action['context'] = { 'default_res_model': self._name, 'default_res_id': self.ids[0] } action['domain'] = str( ['&', ('res_model', '=', self._name), ('res_id', 'in', self.ids)]) action['search_view_id'] = (self.env.ref( 'hr_recruitment.ir_attachment_view_search_inherit_hr_recruitment'). id, ) return action def action_applications_email(self): return { 'type': 'ir.actions.act_window', 'name': _('Applications'), 'res_model': self._name, 'view_mode': 'kanban,tree,form,pivot,graph,calendar,activity', 'domain': [('email_from', 'in', self.mapped('email_from'))], 'context': { 'active_test': False }, } def _track_template(self, changes): res = super(Applicant, self)._track_template(changes) applicant = self[0] if 'stage_id' in changes and applicant.stage_id.template_id: res['stage_id'] = (applicant.stage_id.template_id, { 'auto_delete_message': True, 'subtype_id': self.env['ir.model.data'].xmlid_to_res_id('mail.mt_note'), 'email_layout_xmlid': 'mail.mail_notification_light' }) return res def _creation_subtype(self): return self.env.ref('hr_recruitment.mt_applicant_new') def _track_subtype(self, init_values): record = self[0] if 'stage_id' in init_values and record.stage_id: return self.env.ref('hr_recruitment.mt_applicant_stage_changed') return super(Applicant, self)._track_subtype(init_values) def _notify_get_reply_to(self, default=None, records=None, company=None, doc_names=None): """ Override to set alias of applicants to their job definition if any. """ aliases = self.mapped('job_id')._notify_get_reply_to(default=default, records=None, company=company, doc_names=None) res = {app.id: aliases.get(app.job_id.id) for app in self} leftover = self.filtered(lambda rec: not rec.job_id) if leftover: res.update( super(Applicant, leftover)._notify_get_reply_to(default=default, records=None, company=company, doc_names=doc_names)) return res def _message_get_suggested_recipients(self): recipients = super(Applicant, self)._message_get_suggested_recipients() for applicant in self: if applicant.partner_id: applicant._message_add_suggested_recipient( recipients, partner=applicant.partner_id, reason=_('Contact')) elif applicant.email_from: email_from = applicant.email_from if applicant.partner_name: email_from = tools.formataddr( (applicant.partner_name, email_from)) applicant._message_add_suggested_recipient( recipients, email=email_from, reason=_('Contact Email')) return recipients @api.model def message_new(self, msg, custom_values=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ # remove default author when going through the mail gateway. Indeed we # do not want to explicitly set user_id to False; however we do not # want the gateway user to be responsible if no other responsible is # found. self = self.with_context(default_user_id=False) val = msg.get('from').split('<')[0] defaults = { 'name': msg.get('subject') or _("No Subject"), 'partner_name': val, 'email_from': msg.get('from'), 'partner_id': msg.get('author_id', False), } if msg.get('priority'): defaults['priority'] = msg.get('priority') if custom_values: defaults.update(custom_values) return super(Applicant, self).message_new(msg, custom_values=defaults) def _message_post_after_hook(self, message, msg_vals): if self.email_from and not self.partner_id: # we consider that posting a message with a specified recipient (not a follower, a specific one) # on a document without customer means that it was created through the chatter using # suggested recipients. This heuristic allows to avoid ugly hacks in JS. new_partner = message.partner_ids.filtered( lambda partner: partner.email == self.email_from) if new_partner: if new_partner.create_date.date() == fields.Date.today(): new_partner.write({ 'type': 'private', 'phone': self.partner_phone, 'mobile': self.partner_mobile, }) self.search([('partner_id', '=', False), ('email_from', '=', new_partner.email), ('stage_id.fold', '=', False) ]).write({'partner_id': new_partner.id}) return super(Applicant, self)._message_post_after_hook(message, msg_vals) def create_employee_from_applicant(self): """ Create an hr.employee from the hr.applicants """ employee = False for applicant in self: contact_name = False if applicant.partner_id: address_id = applicant.partner_id.address_get(['contact' ])['contact'] contact_name = applicant.partner_id.display_name else: if not applicant.partner_name: raise UserError( _('You must define a Contact Name for this applicant.') ) new_partner_id = self.env['res.partner'].create({ 'is_company': False, 'type': 'private', 'name': applicant.partner_name, 'email': applicant.email_from, 'phone': applicant.partner_phone, 'mobile': applicant.partner_mobile }) applicant.partner_id = new_partner_id address_id = new_partner_id.address_get(['contact'])['contact'] if applicant.partner_name or contact_name: employee_data = { 'default_name': applicant.partner_name or contact_name, 'default_job_id': applicant.job_id.id, 'default_job_title': applicant.job_id.name, 'address_home_id': address_id, 'default_department_id': applicant.department_id.id or False, 'default_address_id': applicant.company_id and applicant.company_id.partner_id and applicant.company_id.partner_id.id or False, 'default_work_email': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.email or False, 'default_work_phone': applicant.department_id.company_id.phone, 'form_view_initial_mode': 'edit', 'default_applicant_id': applicant.ids, } dict_act_window = self.env['ir.actions.act_window']._for_xml_id( 'hr.open_view_employee_list') dict_act_window['context'] = employee_data return dict_act_window def archive_applicant(self): return { 'type': 'ir.actions.act_window', 'name': _('Refuse Reason'), 'res_model': 'applicant.get.refuse.reason', 'view_mode': 'form', 'target': 'new', 'context': { 'default_applicant_ids': self.ids, 'active_test': False }, 'views': [[False, 'form']] } def reset_applicant(self): """ Reinsert the applicant into the recruitment pipe in the first stage""" default_stage = dict() for job_id in self.mapped('job_id'): default_stage[job_id.id] = self.env['hr.recruitment.stage'].search( [ '|', ('job_ids', '=', False), ('job_ids', '=', job_id.id), ('fold', '=', False) ], order='sequence asc', limit=1).id for applicant in self: applicant.write({ 'stage_id': applicant.job_id.id and default_stage[applicant.job_id.id], 'refuse_reason_id': False }) def toggle_active(self): res = super(Applicant, self).toggle_active() applicant_active = self.filtered(lambda applicant: applicant.active) if applicant_active: applicant_active.reset_applicant() applicant_inactive = self.filtered( lambda applicant: not applicant.active) if applicant_inactive: return applicant_inactive.archive_applicant() return res
class StockPackageLevel(models.Model): _name = 'stock.package_level' _description = 'Stock Package Level' _check_company_auto = True package_id = fields.Many2one( 'stock.quant.package', 'Package', required=True, check_company=True, domain= "[('location_id', 'child_of', parent.location_id), '|', ('company_id', '=', False), ('company_id', '=', company_id)]" ) picking_id = fields.Many2one('stock.picking', 'Picking', check_company=True) move_ids = fields.One2many('stock.move', 'package_level_id') move_line_ids = fields.One2many('stock.move.line', 'package_level_id') location_id = fields.Many2one('stock.location', 'From', compute='_compute_location_id', check_company=True) location_dest_id = fields.Many2one( 'stock.location', 'To', check_company=True, domain= "[('id', 'child_of', parent.location_dest_id), '|', ('company_id', '=', False), ('company_id', '=', company_id)]" ) is_done = fields.Boolean('Done', compute='_compute_is_done', inverse='_set_is_done') state = fields.Selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('assigned', 'Reserved'), ('new', 'New'), ('done', 'Done'), ('cancel', 'Cancelled'), ], string='State', compute='_compute_state') is_fresh_package = fields.Boolean(compute='_compute_fresh_pack') picking_type_code = fields.Selection( related='picking_id.picking_type_code') show_lots_m2o = fields.Boolean(compute='_compute_show_lot') show_lots_text = fields.Boolean(compute='_compute_show_lot') company_id = fields.Many2one('res.company', 'Company', required=True, index=True) @api.depends('move_line_ids', 'move_line_ids.qty_done') def _compute_is_done(self): for package_level in self: # If it is an existing package if package_level.is_fresh_package: package_level.is_done = True else: package_level.is_done = package_level._check_move_lines_map_quant_package( package_level.package_id) def _set_is_done(self): for package_level in self: if package_level.is_done: if not package_level.is_fresh_package: ml_update_dict = defaultdict(float) for quant in package_level.package_id.quant_ids: corresponding_ml = package_level.move_line_ids.filtered( lambda ml: ml.product_id == quant.product_id and ml .lot_id == quant.lot_id) if corresponding_ml: ml_update_dict[ corresponding_ml[0]] += quant.quantity else: corresponding_move = package_level.move_ids.filtered( lambda m: m.product_id == quant.product_id)[:1] self.env['stock.move.line'].create({ 'location_id': package_level.location_id.id, 'location_dest_id': package_level.location_dest_id.id, 'picking_id': package_level.picking_id.id, 'product_id': quant.product_id.id, 'qty_done': quant.quantity, 'product_uom_id': quant.product_id.uom_id.id, 'lot_id': quant.lot_id.id, 'package_id': package_level.package_id.id, 'result_package_id': package_level.package_id.id, 'package_level_id': package_level.id, 'move_id': corresponding_move.id, 'owner_id': quant.owner_id.id, }) for rec, quant in ml_update_dict.items(): rec.qty_done = quant else: package_level.move_line_ids.filtered( lambda ml: ml.product_qty == 0).unlink() package_level.move_line_ids.filtered( lambda ml: ml.product_qty != 0).write({'qty_done': 0}) @api.depends('move_line_ids', 'move_line_ids.package_id', 'move_line_ids.result_package_id') def _compute_fresh_pack(self): for package_level in self: if not package_level.move_line_ids or all( ml.package_id and ml.package_id == ml.result_package_id for ml in package_level.move_line_ids): package_level.is_fresh_package = False else: package_level.is_fresh_package = True @api.depends('move_ids', 'move_ids.state', 'move_line_ids', 'move_line_ids.state') def _compute_state(self): for package_level in self: if not package_level.move_ids and not package_level.move_line_ids: package_level.state = 'draft' elif not package_level.move_line_ids and package_level.move_ids.filtered( lambda m: m.state not in ('done', 'cancel')): package_level.state = 'confirmed' elif package_level.move_line_ids and not package_level.move_line_ids.filtered( lambda ml: ml.state == 'done'): if package_level.is_fresh_package: package_level.state = 'new' elif package_level._check_move_lines_map_quant_package( package_level.package_id, 'product_uom_qty'): package_level.state = 'assigned' else: package_level.state = 'confirmed' elif package_level.move_line_ids.filtered( lambda ml: ml.state == 'done'): package_level.state = 'done' elif package_level.move_line_ids.filtered( lambda ml: ml.state == 'cancel' ) or package_level.move_ids.filtered( lambda m: m.state == 'cancel'): package_level.state = 'cancel' else: package_level.state = 'draft' def _compute_show_lot(self): for package_level in self: if any(ml.product_id.tracking != 'none' for ml in package_level.move_line_ids): if package_level.picking_id.picking_type_id.use_existing_lots or package_level.state == 'done': package_level.show_lots_m2o = True package_level.show_lots_text = False else: if self.picking_id.picking_type_id.use_create_lots and package_level.state != 'done': package_level.show_lots_m2o = False package_level.show_lots_text = True else: package_level.show_lots_m2o = False package_level.show_lots_text = False else: package_level.show_lots_m2o = False package_level.show_lots_text = False def _generate_moves(self): for package_level in self: if package_level.package_id: for quant in package_level.package_id.quant_ids: self.env['stock.move'].create({ 'picking_id': package_level.picking_id.id, 'name': quant.product_id.display_name, 'product_id': quant.product_id.id, 'product_uom_qty': quant.quantity, 'product_uom': quant.product_id.uom_id.id, 'location_id': package_level.location_id.id, 'location_dest_id': package_level.location_dest_id.id, 'package_level_id': package_level.id, 'company_id': package_level.company_id.id, }) @api.model def create(self, vals): result = super(StockPackageLevel, self).create(vals) if vals.get('location_dest_id'): result.mapped('move_line_ids').write( {'location_dest_id': vals['location_dest_id']}) result.mapped('move_ids').write( {'location_dest_id': vals['location_dest_id']}) return result def write(self, vals): result = super(StockPackageLevel, self).write(vals) if vals.get('location_dest_id'): self.mapped('move_line_ids').write( {'location_dest_id': vals['location_dest_id']}) self.mapped('move_ids').write( {'location_dest_id': vals['location_dest_id']}) return result def unlink(self): self.mapped('move_ids').write({'package_level_id': False}) self.mapped('move_line_ids').write({'result_package_id': False}) return super(StockPackageLevel, self).unlink() def _check_move_lines_map_quant_package(self, package, field='qty_done'): """ should compare in good uom """ all_in = True pack_move_lines = self.move_line_ids keys = ['product_id', 'lot_id'] def sorted_key(object): object.ensure_one() return [object.product_id.id, object.lot_id.id] grouped_quants = {} for k, g in groupby(sorted(package.quant_ids, key=sorted_key), key=itemgetter(*keys)): grouped_quants[k] = sum( self.env['stock.quant'].concat(*list(g)).mapped('quantity')) grouped_ops = {} for k, g in groupby(sorted(pack_move_lines, key=sorted_key), key=itemgetter(*keys)): grouped_ops[k] = sum( self.env['stock.move.line'].concat(*list(g)).mapped(field)) if any(grouped_quants.get(key, 0) - grouped_ops.get(key, 0) != 0 for key in grouped_quants) \ or any(grouped_ops.get(key, 0) - grouped_quants.get(key, 0) != 0 for key in grouped_ops): all_in = False return all_in @api.depends('package_id', 'state', 'is_fresh_package', 'move_ids', 'move_line_ids') def _compute_location_id(self): for pl in self: if pl.state == 'new' or pl.is_fresh_package: pl.location_id = False elif pl.package_id: pl.location_id = pl.package_id.location_id elif pl.state == 'confirmed' and pl.move_ids: pl.location_id = pl.move_ids[0].location_id elif pl.state in ('assigned', 'done') and pl.move_line_ids: pl.location_id = pl.move_line_ids[0].location_id else: pl.location_id = pl.picking_id.location_id def action_show_package_details(self): self.ensure_one() view = self.env.ref('stock.package_level_form_edit_view', raise_if_not_found=False) or self.env.ref( 'stock.package_level_form_view') return { 'name': _('Package Content'), 'type': 'ir.actions.act_window', 'view_mode': 'form', 'res_model': 'stock.package_level', 'views': [(view.id, 'form')], 'view_id': view.id, 'target': 'new', 'res_id': self.id, 'flags': { 'mode': 'readonly' }, }
class MembershipLine(models.Model): _name = 'membership.membership_line' _rec_name = 'partner' _order = 'id desc' partner = fields.Many2one('res.partner', string='Partner', ondelete='cascade', index=True) membership_id = fields.Many2one('product.product', string="Membership", required=True) date_from = fields.Date(string='From', readonly=True) date_to = fields.Date(string='To', readonly=True) date_cancel = fields.Date(string='Cancel date') date = fields.Date(string='Join Date', help="Date on which member has joined the membership") member_price = fields.Float(string='Membership Fee', digits=dp.get_precision('Product Price'), required=True, help='Amount for the membership') account_invoice_line = fields.Many2one('account.invoice.line', string='Account Invoice line', readonly=True, ondelete='cascade') account_invoice_id = fields.Many2one( 'account.invoice', related='account_invoice_line.invoice_id', string='Invoice', readonly=True) company_id = fields.Many2one( 'res.company', related='account_invoice_line.invoice_id.company_id', string="Company", readonly=True, store=True) state = fields.Selection( STATE, compute='_compute_state', string='Membership Status', store=True, help="It indicates the membership status.\n" "-Non Member: A member who has not applied for any membership.\n" "-Cancelled Member: A member who has cancelled his membership.\n" "-Old Member: A member whose membership date has expired.\n" "-Waiting Member: A member who has applied for the membership and whose invoice is going to be created.\n" "-Invoiced Member: A member whose invoice has been created.\n" "-Paid Member: A member who has paid the membership amount.") @api.depends('account_invoice_line.invoice_id.state', 'account_invoice_line.invoice_id.payment_ids', 'account_invoice_line.invoice_id.payment_ids.invoice_ids.type' ) def _compute_state(self): """Compute the state lines """ Invoice = self.env['account.invoice'] for line in self: self._cr.execute( ''' SELECT i.state, i.id FROM account_invoice i WHERE i.id = ( SELECT l.invoice_id FROM account_invoice_line l WHERE l.id = ( SELECT ml.account_invoice_line FROM membership_membership_line ml WHERE ml.id = %s ) ) ''', (line.id, )) fetched = self._cr.fetchone() if not fetched: line.state = 'canceled' continue istate = fetched[0] if istate == 'draft': line.state = 'waiting' elif istate == 'open': line.state = 'invoiced' elif istate == 'paid': line.state = 'paid' invoices = Invoice.browse( fetched[1]).payment_ids.mapped('invoice_ids') if invoices.filtered( lambda invoice: invoice.type == 'out_refund'): line.state = 'canceled' elif istate == 'cancel': line.state = 'canceled' else: line.state = 'none'
class Alias(models.Model): """A Mail Alias is a mapping of an email address with a given Flectra Document model. It is used by Flectra's mail gateway when processing incoming emails sent to the system. If the recipient address (To) of the message matches a Mail Alias, the message will be either processed following the rules of that alias. If the message is a reply it will be attached to the existing discussion on the corresponding record, otherwise a new record of the corresponding model will be created. This is meant to be used in combination with a catch-all email configuration on the company's mail server, so that as soon as a new mail.alias is created, it becomes immediately usable and Flectra will accept email for it. """ _name = 'mail.alias' _description = "Email Aliases" _rec_name = 'alias_name' _order = 'alias_model_id, alias_name' alias_name = fields.Char('Alias Name', help="The name of the email alias, e.g. 'jobs' if you want to catch emails for <*****@*****.**>") alias_model_id = fields.Many2one('ir.model', 'Aliased Model', required=True, ondelete="cascade", help="The model (Flectra Document Kind) to which this alias " "corresponds. Any incoming email that does not reply to an " "existing record will cause the creation of a new record " "of this model (e.g. a Project Task)", # hack to only allow selecting mail_thread models (we might # (have a few false positives, though) domain="[('field_id.name', '=', 'message_ids')]") alias_user_id = fields.Many2one('res.users', 'Owner', defaults=lambda self: self.env.user, help="The owner of records created upon receiving emails on this alias. " "If this field is not set the system will attempt to find the right owner " "based on the sender (From) address, or will use the Administrator account " "if no system user is found for that address.") alias_defaults = fields.Text('Default Values', required=True, default='{}', help="A Python dictionary that will be evaluated to provide " "default values when creating new records for this alias.") alias_force_thread_id = fields.Integer( 'Record Thread ID', help="Optional ID of a thread (record) to which all incoming messages will be attached, even " "if they did not reply to it. If set, this will disable the creation of new records completely.") alias_domain = fields.Char('Alias domain', compute='_get_alias_domain', default=lambda self: self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain")) alias_parent_model_id = fields.Many2one( 'ir.model', 'Parent Model', help="Parent model holding the alias. The model holding the alias reference " "is not necessarily the model given by alias_model_id " "(example: project (parent_model) and task (model))") alias_parent_thread_id = fields.Integer('Parent Record Thread ID', help="ID of the parent record holding the alias (example: project holding the task creation alias)") alias_contact = fields.Selection([ ('everyone', 'Everyone'), ('partners', 'Authenticated Partners'), ('followers', 'Followers only')], default='everyone', string='Alias Contact Security', required=True, help="Policy to post a message on the document using the mailgateway.\n" "- everyone: everyone can post\n" "- partners: only authenticated partners\n" "- followers: only followers of the related document or members of following channels\n") _sql_constraints = [ ('alias_unique', 'UNIQUE(alias_name)', 'Unfortunately this email alias is already used, please choose a unique one') ] @api.multi def _get_alias_domain(self): alias_domain = self.env["ir.config_parameter"].sudo().get_param("mail.catchall.domain") for record in self: record.alias_domain = alias_domain @api.one @api.constrains('alias_defaults') def _check_alias_defaults(self): try: dict(safe_eval(self.alias_defaults)) except Exception: raise ValidationError(_('Invalid expression, it must be a literal python dictionary definition e.g. "{\'field\': \'value\'}"')) @api.model def create(self, vals): """ Creates an email.alias record according to the values provided in ``vals``, with 2 alterations: the ``alias_name`` value may be suffixed in order to make it unique (and certain unsafe characters replaced), and he ``alias_model_id`` value will set to the model ID of the ``model_name`` context value, if provided. """ model_name = self._context.get('alias_model_name') parent_model_name = self._context.get('alias_parent_model_name') if vals.get('alias_name'): vals['alias_name'] = self._clean_and_make_unique(vals.get('alias_name')) if model_name: model = self.env['ir.model']._get(model_name) vals['alias_model_id'] = model.id if parent_model_name: model = self.env['ir.model']._get(parent_model_name) vals['alias_parent_model_id'] = model.id return super(Alias, self).create(vals) @api.multi def write(self, vals): """"give a unique alias name if given alias name is already assigned""" if vals.get('alias_name') and self.ids: vals['alias_name'] = self._clean_and_make_unique(vals.get('alias_name'), alias_ids=self.ids) return super(Alias, self).write(vals) @api.multi def name_get(self): """Return the mail alias display alias_name, including the implicit mail catchall domain if exists from config otherwise "New Alias". e.g. `[email protected]` or `jobs` or 'New Alias' """ res = [] for record in self: if record.alias_name and record.alias_domain: res.append((record['id'], "%s@%s" % (record.alias_name, record.alias_domain))) elif record.alias_name: res.append((record['id'], "%s" % (record.alias_name))) else: res.append((record['id'], _("Inactive Alias"))) return res @api.model def _find_unique(self, name, alias_ids=False): """Find a unique alias name similar to ``name``. If ``name`` is already taken, make a variant by adding an integer suffix until an unused alias is found. """ sequence = None while True: new_name = "%s%s" % (name, sequence) if sequence is not None else name domain = [('alias_name', '=', new_name)] if alias_ids: domain += [('id', 'not in', alias_ids)] if not self.search(domain): break sequence = (sequence + 1) if sequence else 2 return new_name @api.model def _clean_and_make_unique(self, name, alias_ids=False): # when an alias name appears to already be an email, we keep the local part only name = remove_accents(name).lower().split('@')[0] name = re.sub(r'[^\w+.]+', '-', name) return self._find_unique(name, alias_ids=alias_ids) @api.multi def open_document(self): if not self.alias_model_id or not self.alias_force_thread_id: return False return { 'view_type': 'form', 'view_mode': 'form', 'res_model': self.alias_model_id.model, 'res_id': self.alias_force_thread_id, 'type': 'ir.actions.act_window', } @api.multi def open_parent_document(self): if not self.alias_parent_model_id or not self.alias_parent_thread_id: return False return { 'view_type': 'form', 'view_mode': 'form', 'res_model': self.alias_parent_model_id.model, 'res_id': self.alias_parent_thread_id, 'type': 'ir.actions.act_window', }
class Partner(models.Model): _description = 'Contact' _inherit = ['format.address.mixin'] _name = "res.partner" _order = "display_name" def _default_category(self): return self.env['res.partner.category'].browse( self._context.get('category_id')) def _default_company(self): return self.env['res.company']._company_default_get('res.partner') def _split_street_with_params(self, street_raw, street_format): return {'street': street_raw} name = fields.Char(index=True) display_name = fields.Char(compute='_compute_display_name', store=True, index=True) date = fields.Date(index=True) title = fields.Many2one('res.partner.title') parent_id = fields.Many2one('res.partner', string='Related Company', index=True) parent_name = fields.Char(related='parent_id.name', readonly=True, string='Parent name') child_ids = fields.One2many( 'res.partner', 'parent_id', string='Contacts', domain=[('active', '=', True) ]) # force "active_test" domain to bypass _search() override ref = fields.Char(string='Internal Reference', index=True) lang = fields.Selection( _lang_get, string='Language', default=lambda self: self.env.lang, help= "If the selected language is loaded in the system, all documents related to " "this contact will be printed in this language. If not, it will be English." ) tz = fields.Selection( _tz_get, string='Timezone', default=lambda self: self._context.get('tz'), help= "The partner's timezone, used to output proper date and time values " "inside printed reports. It is important to set a value for this field. " "You should use the same timezone that is otherwise used to pick and " "render date and time values: your computer's timezone.") tz_offset = fields.Char(compute='_compute_tz_offset', string='Timezone offset', invisible=True) user_id = fields.Many2one( 'res.users', string='Salesperson', help= 'The internal user that is in charge of communicating with this contact if any.' ) vat = fields.Char(string='TIN', help="Tax Identification Number. " "Fill it if the company is subjected to taxes. " "Used by the some of the legal statements.") bank_ids = fields.One2many('res.partner.bank', 'partner_id', string='Banks') website = fields.Char(help="Website of Partner or Company") comment = fields.Text(string='Notes') category_id = fields.Many2many('res.partner.category', column1='partner_id', column2='category_id', string='Tags', default=_default_category) credit_limit = fields.Float(string='Credit Limit') barcode = fields.Char(oldname='ean13') active = fields.Boolean(default=True) customer = fields.Boolean( string='Is a Customer', default=True, help="Check this box if this contact is a customer.") supplier = fields.Boolean( string='Is a Vendor', help="Check this box if this contact is a vendor. " "If it's not checked, purchase people will not see it when encoding a purchase order." ) employee = fields.Boolean( help="Check this box if this contact is an Employee.") function = fields.Char(string='Job Position') type = fields.Selection( [ ('contact', 'Contact'), ('invoice', 'Invoice address'), ('delivery', 'Shipping address'), ('other', 'Other address'), ("private", "Private Address"), ], string='Address Type', default='contact', help= "Used to select automatically the right address according to the context in sales and purchases documents." ) street = fields.Char() street2 = fields.Char() zip = fields.Char(change_default=True) city = fields.Char() state_id = fields.Many2one("res.country.state", string='State', ondelete='restrict') country_id = fields.Many2one('res.country', string='Country', ondelete='restrict') email = fields.Char() email_formatted = fields.Char( 'Formatted Email', compute='_compute_email_formatted', help='Format email address "Name <email@domain>"') phone = fields.Char() mobile = fields.Char() is_company = fields.Boolean( string='Is a Company', default=False, help="Check if the contact is a company, otherwise it is a person") industry_id = fields.Many2one('res.partner.industry', 'Industry') # company_type is only an interface field, do not use it in business logic company_type = fields.Selection(string='Company Type', selection=[('person', 'Individual'), ('company', 'Company')], compute='_compute_company_type', inverse='_write_company_type') company_id = fields.Many2one('res.company', 'Company', index=True, default=_default_company) color = fields.Integer(string='Color Index', default=0) user_ids = fields.One2many('res.users', 'partner_id', string='Users', auto_join=True) partner_share = fields.Boolean( 'Share Partner', compute='_compute_partner_share', store=True, help= "Either customer (no user), either shared user. Indicated the current partner is a customer without " "access or with a limited access created for sharing data.") contact_address = fields.Char(compute='_compute_contact_address', string='Complete Address') # technical field used for managing commercial fields commercial_partner_id = fields.Many2one( 'res.partner', compute='_compute_commercial_partner', string='Commercial Entity', store=True, index=True) commercial_partner_country_id = fields.Many2one( 'res.country', related='commercial_partner_id.country_id', store=True, string="Commercial Entity's Country") commercial_company_name = fields.Char( 'Company Name Entity', compute='_compute_commercial_company_name', store=True) company_name = fields.Char('Company Name') # image: all image fields are base64 encoded and PIL-supported image = fields.Binary( "Image", attachment=True, help= "This field holds the image used as avatar for this contact, limited to 1024x1024px", ) image_medium = fields.Binary("Medium-sized image", attachment=True, help="Medium-sized image of this contact. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views.") image_small = fields.Binary("Small-sized image", attachment=True, help="Small-sized image of this contact. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required.") # hack to allow using plain browse record in qweb views, and used in ir.qweb.field.contact self = fields.Many2one(comodel_name=_name, compute='_compute_get_ids') _sql_constraints = [ ('check_name', "CHECK( (type='contact' AND name IS NOT NULL) or (type!='contact') )", 'Contacts require a name.'), ] @api.depends('is_company', 'name', 'parent_id.name', 'type', 'company_name') def _compute_display_name(self): diff = dict(show_address=None, show_address_only=None, show_email=None) names = dict(self.with_context(**diff).name_get()) for partner in self: partner.display_name = names.get(partner.id) @api.depends('tz') def _compute_tz_offset(self): for partner in self: partner.tz_offset = datetime.datetime.now( pytz.timezone(partner.tz or 'GMT')).strftime('%z') @api.depends('user_ids.share') def _compute_partner_share(self): for partner in self: partner.partner_share = not partner.user_ids or any( user.share for user in partner.user_ids) @api.depends(lambda self: self._display_address_depends()) def _compute_contact_address(self): for partner in self: partner.contact_address = partner._display_address() @api.one def _compute_get_ids(self): self.self = self.id @api.depends('is_company', 'parent_id.commercial_partner_id') def _compute_commercial_partner(self): for partner in self: if partner.is_company or not partner.parent_id: partner.commercial_partner_id = partner else: partner.commercial_partner_id = partner.parent_id.commercial_partner_id @api.depends('company_name', 'parent_id.is_company', 'commercial_partner_id.name') def _compute_commercial_company_name(self): for partner in self: p = partner.commercial_partner_id partner.commercial_company_name = p.is_company and p.name or partner.company_name @api.model def _get_default_image(self, partner_type, is_company, parent_id): if getattr(threading.currentThread(), 'testing', False) or self._context.get('install_mode'): return False colorize, img_path, image = False, False, False if partner_type in ['other'] and parent_id: parent_image = self.browse(parent_id).image image = parent_image and base64.b64decode(parent_image) or None if not image and partner_type == 'invoice': img_path = get_module_resource('base', 'static/src/img', 'money.png') elif not image and partner_type == 'delivery': img_path = get_module_resource('base', 'static/src/img', 'truck.png') elif not image and is_company: img_path = get_module_resource('base', 'static/src/img', 'company_image.png') elif not image: img_path = get_module_resource('base', 'static/src/img', 'avatar.png') colorize = True if img_path: with open(img_path, 'rb') as f: image = f.read() if image and colorize: image = tools.image_colorize(image) return tools.image_resize_image_big(base64.b64encode(image)) @api.model def _fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): if (not view_id) and (view_type == 'form') and self._context.get('force_email'): view_id = self.env.ref('base.view_partner_simple_form').id res = super(Partner, self)._fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) if view_type == 'form': res['arch'] = self._fields_view_get_address(res['arch']) return res @api.constrains('parent_id') def _check_parent_id(self): if not self._check_recursion(): raise ValidationError( _('You cannot create recursive Partner hierarchies.')) @api.multi def copy(self, default=None): self.ensure_one() chosen_name = default.get('name') if default else '' new_name = chosen_name or _('%s (copy)') % self.name default = dict(default or {}, name=new_name) return super(Partner, self).copy(default) @api.onchange('parent_id') def onchange_parent_id(self): # return values in result, as this method is used by _fields_sync() if not self.parent_id: return result = {} partner = getattr(self, '_origin', self) if partner.parent_id and partner.parent_id != self.parent_id: result['warning'] = { 'title': _('Warning'), 'message': _('Changing the company of a contact should only be done if it ' 'was never correctly set. If an existing contact starts working for a new ' 'company then a new contact should be created under that new ' 'company. You can use the "Discard" button to abandon this change.' ) } if partner.type == 'contact' or self.type == 'contact': # for contacts: copy the parent address, if set (aka, at least one # value is set in the address: otherwise, keep the one from the # contact) address_fields = self._address_fields() if any(self.parent_id[key] for key in address_fields): def convert(value): return value.id if isinstance(value, models.BaseModel) else value result['value'] = { key: convert(self.parent_id[key]) for key in address_fields } return result @api.onchange('country_id') def _onchange_country_id(self): if self.country_id: return { 'domain': { 'state_id': [('country_id', '=', self.country_id.id)] } } else: return {'domain': {'state_id': []}} @api.onchange('email') def onchange_email(self): if not self.image and self._context.get( 'gravatar_image') and self.email: self.image = self._get_gravatar_image(self.email) @api.depends('name', 'email') def _compute_email_formatted(self): for partner in self: partner.email_formatted = tools.formataddr( (partner.name or u"False", partner.email or u"False")) @api.depends('is_company') def _compute_company_type(self): for partner in self: partner.company_type = 'company' if partner.is_company else 'person' def _write_company_type(self): for partner in self: partner.is_company = partner.company_type == 'company' @api.onchange('company_type') def onchange_company_type(self): self.is_company = (self.company_type == 'company') @api.multi def _update_fields_values(self, fields): """ Returns dict of write() values for synchronizing ``fields`` """ values = {} for fname in fields: field = self._fields[fname] if field.type == 'many2one': values[fname] = self[fname].id elif field.type == 'one2many': raise AssertionError( _('One2Many fields cannot be synchronized as part of `commercial_fields` or `address fields`' )) elif field.type == 'many2many': values[fname] = [(6, 0, self[fname].ids)] else: values[fname] = self[fname] return values @api.model def _address_fields(self): """Returns the list of address fields that are synced from the parent.""" return list(ADDRESS_FIELDS) @api.multi def update_address(self, vals): addr_vals = { key: vals[key] for key in self._address_fields() if key in vals } if addr_vals: return super(Partner, self).write(addr_vals) @api.model def _commercial_fields(self): """ Returns the list of fields that are managed by the commercial entity to which a partner belongs. These fields are meant to be hidden on partners that aren't `commercial entities` themselves, and will be delegated to the parent `commercial entity`. The list is meant to be extended by inheriting classes. """ return ['vat', 'credit_limit'] @api.multi def _commercial_sync_from_company(self): """ Handle sync of commercial fields when a new parent commercial entity is set, as if they were related fields """ commercial_partner = self.commercial_partner_id if commercial_partner != self: sync_vals = commercial_partner.with_prefetch( )._update_fields_values(self._commercial_fields()) self.write(sync_vals) @api.multi def _commercial_sync_to_children(self): """ Handle sync of commercial fields to descendants """ commercial_partner = self.commercial_partner_id sync_vals = commercial_partner._update_fields_values( self._commercial_fields()) sync_children = self.child_ids.filtered(lambda c: not c.is_company) for child in sync_children: child._commercial_sync_to_children() res = sync_children.write(sync_vals) sync_children._compute_commercial_partner() return res @api.multi def _fields_sync(self, values): """ Sync commercial fields and address fields from company and to children after create/update, just as if those were all modeled as fields.related to the parent """ # 1. From UPSTREAM: sync from parent if values.get('parent_id') or values.get('type') == 'contact': # 1a. Commercial fields: sync if parent changed if values.get('parent_id'): self._commercial_sync_from_company() # 1b. Address fields: sync if parent or use_parent changed *and* both are now set if self.parent_id and self.type == 'contact': onchange_vals = self.onchange_parent_id().get('value', {}) self.update_address(onchange_vals) # 2. To DOWNSTREAM: sync children if self.child_ids: # 2a. Commercial Fields: sync if commercial entity if self.commercial_partner_id == self: commercial_fields = self._commercial_fields() if any(field in values for field in commercial_fields): self._commercial_sync_to_children() for child in self.child_ids.filtered(lambda c: not c.is_company): if child.commercial_partner_id != self.commercial_partner_id: self._commercial_sync_to_children() break # 2b. Address fields: sync if address changed address_fields = self._address_fields() if any(field in values for field in address_fields): contacts = self.child_ids.filtered( lambda c: c.type == 'contact') contacts.update_address(values) @api.multi def _handle_first_contact_creation(self): """ On creation of first contact for a company (or root) that has no address, assume contact address was meant to be company address """ parent = self.parent_id address_fields = self._address_fields() if (parent.is_company or not parent.parent_id) and len(parent.child_ids) == 1 and \ any(self[f] for f in address_fields) and not any(parent[f] for f in address_fields): addr_vals = self._update_fields_values(address_fields) parent.update_address(addr_vals) def _clean_website(self, website): url = urls.url_parse(website) if not url.scheme: if not url.netloc: url = url.replace(netloc=url.path, path='') website = url.replace(scheme='http').to_url() return website @api.multi def write(self, vals): # res.partner must only allow to set the company_id of a partner if it # is the same as the company of all users that inherit from this partner # (this is to allow the code from res_users to write to the partner!) or # if setting the company_id to False (this is compatible with any user # company) if vals.get('website'): vals['website'] = self._clean_website(vals['website']) if vals.get('parent_id'): vals['company_name'] = False if vals.get('company_id'): company = self.env['res.company'].browse(vals['company_id']) for partner in self: if partner.user_ids: companies = set(user.company_id for user in partner.user_ids) if len(companies) > 1 or company not in companies: raise UserError( _("You can not change the company as the partner/user has multiple user linked with different companies." )) tools.image_resize_images(vals) result = True # To write in SUPERUSER on field is_company and avoid access rights problems. if 'is_company' in vals and self.user_has_groups( 'base.group_partner_manager' ) and not self.env.uid == SUPERUSER_ID: result = super(Partner, self.sudo()).write( {'is_company': vals.get('is_company')}) del vals['is_company'] result = result and super(Partner, self).write(vals) for partner in self: if any( u.has_group('base.group_user') for u in partner.user_ids if u != self.env.user): self.env['res.users'].check_access_rights('write') partner._fields_sync(vals) return result @api.model def create(self, vals): if vals.get('website'): vals['website'] = self._clean_website(vals['website']) if vals.get('parent_id'): vals['company_name'] = False # compute default image in create, because computing gravatar in the onchange # cannot be easily performed if default images are in the way if not vals.get('image'): vals['image'] = self._get_default_image(vals.get('type'), vals.get('is_company'), vals.get('parent_id')) tools.image_resize_images(vals) partner = super(Partner, self).create(vals) partner._fields_sync(vals) partner._handle_first_contact_creation() return partner @api.multi def create_company(self): self.ensure_one() if self.company_name: # Create parent company values = dict(name=self.company_name, is_company=True, vat=self.vat) values.update(self._update_fields_values(self._address_fields())) new_company = self.create(values) # Set new company as my parent self.write({ 'parent_id': new_company.id, 'child_ids': [(1, partner_id, dict(parent_id=new_company.id)) for partner_id in self.child_ids.ids] }) return True @api.multi def open_commercial_entity(self): """ Utility method used to add an "Open Company" button in partner views """ self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': 'res.partner', 'view_mode': 'form', 'res_id': self.commercial_partner_id.id, 'target': 'current', 'flags': { 'form': { 'action_buttons': True } } } @api.multi def open_parent(self): """ Utility method used to add an "Open Parent" button in partner views """ self.ensure_one() address_form_id = self.env.ref('base.view_partner_address_form').id return { 'type': 'ir.actions.act_window', 'res_model': 'res.partner', 'view_mode': 'form', 'views': [(address_form_id, 'form')], 'res_id': self.parent_id.id, 'target': 'new', 'flags': { 'form': { 'action_buttons': True } } } @api.multi def name_get(self): res = [] for partner in self: name = partner.name or '' if partner.company_name or partner.parent_id: if not name and partner.type in [ 'invoice', 'delivery', 'other' ]: name = dict( self.fields_get(['type' ])['type']['selection'])[partner.type] if not partner.is_company: name = "%s, %s" % (partner.commercial_company_name or partner.parent_id.name, name) if self._context.get('show_address_only'): name = partner._display_address(without_company=True) if self._context.get('show_address'): name = name + "\n" + partner._display_address( without_company=True) name = name.replace('\n\n', '\n') name = name.replace('\n\n', '\n') if self._context.get('show_email') and partner.email: name = "%s <%s>" % (name, partner.email) if self._context.get('html_format'): name = name.replace('\n', '<br/>') res.append((partner.id, name)) return res def _parse_partner_name(self, text, context=None): """ Supported syntax: - 'Raoul <*****@*****.**>': will find name and email address - otherwise: default, everything is set as the name """ emails = tools.email_split(text.replace(' ', ',')) if emails: email = emails[0] name = text[:text.index(email)].replace('"', '').replace('<', '').strip() else: name, email = text, '' return name, email @api.model def name_create(self, name): """ Override of orm's name_create method for partners. The purpose is to handle some basic formats to create partners using the name_create. If only an email address is received and that the regex cannot find a name, the name will have the email value. If 'force_email' key in context: must find the email address. """ name, email = self._parse_partner_name(name) if self._context.get('force_email') and not email: raise UserError( _("Couldn't create contact without email address!")) if not name and email: name = email partner = self.create({ self._rec_name: name or email, 'email': email or self.env.context.get('default_email', False) }) return partner.name_get()[0] @api.model def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None): """ Override search() to always show inactive children when searching via ``child_of`` operator. The ORM will always call search() with a simple domain of the form [('parent_id', 'in', [ids])]. """ # a special ``domain`` is set on the ``child_ids`` o2m to bypass this logic, as it uses similar domain expressions if len(args) == 1 and len(args[0]) == 3 and args[0][:2] == ('parent_id','in') \ and args[0][2] != [False]: self = self.with_context(active_test=False) return super(Partner, self)._search(args, offset=offset, limit=limit, order=order, count=count, access_rights_uid=access_rights_uid) @api.model def name_search(self, name, args=None, operator='ilike', limit=100): if args is None: args = [] if name and operator in ('=', 'ilike', '=ilike', 'like', '=like'): self.check_access_rights('read') where_query = self._where_calc(args) self._apply_ir_rules(where_query, 'read') from_clause, where_clause, where_clause_params = where_query.get_sql( ) from_str = from_clause if from_clause else 'res_partner' where_str = where_clause and (" WHERE %s AND " % where_clause) or ' WHERE ' # search on the name of the contacts and of its company search_name = name if operator in ('ilike', 'like'): search_name = '%%%s%%' % name if operator in ('=ilike', '=like'): operator = operator[1:] unaccent = get_unaccent_wrapper(self.env.cr) query = """SELECT res_partner.id FROM {from_str} {where} ({email} {operator} {percent} OR {display_name} {operator} {percent} OR {reference} {operator} {percent} OR {vat} {operator} {percent}) -- don't panic, trust postgres bitmap ORDER BY {display_name} {operator} {percent} desc, {display_name} """.format( from_str=from_str, where=where_str, operator=operator, email=unaccent('res_partner.email'), display_name=unaccent('res_partner.display_name'), reference=unaccent('res_partner.ref'), percent=unaccent('%s'), vat=unaccent('res_partner.vat'), ) where_clause_params += [search_name] * 5 if limit: query += ' limit %s' where_clause_params.append(limit) self.env.cr.execute(query, where_clause_params) partner_ids = [row[0] for row in self.env.cr.fetchall()] if partner_ids: return self.browse(partner_ids).name_get() else: return [] return super(Partner, self).name_search(name, args, operator=operator, limit=limit) @api.model def find_or_create(self, email): """ Find a partner with the given ``email`` or use :py:method:`~.name_create` to create one :param str email: email-like string, which should contain at least one email, e.g. ``"Raoul Grosbedon <*****@*****.**>"``""" assert email, 'an email is required for find_or_create to work' emails = tools.email_split(email) if emails: email = emails[0] partners = self.search([('email', '=ilike', email)], limit=1) return partners.id or self.name_create(email)[0] def _get_gravatar_image(self, email): email_hash = hashlib.md5(email.lower().encode('utf-8')).hexdigest() url = "https://www.gravatar.com/avatar/" + email_hash try: res = requests.get(url, params={'d': '404', 's': '128'}, timeout=5) if res.status_code != requests.codes.ok: return False except requests.exceptions.ConnectionError as e: return False except requests.exceptions.Timeout as e: return False return base64.b64encode(res.content) @api.multi def _email_send(self, email_from, subject, body, on_error=None): for partner in self.filtered('email'): tools.email_send(email_from, [partner.email], subject, body, on_error) return True @api.multi def address_get(self, adr_pref=None): """ Find contacts/addresses of the right type(s) by doing a depth-first-search through descendants within company boundaries (stop at entities flagged ``is_company``) then continuing the search at the ancestors that are within the same company boundaries. Defaults to partners of type ``'default'`` when the exact type is not found, or to the provided partner itself if no type ``'default'`` is found either. """ adr_pref = set(adr_pref or []) if 'contact' not in adr_pref: adr_pref.add('contact') result = {} visited = set() for partner in self: current_partner = partner while current_partner: to_scan = [current_partner] # Scan descendants, DFS while to_scan: record = to_scan.pop(0) visited.add(record) if record.type in adr_pref and not result.get(record.type): result[record.type] = record.id if len(result) == len(adr_pref): return result to_scan = [ c for c in record.child_ids if c not in visited if not c.is_company ] + to_scan # Continue scanning at ancestor if current_partner is not a commercial entity if current_partner.is_company or not current_partner.parent_id: break current_partner = current_partner.parent_id # default to type 'contact' or the partner itself default = result.get('contact', self.id or False) for adr_type in adr_pref: result[adr_type] = result.get(adr_type) or default return result @api.model def view_header_get(self, view_id, view_type): res = super(Partner, self).view_header_get(view_id, view_type) if res: return res if not self._context.get('category_id'): return False return _('Partners: ') + self.env['res.partner.category'].browse( self._context['category_id']).name @api.model @api.returns('self') def main_partner(self): ''' Return the main partner ''' return self.env.ref('base.main_partner') @api.model def _get_default_address_format(self): return "%(street)s\n%(street2)s\n%(city)s %(state_code)s %(zip)s\n%(country_name)s" @api.multi def _display_address(self, without_company=False): ''' The purpose of this function is to build and return an address formatted accordingly to the standards of the country where it belongs. :param address: browse record of the res.partner to format :returns: the address formatted in a display that fit its country habits (or the default ones if not country is specified) :rtype: string ''' # get the information that will be injected into the display format # get the address format address_format = self.country_id.address_format or \ self._get_default_address_format() args = { 'state_code': self.state_id.code or '', 'state_name': self.state_id.name or '', 'country_code': self.country_id.code or '', 'country_name': self.country_id.name or '', 'company_name': self.commercial_company_name or '', } for field in self._address_fields(): args[field] = getattr(self, field) or '' if without_company: args['company_name'] = '' elif self.commercial_company_name: address_format = '%(company_name)s\n' + address_format return address_format % args def _display_address_depends(self): # field dependencies of method _display_address() return self._address_fields() + [ 'country_id.address_format', 'country_id.code', 'country_id.name', 'company_name', 'state_id.code', 'state_id.name', ]
class crm_claim(models.Model): _name = "crm.claim" _description = "Claim" _order = "priority,date desc" _inherit = ['mail.thread'] @api.multi def _get_default_stage_id(self): """ Gives default stage_id """ team_id = self.env['crm.team'].sudo()._get_default_team_id() return self._stage_find(team_id=team_id.id, domain=[('sequence', '=', '1')]) id = fields.Integer('ID', readonly=True) name = fields.Char('Claim Subject', required=True) active = fields.Boolean('Active', default=lambda *a: 1) action_next = fields.Char('Next Action') date_action_next = fields.Datetime('Next Action Date') description = fields.Text('Description') resolution = fields.Text('Resolution') create_date = fields.Datetime('Creation Date', readonly=True) write_date = fields.Datetime('Update Date', readonly=True) date_deadline = fields.Date('Deadline') date_closed = fields.Datetime('Closed', readonly=True) date = fields.Datetime('Claim Date', select=True, default=lambda self: self._context.get( 'date', fields.Date.context_today(self))) categ_id = fields.Many2one('crm.claim.category', 'Category') priority = fields.Selection([('0', 'Low'), ('1', 'Normal'), ('2', 'High')], 'Priority', default='1') type_action = fields.Selection([('correction', 'Corrective Action'), ('prevention', 'Preventive Action')], 'Action Type') user_id = fields.Many2one('res.users', 'Responsible', track_visibility='always', default=lambda self: self.env.user) user_fault = fields.Char('Trouble Responsible') team_id = fields.Many2one('crm.team', 'Sales Team', oldname='section_id',\ select=True, help="Responsible sales team."\ " Define Responsible user and Email account for"\ " mail gateway.")#,default=lambda self: self.env['crm.team']._get_default_team_id() company_id = fields.Many2one('res.company', 'Company', default=lambda self: self.env['res.company']. _company_default_get('crm.case')) partner_id = fields.Many2one('res.partner', 'Partner') email_cc = fields.Text( 'Watchers Emails', size=252, help= "These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma" ) email_from = fields.Char('Email', size=128, help="Destination email for email gateway.") partner_phone = fields.Char('Phone') stage_id = fields.Many2one( 'crm.claim.stage', 'Stage', track_visibility='onchange', domain="['|', ('team_ids', '=', team_id), ('case_default', '=', True)]" ) #,default=lambda self:self.env['crm.claim']._get_default_stage_id() cause = fields.Text('Root Cause') @api.multi @api.onchange('partner_id') def onchange_partner_id(self, email=False): if not self.partner_id: return {'value': {'email_from': False, 'partner_phone': False}} address = self.pool.get('res.partner').browse(self.partner_id) return { 'value': { 'email_from': address.email, 'partner_phone': address.phone } } @api.model def create(self, vals): context = dict(self._context or {}) if vals.get('team_id') and not self._context.get('default_team_id'): context['default_team_id'] = vals.get('team_id') # context: no_log, because subtype already handle this return super(crm_claim, self).create(vals) @api.multi def message_new(self, msg, custom_values=None): if custom_values is None: custom_values = {} desc = html2plaintext(msg.get('body')) if msg.get('body') else '' defaults = { 'name': msg.get('subject') or _("No Subject"), 'description': desc, 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), } if msg.get('priority'): defaults['priority'] = msg.get('priority') defaults.update(custom_values) return super(crm_claim, self).message_new(msg, custom_values=defaults)
class Rating(models.Model): _name = "rating.rating" _description = "Rating" _order = 'write_date desc' _rec_name = 'res_name' _sql_constraints = [ ('rating_range', 'check(rating >= 0 and rating <= 10)', 'Rating should be between 0 to 10'), ] @api.one @api.depends('res_model', 'res_id') def _compute_res_name(self): name = self.env[self.res_model].sudo().browse(self.res_id).name_get() self.res_name = name and name[0][1] or ('%s/%s') % (self.res_model, self.res_id) @api.model def new_access_token(self): return uuid.uuid4().hex res_name = fields.Char(string='Resource name', compute='_compute_res_name', store=True, help="The name of the rated resource.") res_model_id = fields.Many2one('ir.model', 'Related Document Model', index=True, ondelete='cascade', help='Model of the followed resource') res_model = fields.Char(string='Document Model', related='res_model_id.model', store=True, index=True, readonly=True) res_id = fields.Integer(string='Document', required=True, help="Identifier of the rated object", index=True) parent_res_name = fields.Char('Parent Document Name', compute='_compute_parent_res_name', store=True) parent_res_model_id = fields.Many2one('ir.model', 'Parent Related Document Model', index=True) parent_res_model = fields.Char('Parent Document Model', store=True, related='parent_res_model_id.model', index=True) parent_res_id = fields.Integer('Parent Document', index=True) rated_partner_id = fields.Many2one('res.partner', string="Rated person", help="Owner of the rated resource") partner_id = fields.Many2one('res.partner', string='Customer', help="Author of the rating") rating = fields.Float(string="Rating", group_operator="avg", default=0, help="Rating value: 0=Unhappy, 10=Happy") rating_image = fields.Binary('Image', compute='_compute_rating_image') rating_text = fields.Selection( [('satisfied', 'Satisfied'), ('not_satisfied', 'Not satisfied'), ('highly_dissatisfied', 'Highly dissatisfied'), ('no_rating', 'No Rating yet')], string='Rating', store=True, compute='_compute_rating_text', readonly=True) feedback = fields.Text('Comment', help="Reason of the rating") message_id = fields.Many2one( 'mail.message', string="Linked message", help= "Associated message when posting a review. Mainly used in website addons.", index=True) access_token = fields.Char( 'Security Token', default=new_access_token, help="Access token to set the rating of the value") consumed = fields.Boolean(string="Filled Rating", help="Enabled if the rating has been filled.") @api.depends('parent_res_model', 'parent_res_id') def _compute_parent_res_name(self): for rating in self: name = False if rating.parent_res_model and rating.parent_res_id: name = self.env[rating.parent_res_model].sudo().browse( rating.parent_res_id).name_get() name = name and name[0][1] or ('%s/%s') % ( rating.parent_res_model, rating.parent_res_id) rating.parent_res_name = name @api.multi @api.depends('rating') def _compute_rating_image(self): for rating in self: try: image_path = get_resource_path( 'rating', 'static/src/img', 'rating_%s.png' % (int(rating.rating), )) rating.rating_image = base64.b64encode( open(image_path, 'rb').read()) except (IOError, OSError): rating.rating_image = False @api.depends('rating') def _compute_rating_text(self): for rating in self: if rating.rating >= RATING_LIMIT_SATISFIED: rating.rating_text = 'satisfied' elif rating.rating > RATING_LIMIT_OK: rating.rating_text = 'not_satisfied' elif rating.rating >= RATING_LIMIT_MIN: rating.rating_text = 'highly_dissatisfied' else: rating.rating_text = 'no_rating' @api.model def create(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).create(values) @api.multi def write(self, values): if values.get('res_model_id') and values.get('res_id'): values.update(self._find_parent_data(values)) return super(Rating, self).write(values) def _find_parent_data(self, values): """ Determine the parent res_model/res_id, based on the values to create or write """ current_model_name = self.env['ir.model'].sudo().browse( values['res_model_id']).model current_record = self.env[current_model_name].browse(values['res_id']) data = { 'parent_res_model_id': False, 'parent_res_id': False, } if hasattr(current_record, 'rating_get_parent_model_name'): parent_res_model = current_record.rating_get_parent_model_name( values) data['parent_res_model_id'] = self.env['ir.model']._get( parent_res_model).id data['parent_res_id'] = current_record.rating_get_parent_id() return data @api.multi def reset(self): for record in self: record.write({ 'rating': 0, 'access_token': record.new_access_token(), 'feedback': False, 'consumed': False, }) def action_open_rated_object(self): self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': self.res_model, 'res_id': self.res_id, 'views': [[False, 'form']] }
class SurveyQuestion(models.Model): """ Questions that will be asked in a survey. Each question can have one of more suggested answers (eg. in case of dropdown choices, multi-answer checkboxes, radio buttons...). """ _name = 'survey.question' _description = 'Survey Question' _rec_name = 'question' _order = 'sequence,id' # Model fields # # Question metadata page_id = fields.Many2one('survey.page', string='Survey page', ondelete='cascade', required=True, default=lambda self: self.env.context.get('page_id')) survey_id = fields.Many2one('survey.survey', related='page_id.survey_id', string='Survey') sequence = fields.Integer('Sequence', default=10) # Question question = fields.Char('Question Name', required=True, translate=True) description = fields.Html('Description', help="Use this field to add \ additional explanations about your question", translate=True, oldname='descriptive_text') # Answer type = fields.Selection([ ('free_text', 'Multiple Lines Text Box'), ('textbox', 'Single Line Text Box'), ('numerical_box', 'Numerical Value'), ('date', 'Date'), ('simple_choice', 'Multiple choice: only one answer'), ('multiple_choice', 'Multiple choice: multiple answers allowed'), ('matrix', 'Matrix')], string='Type of Question', default='free_text', required=True) matrix_subtype = fields.Selection([('simple', 'One choice per row'), ('multiple', 'Multiple choices per row')], string='Matrix Type', default='simple') labels_ids = fields.One2many('survey.label', 'question_id', string='Types of answers', oldname='answer_choice_ids', copy=True) labels_ids_2 = fields.One2many('survey.label', 'question_id_2', string='Rows of the Matrix', copy=True) # labels are used for proposed choices # if question.type == simple choice | multiple choice # -> only labels_ids is used # if question.type == matrix # -> labels_ids are the columns of the matrix # -> labels_ids_2 are the rows of the matrix # Display options column_nb = fields.Selection([('12', '1'), ('6', '2'), ('4', '3'), ('3', '4'), ('2', '6')], 'Number of columns', default='12') # These options refer to col-xx-[12|6|4|3|2] classes in Bootstrap display_mode = fields.Selection([('columns', 'Radio Buttons'), ('dropdown', 'Selection Box')], default='columns') # Comments comments_allowed = fields.Boolean('Show Comments Field', oldname="allow_comment") comments_message = fields.Char('Comment Message', translate=True, default=lambda self: _("If other, please specify:")) comment_count_as_answer = fields.Boolean('Comment Field is an Answer Choice', oldname='make_comment_field') # Validation validation_required = fields.Boolean('Validate entry', oldname='is_validation_require') validation_email = fields.Boolean('Input must be an email') validation_length_min = fields.Integer('Minimum Text Length') validation_length_max = fields.Integer('Maximum Text Length') validation_min_float_value = fields.Float('Minimum value') validation_max_float_value = fields.Float('Maximum value') validation_min_date = fields.Date('Minimum Date') validation_max_date = fields.Date('Maximum Date') validation_error_msg = fields.Char('Validation Error message', oldname='validation_valid_err_msg', translate=True, default=lambda self: _("The answer you entered has an invalid format.")) # Constraints on number of answers (matrices) constr_mandatory = fields.Boolean('Mandatory Answer', oldname="is_require_answer") constr_error_msg = fields.Char('Error message', oldname='req_error_msg', translate=True, default=lambda self: _("This question requires an answer.")) user_input_line_ids = fields.One2many('survey.user_input_line', 'question_id', string='Answers', domain=[('skipped', '=', False)]) _sql_constraints = [ ('positive_len_min', 'CHECK (validation_length_min >= 0)', 'A length must be positive!'), ('positive_len_max', 'CHECK (validation_length_max >= 0)', 'A length must be positive!'), ('validation_length', 'CHECK (validation_length_min <= validation_length_max)', 'Max length cannot be smaller than min length!'), ('validation_float', 'CHECK (validation_min_float_value <= validation_max_float_value)', 'Max value cannot be smaller than min value!'), ('validation_date', 'CHECK (validation_min_date <= validation_max_date)', 'Max date cannot be smaller than min date!') ] @api.onchange('validation_email') def onchange_validation_email(self): if self.validation_email: self.validation_required = False # Validation methods @api.multi def validate_question(self, post, answer_tag): """ Validate question, depending on question type and parameters """ self.ensure_one() try: checker = getattr(self, 'validate_' + self.type) except AttributeError: _logger.warning(self.type + ": This type of question has no validation method") return {} else: return checker(post, answer_tag) @api.multi def validate_free_text(self, post, answer_tag): self.ensure_one() errors = {} answer = post[answer_tag].strip() # Empty answer to mandatory question if self.constr_mandatory and not answer: errors.update({answer_tag: self.constr_error_msg}) return errors @api.multi def validate_textbox(self, post, answer_tag): self.ensure_one() errors = {} answer = post[answer_tag].strip() # Empty answer to mandatory question if self.constr_mandatory and not answer: errors.update({answer_tag: self.constr_error_msg}) # Email format validation # Note: this validation is very basic: # all the strings of the form # <something>@<anything>.<extension> # will be accepted if answer and self.validation_email: if not email_validator.match(answer): errors.update({answer_tag: _('This answer must be an email address')}) # Answer validation (if properly defined) # Length of the answer must be in a range if answer and self.validation_required: if not (self.validation_length_min <= len(answer) <= self.validation_length_max): errors.update({answer_tag: self.validation_error_msg}) return errors @api.multi def validate_numerical_box(self, post, answer_tag): self.ensure_one() errors = {} answer = post[answer_tag].strip() # Empty answer to mandatory question if self.constr_mandatory and not answer: errors.update({answer_tag: self.constr_error_msg}) # Checks if user input is a number if answer: try: floatanswer = float(answer) except ValueError: errors.update({answer_tag: _('This is not a number')}) # Answer validation (if properly defined) if answer and self.validation_required: # Answer is not in the right range with tools.ignore(Exception): floatanswer = float(answer) # check that it is a float has been done hereunder if not (self.validation_min_float_value <= floatanswer <= self.validation_max_float_value): errors.update({answer_tag: self.validation_error_msg}) return errors @api.multi def validate_date(self, post, answer_tag): self.ensure_one() errors = {} answer = post[answer_tag].strip() # Empty answer to mandatory question if self.constr_mandatory and not answer: errors.update({answer_tag: self.constr_error_msg}) # Checks if user input is a date if answer: try: dateanswer = fields.Date.from_string(answer) except ValueError: errors.update({answer_tag: _('This is not a date')}) return errors # Answer validation (if properly defined) if answer and self.validation_required: # Answer is not in the right range try: date_from_string = fields.Date.from_string dateanswer = date_from_string(answer) min_date = date_from_string(self.validation_min_date) max_date = date_from_string(self.validation_max_date) if min_date and max_date and not (min_date <= dateanswer <= max_date): # If Minimum and Maximum Date are entered errors.update({answer_tag: self.validation_error_msg}) elif min_date and not min_date <= dateanswer: # If only Minimum Date is entered and not Define Maximum Date errors.update({answer_tag: self.validation_error_msg}) elif max_date and not dateanswer <= max_date: # If only Maximum Date is entered and not Define Minimum Date errors.update({answer_tag: self.validation_error_msg}) except ValueError: # check that it is a date has been done hereunder pass return errors @api.multi def validate_simple_choice(self, post, answer_tag): self.ensure_one() errors = {} if self.comments_allowed: comment_tag = "%s_%s" % (answer_tag, 'comment') # Empty answer to mandatory self if self.constr_mandatory and answer_tag not in post: errors.update({answer_tag: self.constr_error_msg}) if self.constr_mandatory and answer_tag in post and not post[answer_tag].strip(): errors.update({answer_tag: self.constr_error_msg}) # Answer is a comment and is empty if self.constr_mandatory and answer_tag in post and post[answer_tag] == "-1" and self.comment_count_as_answer and comment_tag in post and not post[comment_tag].strip(): errors.update({answer_tag: self.constr_error_msg}) return errors @api.multi def validate_multiple_choice(self, post, answer_tag): self.ensure_one() errors = {} if self.constr_mandatory: answer_candidates = dict_keys_startswith(post, answer_tag) comment_flag = answer_candidates.pop(("%s_%s" % (answer_tag, -1)), None) if self.comments_allowed: comment_answer = answer_candidates.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() # Preventing answers with blank value if all(not answer.strip() for answer in answer_candidates.values()) and answer_candidates: errors.update({answer_tag: self.constr_error_msg}) # There is no answer neither comments (if comments count as answer) if not answer_candidates and self.comment_count_as_answer and (not comment_flag or not comment_answer): errors.update({answer_tag: self.constr_error_msg}) # There is no answer at all if not answer_candidates and not self.comment_count_as_answer: errors.update({answer_tag: self.constr_error_msg}) return errors @api.multi def validate_matrix(self, post, answer_tag): self.ensure_one() errors = {} if self.constr_mandatory: lines_number = len(self.labels_ids_2) answer_candidates = dict_keys_startswith(post, answer_tag) answer_candidates.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() # Number of lines that have been answered if self.matrix_subtype == 'simple': answer_number = len(answer_candidates) elif self.matrix_subtype == 'multiple': answer_number = len({sk.rsplit('_', 1)[0] for sk in answer_candidates}) else: raise RuntimeError("Invalid matrix subtype") # Validate that each line has been answered if answer_number != lines_number: errors.update({answer_tag: self.constr_error_msg}) return errors
class PaymentAcquirerOgone(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection(selection_add=[('ogone', 'Ogone')]) ogone_pspid = fields.Char('PSPID', required_if_provider='ogone', groups='base.group_user') ogone_userid = fields.Char('API User ID', required_if_provider='ogone', groups='base.group_user') ogone_password = fields.Char('API User Password', required_if_provider='ogone', groups='base.group_user') ogone_shakey_in = fields.Char('SHA Key IN', size=32, required_if_provider='ogone', groups='base.group_user') ogone_shakey_out = fields.Char('SHA Key OUT', size=32, required_if_provider='ogone', groups='base.group_user') ogone_alias_usage = fields.Char( 'Alias Usage', default="Allow saving my payment data", help="If you want to use Ogone Aliases, this default " "Alias Usage will be presented to the customer as the " "reason you want to keep his payment data") def _get_feature_support(self): """Get advanced feature support by provider. Each provider should add its technical in the corresponding key for the following features: * fees: support payment fees computations * authorize: support authorizing payment (separates authorization and capture) * tokenize: support saving payment data in a payment.tokenize object """ res = super(PaymentAcquirerOgone, self)._get_feature_support() res['tokenize'].append('ogone') return res def _get_ogone_urls(self, environment): """ Ogone URLS: - standard order: POST address for form-based """ return { 'ogone_standard_order_url': 'https://secure.ogone.com/ncol/%s/orderstandard_utf8.asp' % (environment, ), 'ogone_direct_order_url': 'https://secure.ogone.com/ncol/%s/orderdirect_utf8.asp' % (environment, ), 'ogone_direct_query_url': 'https://secure.ogone.com/ncol/%s/querydirect_utf8.asp' % (environment, ), 'ogone_afu_agree_url': 'https://secure.ogone.com/ncol/%s/AFU_agree.asp' % (environment, ), } def _ogone_generate_shasign(self, inout, values): """ Generate the shasign for incoming or outgoing communications. :param string inout: 'in' (flectra contacting ogone) or 'out' (ogone contacting flectra). In this last case only some fields should be contained (see e-Commerce basic) :param dict values: transaction values :return string: shasign """ assert inout in ('in', 'out') assert self.provider == 'ogone' key = getattr(self, 'ogone_shakey_' + inout) def filter_key(key): if inout == 'in': return True else: # SHA-OUT keys # source https://viveum.v-psp.com/Ncol/Viveum_e-Com-BAS_EN.pdf keys = [ 'AAVADDRESS', 'AAVCHECK', 'AAVMAIL', 'AAVNAME', 'AAVPHONE', 'AAVZIP', 'ACCEPTANCE', 'ALIAS', 'AMOUNT', 'BIC', 'BIN', 'BRAND', 'CARDNO', 'CCCTY', 'CN', 'COMPLUS', 'CREATION_STATUS', 'CURRENCY', 'CVCCHECK', 'DCC_COMMPERCENTAGE', 'DCC_CONVAMOUNT', 'DCC_CONVCCY', 'DCC_EXCHRATE', 'DCC_EXCHRATESOURCE', 'DCC_EXCHRATETS', 'DCC_INDICATOR', 'DCC_MARGINPERCENTAGE', 'DCC_VALIDHOURS', 'DIGESTCARDNO', 'ECI', 'ED', 'ENCCARDNO', 'FXAMOUNT', 'FXCURRENCY', 'IBAN', 'IP', 'IPCTY', 'NBREMAILUSAGE', 'NBRIPUSAGE', 'NBRIPUSAGE_ALLTX', 'NBRUSAGE', 'NCERROR', 'NCERRORCARDNO', 'NCERRORCN', 'NCERRORCVC', 'NCERRORED', 'ORDERID', 'PAYID', 'PAYIDSUB', 'PM', 'SCO_CATEGORY', 'SCORING', 'STATUS', 'SUBBRAND', 'SUBSCRIPTION_ID', 'TRXDATE', 'VC' ] return key.upper() in keys items = sorted((k.upper(), v) for k, v in values.items()) sign = ''.join('%s=%s%s' % (k, v, key) for k, v in items if v and filter_key(k)) sign = sign.encode("utf-8") shasign = sha1(sign).hexdigest() return shasign def ogone_form_generate_values(self, values): base_url = self.env['ir.config_parameter'].sudo().get_param( 'web.base.url') ogone_tx_values = dict(values) temp_ogone_tx_values = { 'PSPID': self.ogone_pspid, 'ORDERID': values['reference'], 'AMOUNT': float_repr(float_round(values['amount'], 2) * 100, 0), 'CURRENCY': values['currency'] and values['currency'].name or '', 'LANGUAGE': values.get('partner_lang'), 'CN': values.get('partner_name'), 'EMAIL': values.get('partner_email'), 'OWNERZIP': values.get('partner_zip'), 'OWNERADDRESS': values.get('partner_address'), 'OWNERTOWN': values.get('partner_city'), 'OWNERCTY': values.get('partner_country') and values.get('partner_country').code or '', 'OWNERTELNO': values.get('partner_phone'), 'ACCEPTURL': urls.url_join(base_url, OgoneController._accept_url), 'DECLINEURL': urls.url_join(base_url, OgoneController._decline_url), 'EXCEPTIONURL': urls.url_join(base_url, OgoneController._exception_url), 'CANCELURL': urls.url_join(base_url, OgoneController._cancel_url), 'PARAMPLUS': 'return_url=%s' % ogone_tx_values.pop('return_url') if ogone_tx_values.get('return_url') else False, } if self.save_token in ['ask', 'always']: temp_ogone_tx_values.update({ 'ALIAS': 'FLECTRA-NEW-ALIAS-%s' % time.time(), # something unique, 'ALIASUSAGE': values.get('alias_usage') or self.ogone_alias_usage, }) shasign = self._ogone_generate_shasign('in', temp_ogone_tx_values) temp_ogone_tx_values['SHASIGN'] = shasign ogone_tx_values.update(temp_ogone_tx_values) return ogone_tx_values def ogone_get_form_action_url(self): return self._get_ogone_urls( self.environment)['ogone_standard_order_url'] def ogone_s2s_form_validate(self, data): error = dict() mandatory_fields = [ "cc_number", "cc_cvc", "cc_holder_name", "cc_expiry", "cc_brand" ] # Validation for field_name in mandatory_fields: if not data.get(field_name): error[field_name] = 'missing' return False if error else True def ogone_s2s_form_process(self, data): values = { 'cc_number': data.get('cc_number'), 'cc_cvc': int(data.get('cc_cvc')), 'cc_holder_name': data.get('cc_holder_name'), 'cc_expiry': data.get('cc_expiry'), 'cc_brand': data.get('cc_brand'), 'acquirer_id': int(data.get('acquirer_id')), 'partner_id': int(data.get('partner_id')) } pm_id = self.env['payment.token'].sudo().create(values) return pm_id
class SurveyUserInputLine(models.Model): _name = 'survey.user_input_line' _description = 'Survey User Input Line' _rec_name = 'date_create' user_input_id = fields.Many2one('survey.user_input', string='User Input', ondelete='cascade', required=True) question_id = fields.Many2one('survey.question', string='Question', ondelete='restrict', required=True) page_id = fields.Many2one(related='question_id.page_id', string="Page") survey_id = fields.Many2one(related='user_input_id.survey_id', string='Survey', store=True) date_create = fields.Datetime('Create Date', default=fields.Datetime.now, required=True) skipped = fields.Boolean('Skipped') answer_type = fields.Selection([ ('text', 'Text'), ('number', 'Number'), ('date', 'Date'), ('free_text', 'Free Text'), ('suggestion', 'Suggestion')], string='Answer Type') value_text = fields.Char('Text answer') value_number = fields.Float('Numerical answer') value_date = fields.Date('Date answer') value_free_text = fields.Text('Free Text answer') value_suggested = fields.Many2one('survey.label', string="Suggested answer") value_suggested_row = fields.Many2one('survey.label', string="Row answer") quizz_mark = fields.Float('Score given for this choice') @api.constrains('skipped', 'answer_type') def _answered_or_skipped(self): for uil in self: if not uil.skipped != bool(uil.answer_type): raise ValidationError(_('A question cannot be unanswered and skipped')) @api.constrains('answer_type') def _check_answer_type(self): for uil in self: fields_type = { 'text': bool(uil.value_text), 'number': (bool(uil.value_number) or uil.value_number == 0), 'date': bool(uil.value_date), 'free_text': bool(uil.value_free_text), 'suggestion': bool(uil.value_suggested) } if not fields_type.get(uil.answer_type, True): raise ValidationError(_('The answer must be in the right type')) def _get_mark(self, value_suggested): label = self.env['survey.label'].browse(int(value_suggested)) mark = label.quizz_mark if label.exists() else 0.0 return mark @api.model def create(self, vals): value_suggested = vals.get('value_suggested') if value_suggested: vals.update({'quizz_mark': self._get_mark(value_suggested)}) return super(SurveyUserInputLine, self).create(vals) @api.multi def write(self, vals): value_suggested = vals.get('value_suggested') if value_suggested: vals.update({'quizz_mark': self._get_mark(value_suggested)}) return super(SurveyUserInputLine, self).write(vals) @api.model def save_lines(self, user_input_id, question, post, answer_tag): """ Save answers to questions, depending on question type If an answer already exists for question and user_input_id, it will be overwritten (in order to maintain data consistency). """ try: saver = getattr(self, 'save_line_' + question.type) except AttributeError: _logger.error(question.type + ": This type of question has no saving function") return False else: saver(user_input_id, question, post, answer_tag) @api.model def save_line_free_text(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False, } if answer_tag in post and post[answer_tag].strip(): vals.update({'answer_type': 'free_text', 'value_free_text': post[answer_tag]}) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_textbox(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({'answer_type': 'text', 'value_text': post[answer_tag]}) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_numerical_box(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({'answer_type': 'number', 'value_number': float(post[answer_tag])}) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_date(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } if answer_tag in post and post[answer_tag].strip(): vals.update({'answer_type': 'date', 'value_date': post[answer_tag]}) else: vals.update({'answer_type': None, 'skipped': True}) old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) if old_uil: old_uil.write(vals) else: old_uil.create(vals) return True @api.model def save_line_simple_choice(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) old_uil.sudo().unlink() if answer_tag in post and post[answer_tag].strip(): vals.update({'answer_type': 'suggestion', 'value_suggested': post[answer_tag]}) else: vals.update({'answer_type': None, 'skipped': True}) # '-1' indicates 'comment count as an answer' so do not need to record it if post.get(answer_tag) and post.get(answer_tag) != '-1': self.create(vals) comment_answer = post.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if comment_answer: vals.update({'answer_type': 'text', 'value_text': comment_answer, 'skipped': False, 'value_suggested': False}) self.create(vals) return True @api.model def save_line_multiple_choice(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) old_uil.sudo().unlink() ca_dict = dict_keys_startswith(post, answer_tag + '_') comment_answer = ca_dict.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if len(ca_dict) > 0: for key in ca_dict: # '-1' indicates 'comment count as an answer' so do not need to record it if key != ('%s_%s' % (answer_tag, '-1')): vals.update({'answer_type': 'suggestion', 'value_suggested': ca_dict[key]}) self.create(vals) if comment_answer: vals.update({'answer_type': 'text', 'value_text': comment_answer, 'value_suggested': False}) self.create(vals) if not ca_dict and not comment_answer: vals.update({'answer_type': None, 'skipped': True}) self.create(vals) return True @api.model def save_line_matrix(self, user_input_id, question, post, answer_tag): vals = { 'user_input_id': user_input_id, 'question_id': question.id, 'survey_id': question.survey_id.id, 'skipped': False } old_uil = self.search([ ('user_input_id', '=', user_input_id), ('survey_id', '=', question.survey_id.id), ('question_id', '=', question.id) ]) old_uil.sudo().unlink() no_answers = True ca_dict = dict_keys_startswith(post, answer_tag + '_') comment_answer = ca_dict.pop(("%s_%s" % (answer_tag, 'comment')), '').strip() if comment_answer: vals.update({'answer_type': 'text', 'value_text': comment_answer}) self.create(vals) no_answers = False if question.matrix_subtype == 'simple': for row in question.labels_ids_2: a_tag = "%s_%s" % (answer_tag, row.id) if a_tag in ca_dict: no_answers = False vals.update({'answer_type': 'suggestion', 'value_suggested': ca_dict[a_tag], 'value_suggested_row': row.id}) self.create(vals) elif question.matrix_subtype == 'multiple': for col in question.labels_ids: for row in question.labels_ids_2: a_tag = "%s_%s_%s" % (answer_tag, row.id, col.id) if a_tag in ca_dict: no_answers = False vals.update({'answer_type': 'suggestion', 'value_suggested': col.id, 'value_suggested_row': row.id}) self.create(vals) if no_answers: vals.update({'answer_type': None, 'skipped': True}) self.create(vals) return True
class AcquirerSips(models.Model): _inherit = 'payment.acquirer' provider = fields.Selection(selection_add=[('sips', 'Sips')]) sips_merchant_id = fields.Char('Merchant ID', help="Used for production only", required_if_provider='sips', groups='base.group_user') sips_secret = fields.Char('Secret Key', size=64, required_if_provider='sips', groups='base.group_user') sips_test_url = fields.Char( "Test's url", required_if_provider='sips', groups='base.group_no_one', default='https://payment-webinit.sips-atos.com/paymentInit') sips_prod_url = fields.Char( "Prod's url", required_if_provider='sips', groups='base.group_no_one', default='https://payment-webinit.simu.sips-atos.com/paymentInit') sips_version = fields.Char("Interface Version", required_if_provider='sips', groups='base.group_no_one', default='HP_2.3') def _sips_generate_shasign(self, values): """ Generate the shasign for incoming or outgoing communications. :param dict values: transaction values :return string: shasign """ if self.provider != 'sips': raise ValidationError(_('Incorrect payment acquirer provider')) data = values['Data'] # Test key provided by Worldine key = u'002001000000001_KEY1' if self.environment == 'prod': key = getattr(self, 'sips_secret') shasign = sha256((data + key).encode('utf-8')) return shasign.hexdigest() @api.multi def sips_form_generate_values(self, values): self.ensure_one() base_url = self.env['ir.config_parameter'].sudo().get_param( 'web.base.url') currency = self.env['res.currency'].sudo().browse( values['currency_id']) currency_code = CURRENCY_CODES.get(currency.name, False) if not currency_code: raise ValidationError(_('Currency not supported by Wordline')) amount = int(values['amount'] * 100) if self.environment == 'prod': # For production environment, key version 2 is required merchant_id = getattr(self, 'sips_merchant_id') key_version = self.env['ir.config_parameter'].sudo().get_param( 'sips.key_version', '2') else: # Test key provided by Atos Wordline works only with version 1 merchant_id = '002001000000001' key_version = '1' sips_tx_values = dict(values) sips_tx_values.update({ 'Data': u'amount=%s|' % amount + u'currencyCode=%s|' % currency_code + u'merchantId=%s|' % merchant_id + u'normalReturnUrl=%s|' % urls.url_join(base_url, SipsController._return_url) + u'automaticResponseUrl=%s|' % urls.url_join(base_url, SipsController._return_url) + u'transactionReference=%s|' % values['reference'] + u'statementReference=%s|' % values['reference'] + u'keyVersion=%s' % key_version, 'InterfaceVersion': self.sips_version, }) return_context = {} if sips_tx_values.get('return_url'): return_context[u'return_url'] = u'%s' % sips_tx_values.pop( 'return_url') return_context[u'reference'] = u'%s' % sips_tx_values['reference'] sips_tx_values['Data'] += u'|returnContext=%s' % ( json.dumps(return_context)) shasign = self._sips_generate_shasign(sips_tx_values) sips_tx_values['Seal'] = shasign return sips_tx_values @api.multi def sips_get_form_action_url(self): self.ensure_one() return self.environment == 'prod' and self.sips_prod_url or self.sips_test_url