def _interval_dates(self, frequency, company): """ Method used to compute the theoretical date from which account move lines should be fetched @param {string} frequency: a valid value of the selection field on the object (daily, monthly, annually) frequencies are literal (daily means 24 hours and so on) @param {recordset} company: the company for which the closing is done @return {dict} the theoretical date from which account move lines are fetched. date_stop date to which the move lines are fetched, always now() the dates are in their Odoo Database string representation """ date_stop = datetime.utcnow() interval_from = None name_interval = '' if frequency == 'daily': interval_from = date_stop - timedelta(days=1) name_interval = _('Daily Closing') elif frequency == 'monthly': month_target = date_stop.month > 1 and date_stop.month - 1 or 12 year_target = month_target < 12 and date_stop.year or date_stop.year - 1 interval_from = date_stop.replace(year=year_target, month=month_target) name_interval = _('Monthly Closing') elif frequency == 'annually': year_target = date_stop.year - 1 interval_from = date_stop.replace(year=year_target) name_interval = _('Annual Closing') return {'interval_from': FieldDateTime.to_string(interval_from), 'date_stop': FieldDateTime.to_string(date_stop), 'name_interval': name_interval}
def create_employee_from_applicant(self): """ Create an hr.employee from the hr.applicants """ employee = False for applicant in self: address_id = contact_name = False if applicant.partner_id: address_id = applicant.partner_id.address_get(['contact'])['contact'] contact_name = applicant.partner_id.name_get()[0][1] if applicant.job_id and (applicant.partner_name or contact_name): applicant.job_id.write({'no_of_hired_employee': applicant.job_id.no_of_hired_employee + 1}) employee = self.env['hr.employee'].create({'name': applicant.partner_name or contact_name, 'job_id': applicant.job_id.id, 'address_home_id': address_id, 'department_id': applicant.department_id.id or False, 'address_id': applicant.company_id and applicant.company_id.partner_id and applicant.company_id.partner_id.id or False, 'work_email': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.email or False, 'work_phone': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.phone or False}) applicant.write({'emp_id': employee.id}) applicant.job_id.message_post( body=_('New Employee %s Hired') % applicant.partner_name if applicant.partner_name else applicant.name, subtype="hr_recruitment.mt_job_applicant_hired") employee._broadcast_welcome() else: raise UserError(_('You must define an Applied Job and a Contact Name for this applicant.')) employee_action = self.env.ref('hr.open_view_employee_list') dict_act_window = employee_action.read([])[0] if employee: dict_act_window['res_id'] = employee.id dict_act_window['view_mode'] = 'form,tree' return dict_act_window
def _bootstrap_homepage(self): standard_homepage = self.env.ref('website.homepage', raise_if_not_found=False) if not standard_homepage: return new_homepage_view = '''<t name="Homepage" t-name="website.homepage%s"> <t t-call="website.layout"> <t t-set="pageName" t-value="'homepage'"/> <div id="wrap" class="oe_structure oe_empty"/> </t> </t>''' % (self.id) standard_homepage.with_context(website_id=self.id).arch_db = new_homepage_view self.homepage_id = self.env['website.page'].search([('website_id', '=', self.id), ('key', '=', standard_homepage.key)]) top_menu = self.env['website.menu'].create({ 'name': _('Top Menu for website %s') % self.id, 'website_id': self.id, 'sequence': 0 }) self.menu_id = top_menu.id self.env['website.menu'].create({ 'name': _('Home'), 'url': '/', 'website_id': self.id, 'parent_id': top_menu.id, 'sequence': 10 })
def action_validate(self): if not self.env.user.has_group('hr_holidays.group_hr_holidays_user'): raise UserError(_('Only an HR Officer or Manager can approve leave requests.')) if any(not holiday.can_approve for holiday in self): raise UserError(_('Only your manager can approve your leave requests')) current_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError(_('Leave request must be confirmed in order to approve it.')) if holiday.state == 'validate1' and not holiday.env.user.has_group('hr_holidays.group_hr_holidays_manager'): raise UserError(_('Only an HR Manager can apply the second approval on leave requests.')) holiday.write({'state': 'validate'}) if holiday.validation_type == 'both': holiday.write({'second_approver_id': current_employee.id}) else: holiday.write({'first_approver_id': current_employee.id}) if holiday.holiday_type == 'employee': holiday._validate_leave_request() elif holiday.holiday_type in ['category', 'department']: leaves = self.env['hr.leave'] employees = holiday.category_id.employee_ids if holiday.holiday_type == 'category' else holiday.department_id.member_ids for employee in employees: values = holiday._prepare_holiday_values(employee) leaves += self.with_context(mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].validation_type == 'both': leaves.action_validate() self.activity_update() return True
def _bootstrap_homepage(self): standard_homepage = self.env.ref('website.homepage', raise_if_not_found=False) if not standard_homepage: return new_homepage_view = '''<t name="Homepage" t-name="website.homepage%s"> <t t-call="website.layout"> %s </t> </t>''' % (self.id, self.env['ir.ui.view'].render_template('website.default_homepage', values={'website': self}).decode()) standard_homepage.with_context(website_id=self.id).arch_db = new_homepage_view self.homepage_id = self.env['website.page'].search([('website_id', '=', self.id), ('key', '=', standard_homepage.key)]) top_menu = self.env['website.menu'].create({ 'name': _('Top Menu for website %s') % self.id, 'website_id': self.id, 'sequence': 0 }) self.menu_id = top_menu.id self.env['website.menu'].create({ 'name': _('Home'), 'url': '/', 'website_id': self.id, 'parent_id': top_menu.id, 'sequence': 10 })
def _parse_import_data(self, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[self.res_model].fields_get() for name, field in all_fields.iteritems(): if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field['type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: user_format = ustr(options.get('%s_format' % field['type'])).encode('utf-8') for num, line in enumerate(data): if line[index]: try: line[index] = dt.strftime(dt.strptime(ustr(line[index]).encode('utf-8'), user_format), server_format) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, ustr(e.message))) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, ustr(e.message))) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data
def _parse_import_data_recursive(self, model, prefix, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[model].fields_get() for name, field in all_fields.items(): name = prefix + name if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field['type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: # datetime.str[fp]time takes *native strings* in both # versions, for both data and pattern user_format = pycompat.to_native(options.get('%s_format' % field['type'])) for num, line in enumerate(data): if line[index]: try: line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index].strip()), user_format), server_format) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) # Check if the field is in import_field and is a relational (followed by /) # Also verify that the field name exactly match the import_field at the correct level. elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields): # Recursive call with the relational as new model and add the field name to the prefix self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) return data
def get_error_messages(self): emails = [] partners_error_empty = self.env['res.partner'] partners_error_emails = self.env['res.partner'] partners_error_user = self.env['res.partner'] for wizard_user in self.with_context(active_test=False).filtered(lambda w: w.in_portal and not w.partner_id.user_ids): email = extract_email(wizard_user.email) if not email: partners_error_empty |= wizard_user.partner_id elif email in emails: partners_error_emails |= wizard_user.partner_id user = self.env['res.users'].sudo().with_context(active_test=False).search([('login', '=', email)]) if user: partners_error_user |= wizard_user.partner_id emails.append(email) error_msg = [] if partners_error_empty: error_msg.append("%s\n- %s" % (_("Some contacts don't have a valid email: "), '\n- '.join(partners_error_empty.mapped('display_name')))) if partners_error_emails: error_msg.append("%s\n- %s" % (_("Several contacts have the same email: "), '\n- '.join(partners_error_emails.mapped('email')))) if partners_error_user: error_msg.append("%s\n- %s" % (_("Some contacts have the same email as an existing portal user:"******"To resolve this error, you can: \n" "- Correct the emails of the relevant contacts\n" "- Grant access only to contacts with unique emails")) return error_msg
def _check_approval_update(self, state): """ Check if target state is achievable. """ current_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) is_team_leader = self.env.user.has_group('hr_holidays.group_hr_holidays_team_leader') is_manager = self.env.user.has_group('hr_holidays.group_hr_holidays_manager') for holiday in self: val_type = holiday.holiday_status_id.validation_type if state == 'confirm': continue if state == 'draft': if holiday.employee_id != current_employee and not is_manager: raise UserError(_('Only a Time Off Manager can reset other people time off.')) continue if not is_team_leader: raise UserError(_('Only a Team Leader, Time Off Officer or Manager can approve or refuse time off requests.')) if is_team_leader: # use ir.rule based first access check: department, members, ... (see security.xml) holiday.check_access_rule('write') if holiday.employee_id == current_employee and not is_manager: raise UserError(_('Only a Time Off Manager can approve its own requests.')) if (state == 'validate1' and val_type == 'both') or (state == 'validate' and val_type == 'manager'): manager = holiday.employee_id.parent_id or holiday.employee_id.department_id.manager_id if (manager and manager != current_employee) and not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'): raise UserError(_('You must be either %s\'s manager or Time Off Manager to approve this time off') % (holiday.employee_id.name)) if state == 'validate' and val_type == 'both': if not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'): raise UserError(_('Only an Time Off Manager can apply the second approval on time off requests.'))
def _warn_template_error(self, scheduler, exception): # We warn ~ once by hour ~ instead of every 10 min if the interval unit is more than 'hours'. if random.random() < 0.1666 or scheduler.interval_unit in ('now', 'hours'): ex_s = exception_to_unicode(exception) try: event, template = scheduler.event_id, scheduler.template_id emails = list(set([event.organizer_id.email, event.user_id.email, template.write_uid.email])) subject = _("WARNING: Event Scheduler Error for event: %s" % event.name) body = _("""Event Scheduler for: - Event: %s (%s) - Scheduled: %s - Template: %s (%s) Failed with error: - %s You receive this email because you are: - the organizer of the event, - or the responsible of the event, - or the last writer of the template.""" % (event.name, event.id, scheduler.scheduled_date, template.name, template.id, ex_s)) email = self.env['ir.mail_server'].build_email( email_from=self.env.user.email, email_to=emails, subject=subject, body=body, ) self.env['ir.mail_server'].send_email(email) except Exception as e: _logger.error("Exception while sending traceback by email: %s.\n Original Traceback:\n%s", e, exception) pass
def action_validate(self): self._check_security_action_validate() current_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError(_('Leave request must be confirmed in order to approve it.')) if holiday.state == 'validate1' and not holiday.env.user.has_group('hr_holidays.group_hr_holidays_manager'): raise UserError(_('Only an HR Manager can apply the second approval on leave requests.')) holiday.write({'state': 'validate'}) if holiday.double_validation: holiday.write({'second_approver_id': current_employee.id}) else: holiday.write({'first_approver_id': current_employee.id}) if holiday.holiday_type == 'employee' and holiday.type == 'remove': holiday._validate_leave_request() elif holiday.holiday_type == 'category': leaves = self.env['hr.holidays'] for employee in holiday.category_id.employee_ids: values = holiday._prepare_create_by_category(employee) leaves += self.with_context(mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].double_validation: leaves.action_validate() return True
def _parse_date_from_data(self, data, index, name, field_type, options): dt = datetime.datetime fmt = fields.Date.to_string if field_type == 'date' else fields.Datetime.to_string d_fmt = options.get('date_format') dt_fmt = options.get('datetime_format') for num, line in enumerate(data): if not line[index]: continue v = line[index].strip() try: # first try parsing as a datetime if it's one if dt_fmt and field_type == 'datetime': try: line[index] = fmt(dt.strptime(v, dt_fmt)) continue except ValueError: pass # otherwise try parsing as a date whether it's a date # or datetime line[index] = fmt(dt.strptime(v, d_fmt)) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e))
def _compute_duration_display(self): for allocation in self: allocation.duration_display = '%g %s' % ( (float_round(allocation.number_of_hours_display, precision_digits=2) if allocation.type_request_unit == 'hour' else float_round(allocation.number_of_days_display, precision_digits=2)), _('hours') if allocation.type_request_unit == 'hour' else _('days'))
def page_search_dependencies(self, page_id=False): """ Search dependencies just for information. It will not catch 100% of dependencies and False positive is more than possible Each module could add dependences in this dict :returns a dictionnary where key is the 'categorie' of object related to the given view, and the value is the list of text and link to the resource using given page """ dependencies = {} if not page_id: return dependencies page = self.env['website.page'].browse(int(page_id)) website = self.env['website'].browse(self._context.get('website_id')) url = page.url # search for website_page with link website_page_search_dom = [('view_id.arch_db', 'ilike', url)] + website.website_domain() pages = self.env['website.page'].search(website_page_search_dom) page_key = _('Page') if len(pages) > 1: page_key = _('Pages') page_view_ids = [] for page in pages: dependencies.setdefault(page_key, []) dependencies[page_key].append({ 'text': _('Page <b>%s</b> contains a link to this page') % page.url, 'item': page.name, 'link': page.url, }) page_view_ids.append(page.view_id.id) # search for ir_ui_view (not from a website_page) with link page_search_dom = [('arch_db', 'ilike', url), ('id', 'not in', page_view_ids)] + website.website_domain() views = self.env['ir.ui.view'].search(page_search_dom) view_key = _('Template') if len(views) > 1: view_key = _('Templates') for view in views: dependencies.setdefault(view_key, []) dependencies[view_key].append({ 'text': _('Template <b>%s (id:%s)</b> contains a link to this page') % (view.key or view.name, view.id), 'link': '/web#id=%s&view_type=form&model=ir.ui.view' % view.id, 'item': _('%s (id:%s)') % (view.key or view.name, view.id), }) # search for menu with link menu_search_dom = [('url', 'ilike', '%s' % url)] + website.website_domain() menus = self.env['website.menu'].search(menu_search_dom) menu_key = _('Menu') if len(menus) > 1: menu_key = _('Menus') for menu in menus: dependencies.setdefault(menu_key, []).append({ 'text': _('This page is in the menu <b>%s</b>') % menu.name, 'link': '/web#id=%s&view_type=form&model=website.menu' % menu.id, 'item': menu.name, }) return dependencies
def name_get(self): res = [] for leave in self: if leave.type == 'remove': res.append((leave.id, _("%s on %s : %.2f day(s)") % (leave.employee_id.name or leave.category_id.name, leave.holiday_status_id.name, leave.number_of_days_temp))) else: res.append((leave.id, _("Allocation of %s : %.2f day(s) To %s") % (leave.holiday_status_id.name, leave.number_of_days_temp,leave.employee_id.name))) return res
def _message_notification_recipients(self, message, recipients): result = super(Holidays, self)._message_notification_recipients(message, recipients) leave_type = self.env[message.model].browse(message.res_id).type title = _("See Leave") if leave_type == 'remove' else _("See Allocation") for res in result: if result[res].get('button_access'): result[res]['button_access']['title'] = title return result
def name_get(self): res = [] for leave in self: if self.env.context.get('short_name'): res.append((leave.id, _("%s : %.2f day(s)") % (leave.name or leave.holiday_status_id.name, leave.number_of_days_temp))) else: res.append((leave.id, _("%s on %s : %.2f day(s)") % (leave.employee_id.name or leave.category_id.name, leave.holiday_status_id.name, leave.number_of_days_temp))) return res
def name_get(self): res = [] for leave in self: if leave.type_request_unit == 'hour': res.append((leave.id, _("Allocation of %s : %.2f hour(s) To %s") % (leave.holiday_status_id.name, leave.number_of_hours, leave.employee_id.name))) else: res.append((leave.id, _("Allocation of %s : %.2f day(s) To %s") % (leave.holiday_status_id.name, leave.number_of_days_temp, leave.employee_id.name))) return res
def action_validate(self): if not self.env.user.has_group('hr_holidays.group_hr_holidays_user'): raise UserError(_('Only an HR Officer or Manager can approve leave requests.')) manager = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError(_('Leave request must be confirmed in order to approve it.')) if holiday.state == 'validate1' and not holiday.env.user.has_group('hr_holidays.group_hr_holidays_manager'): raise UserError(_('Only an HR Manager can apply the second approval on leave requests.')) holiday.write({'state': 'validate'}) if holiday.double_validation: holiday.write({'manager_id2': manager.id}) else: holiday.write({'manager_id': manager.id}) if holiday.holiday_type == 'employee' and holiday.type == 'remove': meeting_values = { 'name': holiday.display_name, 'categ_ids': [(6, 0, [holiday.holiday_status_id.categ_id.id])] if holiday.holiday_status_id.categ_id else [], 'duration': holiday.number_of_days_temp * HOURS_PER_DAY, 'description': holiday.notes, 'user_id': holiday.user_id.id, 'start': holiday.date_from, 'stop': holiday.date_to, 'allday': False, 'state': 'open', # to block that meeting date in the calendar 'privacy': 'confidential' } #Add the partner_id (if exist) as an attendee if holiday.user_id and holiday.user_id.partner_id: meeting_values['partner_ids'] = [(4, holiday.user_id.partner_id.id)] meeting = self.env['calendar.event'].with_context(no_mail_to_attendees=True).create(meeting_values) holiday._create_resource_leave() holiday.write({'meeting_id': meeting.id}) elif holiday.holiday_type == 'category': leaves = self.env['hr.holidays'] for employee in holiday.category_id.employee_ids: values = { 'name': holiday.name, 'type': holiday.type, 'holiday_type': 'employee', 'holiday_status_id': holiday.holiday_status_id.id, 'date_from': holiday.date_from, 'date_to': holiday.date_to, 'notes': holiday.notes, 'number_of_days_temp': holiday.number_of_days_temp, 'parent_id': holiday.id, 'employee_id': employee.id } leaves += self.with_context(mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].double_validation: leaves.action_validate() return True
def copy_doc(self, res_id, template_id, name_gdocs, res_model): google_web_base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url') access_token = self.get_access_token() # Copy template in to drive with help of new access token request_url = "https://www.googleapis.com/drive/v2/files/%s?fields=parents/id&access_token=%s" % (template_id, access_token) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = urllib2.Request(request_url, None, headers) parents = urllib2.urlopen(req, timeout=TIMEOUT).read() except urllib2.HTTPError: raise UserError(_("The Google Template cannot be found. Maybe it has been deleted.")) parents_dict = json.loads(parents) record_url = "Click on link to open Record in Odoo\n %s/?db=%s#id=%s&model=%s" % (google_web_base_url, self._cr.dbname, res_id, res_model) data = { "title": name_gdocs, "description": record_url, "parents": parents_dict['parents'] } request_url = "https://www.googleapis.com/drive/v2/files/%s/copy?access_token=%s" % (template_id, access_token) headers = { 'Content-type': 'application/json', 'Accept': 'text/plain' } data_json = json.dumps(data) # resp, content = Http().request(request_url, "POST", data_json, headers) req = urllib2.Request(request_url, data_json, headers) content = urllib2.urlopen(req, timeout=TIMEOUT).read() content = json.loads(content) res = {} if content.get('alternateLink'): res['id'] = self.env["ir.attachment"].create({ 'res_model': res_model, 'name': name_gdocs, 'res_id': res_id, 'type': 'url', 'url': content['alternateLink'] }).id # Commit in order to attach the document to the current object instance, even if the permissions has not been written. self._cr.commit() res['url'] = content['alternateLink'] key = self._get_key_from_url(res['url']) request_url = "https://www.googleapis.com/drive/v2/files/%s/permissions?emailMessage=This+is+a+drive+file+created+by+Odoo&sendNotificationEmails=false&access_token=%s" % (key, access_token) data = {'role': 'writer', 'type': 'anyone', 'value': '', 'withLink': True} try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: raise self.env['res.config.settings'].get_config_warning(_("The permission 'reader' for 'anyone with the link' has not been written on the document")) if self.env.user.email: data = {'role': 'writer', 'type': 'user', 'value': self.env.user.email} try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: pass return res
def _get_type_selection(self): types = sets.Set(super(BarcodeRule, self)._get_type_selection()) types.update([ ('weight', _('Weighted Product')), ('price', _('Priced Product')), ('discount', _('Discounted Product')), ('client', _('Client')), ('cashier', _('Cashier')) ]) return list(types)
def _parse_import_data_recursive(self, model, prefix, data, import_fields, options): # Get fields of type date/datetime all_fields = self.env[model].fields_get() for name, field in all_fields.items(): name = prefix + name if field['type'] in ('date', 'datetime') and name in import_fields: # Parse date index = import_fields.index(name) dt = datetime.datetime server_format = DEFAULT_SERVER_DATE_FORMAT if field['type'] == 'date' else DEFAULT_SERVER_DATETIME_FORMAT if options.get('%s_format' % field['type'], server_format) != server_format: # datetime.str[fp]time takes *native strings* in both # versions, for both data and pattern user_format = pycompat.to_native(options.get('%s_format' % field['type'])) for num, line in enumerate(data): if line[index]: line[index] = line[index].strip() if line[index]: try: line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format) except ValueError as e: try: # Allow to import date in datetime fields if field['type'] == 'datetime': user_format = pycompat.to_native(options.get('date_format')) line[index] = dt.strftime(dt.strptime(pycompat.to_native(line[index]), user_format), server_format) except ValueError as e: raise ValueError(_("Column %s contains incorrect values. Error in line %d: %s") % (name, num + 1, e)) except Exception as e: raise ValueError(_("Error Parsing Date [%s:L%d]: %s") % (name, num + 1, e)) # Check if the field is in import_field and is a relational (followed by /) # Also verify that the field name exactly match the import_field at the correct level. elif any(name + '/' in import_field and name == import_field.split('/')[prefix.count('/')] for import_field in import_fields): # Recursive call with the relational as new model and add the field name to the prefix self._parse_import_data_recursive(field['relation'], name + '/', data, import_fields, options) elif field['type'] in ('float', 'monetary') and name in import_fields: # Parse float, sometimes float values from file have currency symbol or () to denote a negative value # We should be able to manage both case index = import_fields.index(name) self._parse_float_from_data(data, index, name, options) elif field['type'] == 'binary' and field.get('attachment') and any(f in name for f in IMAGE_FIELDS) and name in import_fields: index = import_fields.index(name) with requests.Session() as session: session.stream = True for num, line in enumerate(data): if re.match(config.get("import_image_regex", DEFAULT_IMAGE_REGEX), line[index]): if not self.env.user._can_import_remote_urls(): raise AccessError(_("You can not import images via URL, check with your administrator or support for the reason.")) line[index] = self._import_image_by_url(line[index], session, name, num) return data
def open_rating(self, token, rate, **kwargs): assert rate in (1, 5, 10), "Incorrect rating" rating = request.env["rating.rating"].sudo().search([("access_token", "=", token)]) if not rating: return request.not_found() rate_names = {5: _("not satisfied"), 1: _("highly dissatisfied"), 10: _("satisfied")} rating.sudo().write({"rating": rate, "consumed": True}) return request.render( "rating.rating_external_page_submit", {"rating": rating, "token": token, "rate_name": rate_names[rate], "rate": rate}, )
def page_search_dependencies(self, view_id=False): """ Search dependencies just for information. It will not catch 100% of dependencies and False positive is more than possible Each module could add dependences in this dict :returns a dictionnary where key is the 'categorie' of object related to the given view, and the value is the list of text and link to the resource using given page """ dependencies = {} if not view_id: return dependencies view = self.env['ir.ui.view'].browse(view_id) website_id = self._context.get('website_id') name = view.key.replace("website.", "") fullname = "website.%s" % name if view.page: # search for page with link page_search_dom = [ '|', ('website_id', '=', website_id), ('website_id', '=', False), '|', ('arch_db', 'ilike', '/page/%s' % name), ('arch_db', 'ilike', '/page/%s' % fullname) ] page_key = _('Page') pages = self.env['ir.ui.view'].search(page_search_dom) for page in pages: dependencies.setdefault(page_key, []) if page.page: dependencies[page_key].append({ 'text': _('Page <b>%s</b> contains a link to this page') % page.key, 'link': '/page/%s' % page.key }) else: dependencies[page_key].append({ 'text': _('Template <b>%s (id:%s)</b> contains a link to this page') % (page.key, page.id), 'link': '#' }) # search for menu with link menu_search_dom = [ '|', ('website_id', '=', website_id), ('website_id', '=', False), '|', ('url', 'ilike', '/page/%s' % name), ('url', 'ilike', '/page/%s' % fullname) ] menu_key = _('Menu') menus = self.env['website.menu'].search(menu_search_dom) for menu in menus: dependencies.setdefault(menu_key, []).append({ 'text': _('This page is in the menu <b>%s</b>') % menu.name, 'link': False }) return dependencies
def action_approve(self): # if double_validation: this method is the first approval approval # if not double_validation: this method calls action_validate() below if not self.env.user.has_group('hr_holidays.group_hr_holidays_user'): raise UserError(_('Only an HR Officer or Manager can approve leave requests.')) current_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) if any(holiday.state != 'confirm' for holiday in self): raise UserError(_('Leave request must be confirmed ("To Approve") in order to approve it.')) self.filtered(lambda hol: hol.double_validation).write({'state': 'validate1', 'first_approver_id': current_employee.id}) self.filtered(lambda hol: not hol.double_validation).action_validate() return True
def action_approve(self): # if double_validation: this method is the first approval approval # if not double_validation: this method calls action_validate() below if not self.env.user.has_group('hr_holidays.group_hr_holidays_user'): raise UserError(_('Only an HR Officer or Manager can approve leave requests.')) manager = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state != 'confirm': raise UserError(_('Leave request must be confirmed ("To Approve") in order to approve it.')) if holiday.double_validation: return holiday.write({'state': 'validate1', 'manager_id': manager.id if manager else False}) else: holiday.action_validate()
def open_rating(self, token, rate, **kwargs): assert rate in (1, 5, 10), "Incorrect rating" rating = request.env['rating.rating'].sudo().search([('access_token', '=', token)]) if not rating: return request.not_found() rate_names={ 5: _("not satisfied"), 1: _("highly dissatisfied"), 10: _("satisfied") } rating.sudo().write({'rating': rate, 'consumed': True}) return request.render('rating.rating_external_page_submit', { 'rating': rating, 'token': token, 'rate_name': rate_names[rate], 'rate': rate })
def action_draft(self): for holiday in self: if not holiday.can_reset: raise UserError(_('Only an HR Manager or the concerned employee can reset to draft.')) if holiday.state not in ['confirm', 'refuse']: raise UserError(_('Leave request state must be "Refused" or "To Approve" in order to reset to Draft.')) holiday.write({ 'state': 'draft', 'manager_id': False, 'manager_id2': False, }) linked_requests = holiday.mapped('linked_request_ids') for linked_request in linked_requests: linked_request.action_draft() linked_requests.unlink() return True
def get_google_drive_config(self, res_model, res_id): ''' Function called by the js, when no google doc are yet associated with a record, with the aim to create one. It will first seek for a google.docs.config associated with the model `res_model` to find out what's the template of google doc to copy (this is usefull if you want to start with a non-empty document, a type or a name different than the default values). If no config is associated with the `res_model`, then a blank text document with a default name is created. :param res_model: the object for which the google doc is created :param ids: the list of ids of the objects for which the google doc is created. This list is supposed to have a length of 1 element only (batch processing is not supported in the code, though nothing really prevent it) :return: the config id and config name ''' if not res_id: raise UserError(_("Creating google drive may only be done by one at a time.")) # check if a model is configured with a template configs = self.search([('model_id', '=', res_model)]) config_values = [] for config in configs: if config.filter_id: if config.filter_id.user_id and config.filter_id.user_id.id != self.env.user.id: #Private continue domain = [('id', 'in', [res_id])] + eval(config.filter_id.domain) additionnal_context = eval(config.filter_id.context) google_doc_configs = self.env[config.filter_id.model_id].with_context(**additionnal_context).search(domain) if google_doc_configs: config_values.append({'id': config.id, 'name': config.name}) else: config_values.append({'id': config.id, 'name': config.name}) return config_values
def write(self, values): employee_id = values.get('employee_id', False) if not self._check_state_access_right(values): raise AccessError(_('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % values.get('state')) result = super(Holidays, self).write(values) self.add_follower(employee_id) return result
class CurrencyRateUpdateService(models.Model): """Class thats tell for wich services wich currencies have to be updated""" _name = "currency.rate.update.service" _description = "Currency Rate Update" # list of webservicies the value sould be a class name service = fields.Selection( [ ('Admin_ch_getter', 'Admin.ch'), ('ECB_getter', 'European Central Bank'), # ('NYFB_getter','Federal Reserve Bank of NY'), # ('Google_getter','Google Finance'), ('Yahoo_getter', 'Yahoo Finance '), ('PL_NBP_getter', 'Narodowy Bank Polski'), # Added for polish rates ('Banxico_getter', 'Banco de México'), # Added for mexican rates # Bank of Canada is using RSS-CB http://www.cbwiki.net/wiki/index.php/Specification_1.1 : # This RSS format is used by other national banks (Thailand, Malaysia, Mexico...) ('CA_BOC_getter', 'Bank of Canada - noon rates' ), # Added for canadian rates ], "Webservice to use", required=True) # list of currency to update currency_to_update = fields.Many2many( 'res.currency', 'res_curreny_auto_udate_rel', 'service_id', 'currency_id', 'currency to update with this service', ) # back ref company_id = fields.Many2one( 'res.company', 'linked company', ) # note fileds that will be used as a logger note = fields.Text('update notice') max_delta_days = fields.Integer( 'Max delta days', required=True, help= "If the time delta between the rate date given by the webservice and the current date exeeds this value, then the currency rate is not updated in OpenERP." ) _defaults = { 'max_delta_days': lambda *a: 4, } _sql_constraints = [('curr_service_unique', 'unique (service, company_id)', _('You can use a service one time per company !'))] def _check_max_delta_days(self, cr, uid, ids): for company in self.read(cr, uid, ids, ['max_delta_days']): if company['max_delta_days'] >= 0: continue else: return False return True _constraints = [ (_check_max_delta_days, "'Max delta days' must be >= 0", ['max_delta_days']), ]
def make_order(self): context = self.env.context case_id = context and context.get('active_ids', []) or [] case_id = case_id and case_id[0] or False crm_id = self.env['crm.lead'].browse(case_id) if self.update_quotation and crm_id and crm_id.order_ids: for order in crm_id.order_ids: if order.order_line: order.order_line.unlink() if crm_id and crm_id.account_id: partner = crm_id.partner_id sale_order = self.env['sale.order'] pricelist = partner.property_product_pricelist.id partner_address = partner.address_get( [ 'default', 'invoice', 'delivery', 'contact' ] ) sale_order_values = { 'partner_id': partner.id, 'opportunity_id': crm_id.id, 'partner_invoice_id': partner_address['invoice'], 'partner_shipping_id': partner_address['delivery'], 'date_order': fields.datetime.now(), } for deliverable in crm_id.account_id.deliverable_ids: sale_order_values.update({ 'client_order_ref': ( deliverable.account_id.complete_wbs_name), 'origin': deliverable.account_id.complete_wbs_code, 'account_id': deliverable.account_id.id }) if deliverable and crm_id.account_id.pricelist_id: sale_order_values.update({ 'pricelist_id': deliverable.pricelist_id.id }) else: sale_order_values.update({ 'pricelist_id': pricelist }) order_id = sale_order.create(sale_order_values) order_lines = self.prepare_sale_order_line(case_id, order_id.id) self.create_sale_order_line(order_lines) return { 'domain': str([('id', 'in', [order_id.id])]), 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'sale.order', 'view_id': False, 'type': 'ir.actions.act_window', 'name': _('Quotation'), 'res_id': order_id.id } if crm_id and crm_id.order_ids: return { 'domain': str([('id', 'in', crm_id.order_ids.ids)]), 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'sale.order', 'view_id': False, 'type': 'ir.actions.act_window', 'name': _('Quotation'), 'res_ids': crm_id.order_ids.ids }
class WebsiteForm(http.Controller): # Check and insert values from the form on the model <model> @http.route('/website_form/<string:model_name>', type='http', auth="public", methods=['POST'], website=True) def website_form(self, model_name, **kwargs): model_record = request.env['ir.model'].sudo().search([ ('model', '=', model_name), ('website_form_access', '=', True) ]) if not model_record: return json.dumps(False) try: data = self.extract_data(model_record, request.params) # If we encounter an issue while extracting data except ValidationError as e: # I couldn't find a cleaner way to pass data to an exception return json.dumps({'error_fields': e.args[0]}) try: id_record = self.insert_record(request, model_record, data['record'], data['custom'], data.get('meta')) if id_record: self.insert_attachment(model_record, id_record, data['attachments']) # Some fields have additional SQL constraints that we can't check generically # Ex: crm.lead.probability which is a float between 0 and 1 # TODO: How to get the name of the erroneous field ? except IntegrityError: return json.dumps(False) request.session['form_builder_model_model'] = model_record.model request.session['form_builder_model'] = model_record.name request.session['form_builder_id'] = id_record return json.dumps({'id': id_record}) # Constants string to make custom info and metadata readable on a text field _custom_label = "%s\n___________\n\n" % _( "Custom infos") # Title for custom fields _meta_label = "%s\n________\n\n" % _("Metadata") # Title for meta data # Dict of dynamically called filters following type of field to be fault tolerent def identity(self, field_label, field_input): return field_input def integer(self, field_label, field_input): return int(field_input) def floating(self, field_label, field_input): return float(field_input) def boolean(self, field_label, field_input): return bool(field_input) def date(self, field_label, field_input): lang = request.env['ir.qweb.field'].user_lang() return datetime.strptime( field_input, lang.date_format).strftime(DEFAULT_SERVER_DATE_FORMAT) def datetime(self, field_label, field_input): lang = request.env['ir.qweb.field'].user_lang() strftime_format = (u"%s %s" % (lang.date_format, lang.time_format)) user_tz = pytz.timezone( request.context.get('tz') or request.env.user.tz or 'UTC') dt = user_tz.localize(datetime.strptime( field_input, strftime_format)).astimezone(pytz.utc) return dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT) def binary(self, field_label, field_input): return base64.b64encode(field_input.read()) def one2many(self, field_label, field_input): return [int(i) for i in field_input.split(',')] def many2many(self, field_label, field_input, *args): return [(args[0] if args else (6, 0)) + (self.one2many(field_label, field_input), )] _input_filters = { 'char': identity, 'text': identity, 'html': identity, 'date': date, 'datetime': datetime, 'many2one': integer, 'one2many': one2many, 'many2many': many2many, 'selection': identity, 'boolean': boolean, 'integer': integer, 'float': floating, 'binary': binary, } # Extract all data sent by the form and sort its on several properties def extract_data(self, model, values): data = { 'record': {}, # Values to create record 'attachments': [], # Attached files 'custom': '', # Custom fields values 'meta': '', # Add metadata if enabled } authorized_fields = model.sudo()._get_form_writable_fields() error_fields = [] for field_name, field_value in values.items(): # If the value of the field if a file if hasattr(field_value, 'filename'): # Undo file upload field name indexing field_name = field_name.rsplit('[', 1)[0] # If it's an actual binary field, convert the input file # If it's not, we'll use attachments instead if field_name in authorized_fields and authorized_fields[ field_name]['type'] == 'binary': data['record'][field_name] = base64.b64encode( field_value.read()) else: field_value.field_name = field_name data['attachments'].append(field_value) # If it's a known field elif field_name in authorized_fields: try: input_filter = self._input_filters[ authorized_fields[field_name]['type']] data['record'][field_name] = input_filter( self, field_name, field_value) except ValueError: error_fields.append(field_name) # If it's a custom field elif field_name != 'context': data['custom'] += u"%s : %s\n" % (field_name, field_value) # Add metadata if enabled environ = request.httprequest.headers.environ if (request.website.website_form_enable_metadata): data['meta'] += "%s : %s\n%s : %s\n%s : %s\n%s : %s\n" % ( "IP", environ.get("REMOTE_ADDR"), "USER_AGENT", environ.get("HTTP_USER_AGENT"), "ACCEPT_LANGUAGE", environ.get("HTTP_ACCEPT_LANGUAGE"), "REFERER", environ.get("HTTP_REFERER")) # This function can be defined on any model to provide # a model-specific filtering of the record values # Example: # def website_form_input_filter(self, values): # values['name'] = '%s\'s Application' % values['partner_name'] # return values dest_model = request.env[model.sudo().model] if hasattr(dest_model, "website_form_input_filter"): data['record'] = dest_model.website_form_input_filter( request, data['record']) missing_required_fields = [ label for label, field in authorized_fields.items() if field['required'] and not label in data['record'] ] if any(error_fields): raise ValidationError(error_fields + missing_required_fields) return data def insert_record(self, request, model, values, custom, meta=None): model_name = model.sudo().model record = request.env[model_name].sudo().with_context( mail_create_nosubscribe=True).create(values) if custom or meta: default_field = model.website_form_default_field_id default_field_data = values.get(default_field.name, '') custom_content = (default_field_data + "\n\n" if default_field_data else '') \ + (self._custom_label + custom + "\n\n" if custom else '') \ + (self._meta_label + meta if meta else '') # If there is a default field configured for this model, use it. # If there isn't, put the custom data in a message instead if default_field.name: if default_field.ttype == 'html' or model_name == 'mail.mail': custom_content = nl2br(custom_content) record.update({default_field.name: custom_content}) else: values = { 'body': nl2br(custom_content), 'model': model_name, 'message_type': 'comment', 'no_auto_thread': False, 'res_id': record.id, } mail_id = request.env['mail.message'].sudo().create(values) return record.id # Link all files attached on the form def insert_attachment(self, model, id_record, files): orphan_attachment_ids = [] model_name = model.sudo().model record = model.env[model_name].browse(id_record) authorized_fields = model.sudo()._get_form_writable_fields() for file in files: custom_field = file.field_name not in authorized_fields attachment_value = { 'name': file.field_name if custom_field else file.filename, 'datas': base64.encodestring(file.read()), 'datas_fname': file.filename, 'res_model': model_name, 'res_id': record.id, } attachment_id = request.env['ir.attachment'].sudo().create( attachment_value) if attachment_id and not custom_field: record.sudo()[file.field_name] = [(4, attachment_id.id)] else: orphan_attachment_ids.append(attachment_id.id) # If some attachments didn't match a field on the model, # we create a mail.message to link them to the record if orphan_attachment_ids: if model_name != 'mail.mail': values = { 'body': _('<p>Attached files : </p>'), 'model': model_name, 'message_type': 'comment', 'no_auto_thread': False, 'res_id': id_record, 'attachment_ids': [(6, 0, orphan_attachment_ids)], } mail_id = request.env['mail.message'].sudo().create(values) else: # If the model is mail.mail then we have no other choice but to # attach the custom binary field files on the attachment_ids field. for attachment_id_id in orphan_attachment_ids: record.attachment_ids = [(4, attachment_id_id)]
def get_fields(self, model, depth=FIELDS_RECURSION_LIMIT): """ Recursively get fields for the provided model (through fields_get) and filter them according to importability The output format is a list of ``Field``, with ``Field`` defined as: .. class:: Field .. attribute:: id (str) A non-unique identifier for the field, used to compute the span of the ``required`` attribute: if multiple ``required`` fields have the same id, only one of them is necessary. .. attribute:: name (str) The field's logical (Odoo) name within the scope of its parent. .. attribute:: string (str) The field's human-readable name (``@string``) .. attribute:: required (bool) Whether the field is marked as required in the model. Clients must provide non-empty import values for all required fields or the import will error out. .. attribute:: fields (list(Field)) The current field's subfields. The database and external identifiers for m2o and m2m fields; a filtered and transformed fields_get for o2m fields (to a variable depth defined by ``depth``). Fields with no sub-fields will have an empty list of sub-fields. :param str model: name of the model to get fields form :param int depth: depth of recursion into o2m fields """ Model = self.env[model] importable_fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], 'type': 'id', }] if not depth: return importable_fields model_fields = Model.fields_get() blacklist = models.MAGIC_COLUMNS + [Model.CONCURRENCY_CHECK_FIELD] for name, field in model_fields.items(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.values())): continue field_value = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], 'type': field['type'], } if field['type'] in ('many2many', 'many2one'): field_value['fields'] = [ dict(field_value, name='id', string=_("External ID"), type='id'), dict(field_value, name='.id', string=_("Database ID"), type='id'), ] elif field['type'] == 'one2many': field_value['fields'] = self.get_fields(field['relation'], depth=depth - 1) if self.user_has_groups('base.group_no_one'): field_value['fields'].append({ 'id': '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': [], 'type': 'id' }) importable_fields.append(field_value) # TODO: cache on model? return importable_fields
def database_name_checking(self, patren, name): if not re.match(patren, name): raise ValidationError( _('Invalid database name. Only alphanumerical characters, underscore, hyphen and dot are allowed.' ))
def create_move(self): move_obj = self.env['account.move'] for statement in self: statement_date = fields.Date.to_string(statement.date) move_data = { 'name': _('VAT statement') + ' - ' + statement_date, 'date': statement_date, 'journal_id': statement.journal_id.id, } move = move_obj.create(move_data) move_id = move.id statement.write({'move_id': move_id}) lines_to_create = [] for debit_line in statement.debit_vat_account_line_ids: if debit_line.amount != 0.0: debit_vat_data = { 'name': _('Debit VAT'), 'account_id': debit_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if debit_line.amount > 0: debit_vat_data['debit'] = math.fabs(debit_line.amount) else: debit_vat_data['credit'] = math.fabs(debit_line.amount) lines_to_create.append((0, 0, debit_vat_data)) for credit_line in statement.credit_vat_account_line_ids: if credit_line.amount != 0.0: credit_vat_data = { 'name': _('Credit VAT'), 'account_id': credit_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if credit_line.amount < 0: credit_vat_data['debit'] = math.fabs( credit_line.amount) else: credit_vat_data['credit'] = math.fabs( credit_line.amount) lines_to_create.append((0, 0, credit_vat_data)) if statement.previous_credit_vat_amount: previous_credit_vat_data = { 'name': _('Previous Credits VAT'), 'account_id': statement.previous_credit_vat_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if statement.previous_credit_vat_amount < 0: previous_credit_vat_data['debit'] = math.fabs( statement.previous_credit_vat_amount) else: previous_credit_vat_data['credit'] = math.fabs( statement.previous_credit_vat_amount) lines_to_create.append((0, 0, previous_credit_vat_data)) if statement.tax_credit_amount: tax_credit_vat_data = { 'name': _('Tax Credits'), 'account_id': statement.tax_credit_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if statement.tax_credit_amount < 0: tax_credit_vat_data['debit'] = math.fabs( statement.tax_credit_amount) else: tax_credit_vat_data['credit'] = math.fabs( statement.tax_credit_amount) lines_to_create.append((0, 0, tax_credit_vat_data)) if statement.previous_debit_vat_amount: previous_debit_vat_data = { 'name': _('Previous Debits VAT'), 'account_id': statement.previous_debit_vat_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if statement.previous_debit_vat_amount > 0: previous_debit_vat_data['debit'] = math.fabs( statement.previous_debit_vat_amount) else: previous_debit_vat_data['credit'] = math.fabs( statement.previous_debit_vat_amount) lines_to_create.append((0, 0, previous_debit_vat_data)) if statement.interests_debit_vat_amount: interests_data = { 'name': _('Due interests'), 'account_id': statement.interests_debit_vat_account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if statement.interests_debit_vat_amount > 0: interests_data['debit'] = math.fabs( statement.interests_debit_vat_amount) else: interests_data['credit'] = math.fabs( statement.interests_debit_vat_amount) lines_to_create.append((0, 0, interests_data)) for generic_line in statement.generic_vat_account_line_ids: generic_vat_data = { 'name': _('Other VAT Credits / Debits'), 'account_id': generic_line.account_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'debit': 0.0, 'credit': 0.0, 'date': statement_date, 'company_id': statement.company_id.id, } if generic_line.amount < 0: generic_vat_data['debit'] = math.fabs(generic_line.amount) else: generic_vat_data['credit'] = math.fabs(generic_line.amount) lines_to_create.append((0, 0, generic_vat_data)) end_debit_vat_data = { 'name': _('Tax Authority VAT'), 'account_id': statement.authority_vat_account_id.id, 'partner_id': statement.authority_partner_id.id, 'move_id': move_id, 'journal_id': statement.journal_id.id, 'date': statement_date, 'company_id': statement.company_id.id, } if statement.authority_vat_amount > 0: end_debit_vat_data['debit'] = 0.0 end_debit_vat_data['credit'] = math.fabs( statement.authority_vat_amount) if statement.payment_term_id: due_list = statement.payment_term_id.compute( statement.authority_vat_amount, statement_date)[0] for term in due_list: current_line = end_debit_vat_data current_line['credit'] = term[1] current_line['date_maturity'] = term[0] lines_to_create.append((0, 0, current_line)) else: lines_to_create.append((0, 0, end_debit_vat_data)) elif statement.authority_vat_amount < 0: end_debit_vat_data['debit'] = math.fabs( statement.authority_vat_amount) end_debit_vat_data['credit'] = 0.0 lines_to_create.append((0, 0, end_debit_vat_data)) move.line_ids = lines_to_create move.post() statement.state = 'confirmed' return True
def producteca_bind_to(self, account, binding_product_tmpl_id=False): pv_bind = None for product in self: for bind in product.producteca_bindings: if (account.id in bind.connection_account.ids): _logger.info("No need to add") continue _logger.info( _("Adding product %s to %s") % (product.display_name, account.name)) try: prod_binding = { "connection_account": account.id, "product_tmpl_id": product.product_tmpl_id.id, "product_id": product.id, "name": product.name, "description": product.description_sale or "", "sku": product.default_code or product.barcode or "", #"price": product.ocapi_price(account), #"stock": product.ocapi_stock(account), } #Binding Product Template if missing if binding_product_tmpl_id: prod_binding[ "binding_product_tmpl_id"] = binding_product_tmpl_id.id else: pt_bind = self.env[ "producteca.binding.product_template"].search([ ("product_tmpl_id", "=", product.product_tmpl_id.id), ("connection_account", "=", account.id) ]) if pt_bind and len(pt_bind): prod_binding["binding_product_tmpl_id"] = pt_bind[0].id else: pt_bind = product.product_tmpl_id.producteca_bind_to( account, bind_variants=False) if pt_bind and len(pt_bind): prod_binding["binding_product_tmpl_id"] = pt_bind[ 0].id #Binding Variant finally pv_bind = self.env["producteca.binding.product"].search([ ("product_id", "=", product.id), ("product_tmpl_id", "=", product.product_tmpl_id.id), ("connection_account", "=", account.id) ]) if len(pv_bind): pv_bind = pv_bind[0] _logger.info(prod_binding) pv_bind.write(prod_binding) else: _logger.info("Create variant binding:" + str(prod_binding)) pv_bind = self.env["producteca.binding.product"].create( [prod_binding]) if pv_bind: product.producteca_bindings = [(4, pv_bind.id)] else: _logger.error("Error creating variant binding: " + str(prod_binding)) except Exception as e: _logger.info(e, exc_info=True) raise e return pv_bind
def copy(self, default=None): self.ensure_one() default = dict(default or {}, name=_("%s (copy)") % self.name) return super(BveView, self).copy(default=default)
def copy_data(self, default=None): raise UserError(_('A leave cannot be duplicated.'))
def _parse_data(data): try: return json.loads(data) except ValueError: raise ValidationError(_("Data should be a valid JSON"))
def explode_kit_from_name(self): ''' Explode kit product from name (raise error) ''' # Pool used: template_pool = self.env['product.template'] component_pool = self.env['product.template.kit.bom'] # --------------------------------------------------------------------- # Check if is a kit syntax: # --------------------------------------------------------------------- if not self.default_code or '#' not in self.default_code: raise exceptions.Warning(_('No "#" char present in default code')) self.is_kit = True # Always update is_kit if present # --------------------------------------------------------------------- # Code in default_code of the kit: # --------------------------------------------------------------------- # Clean extra special char (\t \n ' ') default_code = (self.default_code or '').strip() code_list = default_code.split('#') # --------------------------------------------------------------------- # Search product by code (only present): # --------------------------------------------------------------------- components = self.search([('default_code', 'in', code_list)]) template_db = {} # ID of template for template in components: template_db[template.default_code] = template.id # --------------------------------------------------------------------- # Generate all component extracting from default_code # --------------------------------------------------------------------- # Static fields: product_id = self.id partner_id = self.company_id.partner_id.id # Delete all components: self.component_ids = [(5, False, False)] # Re-create all components: for default_code in code_list: # ----------------------------------------------------------------- # Get template ID for component: # ----------------------------------------------------------------- if default_code in template_db: # Product yet present: template_id = template_db[default_code] else: # ------------------------------------------------------------- # Product create as a service: # ------------------------------------------------------------- template_id = template_pool.create({ 'name': default_code, 'default_code': default_code, 'default_supplier_id': partner_id, 'type': 'service', }).id _logger.warning( 'Create a new template as a service: %s' % default_code) # ----------------------------------------------------------------- # Create component in this kit: # ----------------------------------------------------------------- component_pool.create({ 'sequence': 10, 'product_id': product_id, # parent 'component_id': template_id, # component })
def unlink(self): for cf in self: if cf.state not in ('draft', 'cancel'): raise UserError( _('You cannot delete a Validated Consumo de Folios.')) return super(ConsumoFolios, self).unlink()
def _validar(self): cant_doc_batch = 0 company_id = self.company_id dte_service = company_id.dte_service_provider signature_id = self.env.user.get_digital_signature(self.company_id) if not signature_id: raise UserError( _('''There is no Signer Person with an \ authorized signature for you in the system. Please make sure that \ 'user_signature_key' module has been installed and enable a digital \ signature, for you or make the signer to authorize you to use his \ signature.''')) resumenes, TpoDocs = self._get_resumenes(marc=True) Resumen = [] listado = [ 'TipoDocumento', 'MntNeto', 'MntIva', 'TasaIVA', 'MntExento', 'MntTotal', 'FoliosEmitidos', 'FoliosAnulados', 'FoliosUtilizados', 'itemUtilizados' ] xml = '<Resumen><TipoDocumento>39</TipoDocumento><MntTotal>0</MntTotal><FoliosEmitidos>0</FoliosEmitidos><FoliosAnulados>0</FoliosAnulados><FoliosUtilizados>0</FoliosUtilizados></Resumen>' if resumenes: for r, value in resumenes.items(): ordered = collections.OrderedDict() for i in listado: if i in value: ordered[i] = value[i] elif i == 'itemUtilizados': Rangos = value[str(r) + '_folios'] folios = [] if 'itemUtilizados' in Rangos: utilizados = [] for rango in Rangos['itemUtilizados']: utilizados.append({'RangoUtilizados': rango}) folios.append({'itemUtilizados': utilizados}) if 'itemAnulados' in Rangos: anulados = [] for rango in Rangos['itemAnulados']: anulados.append({'RangoAnulados': rango}) folios.append({'itemAnulados': anulados}) ordered[str(r) + '_folios'] = folios Resumen.extend([{'Resumen': ordered}]) dte = collections.OrderedDict({'item': Resumen}) xml = dicttoxml.dicttoxml(dte, root=False, attr_type=False).decode() resol_data = self.get_resolution_data(company_id) RUTEmisor = self.format_vat(company_id.vat) RUTRecep = "60803000-K" # RUT SII doc_id = 'CF_' + self.date Correlativo = self.correlativo SecEnvio = self.sec_envio cf = self.create_template_envio( RUTEmisor, resol_data['dte_resolution_date'], resol_data['dte_resolution_number'], self.fecha_inicio, self.fecha_final, Correlativo, SecEnvio, xml, signature_id.subject_serial_number, doc_id) xml = self.create_template_env(cf) root = etree.XML(xml) xml_pret = etree.tostring(root, pretty_print=True).decode()\ .replace('<item>', '\n').replace('</item>', '')\ .replace('<itemNoRec>', '').replace('</itemNoRec>', '\n')\ .replace('<itemOtrosImp>', '').replace('</itemOtrosImp>', '\n')\ .replace('<itemUtilizados>', '').replace('</itemUtilizados>', '\n')\ .replace('<itemAnulados>', '').replace('</itemAnulados>', '\n') for TpoDoc in TpoDocs: xml_pret = xml_pret.replace( '<key name="' + str(TpoDoc) + '_folios">', '').replace('</key>', '\n').replace( '<key name="' + str(TpoDoc) + '_folios"/>', '\n') envio_dte = self.env['account.invoice'].sign_full_xml( xml_pret, doc_id, 'consu') doc_id += '.xml' self.sii_xml_request = self.env['sii.xml.envio'].create({ 'xml_envio': '<?xml version="1.0" encoding="ISO-8859-1"?>\n%s' % envio_dte, 'name': doc_id, 'company_id': self.company_id.id, 'state': 'draft', }).id
class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.one @api.depends('number', 'state') def _compute_ref_number(self): if self.number: invoice_number = re.sub(r'\D', '', self.number) checksum = sum((7, 3, 1)[idx % 3] * int(val) for idx, val in enumerate(invoice_number[::-1])) self.ref_number = invoice_number + str((10 - (checksum % 10)) % 10) self.invoice_number = invoice_number else: self.invoice_number = False self.ref_number = False @api.one def _compute_barcode_string(self): displayed_bank_accounts = self.company_id.partner_id.bank_ids.filtered( 'journal_id.include_on_invoice') primary_bank_account = self.partner_bank_id or \ displayed_bank_accounts and displayed_bank_accounts[0] if (self.amount_total and primary_bank_account.acc_number and self.ref_number and self.date_due): amount_total_string = str(self.amount_total) if amount_total_string[-2:-1] == '.': amount_total_string += '0' amount_total_string = amount_total_string.zfill(9) receiver_bank_account = re\ .sub("[^0-9]", "", str(primary_bank_account.acc_number)) ref_number_filled = self.ref_number.zfill(20) self.barcode_string = '4' \ + receiver_bank_account \ + amount_total_string[:-3] \ + amount_total_string[-2:] \ + "000" + ref_number_filled \ + self.date_due[2:4] \ + self.date_due[5:-3] \ + self.date_due[-2:] else: self.barcode_string = False invoice_number = fields.Char( 'Invoice number', compute='_compute_ref_number', store=True, help=_('Identifier number used to refer to this invoice in ' 'accordance with https://www.fkl.fi/teemasivut/sepa/' 'tekninen_dokumentaatio/Dokumentit/kotimaisen_viitte' 'en_rakenneohje.pdf')) ref_number = fields.Char( 'Reference Number', compute='_compute_ref_number', store=True, help=_('Invoice reference number in accordance with https://' 'www.fkl.fi/teemasivut/sepa/tekninen_dokumentaatio/Do' 'kumentit/kotimaisen_viitteen_rakenneohje.pdf')) date_delivered = fields.Date( 'Date delivered', help=_('The date when the invoiced product or service was considered ' 'delivered, for taxation purposes.')) barcode_string = fields.Char( 'Barcode String', compute='_compute_barcode_string', help=_('https://www.fkl.fi/teemasivut/sepa/tekninen_dokumentaatio/Dok' 'umentit/Pankkiviivakoodi-opas.pdf')) @api.multi def invoice_print(self): """ Print the invoice and mark it as sent, so that we can see more easily the next step of the workflow """ assert len(self) == 1, \ 'This option should only be used for a single id at a time.' # noinspection PyAttributeOutsideInit self.sent = True return self.env.ref( 'l10n_fi_invoice.report_invoice_finnish').report_action(self)
def run_currency_update(self, cr, uid): "update currency at the given frequence" factory = CurrencyGetterFactory() curr_obj = self.pool.get('res.currency') rate_obj = self.pool.get('res.currency.rate') companies = self.pool.get('res.company').search(cr, uid, []) for comp in self.pool.get('res.company').browse(cr, uid, companies): # the multi company currency can beset or no so we handle # the two case if not comp.auto_currency_up: continue # we initialise the multi compnay search filter or not serach filter # we fetch the main currency looking for currency with base = true. The main rate should be set at 1.00 main_curr_ids = curr_obj.search(cr, uid, [('base', '=', True), ('company_id', '=', comp.id)]) if not main_curr_ids: # If we can not find a base currency for this company we look for one with no company set main_curr_ids = curr_obj.search(cr, uid, [('base', '=', True), ('company_id', '=', False)]) if main_curr_ids: main_curr_rec = curr_obj.browse(cr, uid, main_curr_ids[0]) else: raise orm.except_orm(_('Error!'), ('There is no base currency set!')) if main_curr_rec.rate != 1: raise orm.except_orm(_('Error!'), ('Base currency rate should be 1.00!')) main_curr = main_curr_rec.name for service in comp.services_to_use: note = service.note or '' try: # we initalize the class that will handle the request # and return a dict of rate getter = factory.register(service.service) curr_to_fetch = map(lambda x: x.name, service.currency_to_update) res, log_info = getter.get_updated_currency( curr_to_fetch, main_curr, service.max_delta_days) rate_name = time.strftime('%Y-%m-%d') for curr in service.currency_to_update: if curr.name == main_curr: continue do_create = True for rate in curr.rate_ids: if rate.name == rate_name: rate.write({'rate': res[curr.name]}) do_create = False break if do_create: vals = { 'currency_id': curr.id, 'rate': res[curr.name], 'name': rate_name } rate_obj.create( cr, uid, vals, ) # show the most recent note at the top note = "\n%s currency updated. "\ % (datetime.strftime(datetime.today(), '%Y-%m-%d %H:%M:%S'))\ + note note = (log_info or '') + note service.write({'note': note}) except Exception as e: error_msg = "\n%s ERROR : %s"\ % (datetime.strftime(datetime.today(), '%Y-%m-%d %H:%M:%S'), str(e))\ + note _logger.info(str(e)) service.write({'note': error_msg})
def action_validate(self): if not self.env.user.has_group('hr_holidays.group_hr_holidays_user'): raise UserError( _('Only an HR Officer or Manager can approve leave requests.')) manager = self.env['hr.employee'].search( [('user_id', '=', self.env.uid)], limit=1) for holiday in self: if holiday.state not in ['confirm', 'validate1']: raise UserError( _('Leave request must be confirmed in order to approve it.' )) if holiday.state == 'validate1' and not holiday.env.user.has_group( 'hr_holidays.group_hr_holidays_manager'): raise UserError( _('Only an HR Manager can apply the second approval on leave requests.' )) holiday.write({'state': 'validate'}) if holiday.double_validation: holiday.write({'manager_id2': manager.id}) else: holiday.write({'manager_id': manager.id}) if holiday.holiday_type == 'employee' and holiday.type == 'remove': meeting_values = { 'name': holiday.display_name, 'categ_ids': [(6, 0, [holiday.holiday_status_id.categ_id.id])] if holiday.holiday_status_id.categ_id else [], 'duration': holiday.number_of_days_temp * HOURS_PER_DAY, 'description': holiday.notes, 'user_id': holiday.user_id.id, 'start': holiday.date_from, 'stop': holiday.date_to, 'allday': False, 'state': 'open', # to block that meeting date in the calendar 'privacy': 'confidential' } #Add the partner_id (if exist) as an attendee if holiday.user_id and holiday.user_id.partner_id: meeting_values['partner_ids'] = [ (4, holiday.user_id.partner_id.id) ] meeting = self.env['calendar.event'].with_context( no_mail_to_attendees=True).create(meeting_values) holiday._create_resource_leave() holiday.write({'meeting_id': meeting.id}) elif holiday.holiday_type == 'category': leaves = self.env['hr.holidays'] for employee in holiday.category_id.employee_ids: values = { 'name': holiday.name, 'type': holiday.type, 'holiday_type': 'employee', 'holiday_status_id': holiday.holiday_status_id.id, 'date_from': holiday.date_from, 'date_to': holiday.date_to, 'notes': holiday.notes, 'number_of_days_temp': holiday.number_of_days_temp, 'parent_id': holiday.id, 'employee_id': employee.id } leaves += self.with_context( mail_notify_force_send=False).create(values) # TODO is it necessary to interleave the calls? leaves.action_approve() if leaves and leaves[0].double_validation: leaves.action_validate() return True
def action_confirm(self): if self.filtered(lambda holiday: holiday.state != 'draft'): raise UserError( _('Leave request must be in Draft state ("To Submit") in order to confirm it.' )) return self.write({'state': 'confirm'})
def apply_inheritance_specs(source, specs_tree, inherit_branding=False, pre_locate=lambda s: True): """ Apply an inheriting view (a descendant of the base view) Apply to a source architecture all the spec nodes (i.e. nodes describing where and what changes to apply to some parent architecture) given by an inheriting view. :param Element source: a parent architecture to modify :param Element specs_tree: a modifying architecture in an inheriting view :param bool inherit_branding: :param pre_locate: function that is executed before locating a node. This function receives an arch as argument. This is required by studio to properly handle group_ids. :return: a modified source where the specs are applied :rtype: Element """ # Queue of specification nodes (i.e. nodes describing where and # changes to apply to some parent architecture). specs = specs_tree if isinstance(specs_tree, list) else [specs_tree] def extract(spec): """ Utility function that locates a node given a specification, remove it from the source and returns it. """ if len(spec): raise ValueError( _("Invalid specification for moved nodes: %r", etree.tostring(spec, encoding='unicode'))) pre_locate(spec) to_extract = locate_node(source, spec) if to_extract is not None: remove_element(to_extract) return to_extract else: raise ValueError( _("Element %r cannot be located in parent view", etree.tostring(spec, encoding='unicode'))) while len(specs): spec = specs.pop(0) if isinstance(spec, SKIPPED_ELEMENT_TYPES): continue if spec.tag == 'data': specs += [c for c in spec] continue pre_locate(spec) node = locate_node(source, spec) if node is not None: pos = spec.get('position', 'inside') if pos == 'replace': mode = spec.get('mode', 'outer') if mode == "outer": for loc in spec.xpath(".//*[text()='$0']"): loc.text = '' loc.append(copy.deepcopy(node)) if node.getparent() is None: spec_content = None comment = None for content in spec: if content.tag is not etree.Comment: spec_content = content break else: comment = content source = copy.deepcopy(spec_content) # only keep the t-name of a template root node t_name = node.get('t-name') if t_name: source.set('t-name', t_name) if comment is not None: text = source.text source.text = None comment.tail = text source.insert(0, comment) else: replaced_node_tag = None for child in spec: if child.get('position') == 'move': child = extract(child) if inherit_branding and not replaced_node_tag and child.tag is not etree.Comment: # To make a correct branding, we need to # - know exactly which node has been replaced # - store it before anything else has altered the Tree # Do it exactly here :D child.set('meta-oe-xpath-replacing', node.tag) # We just store the replaced node tag on the first # child of the xpath replacing it replaced_node_tag = node.tag node.addprevious(child) node.getparent().remove(node) elif mode == "inner": # Replace the entire content of an element for child in node: node.remove(child) node.text = None for child in spec: node.append(copy.deepcopy(child)) node.text = spec.text else: raise ValueError( _("Invalid mode attribute:") + " '%s'" % mode) elif pos == 'attributes': for child in spec.getiterator('attribute'): attribute = child.get('name') value = child.text or '' if child.get('add') or child.get('remove'): assert not child.text separator = child.get('separator', ',') if separator == ' ': separator = None # squash spaces to_add = (s for s in ( s.strip() for s in child.get('add', '').split(separator)) if s) to_remove = { s.strip() for s in child.get('remove', '').split(separator) } values = ( s.strip() for s in node.get(attribute, '').split(separator)) value = (separator or ' ').join( itertools.chain( (v for v in values if v not in to_remove), to_add)) if value: node.set(attribute, value) elif attribute in node.attrib: del node.attrib[attribute] elif pos == 'inside': add_text_inside(node, spec.text) for child in spec: if child.get('position') == 'move': child = extract(child) node.append(child) elif pos == 'after': # add a sentinel element right after node, insert content of # spec before the sentinel, then remove the sentinel element sentinel = E.sentinel() node.addnext(sentinel) add_text_before(sentinel, spec.text) for child in spec: if child.get('position') == 'move': child = extract(child) sentinel.addprevious(child) remove_element(sentinel) elif pos == 'before': add_text_before(node, spec.text) for child in spec: if child.get('position') == 'move': child = extract(child) node.addprevious(child) else: raise ValueError(_("Invalid position attribute: '%s'") % pos) else: attrs = ''.join([ ' %s="%s"' % (attr, html_escape(spec.get(attr))) for attr in spec.attrib if attr != 'position' ]) tag = "<%s%s>" % (spec.tag, attrs) raise ValueError( _("Element '%s' cannot be located in parent view", tag)) return source
def _compute_sale_subscr_name(self): for subs in self: subs.name = subs.name or subs.analytic_account_id.name or _('New')
def customers(self, country=None, industry=None, page=0, **post): Tag = request.env['res.partner.tag'] Partner = request.env['res.partner'] search_value = post.get('search') domain = [('website_published', '=', True), ('assigned_partner_id', '!=', False)] if search_value: domain += [ '|', '|', ('name', 'ilike', search_value), ('website_description', 'ilike', search_value), ('industry_id.name', 'ilike', search_value), ] tag_id = post.get('tag_id') if tag_id: tag_id = unslug(tag_id)[1] or 0 domain += [('website_tag_ids', 'in', tag_id)] # group by industry, based on customers found with the search(domain) industries = Partner.sudo().read_group(domain, ["id", "industry_id"], groupby="industry_id", orderby="industry_id") partners_count = Partner.sudo().search_count(domain) if industry: domain.append(('industry_id', '=', industry.id)) if industry.id not in (x['industry_id'][0] for x in industries if x['industry_id']): if industry.exists(): industries.append({ 'industry_id_count': 0, 'industry_id': (industry.id, industry.name) }) industries.sort(key=lambda d: (d.get('industry_id') or (0, ''))[1]) industries.insert( 0, { 'industry_id_count': partners_count, 'industry_id': (0, _("All Industries")) }) # group by country, based on customers found with the search(domain) countries = Partner.sudo().read_group(domain, ["id", "country_id"], groupby="country_id", orderby="country_id") country_count = Partner.sudo().search_count(domain) if country: domain += [('country_id', '=', country.id)] if country.id not in (x['country_id'][0] for x in countries if x['country_id']): if country.exists(): countries.append({ 'country_id_count': 0, 'country_id': (country.id, country.name) }) countries.sort( key=lambda d: (d['country_id'] or (0, ""))[1]) countries.insert( 0, { 'country_id_count': country_count, 'country_id': (0, _("All Countries")) }) # search customers to display partner_count = Partner.sudo().search_count(domain) # pager url = '/customers' if industry: url += '/industry/%s' % industry.id if country: url += '/country/%s' % country.id pager = request.website.pager(url=url, total=partner_count, page=page, step=self._references_per_page, scope=7, url_args=post) partners = Partner.sudo().search(domain, offset=pager['offset'], limit=self._references_per_page) google_map_partner_ids = ','.join(str(it) for it in partners.ids) google_maps_api_key = request.website.google_maps_api_key tags = Tag.search([('website_published', '=', True), ('partner_ids', 'in', partners.ids)], order='classname, name ASC') tag = tag_id and Tag.browse(tag_id) or False values = { 'countries': countries, 'current_country_id': country.id if country else 0, 'current_country': country or False, 'industries': industries, 'current_industry_id': industry.id if industry else 0, 'current_industry': industry or False, 'partners': partners, 'google_map_partner_ids': google_map_partner_ids, 'pager': pager, 'post': post, 'search_path': "?%s" % werkzeug.urls.url_encode(post), 'tag': tag, 'tags': tags, 'google_maps_api_key': google_maps_api_key, } return request.render("website_customer.index", values)
def get_fields(self, model, depth=FIELDS_RECURSION_LIMIT): Model = self.env['sps.vendor_offer_automation.template'] importable_fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], 'type': 'id', }] model_fields = Model.fields_get() blacklist = models.MAGIC_COLUMNS + [Model.CONCURRENCY_CHECK_FIELD] for name, field in model_fields.items(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.values())): continue if not name.startswith('mf_'): continue field_value = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], 'type': field['type'], } if field['type'] in ('many2many', 'many2one'): field_value['fields'] = [ dict(field_value, name='id', string=_("External ID"), type='id'), dict(field_value, name='.id', string=_("Database ID"), type='id'), ] elif field['type'] == 'one2many' and depth: field_value['fields'] = self.get_fields(field['relation'], depth=depth - 1) if self.user_has_groups('base.group_no_one'): field_value['fields'].append({ 'id': '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': [], 'type': 'id' }) importable_fields.append(field_value) # TODO: cache on model? return importable_fields
def my_openacademy_sessions(self, page=1, date_begin=None, date_end=None, sortby=None, search=None, search_in='content', **kw): values = self._prepare_portal_layout_values() user = request.env.user domain = [('instructor_id', '=', request.env.user.partner_id.id)] searchbar_sortings = { 'date': {'label': _('Newest'), 'order': 'create_date desc'}, 'name': {'label': _('Subject'), 'order': 'name'}, } searchbar_inputs = { 'content': {'input': 'content', 'label': _('Search <span class="nolabel"> (in Content)</span>')}, 'message': {'input': 'message', 'label': _('Search in Messages')}, 'customer': {'input': 'customer', 'label': _('Search in Customer')}, 'id': {'input': 'id', 'label': _('Search ID')}, 'all': {'input': 'all', 'label': _('Search in All')}, } # default sort by value if not sortby: sortby = 'date' order = searchbar_sortings[sortby]['order'] # archive groups - Default Group By 'create_date' archive_groups = self._get_archive_groups('openacademy.session', domain) if date_begin and date_end: domain += [('create_date', '>', date_begin), ('create_date', '<=', date_end)] # search if search and search_in: search_domain = [] if search_in in ('id', 'all'): search_domain = OR([search_domain, [('id', '=', search)]]) if search_in in ('content', 'all'): search_domain = OR([search_domain, ['|', ('name', 'ilike', search), ('description', 'ilike', search)]]) if search_in in ('customer', 'all'): search_domain = OR([search_domain, [('partner_id', 'ilike', search)]]) if search_in in ('message', 'all'): search_domain = OR([search_domain, [('message_ids.body', 'ilike', search)]]) domain += search_domain # pager sessions_count = request.env['openacademy.session'].search_count(domain) pager = portal_pager( url="/my/sessions", url_args={'date_begin': date_begin, 'date_end': date_end, 'sortby': sortby}, total=sessions_count, page=page, step=self._items_per_page ) sessions = request.env['openacademy.session'].search(domain, order=order, limit=self._items_per_page, offset=pager['offset']) request.session['my_sessions_history'] = sessions.ids[:100] values.update({ 'date': date_begin, 'sessions': sessions, 'page_name': 'session', 'default_url': '/my/sessions', 'pager': pager, 'archive_groups': archive_groups, 'searchbar_sortings': searchbar_sortings, 'searchbar_inputs': searchbar_inputs, 'sortby': sortby, 'search_in': search_in, 'search': search, }) return request.render("openacademy.portal_openacademy_session", values)
def create_employee_from_applicant(self): """ Create an hr.employee from the hr.applicants """ employee = False for applicant in self: contact_name = False if applicant.partner_id: address_id = applicant.partner_id.address_get(['contact' ])['contact'] contact_name = applicant.partner_id.display_name else: if not applicant.partner_name: raise UserError( _('You must define a Contact Name for this applicant.') ) new_partner_id = self.env['res.partner'].create({ 'is_company': False, 'type': 'private', 'name': applicant.partner_name, 'email': applicant.email_from, 'phone': applicant.partner_phone, 'mobile': applicant.partner_mobile }) applicant.partner_id = new_partner_id address_id = new_partner_id.address_get(['contact'])['contact'] if applicant.partner_name or contact_name: employee_data = { 'default_name': applicant.partner_name or contact_name, 'default_job_id': applicant.job_id.id, 'default_job_title': applicant.job_id.name, 'address_home_id': address_id, 'default_department_id': applicant.department_id.id or False, 'default_address_id': applicant.company_id and applicant.company_id.partner_id and applicant.company_id.partner_id.id or False, 'default_work_email': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.email or False, 'default_work_phone': applicant.department_id.company_id.phone, 'form_view_initial_mode': 'edit', 'default_applicant_id': applicant.ids, } dict_act_window = self.env['ir.actions.act_window']._for_xml_id( 'hr.open_view_employee_list') dict_act_window['context'] = employee_data return dict_act_window
def _check_autostaging_idle_timeout(self): if self.autostaging_enabled and self.autostaging_idle_timeout <= 0: raise ValidationError(_( "Days limit field value must be greater than 0"))
def _get_step_from_code(self, code): step = self.env['shopinvader.cart.step'].search([('code', '=', code)]) if not step: raise UserError(_('Invalid step code %s') % code) else: return step
class BveView(models.Model): _name = 'bve.view' _description = 'BI View Editor' @api.depends('group_ids') @api.multi def _compute_users(self): for bve_view in self: group_ids = bve_view.sudo().group_ids if group_ids: bve_view.user_ids = group_ids.mapped('users') else: bve_view.user_ids = self.env['res.users'].sudo().search([]) name = fields.Char(required=True, copy=False) model_name = fields.Char() note = fields.Text(string='Notes') state = fields.Selection([('draft', 'Draft'), ('created', 'Created')], default='draft', copy=False) data = fields.Text( help="Use the special query builder to define the query " "to generate your report dataset. " "NOTE: Te be edited, the query should be in 'Draft' status.") action_id = fields.Many2one('ir.actions.act_window', string='Action') view_id = fields.Many2one('ir.ui.view', string='View') group_ids = fields.Many2many( 'res.groups', string='Groups', help="User groups allowed to see the generated report; " "if NO groups are specified the report will be public " "for everyone.") user_ids = fields.Many2many('res.users', string='Users', compute=_compute_users, store=True) _sql_constraints = [ ('name_uniq', 'unique(name)', _('Custom BI View names must be unique!')), ] @api.multi def action_reset(self): self.ensure_one() if self.action_id: if self.action_id.view_id: self.action_id.view_id.sudo().unlink() self.action_id.sudo().unlink() models = self.env['ir.model'].sudo().search([('model', '=', self.model_name)]) for model in models: model.sudo().unlink() table_name = self.model_name.replace('.', '_') tools.drop_view_if_exists(self.env.cr, table_name) self.state = 'draft' @api.multi def _create_view_arch(self): self.ensure_one() fields_info = json.loads(self._get_format_data(self.data)) view_fields = [] for field_info in fields_info: is_row = field_info['row'] is_column = field_info['column'] is_measure = field_info['measure'] if is_row or is_column or is_measure: field_def = self._get_field_def(field_info) view_fields.append(field_def) return view_fields @api.model def _get_field_def(self, field_info): name = field_info['name'] row = field_info['row'] and 'row' column = field_info['column'] and 'col' measure = field_info['measure'] and 'measure' field_def = """<field name="x_{}" type="{}" />""".format( name, row or column or measure) return field_def @api.model def _get_format_data(self, data): data = data.replace('\'', '"') data = data.replace(': u"', ':"') return data @api.multi def action_create(self): self.ensure_one() self._create_bve_object() self._force_registry_reload() self._create_bve_view() def _force_registry_reload(self): # setup models: this automatically adds model in registry self.pool.setup_models(self._cr, partial=(not self.pool.ready)) RegistryManager.signal_registry_change(self.env.cr.dbname) def _create_bve_view(self): # create views View = self.env['ir.ui.view'] old_views = View.sudo().search([('model', '=', self.model_name)]) old_views.sudo().unlink() # create Pivot view View.sudo().create({ 'name': 'Pivot Analysis', 'type': 'pivot', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <pivot string="Pivot Analysis"> {} </pivot> """.format("".join(self._create_view_arch())) }) # create Graph view View.sudo().create({ 'name': 'Graph Analysis', 'type': 'graph', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <graph string="Graph Analysis" type="bar" stacked="True"> {} </graph> """.format("".join(self._create_view_arch())) }) # create Tree view tree_view = View.sudo().create({ 'name': 'Tree Analysis', 'type': 'tree', 'model': self.model_name, 'priority': 16, 'arch': """<?xml version="1.0"?> <tree string="List Analysis" create="false"> {} </tree> """.format("".join(self._create_view_arch())) }) # set the Tree view as the default one action_vals = { 'name': self.name, 'res_model': self.model_name, 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'tree,graph,pivot', 'view_id': tree_view.id, 'context': "{'service_name': '%s'}" % self.name, } ActWindow = self.env['ir.actions.act_window'] action_id = ActWindow.sudo().create(action_vals) self.write({ 'action_id': action_id.id, 'view_id': tree_view.id, 'state': 'created' }) def _create_bve_object(self): def _get_fields_info(fields_data): fields_info = [] for field_data in fields_data: field = self.env['ir.model.fields'].browse(field_data['id']) vals = { 'table': self.env[field.model_id.model]._table, 'table_alias': field_data['table_alias'], 'select_field': field.name, 'as_field': 'x_' + field_data['name'], 'join': False, 'model': field.model_id.model } if field_data.get('join_node'): vals.update({'join': field_data['join_node']}) fields_info.append(vals) return fields_info def _build_query(): data = self.data if not data: raise UserError(_('No data to process.')) formatted_data = json.loads(self._get_format_data(data)) info = _get_fields_info(formatted_data) fields = [("{}.{}".format(f['table_alias'], f['select_field']), f['as_field']) for f in info if 'join_node' not in f] tables = set([(f['table'], f['table_alias']) for f in info]) join_nodes = [(f['table_alias'], f['join'], f['select_field']) for f in info if f['join'] is not False] table_name = self.model_name.replace('.', '_') tools.drop_view_if_exists(self.env.cr, table_name) basic_fields = [("t0.id", "id"), ("t0.write_uid", "write_uid"), ("t0.write_date", "write_date"), ("t0.create_uid", "create_uid"), ("t0.create_date", "create_date")] q = """CREATE or REPLACE VIEW %s as ( SELECT %s FROM %s WHERE %s )""" % (table_name, ','.join([ "{} AS {}".format(f[0], f[1]) for f in basic_fields + fields ]), ','.join([ "{} AS {}".format(t[0], t[1]) for t in list(tables) ]), " AND ".join( ["{}.{} = {}.id".format(j[0], j[2], j[1]) for j in join_nodes] + ["TRUE"])) self.env.cr.execute(q) def _prepare_field(field_data): if not field_data['custom']: field = self.env['ir.model.fields'].browse(field_data['id']) vals = { 'name': 'x_' + field_data['name'], 'complete_name': field.complete_name, 'model': self.model_name, 'relation': field.relation, 'field_description': field_data.get('description', field.field_description), 'ttype': field.ttype, 'selection': field.selection, 'size': field.size, 'state': 'manual' } if vals['ttype'] == 'monetary': vals.update({'ttype': 'float'}) if field.ttype == 'selection' and not field.selection: model_obj = self.env[field.model_id.model] selection = model_obj._columns[field.name].selection selection_domain = str(selection) vals.update({'selection': selection_domain}) return vals def _prepare_object(): data = json.loads(self._get_format_data(self.data)) return { 'name': self.name, 'model': self.model_name, 'field_id': [(0, 0, _prepare_field(field)) for field in data if 'join_node' not in field] } def _build_object(): vals = _prepare_object() Model = self.env['ir.model'] res_id = Model.sudo().with_context(bve=True).create(vals) return res_id def group_ids_with_access(model_name, access_mode): self.env.cr.execute( '''SELECT g.id FROM ir_model_access a JOIN ir_model m ON (a.model_id=m.id) JOIN res_groups g ON (a.group_id=g.id) LEFT JOIN ir_module_category c ON (c.id=g.category_id) WHERE m.model=%s AND a.active IS True AND a.perm_''' + access_mode, (model_name, )) return [x[0] for x in self.env.cr.fetchall()] def _build_access_rules(obj): info = json.loads(self._get_format_data(self.data)) models = list(set([f['model'] for f in info])) read_groups = set.intersection(*[ set(group_ids_with_access(model, 'read')) for model in models ]) # read access for group in read_groups: self.env['ir.model.access'].sudo().create({ 'name': 'read access to ' + self.model_name, 'model_id': obj.id, 'group_id': group, 'perm_read': True, }) # read and write access for group in self.group_ids: self.env['ir.model.access'].sudo().create({ 'name': 'read-write access to ' + self.model_name, 'model_id': obj.id, 'group_id': group.id, 'perm_read': True, 'perm_write': True, }) return self.model_name = 'x_bve.' + ''.join([ x for x in self.name.lower() if x.isalnum() ]).replace('_', '.').replace(' ', '.') _build_query() obj = _build_object() _build_access_rules(obj) self.env.cr.commit() @api.multi def open_view(self): self.ensure_one() return { 'type': 'ir.actions.act_window', 'res_model': self.model_name, 'view_type': 'form', 'view_mode': 'tree,graph,pivot', } @api.multi def copy(self, default=None): self.ensure_one() default = dict(default or {}, name=_("%s (copy)") % self.name) return super(BveView, self).copy(default=default) @api.multi def unlink(self): for view in self: if view.state == 'created': raise UserError( _('You cannot delete a created view! ' 'Reset the view to draft first.')) return super(BveView, self).unlink()
class Applicant(models.Model): _name = "hr.applicant" _description = "Applicant" _order = "priority desc, id desc" _inherit = ['mail.thread', 'utm.mixin'] _mail_mass_mailing = _('Applicants') def _default_stage_id(self): if self._context.get('default_job_id'): ids = self.env['hr.recruitment.stage'].search([ '|', ('job_id', '=', False), ('job_id', '=', self._context['default_job_id']), ('fold', '=', False) ], order='sequence asc', limit=1).ids if ids: return ids[0] return False def _default_company_id(self): company_id = False if self._context.get('default_department_id'): department = self.env['hr.department'].browse( self._context['default_department_id']) company_id = department.company_id.id if not company_id: company_id = self.env['res.company']._company_default_get( 'hr.applicant') return company_id name = fields.Char("Subject / Application Name", required=True) active = fields.Boolean( "Active", default=True, help= "If the active field is set to false, it will allow you to hide the case without removing it." ) description = fields.Text("Description") email_from = fields.Char("Email", size=128, help="These people will receive email.") email_cc = fields.Text( "Watchers Emails", size=252, help= "These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma" ) probability = fields.Float("Probability") partner_id = fields.Many2one('res.partner', "Contact") create_date = fields.Datetime("Creation Date", readonly=True, index=True) write_date = fields.Datetime("Update Date", readonly=True) stage_id = fields.Many2one( 'hr.recruitment.stage', 'Stage', track_visibility='onchange', domain="['|', ('job_id', '=', False), ('job_id', '=', job_id)]", copy=False, index=True, group_expand='_read_group_stage_ids', default=_default_stage_id) last_stage_id = fields.Many2one( 'hr.recruitment.stage', "Last Stage", help= "Stage of the applicant before being in the current stage. Used for lost cases analysis." ) categ_ids = fields.Many2many('hr.applicant.category', string="Tags") company_id = fields.Many2one('res.company', "Company", default=_default_company_id) user_id = fields.Many2one('res.users', "Responsible", track_visibility="onchange", default=lambda self: self.env.uid) date_closed = fields.Datetime("Closed", readonly=True, index=True) date_open = fields.Datetime("Assigned", readonly=True, index=True) date_last_stage_update = fields.Datetime("Last Stage Update", index=True, default=fields.Datetime.now) date_action = fields.Date("Next Action Date") title_action = fields.Char("Next Action", size=64) priority = fields.Selection(AVAILABLE_PRIORITIES, "Appreciation", default='0') job_id = fields.Many2one('hr.job', "Applied Job") salary_proposed_extra = fields.Char( "Proposed Salary Extra", help="Salary Proposed by the Organisation, extra advantages") salary_expected_extra = fields.Char( "Expected Salary Extra", help="Salary Expected by Applicant, extra advantages") salary_proposed = fields.Float("Proposed Salary", help="Salary Proposed by the Organisation") salary_expected = fields.Float("Expected Salary", help="Salary Expected by Applicant") availability = fields.Date( "Availability", help= "The date at which the applicant will be available to start working") partner_name = fields.Char("Applicant's Name") partner_phone = fields.Char("Phone", size=32) partner_mobile = fields.Char("Mobile", size=32) type_id = fields.Many2one('hr.recruitment.degree', "Degree") department_id = fields.Many2one('hr.department', "Department") reference = fields.Char("Referred By") day_open = fields.Float(compute='_compute_day', string="Days to Open") day_close = fields.Float(compute='_compute_day', string="Days to Close") color = fields.Integer("Color Index", default=0) emp_id = fields.Many2one('hr.employee', string="Employee", track_visibility="onchange", help="Employee linked to the applicant.") user_email = fields.Char(related='user_id.email', type="char", string="User Email", readonly=True) attachment_number = fields.Integer(compute='_get_attachment_number', string="Number of Attachments") employee_name = fields.Char(related='emp_id.name', string="Employee Name") attachment_ids = fields.One2many('ir.attachment', 'res_id', domain=[('res_model', '=', 'hr.applicant') ], string='Attachments') @api.depends('date_open', 'date_closed') @api.one def _compute_day(self): if self.date_open: date_create = datetime.strptime( self.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT) date_open = datetime.strptime(self.date_open, tools.DEFAULT_SERVER_DATETIME_FORMAT) self.day_open = (date_open - date_create).total_seconds() / (24.0 * 3600) if self.date_closed: date_create = datetime.strptime( self.create_date, tools.DEFAULT_SERVER_DATETIME_FORMAT) date_closed = datetime.strptime( self.date_closed, tools.DEFAULT_SERVER_DATETIME_FORMAT) self.day_close = (date_closed - date_create).total_seconds() / (24.0 * 3600) @api.multi def _get_attachment_number(self): read_group_res = self.env['ir.attachment'].read_group( [('res_model', '=', 'hr.applicant'), ('res_id', 'in', self.ids)], ['res_id'], ['res_id']) attach_data = dict( (res['res_id'], res['res_id_count']) for res in read_group_res) for record in self: record.attachment_number = attach_data.get(record.id, 0) @api.model def _read_group_stage_ids(self, stages, domain, order): # retrieve job_id from the context and write the domain: ids + contextual columns (job or default) job_id = self._context.get('default_job_id') search_domain = [('job_id', '=', False)] if job_id: search_domain = ['|', ('job_id', '=', job_id)] + search_domain if stages: search_domain = ['|', ('id', 'in', stages.ids)] + search_domain stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids) @api.onchange('job_id') def onchange_job_id(self): vals = self._onchange_job_id_internal(self.job_id.id) self.department_id = vals['value']['department_id'] self.user_id = vals['value']['user_id'] self.stage_id = vals['value']['stage_id'] def _onchange_job_id_internal(self, job_id): department_id = False user_id = False stage_id = self.stage_id.id if job_id: job = self.env['hr.job'].browse(job_id) department_id = job.department_id.id user_id = job.user_id.id if not self.stage_id: stage_ids = self.env['hr.recruitment.stage'].search( [ '|', ('job_id', '=', False), ('job_id', '=', job.id), ('fold', '=', False) ], order='sequence asc', limit=1).ids stage_id = stage_ids[0] if stage_ids else False return { 'value': { 'department_id': department_id, 'user_id': user_id, 'stage_id': stage_id } } @api.onchange('partner_id') def onchange_partner_id(self): self.partner_phone = self.partner_id.phone self.partner_mobile = self.partner_id.mobile self.email_from = self.partner_id.email @api.onchange('stage_id') def onchange_stage_id(self): vals = self._onchange_stage_id_internal(self.stage_id.id) if vals['value'].get('date_closed'): self.date_closed = vals['value']['date_closed'] def _onchange_stage_id_internal(self, stage_id): if not stage_id: return {'value': {}} stage = self.env['hr.recruitment.stage'].browse(stage_id) if stage.fold: return {'value': {'date_closed': fields.datetime.now()}} return {'value': {'date_closed': False}} @api.model def create(self, vals): if vals.get('department_id' ) and not self._context.get('default_department_id'): self = self.with_context( default_department_id=vals.get('department_id')) if vals.get('job_id') or self._context.get('default_job_id'): job_id = vals.get('job_id') or self._context.get('default_job_id') for key, value in self._onchange_job_id_internal( job_id)['value'].iteritems(): if key not in vals: vals[key] = value if vals.get('user_id'): vals['date_open'] = fields.Datetime.now() if 'stage_id' in vals: vals.update( self._onchange_stage_id_internal( vals.get('stage_id'))['value']) return super(Applicant, self.with_context(mail_create_nolog=True)).create(vals) @api.multi def write(self, vals): # user_id change: update date_open if vals.get('user_id'): vals['date_open'] = fields.Datetime.now() # stage_id: track last stage before update if 'stage_id' in vals: vals['date_last_stage_update'] = fields.Datetime.now() vals.update( self._onchange_stage_id_internal( vals.get('stage_id'))['value']) for applicant in self: vals['last_stage_id'] = applicant.stage_id.id res = super(Applicant, self).write(vals) else: res = super(Applicant, self).write(vals) return res @api.model def get_empty_list_help(self, help): return super( Applicant, self.with_context( empty_list_help_model='hr.job', empty_list_help_id=self.env.context.get('default_job_id'), empty_list_help_document_name=_( "job applicants"))).get_empty_list_help(help) @api.multi def action_get_created_employee(self): self.ensure_one() action = self.env['ir.actions.act_window'].for_xml_id( 'hr', 'open_view_employee_list') action['res_id'] = self.mapped('emp_id').ids[0] return action @api.multi def action_makeMeeting(self): """ This opens Meeting's calendar view to schedule meeting on current applicant @return: Dictionary value for created Meeting view """ self.ensure_one() partners = self.partner_id | self.user_id.partner_id | self.department_id.manager_id.user_id.partner_id category = self.env.ref('hr_recruitment.categ_meet_interview') res = self.env['ir.actions.act_window'].for_xml_id( 'calendar', 'action_calendar_event') res['context'] = { 'search_default_partner_ids': self.partner_id.name, 'default_partner_ids': partners.ids, 'default_user_id': self.env.uid, 'default_name': self.name, 'default_categ_ids': category and [category.id] or False, } return res @api.multi def action_get_attachment_tree_view(self): attachment_action = self.env.ref('base.action_attachment') action = attachment_action.read()[0] action['context'] = { 'default_res_model': self._name, 'default_res_id': self.ids[0] } action['domain'] = str( ['&', ('res_model', '=', self._name), ('res_id', 'in', self.ids)]) action['search_view_id'] = (self.env.ref( 'hr_recruitment.ir_attachment_view_search_inherit_hr_recruitment'). id, ) return action @api.multi def _track_template(self, tracking): res = super(Applicant, self)._track_template(tracking) applicant = self[0] changes, dummy = tracking[applicant.id] if 'stage_id' in changes and applicant.stage_id.template_id: res['stage_id'] = (applicant.stage_id.template_id, { 'composition_mode': 'mass_mail' }) return res @api.multi def _track_subtype(self, init_values): record = self[0] if 'emp_id' in init_values and record.emp_id: return 'hr_recruitment.mt_applicant_hired' elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence <= 1: return 'hr_recruitment.mt_applicant_new' elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence > 1: return 'hr_recruitment.mt_applicant_stage_changed' return super(Applicant, self)._track_subtype(init_values) @api.model def message_get_reply_to(self, ids, default=None): """ Override to get the reply_to of the parent project. """ applicants = self.sudo().browse(ids) aliases = self.env['hr.job'].message_get_reply_to( applicants.mapped('job_id').ids, default=default) return dict( (applicant.id, aliases.get(applicant.job_id and applicant.job_id.id or 0, False)) for applicant in applicants) @api.multi def message_get_suggested_recipients(self): recipients = super(Applicant, self).message_get_suggested_recipients() for applicant in self: if applicant.partner_id: applicant._message_add_suggested_recipient( recipients, partner=applicant.partner_id, reason=_('Contact')) elif applicant.email_from: applicant._message_add_suggested_recipient( recipients, email=applicant.email_from, reason=_('Contact Email')) return recipients @api.model def message_new(self, msg, custom_values=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ # remove default author when going through the mail gateway. Indeed we # do not want to explicitly set user_id to False; however we do not # want the gateway user to be responsible if no other responsible is # found. self = self.with_context(default_user_id=False) val = msg.get('from').split('<')[0] defaults = { 'name': msg.get('subject') or _("No Subject"), 'partner_name': val, 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), } if msg.get('priority'): defaults['priority'] = msg.get('priority') if custom_values: defaults.update(custom_values) return super(Applicant, self).message_new(msg, custom_values=defaults) @api.multi def create_employee_from_applicant(self): """ Create an hr.employee from the hr.applicants """ employee = False for applicant in self: address_id = contact_name = False if applicant.partner_id: address_id = applicant.partner_id.address_get(['contact' ])['contact'] contact_name = applicant.partner_id.name_get()[0][1] if applicant.job_id and (applicant.partner_name or contact_name): applicant.job_id.write({ 'no_of_hired_employee': applicant.job_id.no_of_hired_employee + 1 }) employee = self.env['hr.employee'].create({ 'name': applicant.partner_name or contact_name, 'job_id': applicant.job_id.id, 'address_home_id': address_id, 'department_id': applicant.department_id.id or False, 'address_id': applicant.company_id and applicant.company_id.partner_id and applicant.company_id.partner_id.id or False, 'work_email': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.email or False, 'work_phone': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.phone or False }) applicant.write({'emp_id': employee.id}) applicant.job_id.message_post( body=_('New Employee %s Hired') % applicant.partner_name if applicant.partner_name else applicant.name, subtype="hr_recruitment.mt_job_applicant_hired") employee._broadcast_welcome() else: raise UserError( _('You must define an Applied Job and a Contact Name for this applicant.' )) employee_action = self.env.ref('hr.open_view_employee_list') dict_act_window = employee_action.read([])[0] if employee: dict_act_window['res_id'] = employee.id dict_act_window['view_mode'] = 'form,tree' return dict_act_window @api.multi def archive_applicant(self): self.write({'active': False}) @api.multi def reset_applicant(self): """ Reinsert the applicant into the recruitment pipe in the first stage""" default_stage_id = self._default_stage_id() self.write({'active': True, 'stage_id': default_stage_id})
def update_transporter(self): ''' This function updates or assigns transporter in the event form ''' res = True self = self.with_context(transporter=[]) mod_obj = self.env['ir.model.data'] cur_obj = self event = cur_obj.event_id user = self.env.user if event: if event.transporter_id: if user.user_type and user.user_type == 'vendor': raise UserError( _('The Transporter has already been assigned to this event' )) title = "Transporter Already Assigned" message = " The Transporter '%s %s' has been already assigned to the event. Do you want to change it?" % ( event.transporter_id.name, event.transporter_id.last_name or '') self = self.with_context(event_id=event.id, history_id=cur_obj.history_id.id) return self.env['warning.transporter'].warning(title, message) else: if not event.history_id2: history_id2 = self.env['transporter.alloc.history'].sudo( ).create({ 'partner_id': event.partner_id and event.partner_id.id or False, 'name': cur_obj.history_id and cur_obj.history_id.transporter_id and cur_obj.history_id.transporter_id.id or False, 'event_id': event.id, 'event_date': event.event_date, 'event_start': event.event_start, 'event_end': event.event_end, 'state': 'allocated', 'company_id': event.company_id and event.company_id.id or False, 'allocate_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'language_id': event.language_id.id }) else: history_id2 = event.history_id2.sudo().write({ 'partner_id': event.partner_id and event.partner_id.id or False, 'name': cur_obj.history_id and cur_obj.history_id.transporter_id and cur_obj.history_id.transporter_id.id or False, 'event_id': event.id, 'event_date': event.event_date, 'event_start': event.event_start, 'event_end': event.event_end, 'state': 'allocated', 'company_id': event.company_id and event.company_id.id or False, 'allocate_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'language_id': event.language_id.id }) res = event.sudo().write({ 'transporter_id': cur_obj.transporter_id and cur_obj.transporter_id.id or False, 'state': 'allocated', 'schedule_event_time': time.strftime('%Y-%m-%d %H:%M:%S'), 'history_id2': history_id2.id }) res = cur_obj.history_id.sudo().write({'state': 'assigned'}) # if user.user_type and user.user_type == 'vendor': # self.pool.get('event').event_confirm_mail(cr ,SUPERUSER_ID , [event.id] , context=context) # self.pool.get('event').confirm_event(cr ,SUPERUSER_ID , [event.id] , context=context) # res = mod_obj.get_object_reference(cr, SUPERUSER_ID, 'bista_iugroup', 'view_event_user_form') # res_id = res and res[1] or False, ## print "res_id.......",res_id # return { # 'name': _('Event'), # 'view_type': 'form', # 'view_mode': 'form', # 'view_id': [res_id[0]], # 'res_model': 'event', # 'type': 'ir.actions.act_window', # 'nodestroy': True, # 'target': 'current', # 'res_id': event.id or False, # } # return res # else: # if not event.suppress_email: # template_id = mod_obj.get_object_reference(cr, SUPERUSER_ID, 'bista_iugroup', 'event_allocation_customer')[1] # if template_id: # self.pool.get('email.template').send_mail(cr , uid,template_id ,event.id) template_id1 = mod_obj.sudo().get_object_reference( 'bista_iugroup', 'event_allocation_transporter')[1] if template_id1: if user.user_type and user.user_type == 'vendor': self.env['mail.template'].sudo().browse( template_id1).send_mail(event.id) else: res = event.sudo().action_mail_send( event, 'event', template_id1) return res
def show_recipients(self): zip2 = self.zip language_id2 = self.language_id.id state_id2 = self.state_id.id radius2 = self.radius notify_pids = [] partner_obj = self.env['res.partner'] domain = [('cust_type', '=', 'interpreter'), ('name', '!=', ''), ('is_interpretation_active', '=', True)] if state_id2: domain.append(('state_id', '=', state_id2)) if zip2 and not radius2: domain.append(('zip', '=', zip2)) if self.gender: domain.append(('gender', '=', self.gender)) if self.age_from and self.age_to: cur = ((datetime.datetime.now()) - relativedelta.relativedelta( years=self.age_from)).strftime('%Y-%m-%d') domain.append(('dob', '<=', cur)) if self.age_from and not self.age_to: raise UserError(_('Please specify Age to as well')) if not self.age_from and self.age_to: raise UserError(_('Please specify Age From as well')) if self.age_to and self.age_from: if self.age_to < self.age_from: raise UserError(_('Age to should be greater than Age From')) else: cur = ((datetime.datetime.now()) - relativedelta.relativedelta( years=self.age_to)).strftime('%Y-%m-%d') domain.append(('dob', '>=', cur)) logger = logging.getLogger('test2') logger.info("This is notify pids------->%s " % str((domain))) partner_ids = partner_obj.search(domain) if partner_ids: if not radius2 and not language_id2: notify_pids = partner_ids.ids elif zip2 and radius2 and not language_id2: for partner in partner_ids: select_interpreter = False if partner.zip: try: zips = [ z.zip for z in zcdb.get_zipcodes_around_radius( zip2, radius2) ] for myzip in zips: if partner.zip in myzip: notify_pids.append(partner.id) continue except ZipNotFoundException: raise UserError(_('Please Enter valid ZIPCODE')) elif not radius2 and language_id2: if self.certification_level_id and not self.is_simultaneous: for partner in partner_ids: select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.certification_level_id == self.certification_level_id: notify_pids.append(partner.id) continue elif not self.certification_level_id and self.is_simultaneous: for partner in partner_ids: select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.is_simultaneous: notify_pids.append(partner.id) continue elif self.certification_level_id and self.is_simultaneous: for partner in partner_ids: select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.certification_level_id == self.certification_level_id and rec.is_simultaneous: notify_pids.append(partner.id) continue else: for partner in partner_ids: select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: notify_pids.append(partner.id) continue elif zip2 and radius2 and language_id2: notify_pids_all = [] for partner in partner_ids: select_interpreter = False if partner.zip: try: zips = [ z.zip for z in zcdb.get_zipcodes_around_radius( zip2, radius2) ] for myzip in zips: if partner.zip in myzip: notify_pids_all.append(partner.id) continue except ZipNotFoundException: raise UserError(_('Please Enter valid ZIPCODE')) if self.certification_level_id and not self.is_simultaneous: for partner in partner_obj.browse(notify_pids_all): select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.certification_level_id == self.certification_level_id: notify_pids.append(partner.id) continue elif not self.certification_level_id and self.is_simultaneous: for partner in partner_obj.browse(notify_pids_all): select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.is_simultaneous: notify_pids.append(partner.id) continue elif self.certification_level_id and self.is_simultaneous: for partner in partner_obj.browse(notify_pids_all): select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: if rec.certification_level_id == self.certification_level_id and rec.is_simultaneous: notify_pids.append(partner.id) continue else: for partner in partner_obj.browse(notify_pids_all): select_interpreter = False if partner.language_lines: for rec in partner.language_lines: if language_id2 == rec.name.id: notify_pids.append(partner.id) continue if not notify_pids: logger = logging.getLogger('test2') logger.info("This is notify pids no------->%s " % str(len(notify_pids))) self.write({'label_flag': True, 'flag': False}) self._cr.commit() else: logger = logging.getLogger('test2') logger.info("This is notify pids yes------->%s " % str(len(notify_pids))) result = self.write({ 'interpreter_ids': [(6, 0, notify_pids)], 'label_flag': False, 'flag': True }) self._cr.commit() return True
def web_login(self, redirect=None, **kw): main.ensure_db() request.params['login_success'] = False if request.httprequest.method == 'GET' and redirect and request.session.uid: return http.redirect_with_hash(redirect) if not request.uid: request.uid = odoo.SUPERUSER_ID values = request.params.copy() try: values['databases'] = http.db_list() except odoo.exceptions.AccessDenied: values['databases'] = None if request.httprequest.method == 'POST': old_uid = request.uid uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password']) if uid is not False: user_rec = request.env['res.users'].sudo().search([('id', '=', uid)]) if user_rec.partner_id.email and user_rec.has_group( 'user_login_alert.receive_login_notification'): send_mail = 0 agent = request.httprequest.environ.get('HTTP_USER_AGENT') agent_details = httpagentparser.detect(agent) user_os = agent_details['os']['name'] browser_name = agent_details['browser']['name'] ip_address = request.httprequest.environ['REMOTE_ADDR'] if user_rec.last_logged_ip and user_rec.last_logged_browser and user_rec.last_logged_os: if user_rec.last_logged_ip != ip_address or user_rec.last_logged_browser != browser_name or user_rec.last_logged_os != user_os: send_mail = 1 user_rec.last_logged_ip = ip_address user_rec.last_logged_browser = browser_name user_rec.last_logged_os = user_os else: send_mail = 0 else: send_mail = 1 user_rec.last_logged_ip = ip_address user_rec.last_logged_browser = browser_name user_rec.last_logged_os = user_os if send_mail == 1: email_to = user_rec.partner_id.email current_date_time = strftime("%Y-%m-%d %H:%M:%S", gmtime()) message_body = 'Hi ' + user_rec.name + ' , Your account has been ' \ 'accessed successfully. The details of the ' \ 'system from which the account is accessed ...,' message_body += '<table border="1" width="100%" cellpadding="0" bgcolor="#ededed">' message_body += '<tr><td>' + 'OS' + '</td>' \ '<td>' + user_os + '</td>' \ '</tr>'\ '<tr><td>' + 'Browser' + '</td>' \ '<td>' + browser_name + '</td>' \ '</tr>'\ '<tr><td>' + 'IP Address' + '</td>' \ '<td>' + ip_address + '</td>' \ '</tr>' message_body += '</table>' message_body += 'Thank you' template_obj = request.env['mail.mail'] template_data = { 'subject': 'Login Alert : ' + current_date_time, 'body_html': message_body, 'email_from': request.env.user.company_id.email, 'email_to': email_to } template_id = template_obj.create(template_data) template_obj.send(template_id) request.params['login_success'] = True if not redirect: redirect = '/web' return http.redirect_with_hash(redirect) request.uid = old_uid values['error'] = _("Wrong login/password") return request.render('web.login', values)