class base_action_rule(osv.osv): """ Base Action Rules """ _name = 'base.action.rule' _description = 'Action Rules' _order = 'sequence' _columns = { 'name': fields.char('Rule Name', required=True), 'model_id': fields.many2one('ir.model', 'Related Document Model', required=True, domain=[('transient', '=', False)]), 'model': fields.related('model_id', 'model', type="char", string='Model'), 'create_date': fields.datetime('Create Date', readonly=1), 'active': fields.boolean('Active', help="When unchecked, the rule is hidden and will not be executed."), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of rules."), 'kind': fields.selection( [('on_create', 'On Creation'), ('on_write', 'On Update'), ('on_create_or_write', 'On Creation & Update'), ('on_unlink', 'On Deletion'), ('on_change', 'Based on Form Modification'), ('on_time', 'Based on Timed Condition')], string='When to Run'), 'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date', help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.", domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"), 'trg_date_range': fields.integer('Delay after trigger date', help="Delay after the trigger date." \ "You can put a negative number if you need a delay before the" \ "trigger date, like sending a reminder 15 minutes before a meeting."), 'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'), ('day', 'Days'), ('month', 'Months')], 'Delay type'), 'trg_date_calendar_id': fields.many2one( 'resource.calendar', 'Use Calendar', help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.', ondelete='set null', ), 'act_user_id': fields.many2one('res.users', 'Set Responsible'), 'act_followers': fields.many2many("res.partner", string="Add Followers"), 'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions', domain="[('model_id', '=', model_id)]", help="Examples: email reminders, call object service, etc."), 'filter_pre_id': fields.many2one( 'ir.filters', string='Before Update Filter', ondelete='restrict', domain="[('model_id', '=', model_id.model)]", help="If present, this condition must be satisfied before the update of the record."), 'filter_pre_domain': fields.char(string='Before Update Domain', help="If present, this condition must be satisfied before the update of the record."), 'filter_id': fields.many2one( 'ir.filters', string='Filter', ondelete='restrict', domain="[('model_id', '=', model_id.model)]", help="If present, this condition must be satisfied before executing the action rule."), 'filter_domain': fields.char(string='Domain', help="If present, this condition must be satisfied before executing the action rule."), 'last_run': fields.datetime('Last Run', readonly=1, copy=False), 'on_change_fields': fields.char(string="On Change Fields Trigger", help="Comma-separated list of field names that triggers the onchange."), } # which fields have an impact on the registry CRITICAL_FIELDS = ['model_id', 'active', 'kind', 'on_change_fields'] _defaults = { 'active': True, 'trg_date_range_type': 'day', } def onchange_kind(self, cr, uid, ids, kind, context=None): clear_fields = [] if kind in ['on_create', 'on_create_or_write', 'on_unlink']: clear_fields = ['filter_pre_id', 'filter_pre_domain', 'trg_date_id', 'trg_date_range', 'trg_date_range_type'] elif kind in ['on_write', 'on_create_or_write']: clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type'] elif kind == 'on_time': clear_fields = ['filter_pre_id', 'filter_pre_domain'] return {'value': dict.fromkeys(clear_fields, False)} def onchange_filter_pre_id(self, cr, uid, ids, filter_pre_id, context=None): ir_filter = self.pool['ir.filters'].browse(cr, uid, filter_pre_id, context=context) return {'value': {'filter_pre_domain': ir_filter.domain}} def onchange_filter_id(self, cr, uid, ids, filter_id, context=None): ir_filter = self.pool['ir.filters'].browse(cr, uid, filter_id, context=context) return {'value': {'filter_domain': ir_filter.domain}} @ecore.api.model def _get_actions(self, records, kinds): """ Return the actions of the given kinds for records' model. The returned actions' context contain an object to manage processing. """ if '__action_done' not in self._context: self = self.with_context(__action_done={}) domain = [('model', '=', records._name), ('kind', 'in', kinds)] actions = self.with_context(active_test=True).search(domain) return actions.with_env(self.env) @ecore.api.model def _get_eval_context(self): """ Prepare the context used when evaluating python code :returns: dict -- evaluation context given to (safe_)eval """ return { 'datetime': DT, 'dateutil': dateutil, 'time': time, 'uid': self.env.uid, 'user': self.env.user, } @ecore.api.model def _filter_pre(self, records): """ Filter the records that satisfy the precondition of action ``self``. """ if self.filter_pre_id and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_pre_id.domain, eval_context) ctx = eval(self.filter_pre_id.context) return records.with_context(**ctx).search(domain).with_env(records.env) elif self.filter_pre_domain and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_pre_domain, eval_context) return records.search(domain) else: return records @ecore.api.model def _filter_post(self, records): """ Filter the records that satisfy the postcondition of action ``self``. """ if self.filter_id and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_id.domain, eval_context) ctx = eval(self.filter_id.context) return records.with_context(**ctx).search(domain).with_env(records.env) elif self.filter_domain and records: eval_context = self._get_eval_context() domain = [('id', 'in', records.ids)] + eval(self.filter_domain, eval_context) return records.search(domain) else: return records @ecore.api.multi def _process(self, records): """ Process action ``self`` on the ``records`` that have not been done yet. """ # filter out the records on which self has already been done, then mark # remaining records as done (to avoid recursive processing) action_done = self._context['__action_done'] records -= action_done.setdefault(self, records.browse()) if not records: return action_done[self] |= records # modify records values = {} if 'date_action_last' in records._fields: values['date_action_last'] = ecore.fields.Datetime.now() if self.act_user_id and 'user_id' in records._fields: values['user_id'] = self.act_user_id.id if values: records.write(values) # subscribe followers if self.act_followers and hasattr(records, 'message_subscribe'): records.message_subscribe(self.act_followers.ids) # execute server actions if self.server_action_ids: for record in records: ctx = {'active_model': record._name, 'active_ids': record.ids, 'active_id': record.id} self.server_action_ids.with_context(**ctx).run() def _register_hook(self, cr): """ Patch models that should trigger action rules based on creation, modification, deletion of records and form onchanges. """ # # Note: the patched methods must be defined inside another function, # otherwise their closure may be wrong. For instance, the function # create refers to the outer variable 'create', which you expect to be # bound to create itself. But that expectation is wrong if create is # defined inside a loop; in that case, the variable 'create' is bound to # the last function defined by the loop. # def make_create(): """ Instanciate a create method that processes action rules. """ @ecore.api.model def create(self, vals): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_create', 'on_create_or_write']) # call original method record = create.origin(self.with_env(actions.env), vals) # check postconditions, and execute actions on the records that satisfy them for action in actions.with_context(old_values=None): action._process(action._filter_post(record)) return record.with_env(self.env) return create def make_write(): """ Instanciate a _write method that processes action rules. """ # # Note: we patch method _write() instead of write() in order to # catch updates made by field recomputations. # @ecore.api.multi def _write(self, vals): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_write', 'on_create_or_write']) records = self.with_env(actions.env) # check preconditions on records pre = {action: action._filter_pre(records) for action in actions} # read old values before the update old_values = { old_vals.pop('id'): old_vals for old_vals in records.read(list(vals)) } # call original method _write.origin(records, vals) # check postconditions, and execute actions on the records that satisfy them for action in actions.with_context(old_values=old_values): action._process(action._filter_post(pre[action])) return True return _write def make_unlink(): """ Instanciate an unlink method that processes action rules. """ @ecore.api.multi def unlink(self, **kwargs): # retrieve the action rules to possibly execute actions = self.env['base.action.rule']._get_actions(self, ['on_unlink']) records = self.with_env(actions.env) # check conditions, and execute actions on the records that satisfy them for action in actions: action._process(action._filter_post(pre[action])) # call original method return unlink.origin(self, **kwargs) return unlink def make_onchange(action_rule_id): """ Instanciate an onchange method for the given action rule. """ def base_action_rule_onchange(self): action_rule = self.env['base.action.rule'].browse(action_rule_id) server_actions = action_rule.server_action_ids.with_context(active_model=self._name, onchange_self=self) result = {} for server_action in server_actions: res = server_action.run() if res and 'value' in res: res['value'].pop('id', None) self.update(self._convert_to_cache(res['value'], validate=False)) if res and 'domain' in res: result.setdefault('domain', {}).update(res['domain']) if res and 'warning' in res: result['warning'] = res['warning'] return result return base_action_rule_onchange patched_models = defaultdict(set) def patch(model, name, method): """ Patch method `name` on `model`, unless it has been patched already. """ if model not in patched_models[name]: patched_models[name].add(model) model._patch_method(name, method) # retrieve all actions, and patch their corresponding model ids = self.search(cr, SUPERUSER_ID, []) for action_rule in self.browse(cr, SUPERUSER_ID, ids): model = action_rule.model_id.model model_obj = self.pool.get(model) if not model_obj: continue if action_rule.kind == 'on_create': patch(model_obj, 'create', make_create()) elif action_rule.kind == 'on_create_or_write': patch(model_obj, 'create', make_create()) patch(model_obj, '_write', make_write()) elif action_rule.kind == 'on_write': patch(model_obj, '_write', make_write()) elif action_rule.kind == 'on_unlink': patch(model_obj, 'unlink', make_unlink()) elif action_rule.kind == 'on_change': # register an onchange method for the action_rule method = make_onchange(action_rule.id) for field_name in action_rule.on_change_fields.split(","): field_name = field_name.strip() model_obj._onchange_methods[field_name].append(method) def _update_cron(self, cr, uid, context=None): """ Activate the cron job depending on whether there exists action rules based on time conditions. """ try: cron = self.pool['ir.model.data'].get_object( cr, uid, 'base_action_rule', 'ir_cron_crm_action', context=context) except ValueError: return False return cron.toggle(model=self._name, domain=[('kind', '=', 'on_time')]) def _update_registry(self, cr, uid, context=None): """ Update the registry after a modification on action rules. """ if self.pool.ready: # for the sake of simplicity, simply force the registry to reload cr.commit() ecore.api.Environment.reset() RegistryManager.new(cr.dbname) RegistryManager.signal_registry_change(cr.dbname) def create(self, cr, uid, vals, context=None): res_id = super(base_action_rule, self).create(cr, uid, vals, context=context) self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return res_id def write(self, cr, uid, ids, vals, context=None): super(base_action_rule, self).write(cr, uid, ids, vals, context=context) if set(vals) & set(self.CRITICAL_FIELDS): self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return True def unlink(self, cr, uid, ids, context=None): res = super(base_action_rule, self).unlink(cr, uid, ids, context=context) self._update_cron(cr, uid, context=context) self._update_registry(cr, uid, context=context) return res def onchange_model_id(self, cr, uid, ids, model_id, context=None): data = {'model': False, 'filter_pre_id': False, 'filter_id': False} if model_id: model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context) data.update({'model': model.model}) return {'value': data} def _check_delay(self, cr, uid, action, record, record_dt, context=None): if action.trg_date_calendar_id and action.trg_date_range_type == 'day': start_dt = get_datetime(record_dt) action_dt = self.pool['resource.calendar'].schedule_days_get_date( cr, uid, action.trg_date_calendar_id.id, action.trg_date_range, day_date=start_dt, compute_leaves=True, context=context ) else: delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range) action_dt = get_datetime(record_dt) + delay return action_dt def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None): """ This Function is called by scheduler. """ context = context or {} # retrieve all the action rules to run based on a timed condition action_dom = [('kind', '=', 'on_time')] action_ids = self.search(cr, uid, action_dom, context=dict(context, active_test=True)) eval_context = self._get_eval_context(cr, uid, context=context) for action in self.browse(cr, uid, action_ids, context=context): now = datetime.now() if action.last_run: last_run = get_datetime(action.last_run) else: last_run = datetime.utcfromtimestamp(0) # retrieve all the records that satisfy the action's condition model = self.pool[action.model_id.model] domain = [] ctx = dict(context) if action.filter_domain is not False: domain = eval(action.filter_domain, eval_context) elif action.filter_id: domain = eval(action.filter_id.domain, eval_context) ctx.update(eval(action.filter_id.context)) if 'lang' not in ctx: # Filters might be language-sensitive, attempt to reuse creator lang # as we are usually running this as super-user in background [filter_meta] = action.filter_id.get_metadata() user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \ filter_meta['create_uid'][0] ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang record_ids = model.search(cr, uid, domain, context=ctx) # determine when action should occur for the records date_field = action.trg_date_id.name if date_field == 'date_action_last' and 'create_date' in model._fields: get_record_dt = lambda record: record[date_field] or record.create_date else: get_record_dt = lambda record: record[date_field] # process action on the records that should be executed for record in model.browse(cr, uid, record_ids, context=context): record_dt = get_record_dt(record) if not record_dt: continue action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context) if last_run <= action_dt < now: try: context = dict(context or {}, action=True) self._process(cr, uid, action, [record.id], context=context) except Exception: import traceback _logger.error(traceback.format_exc()) action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}) if automatic: # auto-commit for batch processing cr.commit()
class ir_filters(osv.osv): _name = 'ir.filters' _description = 'Filters' def _list_all_models(self, cr, uid, context=None): cr.execute("SELECT model, name FROM ir_model ORDER BY name") return cr.fetchall() def copy(self, cr, uid, id, default=None, context=None): name = self.read(cr, uid, [id], ['name'])[0]['name'] default.update({'name':_('%s (copy)') % name}) return super(ir_filters, self).copy(cr, uid, id, default, context) def _get_action_domain(self, cr, uid, action_id=None): """Return a domain component for matching filters that are visible in the same context (menu/view) as the given action.""" if action_id: # filters specific to this menu + global ones return [('action_id', 'in' , [action_id, False])] # only global ones return [('action_id', '=', False)] def get_filters(self, cr, uid, model, action_id=None, context=None): """Obtain the list of filters available for the user on the given model. :param action_id: optional ID of action to restrict filters to this action plus global filters. If missing only global filters are returned. The action does not have to correspond to the model, it may only be a contextual action. :return: list of :meth:`~osv.read`-like dicts containing the ``name``, ``is_default``, ``domain``, ``user_id`` (m2o tuple), ``action_id`` (m2o tuple) and ``context`` of the matching ``ir.filters``. """ # available filters: private filters (user_id=uid) and public filters (uid=NULL), # and filters for the action (action_id=action_id) or global (action_id=NULL) context = self.pool['res.users'].context_get(cr, uid) action_domain = self._get_action_domain(cr, uid, action_id) filter_ids = self.search(cr, uid, action_domain + [('model_id','=',model),('user_id','in',[uid, False])]) my_filters = self.read(cr, uid, filter_ids, ['name', 'is_default', 'domain', 'context', 'user_id', 'sort'], context=context) return my_filters def _check_global_default(self, cr, uid, vals, matching_filters, context=None): """ _check_global_default(cursor, UID, dict, list(dict), dict) -> None Checks if there is a global default for the model_id requested. If there is, and the default is different than the record being written (-> we're not updating the current global default), raise an error to avoid users unknowingly overwriting existing global defaults (they have to explicitly remove the current default before setting a new one) This method should only be called if ``vals`` is trying to set ``is_default`` :raises ecore.exceptions.Warning: if there is an existing default and we're not updating it """ action_domain = self._get_action_domain(cr, uid, vals.get('action_id')) existing_default = self.search(cr, uid, action_domain + [ ('model_id', '=', vals['model_id']), ('user_id', '=', False), ('is_default', '=', True)], context=context) if not existing_default: return if matching_filters and \ (matching_filters[0]['id'] == existing_default[0]): return raise exceptions.Warning( _("There is already a shared filter set as default for %(model)s, delete or change it before setting a new default") % { 'model': vals['model_id'] }) def create_or_replace(self, cr, uid, vals, context=None): lower_name = vals['name'].lower() action_id = vals.get('action_id') current_filters = self.get_filters(cr, uid, vals['model_id'], action_id) matching_filters = [f for f in current_filters if f['name'].lower() == lower_name # next line looks for matching user_ids (specific or global), i.e. # f.user_id is False and vals.user_id is False or missing, # or f.user_id.id == vals.user_id if (f['user_id'] and f['user_id'][0]) == vals.get('user_id', False)] if vals.get('is_default'): if vals.get('user_id'): # Setting new default: any other default that belongs to the user # should be turned off action_domain = self._get_action_domain(cr, uid, action_id) act_ids = self.search(cr, uid, action_domain + [ ('model_id', '=', vals['model_id']), ('user_id', '=', vals['user_id']), ('is_default', '=', True), ], context=context) if act_ids: self.write(cr, uid, act_ids, {'is_default': False}, context=context) else: self._check_global_default( cr, uid, vals, matching_filters, context=None) # When a filter exists for the same (name, model, user) triple, we simply # replace its definition (considering action_id irrelevant here) if matching_filters: self.write(cr, uid, matching_filters[0]['id'], vals, context) return matching_filters[0]['id'] return self.create(cr, uid, vals, context) _sql_constraints = [ # Partial constraint, complemented by unique index (see below) # Still useful to keep because it provides a proper error message when a violation # occurs, as it shares the same prefix as the unique index. ('name_model_uid_unique', 'unique (name, model_id, user_id, action_id)', 'Filter names must be unique'), ] def _auto_init(self, cr, context=None): result = super(ir_filters, self)._auto_init(cr, context) # Use unique index to implement unique constraint on the lowercase name (not possible using a constraint) cr.execute("DROP INDEX IF EXISTS ir_filters_name_model_uid_unique_index") # drop old index w/o action cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = 'ir_filters_name_model_uid_unique_action_index'") if not cr.fetchone(): cr.execute("""CREATE UNIQUE INDEX "ir_filters_name_model_uid_unique_action_index" ON ir_filters (lower(name), model_id, COALESCE(user_id,-1), COALESCE(action_id,-1))""") return result _columns = { 'name': fields.char('Filter Name', translate=True, required=True), 'user_id': fields.many2one('res.users', 'User', ondelete='cascade', help="The user this filter is private to. When left empty the filter is public " "and available to all users."), 'domain': fields.text('Domain', required=True), 'context': fields.text('Context', required=True), 'sort': fields.text('Sort', required=True), 'model_id': fields.selection(_list_all_models, 'Model', required=True), 'is_default': fields.boolean('Default filter'), 'action_id': fields.many2one('ir.actions.actions', 'Action', ondelete='cascade', help="The menu action this filter applies to. " "When left empty the filter applies to all menus " "for this model."), 'active': fields.boolean('Active') } _defaults = { 'domain': '[]', 'context':'{}', 'sort': '[]', 'user_id': lambda self,cr,uid,context=None: uid, 'is_default': False, 'active': True } _order = 'model_id, name, id desc'
class ir_import(orm.TransientModel): _name = 'base_import.import' # allow imports to survive for 12h in case user is slow _transient_max_hours = 12.0 _columns = { 'res_model': fields.char('Model'), 'file': fields.binary( 'File', help="File to check and/or import, raw binary (not base64)"), 'file_name': fields.char('File Name'), 'file_type': fields.char(string='File Type'), } def get_fields(self, cr, uid, model, context=None, depth=FIELDS_RECURSION_LIMIT): """ Recursively get fields for the provided model (through fields_get) and filter them according to importability The output format is a list of ``Field``, with ``Field`` defined as: .. class:: Field .. attribute:: id (str) A non-unique identifier for the field, used to compute the span of the ``required`` attribute: if multiple ``required`` fields have the same id, only one of them is necessary. .. attribute:: name (str) The field's logical (eCore) name within the scope of its parent. .. attribute:: string (str) The field's human-readable name (``@string``) .. attribute:: required (bool) Whether the field is marked as required in the model. Clients must provide non-empty import values for all required fields or the import will error out. .. attribute:: fields (list(Field)) The current field's subfields. The database and external identifiers for m2o and m2m fields; a filtered and transformed fields_get for o2m fields (to a variable depth defined by ``depth``). Fields with no sub-fields will have an empty list of sub-fields. :param str model: name of the model to get fields form :param int landing: depth of recursion into o2m fields """ model_obj = self.pool[model] fields = [{ 'id': 'id', 'name': 'id', 'string': _("External ID"), 'required': False, 'fields': [], }] fields_got = model_obj.fields_get(cr, uid, context=context) blacklist = orm.MAGIC_COLUMNS + [model_obj.CONCURRENCY_CHECK_FIELD] for name, field in fields_got.iteritems(): if name in blacklist: continue # an empty string means the field is deprecated, @deprecated must # be absent or False to mean not-deprecated if field.get('deprecated', False) is not False: continue if field.get('readonly'): states = field.get('states') if not states: continue # states = {state: [(attr, value), (attr2, value2)], state2:...} if not any(attr == 'readonly' and value is False for attr, value in itertools.chain.from_iterable( states.itervalues())): continue f = { 'id': name, 'name': name, 'string': field['string'], # Y U NO ALWAYS HAS REQUIRED 'required': bool(field.get('required')), 'fields': [], } if field['type'] in ('many2many', 'many2one'): f['fields'] = [ dict(f, name='id', string=_("External ID")), dict(f, name='.id', string=_("Database ID")), ] elif field['type'] == 'one2many' and depth: f['fields'] = self.get_fields( cr, uid, field['relation'], context=context, depth=depth-1) if self.user_has_groups(cr, uid, 'base.group_no_one'): f['fields'].append({'id' : '.id', 'name': '.id', 'string': _("Database ID"), 'required': False, 'fields': []}) fields.append(f) # TODO: cache on model? return fields def _read_file(self, file_type, record, options): (file_extension, handler, req) = FILE_TYPE_DICT.get(file_type, (None, None, None)) if handler: return getattr(self, '_read_' + file_extension)(record, options) # fallback on file extensions as mime types can be unreliable (e.g. # software setting incorrect mime types, or non-installed software # leading to browser not sending mime types) if record.file_name: p, ext = os.path.splitext(record.file_name) if ext and EXTENSIONS.get(ext): return getattr(self, '_read_' + ext[1:])(record, options) if req: raise ImportError(_("Unable to load \"{extension}\" file: requires Python module \"{modname}\"").format(extension=file_extension, modname=req)) raise ValueError(_("Unsupported file format \"{}\", import only supports CSV, ODS, XLS and XLSX").format(file_type)) def _read_xls(self, record, options): book = xlrd.open_workbook(file_contents=record.file) sheet = book.sheet_by_index(0) # emulate Sheet.get_rows for pre-0.9.4 for row in itertools.imap(sheet.row, range(sheet.nrows)): values = [] for cell in row: if cell.ctype is xlrd.XL_CELL_NUMBER: is_float = cell.value % 1 != 0.0 values.append( unicode(cell.value) if is_float else unicode(int(cell.value)) ) elif cell.ctype is xlrd.XL_CELL_DATE: is_datetime = cell.value % 1 != 0.0 # emulate xldate_as_datetime for pre-0.9.3 dt = datetime.datetime(*xlrd.xldate.xldate_as_tuple( cell.value, book.datemode)) values.append( dt.strftime(DEFAULT_SERVER_DATETIME_FORMAT) if is_datetime else dt.strftime(DEFAULT_SERVER_DATE_FORMAT) ) elif cell.ctype is xlrd.XL_CELL_BOOLEAN: values.append(u'True' if cell.value else u'False') elif cell.ctype is xlrd.XL_CELL_ERROR: raise ValueError( _("Error cell found while reading XLS/XLSX file: %s") % xlrd.error_text_from_code.get( cell.value, "unknown error code %s" % cell.value) ) else: values.append(cell.value) if any(x for x in values if x.strip()): yield values _read_xlsx = _read_xls def _read_ods(self, record, options): doc = odf_ods_reader.ODSReader(file=io.BytesIO(record.file)) return ( row for row in doc.getFirstSheet() if any(x for x in row if x.strip()) ) def _read_csv(self, record, options): """ Returns a CSV-parsed iterator of all empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_iterator = csv.reader( StringIO(record.file), quotechar=str(options['quoting']), delimiter=str(options['separator'])) # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') return ( [item.decode(encoding) for item in row] for row in csv_iterator if any(x for x in row if x.strip()) ) def _match_header(self, header, fields, options): """ Attempts to match a given header to a field of the imported model. :param str header: header name from the CSV file :param fields: :param dict options: :returns: an empty list if the header couldn't be matched, or all the fields to traverse :rtype: list(Field) """ string_match = None for field in fields: # FIXME: should match all translations & original # TODO: use string distance (levenshtein? hamming?) if header.lower() == field['name'].lower(): return [field] if header.lower() == field['string'].lower(): # matching string are not reliable way because # strings have no unique constraint string_match = field if string_match: # this behavior is only applied if there is no matching field['name'] return [string_match] if '/' not in header: return [] # relational field path traversal = [] subfields = fields # Iteratively dive into fields tree for section in header.split('/'): # Strip section in case spaces are added around '/' for # readability of paths match = self._match_header(section.strip(), subfields, options) # Any match failure, exit if not match: return [] # prep subfields for next iteration within match[0] field = match[0] subfields = field['fields'] traversal.append(field) return traversal def _match_headers(self, rows, fields, options): """ Attempts to match the imported model's fields to the titles of the parsed CSV file, if the file is supposed to have headers. Will consume the first line of the ``rows`` iterator. Returns a pair of (None, None) if headers were not requested or the list of headers and a dict mapping cell indices to key paths in the ``fields`` tree :param Iterator rows: :param dict fields: :param dict options: :rtype: (None, None) | (list(str), dict(int: list(str))) """ if not options.get('headers'): return None, None headers = next(rows) return headers, { index: [field['name'] for field in self._match_header(header, fields, options)] or None for index, header in enumerate(headers) } def parse_preview(self, cr, uid, id, options, count=10, context=None): """ Generates a preview of the uploaded files, and performs fields-matching between the import's file data and the model's columns. If the headers are not requested (not options.headers), ``matches`` and ``headers`` are both ``False``. :param id: identifier of the import :param int count: number of preview lines to generate :param options: format-specific options. CSV: {encoding, quoting, separator, headers} :type options: {str, str, str, bool} :returns: {fields, matches, headers, preview} | {error, preview} :rtype: {dict(str: dict(...)), dict(int, list(str)), list(str), list(list(str))} | {str, str} """ (record,) = self.browse(cr, uid, [id], context=context) fields = self.get_fields(cr, uid, record.res_model, context=context) try: rows = self._read_file(record.file_type, record, options) headers, matches = self._match_headers(rows, fields, options) # Match should have consumed the first row (iif headers), get # the ``count`` next rows for preview preview = list(itertools.islice(rows, count)) assert preview, "CSV file seems to have no content" return { 'fields': fields, 'matches': matches or False, 'headers': headers or False, 'preview': preview, } except Exception, e: # Due to lazy generators, UnicodeDecodeError (for # instance) may only be raised when serializing the # preview to a list in the return. _logger.debug("Error during parsing preview", exc_info=True) preview = None if record.file_type == 'text/csv': preview = record.file[:ERROR_PREVIEW_BYTES].decode('iso-8859-1') return { 'error': str(e), # iso-8859-1 ensures decoding will always succeed, # even if it yields non-printable characters. This is # in case of UnicodeDecodeError (or csv.Error # compounded with UnicodeDecodeError) 'preview': preview, }
class hr_job(osv.Model): def _get_nbr_employees(self, cr, uid, ids, name, args, context=None): res = {} for job in self.browse(cr, uid, ids, context=context): nb_employees = len(job.employee_ids or []) res[job.id] = { 'no_of_employee': nb_employees, 'expected_employees': nb_employees + job.no_of_recruitment, } return res def _get_job_position(self, cr, uid, ids, context=None): res = [] for employee in self.pool.get('hr.employee').browse(cr, uid, ids, context=context): if employee.job_id: res.append(employee.job_id.id) return res _name = "hr.job" _description = "Job Position" _inherit = ['mail.thread'] _columns = { 'name': fields.char('Job Name', required=True, select=True, translate=True), 'expected_employees': fields.function( _get_nbr_employees, string='Total Forecasted Employees', help= 'Expected number of employees for this job position after new recruitment.', store={ 'hr.job': (lambda self, cr, uid, ids, c=None: ids, ['no_of_recruitment'], 10), 'hr.employee': (_get_job_position, ['job_id'], 10), }, type='integer', multi='_get_nbr_employees'), 'no_of_employee': fields.function( _get_nbr_employees, string="Current Number of Employees", help='Number of employees currently occupying this job position.', store={ 'hr.employee': (_get_job_position, ['job_id'], 10), }, type='integer', multi='_get_nbr_employees'), 'no_of_recruitment': fields.integer('Expected New Employees', copy=False, help='Number of new employees you expect to recruit.'), 'no_of_hired_employee': fields.integer( 'Hired Employees', copy=False, help= 'Number of hired employees for this job position during recruitment phase.' ), 'employee_ids': fields.one2many('hr.employee', 'job_id', 'Employees', groups='base.group_user'), 'description': fields.text('Job Description'), 'requirements': fields.text('Requirements'), 'department_id': fields.many2one('hr.department', 'Department'), 'company_id': fields.many2one('res.company', 'Company'), 'state': fields.selection( [('recruit', 'Recruitment in Progress'), ('open', 'Recruitment Closed')], string='Status', readonly=True, required=True, track_visibility='always', copy=False, help= "Set whether the recruitment process is open or closed for this job position." ), 'write_date': fields.datetime('Update Date', readonly=True), } _defaults = { 'company_id': lambda self, cr, uid, ctx=None: self.pool.get('res.company'). _company_default_get(cr, uid, 'hr.job', context=ctx), 'state': 'recruit', 'no_of_recruitment': 1, } _sql_constraints = [ ('name_company_uniq', 'unique(name, company_id, department_id)', 'The name of the job position must be unique per department in company!' ), ] def set_recruit(self, cr, uid, ids, context=None): for job in self.browse(cr, uid, ids, context=context): no_of_recruitment = job.no_of_recruitment == 0 and 1 or job.no_of_recruitment self.write(cr, uid, [job.id], { 'state': 'recruit', 'no_of_recruitment': no_of_recruitment }, context=context) return True def set_open(self, cr, uid, ids, context=None): self.write(cr, uid, ids, { 'state': 'open', 'no_of_recruitment': 0, 'no_of_hired_employee': 0 }, context=context) return True # TDE note: done in new api, because called with new api -> context is a # frozendict -> error when tryign to manipulate it @api.model def create(self, values): return super( hr_job, self.with_context(mail_create_nosubscribe=True)).create(values) def copy(self, cr, uid, id, default=None, context=None): if default is None: default = {} if 'name' not in default: job = self.browse(cr, uid, id, context=context) default['name'] = _("%s (copy)") % (job.name) return super(hr_job, self).copy(cr, uid, id, default=default, context=context) # ---------------------------------------- # Compatibility methods # ---------------------------------------- _no_of_employee = _get_nbr_employees # v7 compatibility job_open = set_open # v7 compatibility job_recruitment = set_recruit # v7 compatibility
class ir_translation(osv.osv): _name = "ir.translation" _log_access = False def _get_language(self, cr, uid, context): lang_model = self.pool.get('res.lang') lang_ids = lang_model.search(cr, uid, [('translatable', '=', True)], context=context) lang_data = lang_model.read(cr, uid, lang_ids, ['code', 'name'], context=context) return [(d['code'], d['name']) for d in lang_data] def _get_src(self, cr, uid, ids, name, arg, context=None): ''' Get source name for the translation. If object type is model then return the value store in db. Otherwise return value store in src field ''' if context is None: context = {} res = dict.fromkeys(ids, False) for record in self.browse(cr, uid, ids, context=context): res[record.id] = record.src if record.type == 'model': model_name, field_name = record.name.split(',') model = self.pool.get(model_name) if model is None: continue field = model._fields.get(field_name) if field is None: continue if not callable(field.translate): # Pass context without lang, need to read real stored field, not translation context_no_lang = dict(context, lang=None) result = model.read(cr, uid, [record.res_id], [field_name], context=context_no_lang) res[record.id] = result[0][field_name] if result else False return res def _set_src(self, cr, uid, id, name, value, args, context=None): ''' When changing source term of a translation, change its value in db for the associated object, and the src field ''' if context is None: context = {} record = self.browse(cr, uid, id, context=context) if record.type == 'model': model_name, field_name = record.name.split(',') model = self.pool.get(model_name) field = model._fields[field_name] if not callable(field.translate): # Make a context without language information, because we want # to write on the value stored in db and not on the one # associated with the current language. Also not removing lang # from context trigger an error when lang is different. context_wo_lang = context.copy() context_wo_lang.pop('lang', None) model.write(cr, uid, [record.res_id], {field_name: value}, context=context_wo_lang) return self.write(cr, uid, id, {'src': value}, context=context) def _search_src(self, cr, uid, obj, name, args, context): ''' the source term is stored on 'src' field ''' res = [] for field, operator, value in args: res.append(('src', operator, value)) return res _columns = { 'name': fields.char('Translated field', required=True), 'res_id': fields.integer('Record ID', select=True), 'lang': fields.selection(_get_language, string='Language'), 'type': fields.selection(TRANSLATION_TYPE, string='Type', select=True), 'src': fields.text('Internal Source' ), # stored in database, kept for backward compatibility 'source': fields.function(_get_src, fnct_inv=_set_src, fnct_search=_search_src, type='text', string='Source term'), 'value': fields.text('Translation Value'), 'module': fields.char('Module', help="Module this term belongs to", select=True), 'state': fields.selection( [('to_translate', 'To Translate'), ('inprogress', 'Translation in Progress'), ('translated', 'Translated')], string="Status", help= "Automatically set to let administators find new terms that might need to be translated" ), # aka gettext extracted-comments - we use them to flag ecore-web translation # cfr: http://www.gnu.org/savannah-checkouts/gnu/gettext/manual/html_node/PO-Files.html 'comments': fields.text('Translation comments', select=True), } _defaults = { 'state': 'to_translate', } _sql_constraints = [ ('lang_fkey_res_lang', 'FOREIGN KEY(lang) REFERENCES res_lang(code)', 'Language code of translation item must be among known languages'), ] def _auto_init(self, cr, context=None): super(ir_translation, self)._auto_init(cr, context) cr.execute( "SELECT indexname FROM pg_indexes WHERE indexname LIKE 'ir_translation_%'" ) indexes = [row[0] for row in cr.fetchall()] # Removed because there is a size limit on btree indexed values (problem with column src): # cr.execute('CREATE INDEX ir_translation_ltns ON ir_translation (name, lang, type, src)') # cr.execute('CREATE INDEX ir_translation_lts ON ir_translation (lang, type, src)') # # Removed because hash indexes are not compatible with postgres streaming replication: # cr.execute('CREATE INDEX ir_translation_src_hash_idx ON ir_translation USING hash (src)') if set(indexes) & set([ 'ir_translation_ltns', 'ir_translation_lts', 'ir_translation_src_hash_idx' ]): cr.execute( 'DROP INDEX IF EXISTS ir_translation_ltns, ir_translation_lts, ir_translation_src_hash_idx' ) cr.commit() # Add separate md5 index on src (no size limit on values, and good performance). if 'ir_translation_src_md5' not in indexes: cr.execute( 'CREATE INDEX ir_translation_src_md5 ON ir_translation (md5(src))' ) cr.commit() if 'ir_translation_ltn' not in indexes: cr.execute( 'CREATE INDEX ir_translation_ltn ON ir_translation (name, lang, type)' ) cr.commit() def _check_selection_field_value(self, cr, uid, field, value, context=None): if field == 'lang': return return super(ir_translation, self)._check_selection_field_value(cr, uid, field, value, context=context) def _get_ids(self, cr, uid, name, tt, lang, ids): translations = dict.fromkeys(ids, False) if ids: cr.execute( 'select res_id,value ' 'from ir_translation ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s', (lang, tt, name, tuple(ids))) for res_id, value in cr.fetchall(): translations[res_id] = value return translations def _set_ids(self, cr, uid, name, tt, lang, ids, value, src=None): self.clear_caches() cr.execute( 'update ir_translation ' 'set value=%s ' ' , src=%s ' ' , state=%s ' 'where lang=%s ' 'and type=%s ' 'and name=%s ' 'and res_id IN %s ' 'returning res_id', ( value, src, 'translated', lang, tt, name, tuple(ids), )) existing_ids = [x[0] for x in cr.fetchall()] for id in list(set(ids) - set(existing_ids)): self.create( cr, uid, { 'lang': lang, 'type': tt, 'name': name, 'res_id': id, 'value': value, 'src': src, 'state': 'translated' }) return len(ids) def _get_source_query(self, cr, uid, name, types, lang, source, res_id): if source: # Note: the extra test on md5(src) is a hint for postgres to use the # index ir_translation_src_md5 query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND src=%s AND md5(src)=md5(%s)""" source = tools.ustr(source) params = (lang or '', types, source, source) if res_id: query += " AND res_id in %s" params += (res_id, ) if name: query += " AND name=%s" params += (tools.ustr(name), ) else: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND name=%s""" params = (lang or '', types, tools.ustr(name)) return (query, params) @tools.ormcache('name', 'types', 'lang', 'source', 'res_id') def __get_source(self, cr, uid, name, types, lang, source, res_id): # res_id is a tuple or None, otherwise ormcache cannot cache it! query, params = self._get_source_query(cr, uid, name, types, lang, source, res_id) cr.execute(query, params) res = cr.fetchone() trad = res and res[0] or u'' if source and not trad: return tools.ustr(source) return trad def _get_source(self, cr, uid, name, types, lang, source=None, res_id=None): """ Returns the translation for the given combination of name, type, language and source. All values passed to this method should be unicode (not byte strings), especially ``source``. :param name: identification of the term to translate, such as field name (optional if source is passed) :param types: single string defining type of term to translate (see ``type`` field on ir.translation), or sequence of allowed types (strings) :param lang: language code of the desired translation :param source: optional source term to translate (should be unicode) :param res_id: optional resource id or a list of ids to translate (if used, ``source`` should be set) :rtype: unicode :return: the request translation, or an empty unicode string if no translation was found and `source` was not passed """ # FIXME: should assert that `source` is unicode and fix all callers to always pass unicode # so we can remove the string encoding/decoding. if not lang: return tools.ustr(source or '') if isinstance(types, basestring): types = (types, ) if res_id: if isinstance(res_id, (int, long)): res_id = (res_id, ) else: res_id = tuple(res_id) return self.__get_source(cr, uid, name, types, lang, source, res_id) @api.model def _get_terms_query(self, field, records): """ Utility function that makes the query for field terms. """ query = """ SELECT * FROM ir_translation WHERE lang=%s AND type=%s AND name=%s AND res_id IN %s """ name = "%s,%s" % (field.model_name, field.name) params = (records.env.lang, 'model', name, tuple(records.ids)) return query, params @api.model def _get_terms_mapping(self, field, records): """ Return a function mapping a ir_translation row (dict) to a value. This method is called before querying the database for translations. """ return lambda data: data['value'] @api.model def _get_terms_translations(self, field, records): """ Return the terms and translations of a given `field` on `records`. :return: {record_id: {source: value}} """ result = {rid: {} for rid in records.ids} if records: map_trans = self._get_terms_mapping(field, records) query, params = self._get_terms_query(field, records) self._cr.execute(query, params) for data in self._cr.dictfetchall(): result[data['res_id']][data['src']] = map_trans(data) return result @api.model def _sync_terms_translations(self, field, records): """ Synchronize the translations to the terms to translate, after the English value of a field is modified. The algorithm tries to match existing translations to the terms to translate, provided the distance between modified strings is not too large. It allows to not retranslate data where a typo has been fixed in the English value. """ if not callable(getattr(field, 'translate', None)): return trans = self.env['ir.translation'] outdated = trans discarded = trans for record in records: # get field value and terms to translate value = record[field.name] terms = set(field.get_trans_terms(value)) record_trans = trans.search([ ('type', '=', 'model'), ('name', '=', "%s,%s" % (field.model_name, field.name)), ('res_id', '=', record.id), ]) if not terms: # discard all translations for that field discarded += record_trans continue # remap existing translations on terms when possible for trans in record_trans: if trans.src == trans.value: discarded += trans elif trans.src not in terms: matches = get_close_matches(trans.src, terms, 1, 0.9) if matches: trans.write({'src': matches[0], 'state': trans.state}) else: outdated += trans # process outdated and discarded translations outdated.write({'state': 'to_translate'}) discarded.unlink() @api.model @tools.ormcache_context('model_name', keys=('lang', )) def get_field_string(self, model_name): """ Return the translation of fields strings in the context's language. Note that the result contains the available translations only. :param model_name: the name of a model :return: the model's fields' strings as a dictionary `{field_name: field_string}` """ fields = self.env['ir.model.fields'].search([('model', '=', model_name) ]) return {field.name: field.field_description for field in fields} @api.model @tools.ormcache_context('model_name', keys=('lang', )) def get_field_help(self, model_name): """ Return the translation of fields help in the context's language. Note that the result contains the available translations only. :param model_name: the name of a model :return: the model's fields' help as a dictionary `{field_name: field_help}` """ fields = self.env['ir.model.fields'].search([('model', '=', model_name) ]) return {field.name: field.help for field in fields} @api.multi def check(self, mode): """ Check access rights of operation ``mode`` on ``self`` for the current user. Raise an AccessError in case conditions are not met. """ if self.env.user._is_admin(): return # collect translated field records (model_ids) and other translations trans_ids = [] model_ids = defaultdict(list) model_fields = defaultdict(list) for trans in self: if trans.type == 'model': mname, fname = trans.name.split(',') model_ids[mname].append(trans.res_id) model_fields[mname].append(fname) else: trans_ids.append(trans.id) # check for regular access rights on other translations if trans_ids: records = self.browse(trans_ids) records.check_access_rights(mode) records.check_access_rule(mode) # check for read/write access on translated field records fmode = 'read' if mode == 'read' else 'write' for mname, ids in model_ids.iteritems(): records = self.env[mname].browse(ids) records.check_access_rights(fmode) records.check_field_access_rights(fmode, model_fields[mname]) records.check_access_rule(fmode) @api.model def create(self, vals): if vals.get('type') == 'model' and vals.get('value'): # check and sanitize value mname, fname = vals['name'].split(',') field = self.env[mname]._fields[fname] vals['value'] = field.check_trans_value(vals['value']) record = super(ir_translation, self.sudo()).create(vals).with_env(self.env) record.check('create') self.clear_caches() return record @api.multi def write(self, vals): if vals.get('value'): vals.setdefault('state', 'translated') ttype = vals.get('type') or self[:1].type if ttype == 'model': # check and sanitize value name = vals.get('name') or self[:1].name mname, fname = name.split(',') field = self.env[mname]._fields[fname] vals['value'] = field.check_trans_value(vals['value']) elif vals.get('src') or not vals.get('value', True): vals.setdefault('state', 'to_translate') self.check('write') result = super(ir_translation, self.sudo()).write(vals) self.check('write') self.clear_caches() return result @api.multi def unlink(self): self.check('unlink') self.clear_caches() return super(ir_translation, self.sudo()).unlink() @api.model def insert_missing(self, field, records): """ Insert missing translations for `field` on `records`. """ records = records.with_context(lang=None) external_ids = records.get_external_id() # if no xml_id, empty string if callable(field.translate): # insert missing translations for each term in src query = """ INSERT INTO ir_translation (lang, type, name, res_id, src, value, module) SELECT l.code, 'model', %(name)s, %(res_id)s, %(src)s, %(src)s, %(module)s FROM res_lang l WHERE NOT EXISTS ( SELECT 1 FROM ir_translation WHERE lang=l.code AND type='model' AND name=%(name)s AND res_id=%(res_id)s AND src=%(src)s AND module=%(module)s ); """ for record in records: module = external_ids[record.id].split('.')[0] src = record[field.name] or None for term in set(field.get_trans_terms(src)): self._cr.execute( query, { 'name': "%s,%s" % (field.model_name, field.name), 'res_id': record.id, 'src': term, 'module': module }) else: # insert missing translations for src query = """ INSERT INTO ir_translation (lang, type, name, res_id, src, value, module) SELECT l.code, 'model', %(name)s, %(res_id)s, %(src)s, %(src)s, %(module)s FROM res_lang l WHERE l.code != 'en_US' AND NOT EXISTS ( SELECT 1 FROM ir_translation WHERE lang=l.code AND type='model' AND name=%(name)s AND res_id=%(res_id)s AND module=%(module)s ); UPDATE ir_translation SET src=%(src)s WHERE type='model' AND name=%(name)s AND res_id=%(res_id)s AND module=%(module)s; """ for record in records: module = external_ids[record.id].split('.')[0] self._cr.execute( query, { 'name': "%s,%s" % (field.model_name, field.name), 'res_id': record.id, 'src': record[field.name] or None, 'module': module }) self.clear_caches() @api.model def translate_fields(self, model, id, field=None): """ Open a view for translating the field(s) of the record (model, id). """ main_lang = 'en_US' if not self.env['res.lang'].search_count([('code', '!=', main_lang)]): raise UserError( _("Translation features are unavailable until you install an extra translation." )) # determine domain for selecting translations record = self.env[model].with_context(lang=main_lang).browse(id) domain = ['&', ('res_id', '=', id), ('name', '=like', model + ',%')] def make_domain(fld, rec): name = "%s,%s" % (fld.model_name, fld.name) return ['&', ('res_id', '=', rec.id), ('name', '=', name)] # insert missing translations, and extend domain for related fields for name, fld in record._fields.items(): if not getattr(fld, 'translate', False): continue rec = record if fld.related: try: # traverse related fields up to their data source while fld.related: rec, fld = fld.traverse_related(rec) if rec: domain = ['|'] + domain + make_domain(fld, rec) except AccessError: continue assert fld.translate and rec._name == fld.model_name self.insert_missing(fld, rec) action = { 'name': 'Translate', 'res_model': 'ir.translation', 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'tree,form', 'domain': domain, } if field: fld = record._fields[field] if not fld.related: action['context'] = { 'search_default_name': "%s,%s" % (fld.model_name, fld.name), } return action def _get_import_cursor(self, cr, uid, context=None): """ Return a cursor-like object for fast inserting translations """ return ir_translation_import_cursor(cr, uid, self, context=context) def load_module_terms(self, cr, modules, langs, context=None): context = dict(context or {}) # local copy for module_name in modules: modpath = ecore.modules.get_module_path(module_name) if not modpath: continue for lang in langs: lang_code = tools.get_iso_codes(lang) base_lang_code = None if '_' in lang_code: base_lang_code = lang_code.split('_')[0] # Step 1: for sub-languages, load base language first (e.g. es_CL.po is loaded over es.po) if base_lang_code: base_trans_file = ecore.modules.get_module_resource( module_name, 'i18n', base_lang_code + '.po') if base_trans_file: _logger.info( 'module %s: loading base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_file, lang, verbose=False, module_name=module_name, context=context) context[ 'overwrite'] = True # make sure the requested translation will override the base terms later # i18n_extra folder is for additional translations handle manually (eg: for l10n_be) base_trans_extra_file = ecore.modules.get_module_resource( module_name, 'i18n_extra', base_lang_code + '.po') if base_trans_extra_file: _logger.info( 'module %s: loading extra base translation file %s for language %s', module_name, base_lang_code, lang) tools.trans_load(cr, base_trans_extra_file, lang, verbose=False, module_name=module_name, context=context) context[ 'overwrite'] = True # make sure the requested translation will override the base terms later # Step 2: then load the main translation file, possibly overriding the terms coming from the base language trans_file = ecore.modules.get_module_resource( module_name, 'i18n', lang_code + '.po') if trans_file: _logger.info( 'module %s: loading translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_file, lang, verbose=False, module_name=module_name, context=context) elif lang_code != 'en_US': _logger.info('module %s: no translation for language %s', module_name, lang_code) trans_extra_file = ecore.modules.get_module_resource( module_name, 'i18n_extra', lang_code + '.po') if trans_extra_file: _logger.info( 'module %s: loading extra translation file (%s) for language %s', module_name, lang_code, lang) tools.trans_load(cr, trans_extra_file, lang, verbose=False, module_name=module_name, context=context) return True
class TxPaypal(osv.Model): _inherit = 'payment.transaction' _columns = { 'paypal_txn_type': fields.char('Transaction type'), } # -------------------------------------------------- # FORM RELATED METHODS # -------------------------------------------------- def _paypal_form_get_tx_from_data(self, cr, uid, data, context=None): reference, txn_id = data.get('item_number'), data.get('txn_id') if not reference or not txn_id: error_msg = _('Paypal: received data with missing reference (%s) or txn_id (%s)') % (reference, txn_id) _logger.info(error_msg) raise ValidationError(error_msg) # find tx -> @TDENOTE use txn_id ? tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context) if not tx_ids or len(tx_ids) > 1: error_msg = 'Paypal: received data for reference %s' % (reference) if not tx_ids: error_msg += '; no order found' else: error_msg += '; multiple order found' _logger.info(error_msg) raise ValidationError(error_msg) return self.browse(cr, uid, tx_ids[0], context=context) def _paypal_form_get_invalid_parameters(self, cr, uid, tx, data, context=None): invalid_parameters = [] if data.get('notify_version')[0] != '3.4': _logger.warning( 'Received a notification from Paypal with version %s instead of 2.6. This could lead to issues when managing it.' % data.get('notify_version') ) if data.get('test_ipn'): _logger.warning( 'Received a notification from Paypal using sandbox' ), # TODO: txn_id: shoudl be false at draft, set afterwards, and verified with txn details if tx.acquirer_reference and data.get('txn_id') != tx.acquirer_reference: invalid_parameters.append(('txn_id', data.get('txn_id'), tx.acquirer_reference)) # check what is buyed if float_compare(float(data.get('mc_gross', '0.0')), (tx.amount + tx.fees), 2) != 0: invalid_parameters.append(('mc_gross', data.get('mc_gross'), '%.2f' % tx.amount)) # mc_gross is amount + fees if data.get('mc_currency') != tx.currency_id.name: invalid_parameters.append(('mc_currency', data.get('mc_currency'), tx.currency_id.name)) if 'handling_amount' in data and float_compare(float(data.get('handling_amount')), tx.fees, 2) != 0: invalid_parameters.append(('handling_amount', data.get('handling_amount'), tx.fees)) # check buyer if tx.payment_method_id and data.get('payer_id') != tx.payment_method_id.acquirer_ref: invalid_parameters.append(('payer_id', data.get('payer_id'), tx.payment_method_id.acquirer_ref)) # check seller if data.get('receiver_id') and tx.acquirer_id.paypal_seller_account and data['receiver_id'] != tx.acquirer_id.paypal_seller_account: invalid_parameters.append(('receiver_id', data.get('receiver_id'), tx.acquirer_id.paypal_seller_account)) if not data.get('receiver_id') or not tx.acquirer_id.paypal_seller_account: # Check receiver_email only if receiver_id was not checked. # In Paypal, this is possible to configure as receiver_email a different email than the business email (the login email) # In eCore, there is only one field for the Paypal email: the business email. This isn't possible to set a receiver_email # different than the business email. Therefore, if you want such a configuration in your Paypal, you are then obliged to fill # the Merchant ID in the Paypal payment acquirer in eCore, so the check is performed on this variable instead of the receiver_email. # At least one of the two checks must be done, to avoid fraudsters. if data.get('receiver_email') != tx.acquirer_id.paypal_email_account: invalid_parameters.append(('receiver_email', data.get('receiver_email'), tx.acquirer_id.paypal_email_account)) return invalid_parameters def _paypal_form_validate(self, cr, uid, tx, data, context=None): status = data.get('payment_status') res = { 'acquirer_reference': data.get('txn_id'), 'paypal_txn_type': data.get('payment_type'), } if status in ['Completed', 'Processed']: _logger.info('Validated Paypal payment for tx %s: set as done' % (tx.reference)) res.update(state='done', date_validate=data.get('payment_date', fields.datetime.now())) return tx.write(res) elif status in ['Pending', 'Expired']: _logger.info('Received notification for Paypal payment %s: set as pending' % (tx.reference)) res.update(state='pending', state_message=data.get('pending_reason', '')) return tx.write(res) else: error = 'Received unrecognized status for Paypal payment %s: %s, set as error' % (tx.reference, status) _logger.info(error) res.update(state='error', state_message=error) return tx.write(res) # -------------------------------------------------- # SERVER2SERVER RELATED METHODS # -------------------------------------------------- def _paypal_try_url(self, request, tries=3, context=None): """ Try to contact Paypal. Due to some issues, internal service errors seem to be quite frequent. Several tries are done before considering the communication as failed. .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before eCore v8 official release. """ done, res = False, None while (not done and tries): try: res = urllib2.urlopen(request) done = True except urllib2.HTTPError as e: res = e.read() e.close() if tries and res and json.loads(res)['name'] == 'INTERNAL_SERVICE_ERROR': _logger.warning('Failed contacting Paypal, retrying (%s remaining)' % tries) tries = tries - 1 if not res: pass # raise ecore.exceptions. result = res.read() res.close() return result def _paypal_s2s_send(self, cr, uid, values, cc_values, context=None): """ .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before eCore v8 official release. """ tx_id = self.create(cr, uid, values, context=context) tx = self.browse(cr, uid, tx_id, context=context) headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % tx.acquirer_id._paypal_s2s_get_access_token()[tx.acquirer_id.id], } data = { 'intent': 'sale', 'transactions': [{ 'amount': { 'total': '%.2f' % tx.amount, 'currency': tx.currency_id.name, }, 'description': tx.reference, }] } if cc_values: data['payer'] = { 'payment_method': 'credit_card', 'funding_instruments': [{ 'credit_card': { 'number': cc_values['number'], 'type': cc_values['brand'], 'expire_month': cc_values['expiry_mm'], 'expire_year': cc_values['expiry_yy'], 'cvv2': cc_values['cvc'], 'first_name': tx.partner_name, 'last_name': tx.partner_name, 'billing_address': { 'line1': tx.partner_address, 'city': tx.partner_city, 'country_code': tx.partner_country_id.code, 'postal_code': tx.partner_zip, } } }] } else: # TODO: complete redirect URLs data['redirect_urls'] = { # 'return_url': 'http://example.com/your_redirect_url/', # 'cancel_url': 'http://example.com/your_cancel_url/', }, data['payer'] = { 'payment_method': 'paypal', } data = json.dumps(data) request = urllib2.Request('https://api.sandbox.paypal.com/v1/payments/payment', data, headers) result = self._paypal_try_url(request, tries=3, context=context) return (tx_id, result) def _paypal_s2s_get_invalid_parameters(self, cr, uid, tx, data, context=None): """ .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before eCore v8 official release. """ invalid_parameters = [] return invalid_parameters def _paypal_s2s_validate(self, cr, uid, tx, data, context=None): """ .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before eCore v8 official release. """ values = json.loads(data) status = values.get('state') if status in ['approved']: _logger.info('Validated Paypal s2s payment for tx %s: set as done' % (tx.reference)) tx.write({ 'state': 'done', 'date_validate': values.get('udpate_time', fields.datetime.now()), 'paypal_txn_id': values['id'], }) return True elif status in ['pending', 'expired']: _logger.info('Received notification for Paypal s2s payment %s: set as pending' % (tx.reference)) tx.write({ 'state': 'pending', # 'state_message': data.get('pending_reason', ''), 'paypal_txn_id': values['id'], }) return True else: error = 'Received unrecognized status for Paypal s2s payment %s: %s, set as error' % (tx.reference, status) _logger.info(error) tx.write({ 'state': 'error', # 'state_message': error, 'paypal_txn_id': values['id'], }) return False def _paypal_s2s_get_tx_status(self, cr, uid, tx, context=None): """ .. versionadded:: pre-v8 saas-3 .. warning:: Experimental code. You should not use it before eCore v8 official release. """ # TDETODO: check tx.paypal_txn_id is set headers = { 'Content-Type': 'application/json', 'Authorization': 'Bearer %s' % tx.acquirer_id._paypal_s2s_get_access_token()[tx.acquirer_id.id], } url = 'https://api.sandbox.paypal.com/v1/payments/payment/%s' % (tx.paypal_txn_id) request = urllib2.Request(url, headers=headers) data = self._paypal_try_url(request, tries=3, context=context) return self.s2s_feedback(cr, uid, tx.id, data, context=context)
class payroll_advice(osv.osv): ''' Bank Advice ''' _name = 'hr.payroll.advice' _description = 'Bank Advice' _columns = { 'name': fields.char( 'Name', readonly=True, required=True, states={'draft': [('readonly', False)]}, ), 'note': fields.text('Description'), 'date': fields.date('Date', readonly=True, required=True, states={'draft': [('readonly', False)]}, help="Advice Date is used to search Payslips"), 'state': fields.selection([ ('draft', 'Draft'), ('confirm', 'Confirmed'), ('cancel', 'Cancelled'), ], 'Status', select=True, readonly=True), 'number': fields.char('Reference', readonly=True), 'line_ids': fields.one2many('hr.payroll.advice.line', 'advice_id', 'Employee Salary', states={'draft': [('readonly', False)]}, readonly=True, copy=True), 'chaque_nos': fields.char('Cheque Numbers'), 'neft': fields.boolean( 'NEFT Transaction', help="Check this box if your company use online transfer for salary" ), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'bank_id': fields.many2one( 'res.bank', 'Bank', readonly=True, states={'draft': [('readonly', False)] }, help="Select the Bank from which the salary is going to be paid"), 'batch_id': fields.many2one('hr.payslip.run', 'Batch', readonly=True) } _defaults = { 'date': lambda * a: time.strftime('%Y-%m-%d'), 'state': lambda * a: 'draft', 'company_id': lambda self, cr, uid, context: \ self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id, 'note': "Please make the payroll transfer from above account number to the below mentioned account numbers towards employee salaries:" } def compute_advice(self, cr, uid, ids, context=None): """ Advice - Create Advice lines in Payment Advice and compute Advice lines. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Advice’s IDs @return: Advice lines @param context: A standard dictionary for contextual values """ payslip_pool = self.pool.get('hr.payslip') advice_line_pool = self.pool.get('hr.payroll.advice.line') payslip_line_pool = self.pool.get('hr.payslip.line') for advice in self.browse(cr, uid, ids, context=context): old_line_ids = advice_line_pool.search( cr, uid, [('advice_id', '=', advice.id)], context=context) if old_line_ids: advice_line_pool.unlink(cr, uid, old_line_ids, context=context) slip_ids = payslip_pool.search(cr, uid, [('date_from', '<=', advice.date), ('date_to', '>=', advice.date), ('state', '=', 'done')], context=context) for slip in payslip_pool.browse(cr, uid, slip_ids, context=context): if not slip.employee_id.bank_account_id and not slip.employee_id.bank_account_id.acc_number: raise UserError( _('Please define bank account for the %s employee') % (slip.employee_id.name, )) line_ids = payslip_line_pool.search(cr, uid, [('slip_id', '=', slip.id), ('code', '=', 'NET')], context=context) if line_ids: line = payslip_line_pool.browse(cr, uid, line_ids, context=context)[0] advice_line = { 'advice_id': advice.id, 'name': slip.employee_id.bank_account_id.acc_number, 'employee_id': slip.employee_id.id, 'bysal': line.total } advice_line_pool.create(cr, uid, advice_line, context=context) payslip_pool.write(cr, uid, slip_ids, {'advice_id': advice.id}, context=context) return True def confirm_sheet(self, cr, uid, ids, context=None): """ confirm Advice - confirmed Advice after computing Advice Lines.. @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of confirm Advice’s IDs @return: confirmed Advice lines and set sequence of Advice. @param context: A standard dictionary for contextual values """ seq_obj = self.pool.get('ir.sequence') for advice in self.browse(cr, uid, ids, context=context): if not advice.line_ids: raise UserError( _('You can not confirm Payment advice without advice lines.' )) advice_date = datetime.strptime(advice.date, DATETIME_FORMAT) advice_year = advice_date.strftime( '%m') + '-' + advice_date.strftime('%Y') number = seq_obj.next_by_code(cr, uid, 'payment.advice') sequence_num = 'PAY' + '/' + advice_year + '/' + number self.write(cr, uid, [advice.id], { 'number': sequence_num, 'state': 'confirm' }, context=context) return True def set_to_draft(self, cr, uid, ids, context=None): """Resets Advice as draft. """ return self.write(cr, uid, ids, {'state': 'draft'}, context=context) def cancel_sheet(self, cr, uid, ids, context=None): """Marks Advice as cancelled. """ return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) def onchange_company_id(self, cr, uid, ids, company_id=False, context=None): res = {} if company_id: company = self.pool.get('res.company').browse(cr, uid, [company_id], context=context)[0] if company.partner_id.bank_ids: res.update( {'bank_id': company.partner_id.bank_ids[0].bank_id.id}) return {'value': res}
class PaymentTransaction(osv.Model): """ Transaction Model. Each specific acquirer can extend the model by adding its own fields. Methods that can be added in an acquirer-specific implementation: - ``<name>_create``: method receiving values used when creating a new transaction and that returns a dictionary that will update those values. This method can be used to tweak some transaction values. Methods defined for convention, depending on your controllers: - ``<name>_form_feedback(self, cr, uid, data, context=None)``: method that handles the data coming from the acquirer after the transaction. It will generally receives data posted by the acquirer after the transaction. """ _name = 'payment.transaction' _description = 'Payment Transaction' _order = 'id desc' _rec_name = 'reference' def _lang_get(self, cr, uid, context=None): lang_ids = self.pool['res.lang'].search(cr, uid, [], context=context) languages = self.pool['res.lang'].browse(cr, uid, lang_ids, context=context) return [(language.code, language.name) for language in languages] def _default_partner_country_id(self, cr, uid, context=None): comp = self.pool['res.company'].browse(cr, uid, context.get('company_id', 1), context=context) return comp.country_id.id _columns = { 'create_date': fields.datetime('Creation Date', readonly=True), 'date_validate': fields.datetime('Validation Date'), 'acquirer_id': fields.many2one( 'payment.acquirer', 'Acquirer', required=True, ), 'type': fields.selection( [('server2server', 'Server To Server'), ('form', 'Form'), ('form_save', 'Form with credentials storage')], string='Type', required=True), 'state': fields.selection( [('draft', 'Draft'), ('pending', 'Pending'), ('done', 'Done'), ('error', 'Error'), ('cancel', 'Canceled') ], 'Status', required=True, track_visibility='onchange', copy=False), 'state_message': fields.text('Message', help='Field used to store error and/or validation messages for information'), # payment 'amount': fields.float('Amount', required=True, digits=(16, 2), track_visibility='always', help='Amount'), 'fees': fields.float('Fees', digits=(16, 2), track_visibility='always', help='Fees amount; set by the system because depends on the acquirer'), 'currency_id': fields.many2one('res.currency', 'Currency', required=True), 'reference': fields.char('Reference', required=True, help='Internal reference of the TX'), 'acquirer_reference': fields.char('Acquirer Reference', help='Reference of the TX as stored in the acquirer database'), # duplicate partner / transaction data to store the values at transaction time 'partner_id': fields.many2one('res.partner', 'Partner', track_visibility='onchange',), 'partner_name': fields.char('Partner Name'), 'partner_lang': fields.selection(_lang_get, 'Language'), 'partner_email': fields.char('Email'), 'partner_zip': fields.char('Zip'), 'partner_address': fields.char('Address'), 'partner_city': fields.char('City'), 'partner_country_id': fields.many2one('res.country', 'Country', required=True), 'partner_phone': fields.char('Phone'), 'html_3ds': fields.char('3D Secure HTML'), 'callback_eval': fields.char('S2S Callback', help="""\ Will be safe_eval with `self` being the current transaction. i.e.: self.env['my.model'].payment_validated(self)""", oldname="s2s_cb_eval"), 'payment_method_id': fields.many2one('payment.method', 'Payment Method', domain="[('acquirer_id', '=', acquirer_id)]"), } def _check_reference(self, cr, uid, ids, context=None): transaction = self.browse(cr, uid, ids[0], context=context) if transaction.state not in ['cancel', 'error']: if self.search(cr, uid, [('reference', '=', transaction.reference), ('id', '!=', transaction.id)], context=context, count=True): return False return True _constraints = [ (_check_reference, 'The payment transaction reference must be unique!', ['reference', 'state']), ] _defaults = { 'type': 'form', 'state': 'draft', 'partner_lang': 'en_US', 'partner_country_id': _default_partner_country_id, 'reference': lambda s, c, u, ctx=None: s.pool['ir.sequence'].next_by_code(c, u, 'payment.transaction', context=ctx), } def create(self, cr, uid, values, context=None): Acquirer = self.pool['payment.acquirer'] if values.get('partner_id'): # @TDENOTE: not sure values.update(self.on_change_partner_id(cr, uid, None, values.get('partner_id'), context=context)['value']) # call custom create method if defined (i.e. ogone_create for ogone) if values.get('acquirer_id'): acquirer = self.pool['payment.acquirer'].browse(cr, uid, values.get('acquirer_id'), context=context) # compute fees custom_method_name = '%s_compute_fees' % acquirer.provider if hasattr(Acquirer, custom_method_name): fees = getattr(Acquirer, custom_method_name)( cr, uid, acquirer.id, values.get('amount', 0.0), values.get('currency_id'), values.get('partner_country_id'), context=None) values['fees'] = float_round(fees, 2) # custom create custom_method_name = '%s_create' % acquirer.provider if hasattr(self, custom_method_name): values.update(getattr(self, custom_method_name)(cr, uid, values, context=context)) # Default value of reference is tx_id = super(PaymentTransaction, self).create(cr, uid, values, context=context) if not values.get('reference'): self.write(cr, uid, [tx_id], {'reference': str(tx_id)}, context=context) return tx_id def write(self, cr, uid, ids, values, context=None): Acquirer = self.pool['payment.acquirer'] if ('acquirer_id' in values or 'amount' in values) and 'fees' not in values: # The acquirer or the amount has changed, and the fees are not explicitely forced. Fees must be recomputed. if isinstance(ids, (int, long)): ids = [ids] for txn_id in ids: vals = dict(values) vals['fees'] = 0.0 transaction = self.browse(cr, uid, txn_id, context=context) if 'acquirer_id' in values: acquirer = Acquirer.browse(cr, uid, values['acquirer_id'], context=context) if values['acquirer_id'] else None else: acquirer = transaction.acquirer_id if acquirer: custom_method_name = '%s_compute_fees' % acquirer.provider if hasattr(Acquirer, custom_method_name): amount = (values['amount'] if 'amount' in values else transaction.amount) or 0.0 currency_id = values.get('currency_id') or transaction.currency_id.id country_id = values.get('partner_country_id') or transaction.partner_country_id.id fees = getattr(Acquirer, custom_method_name)(cr, uid, acquirer.id, amount, currency_id, country_id, context=None) vals['fees'] = float_round(fees, 2) res = super(PaymentTransaction, self).write(cr, uid, txn_id, vals, context=context) return res return super(PaymentTransaction, self).write(cr, uid, ids, values, context=context) def on_change_partner_id(self, cr, uid, ids, partner_id, context=None): partner = None if partner_id: partner = self.pool['res.partner'].browse(cr, uid, partner_id, context=context) return {'value': { 'partner_name': partner and partner.name or False, 'partner_lang': partner and partner.lang or 'en_US', 'partner_email': partner and partner.email or False, 'partner_zip': partner and partner.zip or False, 'partner_address': _partner_format_address(partner and partner.street or '', partner and partner.street2 or ''), 'partner_city': partner and partner.city or False, 'partner_country_id': partner and partner.country_id.id or self._default_partner_country_id(cr, uid, context=context), 'partner_phone': partner and partner.phone or False, }} return {} def get_next_reference(self, cr, uid, reference, context=None): ref_suffix = 1 init_ref = reference while self.pool['payment.transaction'].search_count(cr, uid, [('reference', '=', reference)], context=context): reference = init_ref + '-' + str(ref_suffix) ref_suffix += 1 return reference # -------------------------------------------------- # FORM RELATED METHODS # -------------------------------------------------- def render(self, cr, uid, id, context=None): tx = self.browse(cr, uid, id, context=context) values = { 'reference': tx.reference, 'amount': tx.amount, 'currency_id': tx.currency_id.id, 'currency': tx.currency_id, 'partner': tx.partner_id, 'partner_name': tx.partner_name, 'partner_lang': tx.partner_lang, 'partner_email': tx.partner_email, 'partner_zip': tx.partner_zip, 'partner_address': tx.partner_address, 'partner_city': tx.partner_city, 'partner_country_id': tx.partner_country_id.id, 'partner_country': tx.partner_country_id, 'partner_phone': tx.partner_phone, 'partner_state': None, } return tx.acquirer_id.render(None, None, None, values=values) def form_feedback(self, cr, uid, data, acquirer_name, context=None): invalid_parameters, tx = None, None tx_find_method_name = '_%s_form_get_tx_from_data' % acquirer_name if hasattr(self, tx_find_method_name): tx = getattr(self, tx_find_method_name)(cr, uid, data, context=context) invalid_param_method_name = '_%s_form_get_invalid_parameters' % acquirer_name if hasattr(self, invalid_param_method_name): invalid_parameters = getattr(self, invalid_param_method_name)(cr, uid, tx, data, context=context) if invalid_parameters: _error_message = '%s: incorrect tx data:\n' % (acquirer_name) for item in invalid_parameters: _error_message += '\t%s: received %s instead of %s\n' % (item[0], item[1], item[2]) _logger.error(_error_message) return False feedback_method_name = '_%s_form_validate' % acquirer_name if hasattr(self, feedback_method_name): return getattr(self, feedback_method_name)(cr, uid, tx, data, context=context) return True # -------------------------------------------------- # SERVER2SERVER RELATED METHODS # -------------------------------------------------- def s2s_create(self, cr, uid, values, cc_values, context=None): tx_id, tx_result = self.s2s_send(cr, uid, values, cc_values, context=context) self.s2s_feedback(cr, uid, tx_id, tx_result, context=context) return tx_id def s2s_do_transaction(self, cr, uid, id, context=None, **kwargs): tx = self.browse(cr, uid, id, context=context) custom_method_name = '%s_s2s_do_transaction' % tx.acquirer_id.provider if hasattr(self, custom_method_name): return getattr(self, custom_method_name)(cr, uid, id, context=context, **kwargs) def s2s_get_tx_status(self, cr, uid, tx_id, context=None): """ Get the tx status. """ tx = self.browse(cr, uid, tx_id, context=context) invalid_param_method_name = '_%s_s2s_get_tx_status' % tx.acquirer_id.provider if hasattr(self, invalid_param_method_name): return getattr(self, invalid_param_method_name)(cr, uid, tx, context=context) return True
class mrp_repair(osv.osv): _name = 'mrp.repair' _inherit = 'mail.thread' _description = 'Repair Order' def _amount_untaxed(self, cr, uid, ids, field_name, arg, context=None): """ Calculates untaxed amount. @param self: The object pointer @param cr: The current row, from the database cursor, @param uid: The current user ID for security checks @param ids: List of selected IDs @param field_name: Name of field. @param arg: Argument @param context: A standard dictionary for contextual values @return: Dictionary of values. """ res = {} cur_obj = self.pool.get('res.currency') for repair in self.browse(cr, uid, ids, context=context): res[repair.id] = 0.0 for line in repair.operations: res[repair.id] += line.price_subtotal for line in repair.fees_lines: res[repair.id] += line.price_subtotal cur = repair.pricelist_id.currency_id res[repair.id] = cur_obj.round(cr, uid, cur, res[repair.id]) return res def _amount_tax(self, cr, uid, ids, field_name, arg, context=None): """ Calculates taxed amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} #return {}.fromkeys(ids, 0) cur_obj = self.pool.get('res.currency') tax_obj = self.pool.get('account.tax') for repair in self.browse(cr, uid, ids, context=context): val = 0.0 cur = repair.pricelist_id.currency_id for line in repair.operations: #manage prices with tax included use compute_all instead of compute if line.to_invoice and line.tax_id: tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, cur, line.product_uom_qty, line.product_id.id, repair.partner_id.id) for c in tax_calculate['taxes']: val += c['amount'] for line in repair.fees_lines: if line.to_invoice and line.tax_id: tax_calculate = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, cur, line.product_uom_qty, line.product_id.id, repair.partner_id.id) for c in tax_calculate['taxes']: val += c['amount'] res[repair.id] = cur_obj.round(cr, uid, cur, val) return res def _amount_total(self, cr, uid, ids, field_name, arg, context=None): """ Calculates total amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} untax = self._amount_untaxed(cr, uid, ids, field_name, arg, context=context) tax = self._amount_tax(cr, uid, ids, field_name, arg, context=context) cur_obj = self.pool.get('res.currency') for id in ids: repair = self.browse(cr, uid, id, context=context) cur = repair.pricelist_id.currency_id res[id] = cur_obj.round(cr, uid, cur, untax.get(id, 0.0) + tax.get(id, 0.0)) return res def _get_default_address(self, cr, uid, ids, field_name, arg, context=None): res = {} partner_obj = self.pool.get('res.partner') for data in self.browse(cr, uid, ids, context=context): adr_id = False if data.partner_id: adr_id = partner_obj.address_get(cr, uid, [data.partner_id.id], ['contact'])['contact'] res[data.id] = adr_id return res def _get_lines(self, cr, uid, ids, context=None): return self.pool['mrp.repair'].search(cr, uid, [('operations', 'in', ids)], context=context) def _get_fee_lines(self, cr, uid, ids, context=None): return self.pool['mrp.repair'].search(cr, uid, [('fees_lines', 'in', ids)], context=context) _columns = { 'name': fields.char('Repair Reference', required=True, states={'confirmed': [('readonly', True)]}, copy=False), 'product_id': fields.many2one('product.product', string='Product to Repair', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_qty': fields.float('Product Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'partner_id': fields.many2one('res.partner', 'Partner', select=True, help='Choose partner for whom the order will be invoiced and delivered.', states={'confirmed': [('readonly', True)]}), 'address_id': fields.many2one('res.partner', 'Delivery Address', domain="[('parent_id','=',partner_id)]", states={'confirmed': [('readonly', True)]}), 'default_address_id': fields.function(_get_default_address, type="many2one", relation="res.partner"), 'state': fields.selection([ ('draft', 'Quotation'), ('cancel', 'Cancelled'), ('confirmed', 'Confirmed'), ('under_repair', 'Under Repair'), ('ready', 'Ready to Repair'), ('2binvoiced', 'To be Invoiced'), ('invoice_except', 'Invoice Exception'), ('done', 'Repaired') ], 'Status', readonly=True, track_visibility='onchange', copy=False, help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed repair order. \ \n* The \'Confirmed\' status is used when a user confirms the repair order. \ \n* The \'Ready to Repair\' status is used to start to repairing, user can start repairing only after repair order is confirmed. \ \n* The \'To be Invoiced\' status is used to generate the invoice before or after repairing done. \ \n* The \'Done\' status is set when repairing is completed.\ \n* The \'Cancelled\' status is used when user cancel repair order.'), 'location_id': fields.many2one('stock.location', 'Current Location', select=True, required=True, readonly=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}), 'location_dest_id': fields.many2one('stock.location', 'Delivery Location', readonly=True, required=True, states={'draft': [('readonly', False)], 'confirmed': [('readonly', True)]}), 'lot_id': fields.many2one('stock.production.lot', 'Repaired Lot', domain="[('product_id','=', product_id)]", help="Products repaired are all belonging to this lot", oldname="prodlot_id"), 'guarantee_limit': fields.date('Warranty Expiration', states={'confirmed': [('readonly', True)]}), 'operations': fields.one2many('mrp.repair.line', 'repair_id', 'Operation Lines', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', help='Pricelist of the selected partner.'), 'partner_invoice_id': fields.many2one('res.partner', 'Invoicing Address'), 'invoice_method': fields.selection([ ("none", "No Invoice"), ("b4repair", "Before Repair"), ("after_repair", "After Repair") ], "Invoice Method", select=True, required=True, states={'draft': [('readonly', False)]}, readonly=True, help='Selecting \'Before Repair\' or \'After Repair\' will allow you to generate invoice before or after the repair is done respectively. \'No invoice\' means you don\'t want to generate invoice for this repair order.'), 'invoice_id': fields.many2one('account.invoice', 'Invoice', readonly=True, track_visibility="onchange", copy=False), 'move_id': fields.many2one('stock.move', 'Move', readonly=True, help="Move created by the repair order", track_visibility="onchange", copy=False), 'fees_lines': fields.one2many('mrp.repair.fee', 'repair_id', 'Fees', readonly=True, states={'draft': [('readonly', False)]}, copy=True), 'internal_notes': fields.text('Internal Notes'), 'quotation_notes': fields.text('Quotation Notes'), 'company_id': fields.many2one('res.company', 'Company'), 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'repaired': fields.boolean('Repaired', readonly=True, copy=False), 'amount_untaxed': fields.function(_amount_untaxed, string='Untaxed Amount', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), 'amount_tax': fields.function(_amount_tax, string='Taxes', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), 'amount_total': fields.function(_amount_total, string='Total', store={ 'mrp.repair': (lambda self, cr, uid, ids, c={}: ids, ['operations', 'fees_lines'], 10), 'mrp.repair.line': (_get_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), 'mrp.repair.fee': (_get_fee_lines, ['price_unit', 'price_subtotal', 'product_id', 'tax_id', 'product_uom_qty', 'product_uom'], 10), }), } def _default_stock_location(self, cr, uid, context=None): try: warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0') return warehouse.lot_stock_id.id except: return False _defaults = { 'state': lambda *a: 'draft', 'name': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').next_by_code(cr, uid, 'mrp.repair'), 'invoice_method': lambda *a: 'none', 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'mrp.repair', context=context), 'pricelist_id': lambda self, cr, uid, context: self.pool['product.pricelist'].search(cr, uid, [], limit=1)[0], 'product_qty': 1.0, 'location_id': _default_stock_location, } _sql_constraints = [ ('name', 'unique (name)', 'The name of the Repair Order must be unique!'), ] def onchange_product_id(self, cr, uid, ids, product_id=None): """ On change of product sets some values. @param product_id: Changed product @return: Dictionary of values. """ product = False if product_id: product = self.pool.get("product.product").browse(cr, uid, product_id) return {'value': { 'guarantee_limit': False, 'lot_id': False, 'product_uom': product and product.uom_id.id or False, } } def onchange_product_uom(self, cr, uid, ids, product_id, product_uom, context=None): res = {'value': {}} if not product_uom or not product_id: return res product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context) if uom.category_id.id != product.uom_id.category_id.id: res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')} res['value'].update({'product_uom': product.uom_id.id}) return res def onchange_location_id(self, cr, uid, ids, location_id=None): """ On change of location """ return {'value': {'location_dest_id': location_id}} def button_dummy(self, cr, uid, ids, context=None): return True def onchange_partner_id(self, cr, uid, ids, part, address_id): """ On change of partner sets the values of partner address, partner invoice address and pricelist. @param part: Changed id of partner. @param address_id: Address id from current record. @return: Dictionary of values. """ part_obj = self.pool.get('res.partner') pricelist_obj = self.pool.get('product.pricelist') if not part: return {'value': { 'address_id': False, 'partner_invoice_id': False, 'pricelist_id': pricelist_obj.search(cr, uid, [], limit=1)[0] } } addr = part_obj.address_get(cr, uid, [part], ['delivery', 'invoice', 'contact']) partner = part_obj.browse(cr, uid, part) pricelist = partner.property_product_pricelist and partner.property_product_pricelist.id or False return {'value': { 'address_id': addr['delivery'] or addr['contact'], 'partner_invoice_id': addr['invoice'], 'pricelist_id': pricelist } } def action_cancel_draft(self, cr, uid, ids, *args): """ Cancels repair order when it is in 'Draft' state. @param *arg: Arguments @return: True """ if not len(ids): return False mrp_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids): mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'draft'}) self.write(cr, uid, ids, {'state': 'draft'}) return self.create_workflow(cr, uid, ids) def action_confirm(self, cr, uid, ids, *args): """ Repair order state is set to 'To be invoiced' when invoice method is 'Before repair' else state becomes 'Confirmed'. @param *arg: Arguments @return: True """ mrp_line_obj = self.pool.get('mrp.repair.line') for o in self.browse(cr, uid, ids): if (o.invoice_method == 'b4repair'): self.write(cr, uid, [o.id], {'state': '2binvoiced'}) else: self.write(cr, uid, [o.id], {'state': 'confirmed'}) for line in o.operations: if line.product_id.tracking != 'none' and not line.lot_id: raise UserError(_("Serial number is required for operation line with product '%s'") % (line.product_id.name)) mrp_line_obj.write(cr, uid, [l.id for l in o.operations], {'state': 'confirmed'}) return True def action_cancel(self, cr, uid, ids, context=None): """ Cancels repair order. @return: True """ mrp_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): if not repair.invoiced: mrp_line_obj.write(cr, uid, [l.id for l in repair.operations], {'state': 'cancel'}, context=context) else: raise UserError(_('Repair order is already invoiced.')) return self.write(cr, uid, ids, {'state': 'cancel'}) def wkf_invoice_create(self, cr, uid, ids, *args): self.action_invoice_create(cr, uid, ids) return True def action_invoice_create(self, cr, uid, ids, group=False, context=None): """ Creates invoice(s) for repair order. @param group: It is set to true when group invoice is to be generated. @return: Invoice Ids. """ res = {} invoices_group = {} inv_line_obj = self.pool.get('account.invoice.line') inv_obj = self.pool.get('account.invoice') repair_line_obj = self.pool.get('mrp.repair.line') repair_fee_obj = self.pool.get('mrp.repair.fee') for repair in self.browse(cr, uid, ids, context=context): res[repair.id] = False if repair.state in ('draft', 'cancel') or repair.invoice_id: continue if not (repair.partner_id.id and repair.partner_invoice_id.id): raise UserError(_('You have to select a Partner Invoice Address in the repair form!')) comment = repair.quotation_notes if (repair.invoice_method != 'none'): if group and repair.partner_invoice_id.id in invoices_group: inv_id = invoices_group[repair.partner_invoice_id.id] invoice = inv_obj.browse(cr, uid, inv_id) invoice_vals = { 'name': invoice.name + ', ' + repair.name, 'origin': invoice.origin + ', ' + repair.name, 'comment': (comment and (invoice.comment and invoice.comment + "\n" + comment or comment)) or (invoice.comment and invoice.comment or ''), } inv_obj.write(cr, uid, [inv_id], invoice_vals, context=context) else: if not repair.partner_id.property_account_receivable_id: raise UserError(_('No account defined for partner "%s".') % repair.partner_id.name) account_id = repair.partner_id.property_account_receivable_id.id inv = { 'name': repair.name, 'origin': repair.name, 'type': 'out_invoice', 'account_id': account_id, 'partner_id': repair.partner_invoice_id.id or repair.partner_id.id, 'currency_id': repair.pricelist_id.currency_id.id, 'comment': repair.quotation_notes, 'fiscal_position_id': repair.partner_id.property_account_position_id.id } inv_id = inv_obj.create(cr, uid, inv) invoices_group[repair.partner_invoice_id.id] = inv_id self.write(cr, uid, repair.id, {'invoiced': True, 'invoice_id': inv_id}) for operation in repair.operations: if operation.to_invoice: if group: name = repair.name + '-' + operation.name else: name = operation.name if operation.product_id.property_account_income_id: account_id = operation.product_id.property_account_income_id.id elif operation.product_id.categ_id.property_account_income_categ_id: account_id = operation.product_id.categ_id.property_account_income_categ_id.id else: raise UserError(_('No account defined for product "%s".') % operation.product_id.name) invoice_line_id = inv_line_obj.create(cr, uid, { 'invoice_id': inv_id, 'name': name, 'origin': repair.name, 'account_id': account_id, 'quantity': operation.product_uom_qty, 'invoice_line_tax_ids': [(6, 0, [x.id for x in operation.tax_id])], 'uom_id': operation.product_uom.id, 'price_unit': operation.price_unit, 'price_subtotal': operation.product_uom_qty * operation.price_unit, 'product_id': operation.product_id and operation.product_id.id or False }) repair_line_obj.write(cr, uid, [operation.id], {'invoiced': True, 'invoice_line_id': invoice_line_id}) for fee in repair.fees_lines: if fee.to_invoice: if group: name = repair.name + '-' + fee.name else: name = fee.name if not fee.product_id: raise UserError(_('No product defined on Fees!')) if fee.product_id.property_account_income_id: account_id = fee.product_id.property_account_income_id.id elif fee.product_id.categ_id.property_account_income_categ_id: account_id = fee.product_id.categ_id.property_account_income_categ_id.id else: raise UserError(_('No account defined for product "%s".') % fee.product_id.name) invoice_fee_id = inv_line_obj.create(cr, uid, { 'invoice_id': inv_id, 'name': name, 'origin': repair.name, 'account_id': account_id, 'quantity': fee.product_uom_qty, 'invoice_line_tax_ids': [(6, 0, [x.id for x in fee.tax_id])], 'uom_id': fee.product_uom.id, 'product_id': fee.product_id and fee.product_id.id or False, 'price_unit': fee.price_unit, 'price_subtotal': fee.product_uom_qty * fee.price_unit }) repair_fee_obj.write(cr, uid, [fee.id], {'invoiced': True, 'invoice_line_id': invoice_fee_id}) #inv_obj.button_reset_taxes(cr, uid, inv_id, context=context) res[repair.id] = inv_id return res def action_repair_ready(self, cr, uid, ids, context=None): """ Writes repair order state to 'Ready' @return: True """ for repair in self.browse(cr, uid, ids, context=context): self.pool.get('mrp.repair.line').write(cr, uid, [l.id for l in repair.operations], {'state': 'confirmed'}, context=context) self.write(cr, uid, [repair.id], {'state': 'ready'}) return True def action_repair_start(self, cr, uid, ids, context=None): """ Writes repair order state to 'Under Repair' @return: True """ repair_line = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): repair_line.write(cr, uid, [l.id for l in repair.operations], {'state': 'confirmed'}, context=context) repair.write({'state': 'under_repair'}) return True def action_repair_end(self, cr, uid, ids, context=None): """ Writes repair order state to 'To be invoiced' if invoice method is After repair else state is set to 'Ready'. @return: True """ for order in self.browse(cr, uid, ids, context=context): val = {} val['repaired'] = True if (not order.invoiced and order.invoice_method == 'after_repair'): val['state'] = '2binvoiced' elif (not order.invoiced and order.invoice_method == 'b4repair'): val['state'] = 'ready' else: pass self.write(cr, uid, [order.id], val) return True def wkf_repair_done(self, cr, uid, ids, *args): self.action_repair_done(cr, uid, ids) return True def action_repair_done(self, cr, uid, ids, context=None): """ Creates stock move for operation and stock move for final product of repair order. @return: Move ids of final products """ res = {} move_obj = self.pool.get('stock.move') repair_line_obj = self.pool.get('mrp.repair.line') for repair in self.browse(cr, uid, ids, context=context): move_ids = [] for move in repair.operations: move_id = move_obj.create(cr, uid, { 'name': move.name, 'product_id': move.product_id.id, 'restrict_lot_id': move.lot_id.id, 'product_uom_qty': move.product_uom_qty, 'product_uom': move.product_uom.id, 'partner_id': repair.address_id and repair.address_id.id or False, 'location_id': move.location_id.id, 'location_dest_id': move.location_dest_id.id, }) move_ids.append(move_id) repair_line_obj.write(cr, uid, [move.id], {'move_id': move_id, 'state': 'done'}, context=context) move_id = move_obj.create(cr, uid, { 'name': repair.name, 'product_id': repair.product_id.id, 'product_uom': repair.product_uom.id or repair.product_id.uom_id.id, 'product_uom_qty': repair.product_qty, 'partner_id': repair.address_id and repair.address_id.id or False, 'location_id': repair.location_id.id, 'location_dest_id': repair.location_dest_id.id, 'restrict_lot_id': repair.lot_id.id, }) move_ids.append(move_id) move_obj.action_done(cr, uid, move_ids, context=context) self.write(cr, uid, [repair.id], {'state': 'done', 'move_id': move_id}, context=context) res[repair.id] = move_id return res
class actions_server(osv.Model): """ Add website option in server actions. """ _name = 'ir.actions.server' _inherit = ['ir.actions.server'] def _compute_website_url(self, cr, uid, id, website_path, xml_id, context=None): base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url', context=context) link = website_path or xml_id or (id and '%d' % id) or '' if base_url and link: path = '%s/%s' % ('/website/action', link) return '%s' % urlparse.urljoin(base_url, path) return '' def _get_website_url(self, cr, uid, ids, name, args, context=None): res = dict.fromkeys(ids, False) for action in self.browse(cr, uid, ids, context=context): if action.state == 'code' and action.website_published: res[action.id] = self._compute_website_url(cr, uid, action.id, action.website_path, action.xml_id, context=context) return res _columns = { 'xml_id': fields.function(osv.osv.get_xml_id, type='char', string="External ID", help="ID of the action if defined in a XML file"), 'website_path': fields.char('Website Path'), 'website_url': fields.function( _get_website_url, type='char', string='Website URL', help='The full URL to access the server action through the website.' ), 'website_published': fields.boolean( 'Available on the Website', copy=False, help= 'A code server action can be executed from the website, using a dedicated' 'controller. The address is <base>/website/action/<website_path>.' 'Set this field as True to allow users to run this action. If it' 'set to is False the action cannot be run through the website.'), } def on_change_website_path(self, cr, uid, ids, website_path, xml_id, context=None): values = { 'website_url': self._compute_website_url(cr, uid, ids and ids[0] or None, website_path, xml_id, context=context) } return {'value': values} def _get_eval_context(self, cr, uid, action, context=None): """ Override to add the request object in eval_context. """ eval_context = super(actions_server, self)._get_eval_context(cr, uid, action, context=context) if action.state == 'code': eval_context['request'] = request return eval_context def run_action_code_multi(self, cr, uid, action, eval_context=None, context=None): """ Override to allow returning response the same way action is already returned by the basic server action behavior. Note that response has priority over action, avoid using both. """ res = super(actions_server, self).run_action_code_multi(cr, uid, action, eval_context, context) if 'response' in eval_context: return eval_context['response'] return res
class mrp_repair_line(osv.osv, ProductChangeMixin): _name = 'mrp.repair.line' _description = 'Repair Line' def _amount_line(self, cr, uid, ids, field_name, arg, context=None): """ Calculates amount. @param field_name: Name of field. @param arg: Argument @return: Dictionary of values. """ res = {} tax_obj = self.pool.get('account.tax') # cur_obj = self.pool.get('res.currency') for line in self.browse(cr, uid, ids, context=context): if line.to_invoice: cur = line.repair_id.pricelist_id.currency_id taxes = tax_obj.compute_all(cr, uid, line.tax_id, line.price_unit, cur.id, line.product_uom_qty, line.product_id.id, line.repair_id.partner_id.id) #res[line.id] = cur_obj.round(cr, uid, cur, taxes['total']) res[line.id] = taxes['total_included'] else: res[line.id] = 0 return res _columns = { 'name': fields.char('Description', required=True), 'repair_id': fields.many2one('mrp.repair', 'Repair Order Reference', ondelete='cascade', select=True), 'type': fields.selection([('add', 'Add'), ('remove', 'Remove')], 'Type', required=True), 'to_invoice': fields.boolean('To Invoice'), 'product_id': fields.many2one('product.product', 'Product', required=True), 'invoiced': fields.boolean('Invoiced', readonly=True, copy=False), 'price_unit': fields.float('Unit Price', required=True, digits_compute=dp.get_precision('Product Price')), 'price_subtotal': fields.function(_amount_line, string='Subtotal', digits=0), 'tax_id': fields.many2many('account.tax', 'repair_operation_line_tax', 'repair_operation_line_id', 'tax_id', 'Taxes'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'invoice_line_id': fields.many2one('account.invoice.line', 'Invoice Line', readonly=True, copy=False), 'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True), 'location_dest_id': fields.many2one('stock.location', 'Dest. Location', required=True, select=True), 'move_id': fields.many2one('stock.move', 'Inventory Move', readonly=True, copy=False), 'lot_id': fields.many2one('stock.production.lot', 'Lot'), 'state': fields.selection([ ('draft', 'Draft'), ('confirmed', 'Confirmed'), ('done', 'Done'), ('cancel', 'Cancelled')], 'Status', required=True, readonly=True, copy=False, help=' * The \'Draft\' status is set automatically as draft when repair order in draft status. \ \n* The \'Confirmed\' status is set automatically as confirm when repair order in confirm status. \ \n* The \'Done\' status is set automatically when repair order is completed.\ \n* The \'Cancelled\' status is set automatically when user cancel repair order.'), } _defaults = { 'state': lambda *a: 'draft', 'product_uom_qty': lambda *a: 1, } def onchange_operation_type(self, cr, uid, ids, type, guarantee_limit, company_id=False, context=None): """ On change of operation type it sets source location, destination location and to invoice field. @param product: Changed operation type. @param guarantee_limit: Guarantee limit of current record. @return: Dictionary of values. """ if not type: return {'value': { 'location_id': False, 'location_dest_id': False }} location_obj = self.pool.get('stock.location') warehouse_obj = self.pool.get('stock.warehouse') location_id = location_obj.search(cr, uid, [('usage', '=', 'production')], context=context) location_id = location_id and location_id[0] or False if type == 'add': # TOCHECK: Find stock location for user's company warehouse or # repair order's company's warehouse (company_id field is added in fix of lp:831583) args = company_id and [('company_id', '=', company_id)] or [] warehouse_ids = warehouse_obj.search(cr, uid, args, context=context) stock_id = False if warehouse_ids: stock_id = warehouse_obj.browse(cr, uid, warehouse_ids[0], context=context).lot_stock_id.id to_invoice = (guarantee_limit and datetime.strptime(guarantee_limit, '%Y-%m-%d') < datetime.now()) return {'value': { 'to_invoice': to_invoice, 'location_id': stock_id, 'location_dest_id': location_id }} scrap_location_ids = location_obj.search(cr, uid, [('scrap_location', '=', True)], context=context) return {'value': { 'to_invoice': False, 'location_id': location_id, 'location_dest_id': scrap_location_ids and scrap_location_ids[0] or False, }}
class crm_opportunity_report(osv.Model): """ CRM Opportunity Analysis """ _name = "crm.opportunity.report" _auto = False _description = "CRM Opportunity Analysis" _rec_name = 'date_deadline' _inherit = ["utm.mixin"] _columns = { 'date_deadline': fields.date('Expected Closing', readonly=True), 'create_date': fields.datetime('Creation Date', readonly=True), 'opening_date': fields.datetime('Assignation Date', readonly=True), 'date_closed': fields.datetime('Close Date', readonly=True), 'date_last_stage_update': fields.datetime('Last Stage Update', readonly=True), 'active': fields.boolean('Active', readonly=True), # durations 'delay_open': fields.float('Delay to Assign', digits=(16, 2), readonly=True, group_operator="avg", help="Number of Days to open the case"), 'delay_close': fields.float('Delay to Close', digits=(16, 2), readonly=True, group_operator="avg", help="Number of Days to close the case"), 'delay_expected': fields.float('Overpassed Deadline', digits=(16, 2), readonly=True, group_operator="avg"), 'user_id': fields.many2one('res.users', 'User', readonly=True), 'team_id': fields.many2one('crm.team', 'Sales Team', oldname='section_id', readonly=True), 'nbr_activities': fields.integer('# of Activities', readonly=True), 'country_id': fields.many2one('res.country', 'Country', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'probability': fields.float('Probability', digits=(16, 2), readonly=True, group_operator="avg"), 'total_revenue': fields.float('Total Revenue', digits=(16, 2), readonly=True), 'expected_revenue': fields.float('Expected Revenue', digits=(16, 2), readonly=True), 'stage_id': fields.many2one('crm.stage', 'Stage', readonly=True, domain="[('team_ids', '=', team_id)]"), 'stage_name': fields.char('Stage Name', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'priority': fields.selection(crm_stage.AVAILABLE_PRIORITIES, 'Priority'), 'type': fields.selection( [ ('lead', 'Lead'), ('opportunity', 'Opportunity'), ], 'Type', help="Type is used to separate Leads and Opportunities"), 'lost_reason': fields.many2one('crm.lost.reason', 'Lost Reason', readonly=True), 'date_conversion': fields.datetime('Conversion Date', readonly=True), } def init(self, cr): tools.drop_view_if_exists(cr, 'crm_opportunity_report') cr.execute(""" CREATE OR REPLACE VIEW crm_opportunity_report AS ( SELECT c.id, c.date_deadline, c.date_open as opening_date, c.date_closed as date_closed, c.date_last_stage_update as date_last_stage_update, c.user_id, c.probability, c.stage_id, stage.name as stage_name, c.type, c.company_id, c.priority, c.team_id, activity.nbr_activities, c.active, c.campaign_id, c.source_id, c.medium_id, c.partner_id, c.country_id, c.planned_revenue as total_revenue, c.planned_revenue*(c.probability/100) as expected_revenue, c.create_date as create_date, extract('epoch' from (c.date_closed-c.create_date))/(3600*24) as delay_close, abs(extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24)) as delay_expected, extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open, c.lost_reason, c.date_conversion as date_conversion FROM "crm_lead" c LEFT JOIN ( SELECT m.res_id, COUNT(*) nbr_activities FROM "mail_message" m WHERE m.model = 'crm.lead' GROUP BY m.res_id ) activity ON (activity.res_id = c.id) LEFT JOIN "crm_stage" stage ON stage.id = c.stage_id GROUP BY c.id, activity.nbr_activities, stage.name )""")
class gamification_goal_definition(osv.Model): """Goal definition A goal definition contains the way to evaluate an objective Each module wanting to be able to set goals to the users needs to create a new gamification_goal_definition """ _name = 'gamification.goal.definition' _description = 'Gamification goal definition' def _get_suffix(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, '') for goal in self.browse(cr, uid, ids, context=context): if goal.suffix and not goal.monetary: res[goal.id] = goal.suffix elif goal.monetary: # use the current user's company currency user = self.pool.get('res.users').browse(cr, uid, uid, context) if goal.suffix: res[goal.id] = "%s %s" % (user.company_id.currency_id.symbol, goal.suffix) else: res[goal.id] = user.company_id.currency_id.symbol else: res[goal.id] = "" return res _columns = { 'name': fields.char('Goal Definition', required=True, translate=True), 'description': fields.text('Goal Description'), 'monetary': fields.boolean('Monetary Value', help="The target and current value are defined in the company currency."), 'suffix': fields.char('Suffix', help="The unit of the target and current values", translate=True), 'full_suffix': fields.function(_get_suffix, type="char", string="Full Suffix", help="The currency and suffix field"), 'computation_mode': fields.selection([ ('manually', 'Recorded manually'), ('count', 'Automatic: number of records'), ('sum', 'Automatic: sum on a field'), ('python', 'Automatic: execute a specific Python code'), ], string="Computation Mode", help="Defined how will be computed the goals. The result of the operation will be stored in the field 'Current'.", required=True), 'display_mode': fields.selection([ ('progress', 'Progressive (using numerical values)'), ('boolean', 'Exclusive (done or not-done)'), ], string="Displayed as", required=True), 'model_id': fields.many2one('ir.model', string='Model', help='The model object for the field to evaluate'), 'model_inherited_model_ids': fields.related('model_id', 'inherited_model_ids', type="many2many", obj="ir.model", string="Inherited models", readonly="True"), 'field_id': fields.many2one('ir.model.fields', string='Field to Sum', help='The field containing the value to evaluate'), 'field_date_id': fields.many2one('ir.model.fields', string='Date Field', help='The date to use for the time period evaluated'), 'domain': fields.char("Filter Domain", help="Domain for filtering records. General rule, not user depending, e.g. [('state', '=', 'done')]. The expression can contain reference to 'user' which is a browse record of the current user if not in batch mode.", required=True), 'batch_mode': fields.boolean('Batch Mode', help="Evaluate the expression in batch instead of once for each user"), 'batch_distinctive_field': fields.many2one('ir.model.fields', string="Distinctive field for batch user", help="In batch mode, this indicates which field distinct one user form the other, e.g. user_id, partner_id..."), 'batch_user_expression': fields.char("Evaluted expression for batch mode", help="The value to compare with the distinctive field. The expression can contain reference to 'user' which is a browse record of the current user, e.g. user.id, user.partner_id.id..."), 'compute_code': fields.text('Python Code', help="Python code to be executed for each user. 'result' should contains the new current value. Evaluated user can be access through object.user_id."), 'condition': fields.selection([ ('higher', 'The higher the better'), ('lower', 'The lower the better') ], string='Goal Performance', help='A goal is considered as completed when the current value is compared to the value to reach', required=True), 'action_id': fields.many2one('ir.actions.act_window', string="Action", help="The action that will be called to update the goal value."), 'res_id_field': fields.char("ID Field of user", help="The field name on the user profile (res.users) containing the value for res_id for action."), } _defaults = { 'condition': 'higher', 'computation_mode': 'manually', 'domain': "[]", 'monetary': False, 'display_mode': 'progress', } def number_following(self, cr, uid, model_name="mail.thread", context=None): """Return the number of 'model_name' objects the user is following The model specified in 'model_name' must inherit from mail.thread """ user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return self.pool.get('mail.followers').search(cr, uid, [('res_model', '=', model_name), ('partner_id', '=', user.partner_id.id)], count=True, context=context) def _check_domain_validity(self, cr, uid, ids, context=None): # take admin as should always be present superuser = self.pool['res.users'].browse(cr, uid, SUPERUSER_ID, context=context) for definition in self.browse(cr, uid, ids, context=context): if definition.computation_mode not in ('count', 'sum'): continue obj = self.pool[definition.model_id.model] try: domain = safe_eval(definition.domain, {'user': superuser}) # demmy search to make sure the domain is valid obj.search(cr, uid, domain, context=context, count=True) except (ValueError, SyntaxError), e: msg = e.message or (e.msg + '\n' + e.text) raise UserError(_("The domain for the definition %s seems incorrect, please check it.\n\n%s" % (definition.name, msg))) return True
class payslip_report(osv.osv): _name = "payslip.report" _description = "Payslip Analysis" _auto = False _columns = { 'name': fields.char('Name', readonly=True), 'date_from': fields.date( 'Date From', readonly=True, ), 'date_to': fields.date( 'Date To', readonly=True, ), 'year': fields.char('Year', size=4, readonly=True), 'month': fields.selection([('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'), ('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December')], 'Month', readonly=True), 'day': fields.char('Day', size=128, readonly=True), 'state': fields.selection([ ('draft', 'Draft'), ('done', 'Done'), ('cancel', 'Rejected'), ], 'Status', readonly=True), 'employee_id': fields.many2one('hr.employee', 'Employee', readonly=True), 'nbr': fields.integer('# Payslip lines', readonly=True), 'number': fields.char('Number', readonly=True), 'struct_id': fields.many2one('hr.payroll.structure', 'Structure', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'paid': fields.boolean('Made Payment Order ? ', readonly=True), 'total': fields.float('Total', readonly=True), 'category_id': fields.many2one('hr.salary.rule.category', 'Category', readonly=True), } def init(self, cr): tools.drop_view_if_exists(cr, 'payslip_report') cr.execute(""" create or replace view payslip_report as ( select min(l.id) as id, l.name, p.struct_id, p.state, p.date_from, p.date_to, p.number, p.company_id, p.paid, l.category_id, l.employee_id, sum(l.total) as total, to_char(p.date_from, 'YYYY') as year, to_char(p.date_from, 'MM') as month, to_char(p.date_from, 'YYYY-MM-DD') as day, to_char(p.date_to, 'YYYY') as to_year, to_char(p.date_to, 'MM') as to_month, to_char(p.date_to, 'YYYY-MM-DD') as to_day, 1 AS nbr from hr_payslip as p left join hr_payslip_line as l on (p.id=l.slip_id) where l.employee_id IS NOT NULL group by p.number,l.name,p.date_from,p.date_to,p.state,p.company_id,p.paid, l.employee_id,p.struct_id,l.category_id ) """)
class res_users(osv.Model): _inherit = 'res.users' _columns = { 'oauth_provider_id': fields.many2one('auth.oauth.provider', 'OAuth Provider'), 'oauth_uid': fields.char('OAuth User ID', help="Oauth Provider user_id", copy=False), 'oauth_access_token': fields.char('OAuth Access Token', readonly=True, copy=False), } _sql_constraints = [ ('uniq_users_oauth_provider_oauth_uid', 'unique(oauth_provider_id, oauth_uid)', 'OAuth UID must be unique per provider'), ] def _auth_oauth_rpc(self, cr, uid, endpoint, access_token, context=None): params = werkzeug.url_encode({'access_token': access_token}) if urlparse.urlparse(endpoint)[4]: url = endpoint + '&' + params else: url = endpoint + '?' + params f = urllib2.urlopen(url) response = f.read() return json.loads(response) def _auth_oauth_validate(self, cr, uid, provider, access_token, context=None): """ return the validation data corresponding to the access token """ p = self.pool.get('auth.oauth.provider').browse(cr, uid, provider, context=context) validation = self._auth_oauth_rpc(cr, uid, p.validation_endpoint, access_token) if validation.get("error"): raise Exception(validation['error']) if p.data_endpoint: data = self._auth_oauth_rpc(cr, uid, p.data_endpoint, access_token) validation.update(data) return validation def _generate_signup_values(self, cr, uid, provider, validation, params, context=None): oauth_uid = validation['user_id'] email = validation.get('email', 'provider_%s_user_%s' % (provider, oauth_uid)) name = validation.get('name', email) return { 'name': name, 'login': email, 'email': email, 'oauth_provider_id': provider, 'oauth_uid': oauth_uid, 'oauth_access_token': params['access_token'], 'active': True, } def _auth_oauth_signin(self, cr, uid, provider, validation, params, context=None): """ retrieve and sign in the user corresponding to provider and validated access token :param provider: oauth provider id (int) :param validation: result of validation of access token (dict) :param params: oauth parameters (dict) :return: user login (str) :raise: ecore.exceptions.AccessDenied if signin failed This method can be overridden to add alternative signin methods. """ try: oauth_uid = validation['user_id'] user_ids = self.search(cr, uid, [("oauth_uid", "=", oauth_uid), ('oauth_provider_id', '=', provider)]) if not user_ids: raise ecore.exceptions.AccessDenied() assert len(user_ids) == 1 user = self.browse(cr, uid, user_ids[0], context=context) user.write({'oauth_access_token': params['access_token']}) return user.login except ecore.exceptions.AccessDenied, access_denied_exception: if context and context.get('no_user_creation'): return None state = json.loads(params['state']) token = state.get('t') values = self._generate_signup_values(cr, uid, provider, validation, params, context=context) try: _, login, _ = self.signup(cr, uid, values, token, context=context) return login except SignupError: raise access_denied_exception
class PaymentAcquirer(osv.Model): """ Acquirer Model. Each specific acquirer can extend the model by adding its own fields, using the acquirer_name as a prefix for the new fields. Using the required_if_provider='<name>' attribute on fields it is possible to have required fields that depend on a specific acquirer. Each acquirer has a link to an ir.ui.view record that is a template of a button used to display the payment form. See examples in ``payment_ogone`` and ``payment_paypal`` modules. Methods that should be added in an acquirer-specific implementation: - ``<name>_form_generate_values(self, cr, uid, id, reference, amount, currency, partner_id=False, partner_values=None, tx_custom_values=None, context=None)``: method that generates the values used to render the form button template. - ``<name>_get_form_action_url(self, cr, uid, id, context=None):``: method that returns the url of the button form. It is used for example in ecommerce application, if you want to post some data to the acquirer. - ``<name>_compute_fees(self, cr, uid, id, amount, currency_id, country_id, context=None)``: computed the fees of the acquirer, using generic fields defined on the acquirer model (see fields definition). Each acquirer should also define controllers to handle communication between eCore and the acquirer. It generally consists in return urls given to the button form and that the acquirer uses to send the customer back after the transaction, with transaction details given as a POST request. """ _name = 'payment.acquirer' _description = 'Payment Acquirer' _order = 'sequence' def _get_providers(self, cr, uid, context=None): return [] # indirection to ease inheritance _provider_selection = lambda self, *args, **kwargs: self._get_providers(*args, **kwargs) _columns = { 'name': fields.char('Name', required=True, translate=True), 'provider': fields.selection(_provider_selection, string='Provider', required=True), 'company_id': fields.many2one('res.company', 'Company', required=True), 'pre_msg': fields.html('Help Message', translate=True, help='Message displayed to explain and help the payment process.'), 'post_msg': fields.html('Thanks Message', help='Message displayed after having done the payment process.'), 'view_template_id': fields.many2one('ir.ui.view', 'Form Button Template', required=True), 'registration_view_template_id': fields.many2one('ir.ui.view', 'S2S Form Template', domain=[('type', '=', 'qweb')], help="Template for method registration"), 'environment': fields.selection( [('test', 'Test'), ('prod', 'Production')], string='Environment', oldname='env'), 'website_published': fields.boolean( 'Visible in Portal / Website', copy=False, help="Make this payment acquirer available (Customer invoices, etc.)"), 'auto_confirm': fields.selection( [('none', 'No automatic confirmation'), ('at_pay_confirm', 'At payment with acquirer confirmation'), ('at_pay_now', 'At payment no acquirer confirmation needed')], string='Order Confirmation', required=True), 'pending_msg': fields.html('Pending Message', translate=True, help='Message displayed, if order is in pending state after having done the payment process.'), 'done_msg': fields.html('Done Message', translate=True, help='Message displayed, if order is done successfully after having done the payment process.'), 'cancel_msg': fields.html('Cancel Message', translate=True, help='Message displayed, if order is cancel during the payment process.'), 'error_msg': fields.html('Error Message', translate=True, help='Message displayed, if error is occur during the payment process.'), # Fees 'fees_active': fields.boolean('Add Extra Fees'), 'fees_dom_fixed': fields.float('Fixed domestic fees'), 'fees_dom_var': fields.float('Variable domestic fees (in percents)'), 'fees_int_fixed': fields.float('Fixed international fees'), 'fees_int_var': fields.float('Variable international fees (in percents)'), 'sequence': fields.integer('Sequence', help="Determine the display order"), } image = ecore.fields.Binary("Image", attachment=True, help="This field holds the image used for this provider, limited to 1024x1024px") image_medium = ecore.fields.Binary("Medium-sized image", compute='_compute_images', inverse='_inverse_image_medium', store=True, attachment=True, help="Medium-sized image of this provider. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views.") image_small = ecore.fields.Binary("Small-sized image", compute='_compute_images', inverse='_inverse_image_small', store=True, attachment=True, help="Small-sized image of this provider. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required.") @ecore.api.depends('image') def _compute_images(self): for rec in self: rec.image_medium = ecore.tools.image_resize_image_medium(rec.image) rec.image_small = ecore.tools.image_resize_image_small(rec.image) def _inverse_image_medium(self): for rec in self: rec.image = ecore.tools.image_resize_image_big(rec.image_medium) def _inverse_image_small(self): for rec in self: rec.image = ecore.tools.image_resize_image_big(rec.image_small) _defaults = { 'company_id': lambda self, cr, uid, obj, ctx=None: self.pool['res.users'].browse(cr, uid, uid).company_id.id, 'environment': 'prod', 'website_published': False, 'auto_confirm': 'at_pay_confirm', 'pending_msg': '<i>Pending,</i> Your online payment has been successfully processed. But your order is not validated yet.', 'done_msg': '<i>Done,</i> Your online payment has been successfully processed. Thank you for your order.', 'cancel_msg': '<i>Cancel,</i> Your payment has been cancelled.', 'error_msg': "<i>Error,</i> Please be aware that an error occurred during the transaction. The order has been confirmed but won't be paid. Don't hesitate to contact us if you have any questions on the status of your order." } def _check_required_if_provider(self, cr, uid, ids, context=None): """ If the field has 'required_if_provider="<provider>"' attribute, then it required if record.provider is <provider>. """ for acquirer in self.browse(cr, uid, ids, context=context): if any(getattr(f, 'required_if_provider', None) == acquirer.provider and not acquirer[k] for k, f in self._fields.items()): return False return True _constraints = [ (_check_required_if_provider, 'Required fields not filled', ['required for this provider']), ] def get_form_action_url(self, cr, uid, id, context=None): """ Returns the form action URL, for form-based acquirer implementations. """ acquirer = self.browse(cr, uid, id, context=context) if hasattr(self, '%s_get_form_action_url' % acquirer.provider): return getattr(self, '%s_get_form_action_url' % acquirer.provider)(cr, uid, id, context=context) return False def render(self, cr, uid, id, reference, amount, currency_id, partner_id=False, values=None, context=None): """ Renders the form template of the given acquirer as a qWeb template. :param string reference: the transaction reference :param float amount: the amount the buyer has to pay :param currency_id: currency id :param dict partner_id: optional partner_id to fill values :param dict values: a dictionary of values for the transction that is given to the acquirer-specific method generating the form values :param dict context: eCore context All templates will receive: - acquirer: the payment.acquirer browse record - user: the current user browse record - currency_id: id of the transaction currency - amount: amount of the transaction - reference: reference of the transaction - partner_*: partner-related values - partner: optional partner browse record - 'feedback_url': feedback URL, controler that manage answer of the acquirer (without base url) -> FIXME - 'return_url': URL for coming back after payment validation (wihout base url) -> FIXME - 'cancel_url': URL if the client cancels the payment -> FIXME - 'error_url': URL if there is an issue with the payment -> FIXME - context: eCore context dictionary """ if context is None: context = {} if values is None: values = {} acquirer = self.browse(cr, uid, id, context=context) # reference and amount values.setdefault('reference', reference) amount = float_round(amount, 2) values.setdefault('amount', amount) # currency id currency_id = values.setdefault('currency_id', currency_id) if currency_id: currency = self.pool['res.currency'].browse(cr, uid, currency_id, context=context) else: currency = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.currency_id values['currency'] = currency # Fill partner_* using values['partner_id'] or partner_id arguement partner_id = values.get('partner_id', partner_id) if partner_id: partner = self.pool['res.partner'].browse(cr, uid, partner_id, context=context) values.update({ 'partner': partner, 'partner_id': partner_id, 'partner_name': partner.name, 'partner_lang': partner.lang, 'partner_email': partner.email, 'partner_zip': partner.zip, 'partner_city': partner.city, 'partner_address': _partner_format_address(partner.street, partner.street2), 'partner_country_id': partner.country_id.id, 'partner_country': partner.country_id, 'partner_phone': partner.phone, 'partner_state': partner.state_id, }) if values.get('partner_name'): values.update({ 'partner_first_name': _partner_split_name(values.get('partner_name'))[0], 'partner_last_name': _partner_split_name(values.get('partner_name'))[1], }) # Fix address, country fields if not values.get('partner_address'): values['address'] = _partner_format_address(values.get('partner_street', ''), values.get('partner_street2', '')) if not values.get('partner_country') and values.get('partner_country_id'): values['country'] = self.pool['res.country'].browse(cr, uid, values.get('partner_country_id'), context=context) # compute fees fees_method_name = '%s_compute_fees' % acquirer.provider if hasattr(self, fees_method_name): fees = getattr(self, fees_method_name)(cr, uid, id, values['amount'], values['currency_id'], values['partner_country_id'], context=None) values['fees'] = float_round(fees, 2) # call <name>_form_generate_values to update the tx dict with acqurier specific values cust_method_name = '%s_form_generate_values' % (acquirer.provider) if hasattr(self, cust_method_name): method = getattr(self, cust_method_name) values = method(cr, uid, id, values, context=context) values.update({ 'tx_url': context.get('tx_url', self.get_form_action_url(cr, uid, id, context=context)), 'submit_class': context.get('submit_class', 'btn btn-link'), 'submit_txt': context.get('submit_txt'), 'acquirer': acquirer, 'user': self.pool.get("res.users").browse(cr, uid, uid, context=context), 'context': context, 'type': values.get('type') or 'form', }) values.setdefault('return_url', False) # because render accepts view ids but not qweb -> need to use the xml_id return self.pool['ir.ui.view'].render(cr, uid, acquirer.view_template_id.xml_id, values, engine='ir.qweb', context=context) def _registration_render(self, cr, uid, id, partner_id, qweb_context=None, context=None): acquirer = self.browse(cr, uid, id, context=context) if qweb_context is None: qweb_context = {} qweb_context.update(id=id, partner_id=partner_id) method_name = '_%s_registration_form_generate_values' % (acquirer.provider,) if hasattr(self, method_name): method = getattr(self, method_name) qweb_context.update(method(cr, uid, id, qweb_context, context=context)) return self.pool['ir.ui.view'].render(cr, uid, acquirer.registration_view_template_id.xml_id, qweb_context, engine='ir.qweb', context=context) def s2s_process(self, cr, uid, id, data, context=None): acquirer = self.browse(cr, uid, id, context=context) cust_method_name = '%s_s2s_form_process' % (acquirer.provider) if not self.s2s_validate(cr, uid, id, data, context=context): return False if hasattr(self, cust_method_name): method = getattr(self, cust_method_name) return method(cr, uid, data, context=context) return True def s2s_validate(self, cr, uid, id, data, context=None): acquirer = self.browse(cr, uid, id, context=context) cust_method_name = '%s_s2s_form_validate' % (acquirer.provider) if hasattr(self, cust_method_name): method = getattr(self, cust_method_name) return method(cr, uid, id, data, context=context) return True
class restaurant_table(osv.osv): _name = 'restaurant.table' _columns = { 'name': fields.char('Table Name', size=32, required=True, help='An internal identification of a table'), 'floor_id': fields.many2one('restaurant.floor', 'Floor'), 'shape': fields.selection([('square', 'Square'), ('round', 'Round')], 'Shape', required=True), 'position_h': fields.float( 'Horizontal Position', help= "The table's horizontal position from the left side to the table's center, in pixels" ), 'position_v': fields.float( 'Vertical Position', help= "The table's vertical position from the top to the table's center, in pixels" ), 'width': fields.float('Width', help="The table's width in pixels"), 'height': fields.float('Height', help="The table's height in pixels"), 'seats': fields.integer( 'Seats', help="The default number of customer served at this table."), 'color': fields.char( 'Color', help= "The table's color, expressed as a valid 'background' CSS property value" ), 'active': fields.boolean( 'Active', help= 'If false, the table is deactivated and will not be available in the point of sale' ), 'pos_order_ids': fields.one2many('pos.order', 'table_id', 'Pos Orders', help='The orders served at this table'), } _defaults = { 'shape': 'square', 'seats': 1, 'position_h': 10, 'position_v': 10, 'height': 50, 'width': 50, 'active': True, } def create_from_ui(self, cr, uid, table, context=None): """ create or modify a table from the point of sale UI. table contains the table's fields. If it contains an id, it will modify the existing table. It then returns the id of the table. """ if table.get('floor_id', False): floor_id = table['floor_id'][0] table['floor_id'] = floor_id if table.get('id', False): # Modifiy existing table table_id = table['id'] del table['id'] self.write(cr, uid, [table_id], table, context=context) else: table_id = self.create(cr, uid, table, context=context) return table_id
class hr_timesheet_sheet(osv.osv): _name = "hr_timesheet_sheet.sheet" _inherit = ['mail.thread', 'ir.needaction_mixin'] _table = 'hr_timesheet_sheet_sheet' _order = "id desc" _description = "Timesheet" def _total(self, cr, uid, ids, name, args, context=None): """ Compute the attendances, analytic lines timesheets and differences between them for all the days of a timesheet and the current day """ res = dict.fromkeys( ids, { 'total_attendance': 0.0, 'total_timesheet': 0.0, 'total_difference': 0.0, }) cr.execute( """ SELECT sheet_id as id, sum(total_attendance) as total_attendance, sum(total_timesheet) as total_timesheet, sum(total_difference) as total_difference FROM hr_timesheet_sheet_sheet_day WHERE sheet_id IN %s GROUP BY sheet_id """, (tuple(ids), )) res.update(dict((x.pop('id'), x) for x in cr.dictfetchall())) return res def check_employee_attendance_state(self, cr, uid, sheet_id, context=None): ids_signin = self.pool.get('hr.attendance').search( cr, uid, [('sheet_id', '=', sheet_id), ('action', '=', 'sign_in')]) ids_signout = self.pool.get('hr.attendance').search( cr, uid, [('sheet_id', '=', sheet_id), ('action', '=', 'sign_out')]) if len(ids_signin) != len(ids_signout): raise UserError( _('The timesheet cannot be validated as it does not contain an equal number of sign ins and sign outs.' )) return True def copy(self, cr, uid, ids, *args, **argv): raise UserError(_('You cannot duplicate a timesheet.')) def create(self, cr, uid, vals, context=None): if 'employee_id' in vals: if not self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).user_id: raise UserError( _('In order to create a timesheet for this employee, you must link him/her to a user.' )) if vals.get('attendances_ids'): # If attendances, we sort them by date asc before writing them, to satisfy the alternance constraint vals['attendances_ids'] = self.sort_attendances( cr, uid, vals['attendances_ids'], context=context) return super(hr_timesheet_sheet, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): if 'employee_id' in vals: new_user_id = self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).user_id.id or False if not new_user_id: raise UserError( _('In order to create a timesheet for this employee, you must link him/her to a user.' )) if not self._sheet_date( cr, uid, ids, forced_user_id=new_user_id, context=context): raise UserError( _('You cannot have 2 timesheets that overlap!\nYou should use the menu \'My Timesheet\' to avoid this problem.' )) if not self.pool.get('hr.employee').browse( cr, uid, vals['employee_id'], context=context).product_id: raise UserError( _('In order to create a timesheet for this employee, you must link the employee to a product.' )) if vals.get('attendances_ids'): # If attendances, we sort them by date asc before writing them, to satisfy the alternance constraint # In addition to the date order, deleting attendances are done before inserting attendances vals['attendances_ids'] = self.sort_attendances( cr, uid, vals['attendances_ids'], context=context) res = super(hr_timesheet_sheet, self).write(cr, uid, ids, vals, context=context) if vals.get('attendances_ids'): for timesheet in self.browse(cr, uid, ids): if not self.pool['hr.attendance']._altern_si_so( cr, uid, [att.id for att in timesheet.attendances_ids]): raise UserError( _('Error ! Sign in (resp. Sign out) must follow Sign out (resp. Sign in)' )) return res def sort_attendances(self, cr, uid, attendance_tuples, context=None): date_attendances = [] for att_tuple in attendance_tuples: if att_tuple[0] in [0, 1, 4]: if att_tuple[0] in [0, 1]: if att_tuple[2] and att_tuple[2].has_key('name'): name = att_tuple[2]['name'] else: name = self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name else: name = self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name date_attendances.append((1, name, att_tuple)) elif att_tuple[0] in [2, 3]: date_attendances.append((0, self.pool['hr.attendance'].browse( cr, uid, att_tuple[1]).name, att_tuple)) else: date_attendances.append((0, False, att_tuple)) date_attendances.sort() return [att[2] for att in date_attendances] def button_confirm(self, cr, uid, ids, context=None): for sheet in self.browse(cr, uid, ids, context=context): if sheet.employee_id and sheet.employee_id.parent_id and sheet.employee_id.parent_id.user_id: self.message_subscribe_users( cr, uid, [sheet.id], user_ids=[sheet.employee_id.parent_id.user_id.id], context=context) self.check_employee_attendance_state(cr, uid, sheet.id, context=context) di = sheet.user_id.company_id.timesheet_max_difference if (abs(sheet.total_difference) < di) or not di: sheet.signal_workflow('confirm') else: raise UserError( _('Please verify that the total difference of the sheet is lower than %.2f.' ) % (di, )) return True def attendance_action_change(self, cr, uid, ids, context=None): hr_employee = self.pool.get('hr.employee') employee_ids = [] for sheet in self.browse(cr, uid, ids, context=context): if sheet.employee_id.id not in employee_ids: employee_ids.append(sheet.employee_id.id) return hr_employee.attendance_action_change(cr, uid, employee_ids, context=context) def _count_attendances(self, cr, uid, ids, field_name, arg, context=None): res = dict.fromkeys(ids, 0) attendances_groups = self.pool['hr.attendance'].read_group( cr, uid, [('sheet_id', 'in', ids)], ['sheet_id'], 'sheet_id', context=context) for attendances in attendances_groups: res[attendances['sheet_id'][0]] = attendances['sheet_id_count'] return res _columns = { 'name': fields.char('Note', select=1, states={ 'confirm': [('readonly', True)], 'done': [('readonly', True)] }), 'employee_id': fields.many2one('hr.employee', 'Employee', required=True), 'user_id': fields.related( 'employee_id', 'user_id', type="many2one", relation="res.users", store=True, string="User", required=False, readonly=True ), #fields.many2one('res.users', 'User', required=True, select=1, states={'confirm':[('readonly', True)], 'done':[('readonly', True)]}), 'date_from': fields.date('Date from', required=True, select=1, readonly=True, states={'new': [('readonly', False)]}), 'date_to': fields.date('Date to', required=True, select=1, readonly=True, states={'new': [('readonly', False)]}), 'timesheet_ids': fields.one2many('account.analytic.line', 'sheet_id', 'Timesheet lines', readonly=True, states={ 'draft': [('readonly', False)], 'new': [('readonly', False)] }), 'attendances_ids': fields.one2many('hr.attendance', 'sheet_id', 'Attendances'), 'state': fields.selection( [('new', 'New'), ('draft', 'Open'), ('confirm', 'Waiting Approval'), ('done', 'Approved')], 'Status', select=True, required=True, readonly=True, track_visibility='onchange', help= ' * The \'Draft\' status is used when a user is encoding a new and unconfirmed timesheet. \ \n* The \'Confirmed\' status is used for to confirm the timesheet by user. \ \n* The \'Done\' status is used when users timesheet is accepted by his/her senior.' ), 'state_attendance': fields.related('employee_id', 'state', type='selection', selection=[('absent', 'Absent'), ('present', 'Present')], string='Current Status', readonly=True), 'total_attendance': fields.function(_total, method=True, string='Total Attendance', multi="_total"), 'total_timesheet': fields.function(_total, method=True, string='Total Timesheet', multi="_total"), 'total_difference': fields.function(_total, method=True, string='Difference', multi="_total"), 'period_ids': fields.one2many('hr_timesheet_sheet.sheet.day', 'sheet_id', 'Period', readonly=True), 'account_ids': fields.one2many('hr_timesheet_sheet.sheet.account', 'sheet_id', 'Analytic accounts', readonly=True), 'company_id': fields.many2one('res.company', 'Company'), 'department_id': fields.many2one('hr.department', 'Department'), 'attendance_count': fields.function(_count_attendances, type='integer', string="Attendances"), } def _default_date_from(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) r = user.company_id and user.company_id.timesheet_range or 'month' if r == 'month': return time.strftime('%Y-%m-01') elif r == 'week': return (datetime.today() + relativedelta(weekday=0, days=-6)).strftime('%Y-%m-%d') elif r == 'year': return time.strftime('%Y-01-01') return fields.date.context_today(self, cr, uid, context) def _default_date_to(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) r = user.company_id and user.company_id.timesheet_range or 'month' if r == 'month': return ( datetime.today() + relativedelta(months=+1, day=1, days=-1)).strftime('%Y-%m-%d') elif r == 'week': return (datetime.today() + relativedelta(weekday=6)).strftime('%Y-%m-%d') elif r == 'year': return time.strftime('%Y-12-31') return fields.date.context_today(self, cr, uid, context) def _default_employee(self, cr, uid, context=None): emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) return emp_ids and emp_ids[0] or False _defaults = { 'date_from': _default_date_from, 'date_to': _default_date_to, 'state': 'new', 'employee_id': _default_employee, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'hr_timesheet_sheet.sheet', context=c) } def _sheet_date(self, cr, uid, ids, forced_user_id=False, context=None): for sheet in self.browse(cr, uid, ids, context=context): new_user_id = forced_user_id or sheet.employee_id.user_id and sheet.employee_id.user_id.id if new_user_id: cr.execute( 'SELECT id \ FROM hr_timesheet_sheet_sheet \ WHERE (date_from <= %s and %s <= date_to) \ AND user_id=%s \ AND id <> %s', (sheet.date_to, sheet.date_from, new_user_id, sheet.id)) if cr.fetchall(): return False return True _constraints = [ (_sheet_date, 'You cannot have 2 timesheets that overlap!\nPlease use the menu \'My Current Timesheet\' to avoid this problem.', ['date_from', 'date_to']), ] def action_set_to_draft(self, cr, uid, ids, *args): self.write(cr, uid, ids, {'state': 'draft'}) self.create_workflow(cr, uid, ids) return True def name_get(self, cr, uid, ids, context=None): if not ids: return [] if isinstance(ids, (long, int)): ids = [ids] # week number according to ISO 8601 Calendar return [(r['id'], _('Week ')+str(datetime.strptime(r['date_from'], '%Y-%m-%d').isocalendar()[1])) \ for r in self.read(cr, uid, ids, ['date_from'], context=context, load='_classic_write')] def unlink(self, cr, uid, ids, context=None): sheets = self.read(cr, uid, ids, ['state', 'total_attendance'], context=context) for sheet in sheets: if sheet['state'] in ('confirm', 'done'): raise UserError( _('You cannot delete a timesheet which is already confirmed.' )) elif sheet['total_attendance'] <> 0.00: raise UserError( _('You cannot delete a timesheet which have attendance entries.' )) toremove = [] analytic_timesheet = self.pool.get('account.analytic.line') for sheet in self.browse(cr, uid, ids, context=context): for timesheet in sheet.timesheet_ids: toremove.append(timesheet.id) analytic_timesheet.unlink(cr, uid, toremove, context=context) return super(hr_timesheet_sheet, self).unlink(cr, uid, ids, context=context) def onchange_employee_id(self, cr, uid, ids, employee_id, context=None): department_id = False user_id = False if employee_id: empl_id = self.pool.get('hr.employee').browse(cr, uid, employee_id, context=context) department_id = empl_id.department_id.id user_id = empl_id.user_id.id return { 'value': { 'department_id': department_id, 'user_id': user_id, } } # ------------------------------------------------ # OpenChatter methods and notifications # ------------------------------------------------ def _track_subtype(self, cr, uid, ids, init_values, context=None): record = self.browse(cr, uid, ids[0], context=context) if 'state' in init_values and record.state == 'confirm': return 'hr_timesheet_sheet.mt_timesheet_confirmed' elif 'state' in init_values and record.state == 'done': return 'hr_timesheet_sheet.mt_timesheet_approved' return super(hr_timesheet_sheet, self)._track_subtype(cr, uid, ids, init_values, context=context) def _needaction_domain_get(self, cr, uid, context=None): emp_obj = self.pool.get('hr.employee') empids = emp_obj.search(cr, uid, [('parent_id.user_id', '=', uid)], context=context) if not empids: return False dom = ['&', ('state', '=', 'confirm'), ('employee_id', 'in', empids)] return dom
class res_company(osv.osv): _name = "res.company" _description = 'Companies' _order = 'name' def _get_address_data(self, cr, uid, ids, field_names, arg, context=None): """ Read the 'address' functional fields. """ result = {} part_obj = self.pool.get('res.partner') for company in self.browse(cr, uid, ids, context=context): result[company.id] = {}.fromkeys(field_names, False) if company.partner_id: address_data = part_obj.address_get(cr, ecore.SUPERUSER_ID, [company.partner_id.id], adr_pref=['contact']) if address_data['contact']: address = part_obj.read(cr, ecore.SUPERUSER_ID, [address_data['contact']], field_names, context=context)[0] for field in field_names: result[company.id][field] = address[field] or False return result def _set_address_data(self, cr, uid, company_id, name, value, arg, context=None): """ Write the 'address' functional fields. """ company = self.browse(cr, uid, company_id, context=context) if company.partner_id: part_obj = self.pool.get('res.partner') address_data = part_obj.address_get(cr, uid, [company.partner_id.id], adr_pref=['contact']) address = address_data['contact'] if address: part_obj.write(cr, uid, [address], {name: value or False}, context=context) else: part_obj.create(cr, uid, { name: value or False, 'parent_id': company.partner_id.id }, context=context) return True def _get_logo_web(self, cr, uid, ids, _field_name, _args, context=None): result = dict.fromkeys(ids, False) for record in self.browse(cr, uid, ids, context=context): size = (180, None) result[record.id] = image_resize_image(record.partner_id.image, size) return result def _get_companies_from_partner(self, cr, uid, ids, context=None): return self.pool['res.company'].search(cr, uid, [('partner_id', 'in', ids)], context=context) _columns = { 'name': fields.related('partner_id', 'name', string='Company Name', size=128, required=True, store=True, type='char'), 'parent_id': fields.many2one('res.company', 'Parent Company', select=True), 'child_ids': fields.one2many('res.company', 'parent_id', 'Child Companies'), 'partner_id': fields.many2one('res.partner', 'Partner', required=True), 'rml_header': fields.text('RML Header', required=True), 'rml_header1': fields.char( 'Company Tagline', help= "Appears by default on the top right corner of your printed documents (report header)." ), 'rml_header2': fields.text('RML Internal Header', required=True), 'rml_header3': fields.text('RML Internal Header for Landscape Reports', required=True), 'rml_footer': fields.text( 'Report Footer', help="Footer text displayed at the bottom of all reports."), 'rml_footer_readonly': fields.related('rml_footer', type='text', string='Report Footer', readonly=True), 'custom_footer': fields.boolean( 'Custom Footer', help= "Check this to define the report footer manually. Otherwise it will be filled in automatically." ), 'font': fields.many2one( 'res.font', string="Font", domain=[('mode', 'in', ('Normal', 'Regular', 'all', 'Book'))], help= "Set the font into the report header, it will be used as default font in the RML reports of the user company" ), 'logo': fields.related('partner_id', 'image', string="Logo", type="binary"), # logo_web: do not store in attachments, since the image is retrieved in SQL for # performance reasons (see addons/web/controllers/main.py, Binary.company_logo) 'logo_web': fields.function(_get_logo_web, string="Logo Web", type="binary", store={ 'res.company': (lambda s, c, u, i, x: i, ['partner_id'], 10), 'res.partner': (_get_companies_from_partner, ['image'], 10), }), 'currency_id': fields.many2one('res.currency', 'Currency', required=True), 'user_ids': fields.many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', 'Accepted Users'), 'account_no': fields.char('Account No.'), 'street': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street", multi='address'), 'street2': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street2", multi='address'), 'zip': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="Zip", multi='address'), 'city': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="City", multi='address'), 'state_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country.state', string="Fed. State", multi='address'), 'country_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country', string="Country", multi='address'), 'email': fields.related('partner_id', 'email', size=64, type='char', string="Email", store=True), 'phone': fields.related('partner_id', 'phone', size=64, type='char', string="Phone", store=True), 'fax': fields.function(_get_address_data, fnct_inv=_set_address_data, size=64, type='char', string="Fax", multi='address'), 'website': fields.related('partner_id', 'website', string="Website", type="char", size=64), 'vat': fields.related('partner_id', 'vat', string="Tax ID", type="char", size=32), 'company_registry': fields.char('Company Registry', size=64), 'rml_paper_format': fields.selection([('a4', 'A4'), ('us_letter', 'US Letter')], "Paper Format", required=True, oldname='paper_format'), } _sql_constraints = [('name_uniq', 'unique (name)', 'The company name must be unique !')] @api.onchange('custom_footer', 'phone', 'fax', 'email', 'website', 'vat', 'company_registry') def onchange_footer(self): if not self.custom_footer: # first line (notice that missing elements are filtered out before the join) res = ' | '.join( filter(bool, [ self.phone and '%s: %s' % (_('Phone'), self.phone), self.fax and '%s: %s' % (_('Fax'), self.fax), self.email and '%s: %s' % (_('Email'), self.email), self.website and '%s: %s' % (_('Website'), self.website), self.vat and '%s: %s' % (_('TIN'), self.vat), self.company_registry and '%s: %s' % (_('Reg'), self.company_registry), ])) self.rml_footer_readonly = res self.rml_footer = res def onchange_state(self, cr, uid, ids, state_id, context=None): if state_id: return { 'value': { 'country_id': self.pool.get('res.country.state').browse( cr, uid, state_id, context).country_id.id } } return {} def onchange_font_name(self, cr, uid, ids, font, rml_header, rml_header2, rml_header3, context=None): """ To change default header style of all <para> and drawstring. """ def _change_header(header, font): """ Replace default fontname use in header and setfont tag """ default_para = re.sub('fontName.?=.?".*"', 'fontName="%s"' % font, header) return re.sub('(<setFont.?name.?=.?)(".*?")(.)', '\g<1>"%s"\g<3>' % font, default_para) if not font: return True fontname = self.pool.get('res.font').browse(cr, uid, font, context=context).name return { 'value': { 'rml_header': _change_header(rml_header, fontname), 'rml_header2': _change_header(rml_header2, fontname), 'rml_header3': _change_header(rml_header3, fontname) } } def on_change_country(self, cr, uid, ids, country_id, context=None): res = {'domain': {'state_id': []}} currency_id = self._get_euro(cr, uid, context=context) if country_id: currency_id = self.pool.get('res.country').browse( cr, uid, country_id, context=context).currency_id.id res['domain'] = {'state_id': [('country_id', '=', country_id)]} res['value'] = {'currency_id': currency_id} return res def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100): context = dict(context or {}) if context.pop('user_preference', None): # We browse as superuser. Otherwise, the user would be able to # select only the currently visible companies (according to rules, # which are probably to allow to see the child companies) even if # she belongs to some other companies. user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context) cmp_ids = list( set([user.company_id.id] + [cmp.id for cmp in user.company_ids])) uid = SUPERUSER_ID args = (args or []) + [('id', 'in', cmp_ids)] return super(res_company, self).name_search(cr, uid, name=name, args=args, operator=operator, context=context, limit=limit) @api.returns('self') def _company_default_get(self, cr, uid, object=False, field=False, context=None): """ Returns the default company (the user's company) The 'object' and 'field' arguments are ignored but left here for backward compatibility and potential override. """ return self.pool['res.users']._get_company(cr, uid, context=context) @tools.ormcache('uid', 'company') def _get_company_children(self, cr, uid=None, company=None): if not company: return [] ids = self.search(cr, uid, [('parent_id', 'child_of', [company])]) return ids def _get_partner_hierarchy(self, cr, uid, company_id, context=None): if company_id: parent_id = self.browse(cr, uid, company_id)['parent_id'] if parent_id: return self._get_partner_hierarchy(cr, uid, parent_id.id, context) else: return self._get_partner_descendance(cr, uid, company_id, [], context) return [] def _get_partner_descendance(self, cr, uid, company_id, descendance, context=None): descendance.append(self.browse(cr, uid, company_id).partner_id.id) for child_id in self._get_company_children(cr, uid, company_id): if child_id != company_id: descendance = self._get_partner_descendance( cr, uid, child_id, descendance) return descendance # # This function restart the cache on the _get_company_children method # def cache_restart(self, cr): self._get_company_children.clear_cache(self) def create(self, cr, uid, vals, context=None): if not vals.get('name', False) or vals.get('partner_id', False): self.cache_restart(cr) return super(res_company, self).create(cr, uid, vals, context=context) obj_partner = self.pool.get('res.partner') partner_id = obj_partner.create(cr, uid, { 'name': vals['name'], 'is_company': True, 'image': vals.get('logo', False) }, context=context) vals.update({'partner_id': partner_id}) self.cache_restart(cr) company_id = super(res_company, self).create(cr, uid, vals, context=context) obj_partner.write(cr, uid, [partner_id], {'company_id': company_id}, context=context) return company_id def write(self, cr, uid, ids, values, context=None): self.cache_restart(cr) return super(res_company, self).write(cr, uid, ids, values, context=context) def _get_euro(self, cr, uid, context=None): rate_obj = self.pool.get('res.currency.rate') rate_id = rate_obj.search(cr, uid, [('rate', '=', 1)], context=context) return rate_id and rate_obj.browse( cr, uid, rate_id[0], context=context).currency_id.id or False def _get_logo(self, cr, uid, ids): return open( os.path.join(tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb').read().encode('base64') def _get_font(self, cr, uid, ids): font_obj = self.pool.get('res.font') res = font_obj.search(cr, uid, [('family', '=', 'Helvetica'), ('mode', '=', 'all')], limit=1) return res and res[0] or False _header = """ <header> <pageTemplate> <frame id="first" x1="28.0" y1="28.0" width="%s" height="%s"/> <stylesheet> <!-- Set here the default font to use for all <para> tags --> <paraStyle name='Normal' fontName="DejaVuSans"/> </stylesheet> <pageGraphics> <fill color="black"/> <stroke color="black"/> <setFont name="DejaVuSans" size="8"/> <drawString x="%s" y="%s"> [[ formatLang(time.strftime("%%Y-%%m-%%d"), date=True) ]] [[ time.strftime("%%H:%%M") ]]</drawString> <setFont name="DejaVuSans-Bold" size="10"/> <drawCentredString x="%s" y="%s">[[ company.partner_id.name ]]</drawCentredString> <stroke color="#000000"/> <lines>%s</lines> <!-- Set here the default font to use for all <drawString> tags --> <!-- don't forget to change the 2 other occurence of <setFont> above if needed --> <setFont name="DejaVuSans" size="8"/> </pageGraphics> </pageTemplate> </header>""" _header2 = _header % (539, 772, "1.0cm", "28.3cm", "11.1cm", "28.3cm", "1.0cm 28.1cm 20.1cm 28.1cm") _header3 = _header % (786, 525, 25, 555, 440, 555, "25 550 818 550") def _get_header(self, cr, uid, ids): try: header_file = tools.file_open( os.path.join('base', 'report', 'corporate_rml_header.rml')) try: return header_file.read() finally: header_file.close() except: return self._header_a4 _header_main = """ <header> <pageTemplate> <frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/> <stylesheet> <!-- Set here the default font to use for all <para> tags --> <paraStyle name='Normal' fontName="DejaVuSans"/> <paraStyle name="main_footer" fontSize="8.0" alignment="CENTER"/> <paraStyle name="main_header" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> </stylesheet> <pageGraphics> <!-- Set here the default font to use for all <drawString> tags --> <setFont name="DejaVuSans" size="8"/> <!-- You Logo - Change X,Y,Width and Height --> <image x="1.3cm" y="%s" height="40.0" >[[ company.logo or removeParentNode('image') ]]</image> <fill color="black"/> <stroke color="black"/> <!-- page header --> <lines>1.3cm %s 20cm %s</lines> <drawRightString x="20cm" y="%s">[[ company.rml_header1 ]]</drawRightString> <drawString x="1.3cm" y="%s">[[ company.partner_id.name ]]</drawString> <place x="1.3cm" y="%s" height="1.8cm" width="15.0cm"> <para style="main_header">[[ display_address(company.partner_id) or '' ]]</para> </place> <drawString x="1.3cm" y="%s">Phone:</drawString> <drawRightString x="7cm" y="%s">[[ company.partner_id.phone or '' ]]</drawRightString> <drawString x="1.3cm" y="%s">Mail:</drawString> <drawRightString x="7cm" y="%s">[[ company.partner_id.email or '' ]]</drawRightString> <lines>1.3cm %s 7cm %s</lines> <!-- left margin --> <rotate degrees="90"/> <fill color="grey"/> <drawString x="2.65cm" y="-0.4cm">generated by eCore.com</drawString> <fill color="black"/> <rotate degrees="-90"/> <!--page bottom--> <lines>1.2cm 2.65cm 19.9cm 2.65cm</lines> <place x="1.3cm" y="0cm" height="2.55cm" width="19.0cm"> <para style="main_footer">[[ company.rml_footer ]]</para> <para style="main_footer">Contact : [[ user.name ]] - Page: <pageNumber/></para> </place> </pageGraphics> </pageTemplate> </header>""" _header_a4 = _header_main % ( '21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm') _header_letter = _header_main % ( '20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm') def onchange_rml_paper_format(self, cr, uid, ids, rml_paper_format, context=None): if rml_paper_format == 'us_letter': return {'value': {'rml_header': self._header_letter}} return {'value': {'rml_header': self._header_a4}} def act_discover_fonts(self, cr, uid, ids, context=None): return self.pool.get("res.font").font_scan(cr, uid, context=context) _defaults = { 'currency_id': _get_euro, 'rml_paper_format': 'a4', 'rml_header': _get_header, 'rml_header2': _header2, 'rml_header3': _header3, 'logo': _get_logo, 'font': _get_font, } _constraints = [ (osv.osv._check_recursion, 'Error! You can not create recursive companies.', ['parent_id']) ]
class crm_claim_report(osv.osv): """ CRM Claim Report""" _name = "crm.claim.report" _auto = False _description = "CRM Claim Report" _columns = { 'user_id': fields.many2one('res.users', 'User', readonly=True), 'team_id': fields.many2one('crm.team', 'Team', oldname='section_id', readonly=True), 'nbr': fields.integer( '# of Claims', readonly=True), # TDE FIXME master: rename into nbr_claims 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'create_date': fields.datetime('Create Date', readonly=True, select=True), 'claim_date': fields.datetime('Claim Date', readonly=True), 'delay_close': fields.float('Delay to close', digits=(16, 2), readonly=True, group_operator="avg", help="Number of Days to close the case"), 'stage_id': fields.many2one('crm.claim.stage', 'Stage', readonly=True, domain="[('team_ids','=',team_id)]"), 'categ_id': fields.many2one('crm.claim.category', 'Category', readonly=True), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'priority': fields.selection(AVAILABLE_PRIORITIES, 'Priority'), 'type_action': fields.selection([('correction', 'Corrective Action'), ('prevention', 'Preventive Action')], 'Action Type'), 'date_closed': fields.datetime('Close Date', readonly=True, select=True), 'date_deadline': fields.date('Deadline', readonly=True, select=True), 'delay_expected': fields.float('Overpassed Deadline', digits=(16, 2), readonly=True, group_operator="avg"), 'email': fields.integer('# Emails', size=128, readonly=True), 'subject': fields.char('Claim Subject', readonly=True) } def init(self, cr): """ Display Number of cases And Team Name @param cr: the current row, from the database cursor, """ tools.drop_view_if_exists(cr, 'crm_claim_report') cr.execute(""" create or replace view crm_claim_report as ( select min(c.id) as id, c.date as claim_date, c.date_closed as date_closed, c.date_deadline as date_deadline, c.user_id, c.stage_id, c.team_id, c.partner_id, c.company_id, c.categ_id, c.name as subject, count(*) as nbr, c.priority as priority, c.type_action as type_action, c.create_date as create_date, avg(extract('epoch' from (c.date_closed-c.create_date)))/(3600*24) as delay_close, (SELECT count(id) FROM mail_message WHERE model='crm.claim' AND res_id=c.id) AS email, extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24) as delay_expected from crm_claim c group by c.date,\ c.user_id,c.team_id, c.stage_id,\ c.categ_id,c.partner_id,c.company_id,c.create_date, c.priority,c.type_action,c.date_deadline,c.date_closed,c.id )""")
class board_create(osv.osv_memory): def board_create(self, cr, uid, ids, context=None): assert len(ids) == 1 this = self.browse(cr, uid, ids[0], context=context) view_arch = dedent("""<?xml version="1.0"?> <form string="%s" version="7.0"> <board style="2-1"> <column/> <column/> </board> </form> """.strip() % (this.name, )) view_id = self.pool.get('ir.ui.view').create( cr, uid, { 'name': this.name, 'model': 'board.board', 'priority': 16, 'type': 'form', 'arch': view_arch, }, context=context) action_id = self.pool.get('ir.actions.act_window').create( cr, uid, { 'name': this.name, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'board.board', 'usage': 'menu', 'view_id': view_id, 'help': dedent('''<div class="oe_empty_custom_dashboard"> <p> <b>This dashboard is empty.</b> </p><p> To add the first report into this dashboard, go to any menu, switch to list or graph view, and click <i>'Add to Dashboard'</i> in the extended search options. </p><p> You can filter and group data before inserting into the dashboard using the search options. </p> </div> ''') }, context=context) menu_id = self.pool.get('ir.ui.menu').create( cr, SUPERUSER_ID, { 'name': this.name, 'parent_id': this.menu_parent_id.id, 'action': 'ir.actions.act_window,%s' % (action_id, ) }, context=context) self.pool.get('board.board')._clear_list_cache() return { 'type': 'ir.actions.client', 'tag': 'reload', 'params': { 'menu_id': menu_id }, } def _default_menu_parent_id(self, cr, uid, context=None): _, menu_id = self.pool.get('ir.model.data').get_object_reference( cr, uid, 'base', 'menu_reporting_dashboard') return menu_id _name = "board.create" _description = "Board Creation" _columns = { 'name': fields.char('Board Name', required=True), 'menu_parent_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True), } _defaults = { 'menu_parent_id': _default_menu_parent_id, }
class AcquirerPaypal(osv.Model): _inherit = 'payment.acquirer' def _get_paypal_urls(self, cr, uid, environment, context=None): """ Paypal URLS """ if environment == 'prod': return { 'paypal_form_url': 'https://www.paypal.com/cgi-bin/webscr', 'paypal_rest_url': 'https://api.paypal.com/v1/oauth2/token', } else: return { 'paypal_form_url': 'https://www.sandbox.paypal.com/cgi-bin/webscr', 'paypal_rest_url': 'https://api.sandbox.paypal.com/v1/oauth2/token', } def _get_providers(self, cr, uid, context=None): providers = super(AcquirerPaypal, self)._get_providers(cr, uid, context=context) providers.append(['paypal', 'Paypal']) return providers _columns = { 'paypal_email_account': fields.char('Paypal Email ID', required_if_provider='paypal'), 'paypal_seller_account': fields.char( 'Paypal Merchant ID', help='The Merchant ID is used to ensure communications coming from Paypal are valid and secured.'), 'paypal_use_ipn': fields.boolean('Use IPN', help='Paypal Instant Payment Notification'), # Server 2 server 'paypal_api_enabled': fields.boolean('Use Rest API'), 'paypal_api_username': fields.char('Rest API Username'), 'paypal_api_password': fields.char('Rest API Password'), 'paypal_api_access_token': fields.char('Access Token'), 'paypal_api_access_token_validity': fields.datetime('Access Token Validity'), } _defaults = { 'paypal_use_ipn': True, 'fees_active': False, 'fees_dom_fixed': 0.35, 'fees_dom_var': 3.4, 'fees_int_fixed': 0.35, 'fees_int_var': 3.9, 'paypal_api_enabled': False, } def _migrate_paypal_account(self, cr, uid, context=None): """ COMPLETE ME """ cr.execute('SELECT id, paypal_account FROM res_company') res = cr.fetchall() for (company_id, company_paypal_account) in res: if company_paypal_account: company_paypal_ids = self.search(cr, uid, [('company_id', '=', company_id), ('provider', '=', 'paypal')], limit=1, context=context) if company_paypal_ids: self.write(cr, uid, company_paypal_ids, {'paypal_email_account': company_paypal_account}, context=context) else: paypal_view = self.pool['ir.model.data'].get_object(cr, uid, 'payment_paypal', 'paypal_acquirer_button') self.create(cr, uid, { 'name': 'Paypal', 'provider': 'paypal', 'paypal_email_account': company_paypal_account, 'view_template_id': paypal_view.id, }, context=context) return True def paypal_compute_fees(self, cr, uid, id, amount, currency_id, country_id, context=None): """ Compute paypal fees. :param float amount: the amount to pay :param integer country_id: an ID of a res.country, or None. This is the customer's country, to be compared to the acquirer company country. :return float fees: computed fees """ acquirer = self.browse(cr, uid, id, context=context) if not acquirer.fees_active: return 0.0 country = self.pool['res.country'].browse(cr, uid, country_id, context=context) if country and acquirer.company_id.country_id.id == country.id: percentage = acquirer.fees_dom_var fixed = acquirer.fees_dom_fixed else: percentage = acquirer.fees_int_var fixed = acquirer.fees_int_fixed fees = (percentage / 100.0 * amount + fixed ) / (1 - percentage / 100.0) return fees def paypal_form_generate_values(self, cr, uid, id, values, context=None): base_url = self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'web.base.url') acquirer = self.browse(cr, uid, id, context=context) paypal_tx_values = dict(values) paypal_tx_values.update({ 'cmd': '_xclick', 'business': acquirer.paypal_email_account, 'item_name': '%s: %s' % (acquirer.company_id.name, values['reference']), 'item_number': values['reference'], 'amount': values['amount'], 'currency_code': values['currency'] and values['currency'].name or '', 'address1': values.get('partner_address'), 'city': values.get('partner_city'), 'country': values.get('partner_country') and values.get('partner_country').name or '', 'state': values.get('partner_state') and values.get('partner_state').name or '', 'email': values.get('partner_email'), 'zip_code': values.get('partner_zip'), 'first_name': values.get('partner_first_name'), 'last_name': values.get('partner_last_name'), 'paypal_return': '%s' % urlparse.urljoin(base_url, PaypalController._return_url), 'notify_url': '%s' % urlparse.urljoin(base_url, PaypalController._notify_url), 'cancel_return': '%s' % urlparse.urljoin(base_url, PaypalController._cancel_url), 'handling': '%.2f' % paypal_tx_values.pop('fees', 0.0) if acquirer.fees_active else False, 'custom': json.dumps({'return_url': '%s' % paypal_tx_values.pop('return_url')}) if paypal_tx_values.get('return_url') else False, }) return paypal_tx_values def paypal_get_form_action_url(self, cr, uid, id, context=None): acquirer = self.browse(cr, uid, id, context=context) return self._get_paypal_urls(cr, uid, acquirer.environment, context=context)['paypal_form_url'] def _paypal_s2s_get_access_token(self, cr, uid, ids, context=None): """ Note: see # see http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem for explanation why we use Authorization header instead of urllib2 password manager """ res = dict.fromkeys(ids, False) parameters = werkzeug.url_encode({'grant_type': 'client_credentials'}) for acquirer in self.browse(cr, uid, ids, context=context): tx_url = self._get_paypal_urls(cr, uid, acquirer.environment)['paypal_rest_url'] request = urllib2.Request(tx_url, parameters) # add other headers (https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/) request.add_header('Accept', 'application/json') request.add_header('Accept-Language', 'en_US') # add authorization header base64string = base64.encodestring('%s:%s' % ( acquirer.paypal_api_username, acquirer.paypal_api_password) ).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) request = urllib2.urlopen(request) result = request.read() res[acquirer.id] = json.loads(result).get('access_token') request.close() return res
class payment_advice_report(osv.osv): _name = "payment.advice.report" _description = "Payment Advice Analysis" _auto = False _columns = { 'name': fields.char('Name', readonly=True), 'date': fields.date( 'Date', readonly=True, ), 'year': fields.char('Year', size=4, readonly=True), 'month': fields.selection([('01', 'January'), ('02', 'February'), ('03', 'March'), ('04', 'April'), ('05', 'May'), ('06', 'June'), ('07', 'July'), ('08', 'August'), ('09', 'September'), ('10', 'October'), ('11', 'November'), ('12', 'December')], 'Month', readonly=True), 'day': fields.char('Day', size=128, readonly=True), 'state': fields.selection([ ('draft', 'Draft'), ('confirm', 'Confirmed'), ('cancel', 'Cancelled'), ], 'Status', select=True, readonly=True), 'employee_id': fields.many2one('hr.employee', 'Employee', readonly=True), 'nbr': fields.integer('# Payment Lines', readonly=True), 'number': fields.char('Number', readonly=True), 'bysal': fields.float('By Salary', readonly=True), 'bank_id': fields.many2one('res.bank', 'Bank', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'cheque_nos': fields.char('Cheque Numbers', readonly=True), 'neft': fields.boolean('NEFT Transaction', readonly=True), 'ifsc_code': fields.char('IFSC Code', size=32, readonly=True), 'employee_bank_no': fields.char('Employee Bank Account', required=True), } def init(self, cr): tools.drop_view_if_exists(cr, 'payment_advice_report') cr.execute(""" create or replace view payment_advice_report as ( select min(l.id) as id, sum(l.bysal) as bysal, p.name, p.state, p.date, p.number, p.company_id, p.bank_id, p.chaque_nos as cheque_nos, p.neft, l.employee_id, l.ifsc_code, l.name as employee_bank_no, to_char(p.date, 'YYYY') as year, to_char(p.date, 'MM') as month, to_char(p.date, 'YYYY-MM-DD') as day, 1 as nbr from hr_payroll_advice as p left join hr_payroll_advice_line as l on (p.id=l.advice_id) where l.employee_id IS NOT NULL group by p.number,p.name,p.date,p.state,p.company_id,p.bank_id,p.chaque_nos,p.neft, l.employee_id,l.advice_id,l.bysal,l.ifsc_code, l.name ) """)
class hr_employee(osv.osv): _name = "hr.employee" _description = "Employee" _order = 'name_related' _inherits = {'resource.resource': "resource_id"} _inherit = ['mail.thread'] _mail_post_access = 'read' _columns = { #we need a related field in order to be able to sort the employee by name 'name_related': fields.related('resource_id', 'name', type='char', string='Name', readonly=True, store=True), 'country_id': fields.many2one('res.country', 'Nationality (Country)'), 'birthday': fields.date("Date of Birth"), 'ssnid': fields.char('SSN No', help='Social Security Number'), 'sinid': fields.char('SIN No', help="Social Insurance Number"), 'identification_id': fields.char('Identification No'), 'gender': fields.selection([('male', 'Male'), ('female', 'Female'), ('other', 'Other')], 'Gender'), 'marital': fields.selection([('single', 'Single'), ('married', 'Married'), ('widower', 'Widower'), ('divorced', 'Divorced')], 'Marital Status'), 'department_id': fields.many2one('hr.department', 'Department'), 'address_id': fields.many2one('res.partner', 'Working Address'), 'address_home_id': fields.many2one('res.partner', 'Home Address'), 'bank_account_id': fields.many2one('res.partner.bank', 'Bank Account Number', domain="[('partner_id','=',address_home_id)]", help="Employee bank salary account"), 'work_phone': fields.char('Work Phone', readonly=False), 'mobile_phone': fields.char('Work Mobile', readonly=False), 'work_email': fields.char('Work Email', size=240), 'work_location': fields.char('Work Location'), 'notes': fields.text('Notes'), 'parent_id': fields.many2one('hr.employee', 'Manager'), 'category_ids': fields.many2many('hr.employee.category', 'employee_category_rel', 'emp_id', 'category_id', 'Tags'), 'child_ids': fields.one2many('hr.employee', 'parent_id', 'Subordinates'), 'resource_id': fields.many2one('resource.resource', 'Resource', ondelete='cascade', required=True, auto_join=True), 'coach_id': fields.many2one('hr.employee', 'Coach'), 'job_id': fields.many2one('hr.job', 'Job Title'), 'passport_id': fields.char('Passport No'), 'color': fields.integer('Color Index'), 'city': fields.related('address_id', 'city', type='char', string='City'), 'login': fields.related('user_id', 'login', type='char', string='Login', readonly=1), 'last_login': fields.related('user_id', 'date', type='datetime', string='Latest Connection', readonly=1), } # image: all image fields are base64 encoded and PIL-supported image = ecore.fields.Binary( "Photo", attachment=True, help= "This field holds the image used as photo for the employee, limited to 1024x1024px." ) image_medium = ecore.fields.Binary("Medium-sized photo", compute='_compute_images', inverse='_inverse_image_medium', store=True, attachment=True, help="Medium-sized photo of the employee. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views.") image_small = ecore.fields.Binary("Small-sized photo", compute='_compute_images', inverse='_inverse_image_small', store=True, attachment=True, help="Small-sized photo of the employee. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required.") @api.depends('image') def _compute_images(self): for rec in self: rec.image_medium = tools.image_resize_image_medium(rec.image) rec.image_small = tools.image_resize_image_small(rec.image) def _inverse_image_medium(self): for rec in self: rec.image = tools.image_resize_image_big(rec.image_medium) def _inverse_image_small(self): for rec in self: rec.image = tools.image_resize_image_big(rec.image_small) def _get_default_image(self, cr, uid, context=None): image_path = get_module_resource('hr', 'static/src/img', 'default_image.png') return tools.image_resize_image_big( open(image_path, 'rb').read().encode('base64')) defaults = { 'active': 1, 'image': _get_default_image, 'color': 0, } def unlink(self, cr, uid, ids, context=None): resource_ids = [] for employee in self.browse(cr, uid, ids, context=context): resource_ids.append(employee.resource_id.id) super(hr_employee, self).unlink(cr, uid, ids, context=context) return self.pool.get('resource.resource').unlink(cr, uid, resource_ids, context=context) def onchange_address_id(self, cr, uid, ids, address, context=None): if address: address = self.pool.get('res.partner').browse(cr, uid, address, context=context) return { 'value': { 'work_phone': address.phone, 'mobile_phone': address.mobile } } return {'value': {}} def onchange_company(self, cr, uid, ids, company, context=None): address_id = False if company: company_id = self.pool.get('res.company').browse(cr, uid, company, context=context) address = self.pool.get('res.partner').address_get( cr, uid, [company_id.partner_id.id], ['contact']) address_id = address and address['contact'] or False return {'value': {'address_id': address_id}} def onchange_department_id(self, cr, uid, ids, department_id, context=None): value = {'parent_id': False} if department_id: department = self.pool.get('hr.department').browse( cr, uid, department_id) value['parent_id'] = department.manager_id.id return {'value': value} def onchange_user(self, cr, uid, ids, name, image, user_id, context=None): if user_id: user = self.pool['res.users'].browse(cr, uid, user_id, context=context) values = { 'name': name or user.name, 'work_email': user.email, 'image': image or user.image, } return {'value': values} def action_follow(self, cr, uid, ids, context=None): """ Wrapper because message_subscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_subscribe_users(cr, uid, ids, context=context) def action_unfollow(self, cr, uid, ids, context=None): """ Wrapper because message_unsubscribe_users take a user_ids=None that receive the context without the wrapper. """ return self.message_unsubscribe_users(cr, uid, ids, context=context) def _message_get_auto_subscribe_fields(self, cr, uid, updated_fields, auto_follow_fields=None, context=None): """ Overwrite of the original method to always follow user_id field, even when not track_visibility so that a user will follow it's employee """ if auto_follow_fields is None: auto_follow_fields = ['user_id'] user_field_lst = [] for name, field in self._fields.items(): if name in auto_follow_fields and name in updated_fields and field.comodel_name == 'res.users': user_field_lst.append(name) return user_field_lst _constraints = [ (osv.osv._check_recursion, _('Error! You cannot create recursive hierarchy of Employee(s).'), ['parent_id']), ]
class base_language_export(osv.osv_memory): _name = "base.language.export" def _get_languages(self, cr, uid, context): lang_obj = self.pool.get('res.lang') ids = lang_obj.search(cr, uid, [('translatable', '=', True)]) langs = lang_obj.browse(cr, uid, ids) return [(NEW_LANG_KEY, _('New Language (Empty translation template)')) ] + [(lang.code, lang.name) for lang in langs] _columns = { 'name': fields.char('File Name', readonly=True), 'lang': fields.selection(_get_languages, 'Language', required=True), 'format': fields.selection([('csv', 'CSV File'), ('po', 'PO File'), ('tgz', 'TGZ Archive')], 'File Format', required=True), 'modules': fields.many2many('ir.module.module', 'rel_modules_langexport', 'wiz_id', 'module_id', 'Apps To Export', domain=[('state', '=', 'installed')]), 'data': fields.binary('File', readonly=True), 'state': fields.selection([ ('choose', 'choose'), # choose language ('get', 'get') ]) # get the file } _defaults = { 'state': 'choose', 'lang': NEW_LANG_KEY, 'format': 'csv', } def act_getfile(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids, context=context)[0] lang = this.lang if this.lang != NEW_LANG_KEY else False mods = sorted(map(lambda m: m.name, this.modules)) or ['all'] with contextlib.closing(cStringIO.StringIO()) as buf: tools.trans_export(lang, mods, buf, this.format, cr) out = base64.encodestring(buf.getvalue()) filename = 'new' if lang: filename = get_iso_codes(lang) elif len(mods) == 1: filename = mods[0] extension = this.format if not lang and extension == 'po': extension = 'pot' name = "%s.%s" % (filename, extension) this.write({'state': 'get', 'data': out, 'name': name}) return { 'type': 'ir.actions.act_window', 'res_model': 'base.language.export', 'view_mode': 'form', 'view_type': 'form', 'res_id': this.id, 'views': [(False, 'form')], 'target': 'new', }
class hr_department(osv.osv): _name = "hr.department" _description = "HR Department" _inherit = ['mail.thread', 'ir.needaction_mixin'] def _dept_name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None): res = self.name_get(cr, uid, ids, context=context) return dict(res) _columns = { 'name': fields.char('Department Name', required=True), 'complete_name': fields.function(_dept_name_get_fnc, type="char", string='Name'), 'company_id': fields.many2one('res.company', 'Company', select=True, required=False), 'parent_id': fields.many2one('hr.department', 'Parent Department', select=True), 'child_ids': fields.one2many('hr.department', 'parent_id', 'Child Departments'), 'manager_id': fields.many2one('hr.employee', 'Manager', track_visibility='onchange'), 'member_ids': fields.one2many('hr.employee', 'department_id', 'Members', readonly=True), 'jobs_ids': fields.one2many('hr.job', 'department_id', 'Jobs'), 'note': fields.text('Note'), 'color': fields.integer('Color Index'), } _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company'). _company_default_get(cr, uid, 'hr.department', context=c), } _constraints = [(osv.osv._check_recursion, _('Error! You cannot create recursive departments.'), ['parent_id'])] def name_get(self, cr, uid, ids, context=None): if not ids: return [] if isinstance(ids, (int, long)): ids = [ids] if context is None: context = {} reads = self.read(cr, uid, ids, ['name', 'parent_id'], context=context) res = [] for record in reads: name = record['name'] if record['parent_id']: name = record['parent_id'][1] + ' / ' + name res.append((record['id'], name)) return res def create(self, cr, uid, vals, context=None): if context is None: context = {} context['mail_create_nosubscribe'] = True # TDE note: auto-subscription of manager done by hand, because currently # the tracking allows to track+subscribe fields linked to a res.user record # An update of the limited behavior should come, but not currently done. manager_id = vals.get("manager_id") new_id = super(hr_department, self).create(cr, uid, vals, context=context) if manager_id: employee = self.pool.get('hr.employee').browse(cr, uid, manager_id, context=context) if employee.user_id: self.message_subscribe_users(cr, uid, [new_id], user_ids=[employee.user_id.id], context=context) return new_id def write(self, cr, uid, ids, vals, context=None): # TDE note: auto-subscription of manager done by hand, because currently # the tracking allows to track+subscribe fields linked to a res.user record # An update of the limited behavior should come, but not currently done. if isinstance(ids, (int, long)): ids = [ids] employee_ids = [] if 'manager_id' in vals: manager_id = vals.get("manager_id") if manager_id: employee = self.pool['hr.employee'].browse(cr, uid, manager_id, context=context) if employee.user_id: self.message_subscribe_users( cr, uid, ids, user_ids=[employee.user_id.id], context=context) for department in self.browse(cr, uid, ids, context=context): employee_ids += self.pool['hr.employee'].search( cr, uid, [('id', '!=', manager_id), ('department_id', '=', department.id), ('parent_id', '=', department.manager_id.id)], context=context) self.pool['hr.employee'].write(cr, uid, employee_ids, {'parent_id': manager_id}, context=context) return super(hr_department, self).write(cr, uid, ids, vals, context=context)
class config(osv.Model): _name = 'google.drive.config' _description = "Google Drive templates config" def get_google_drive_url(self, cr, uid, config_id, res_id, template_id, context=None): config = self.browse(cr, SUPERUSER_ID, config_id, context=context) model = config.model_id filter_name = config.filter_id and config.filter_id.name or False record = self.pool.get(model.model).read(cr, uid, [res_id], context=context)[0] record.update({'model': model.name, 'filter': filter_name}) name_gdocs = config.name_template try: name_gdocs = name_gdocs % record except: raise UserError( _("At least one key cannot be found in your Google Drive name pattern" )) attach_pool = self.pool.get("ir.attachment") attach_ids = attach_pool.search(cr, uid, [('res_model', '=', model.model), ('name', '=', name_gdocs), ('res_id', '=', res_id)]) url = False if attach_ids: attachment = attach_pool.browse(cr, uid, attach_ids[0], context) url = attachment.url else: url = self.copy_doc(cr, uid, res_id, template_id, name_gdocs, model.model, context).get('url') return url def get_access_token(self, cr, uid, scope=None, context=None): ir_config = self.pool['ir.config_parameter'] google_drive_refresh_token = ir_config.get_param( cr, SUPERUSER_ID, 'google_drive_refresh_token') if not google_drive_refresh_token: if self.pool['res.users']._is_admin(cr, uid, [uid]): model, action_id = self.pool[ 'ir.model.data'].get_object_reference( cr, uid, 'base_setup', 'action_general_configuration') msg = _( "You haven't configured 'Authorization Code' generated from google, Please generate and configure it ." ) raise ecore.exceptions.RedirectWarning( msg, action_id, _('Go to the configuration panel')) else: raise UserError( _("Google Drive is not yet configured. Please contact your administrator." )) google_drive_client_id = ir_config.get_param(cr, SUPERUSER_ID, 'google_drive_client_id') google_drive_client_secret = ir_config.get_param( cr, SUPERUSER_ID, 'google_drive_client_secret') #For Getting New Access Token With help of old Refresh Token data = werkzeug.url_encode( dict(client_id=google_drive_client_id, refresh_token=google_drive_refresh_token, client_secret=google_drive_client_secret, grant_type="refresh_token", scope=scope or 'https://www.googleapis.com/auth/drive')) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = urllib2.Request('https://accounts.google.com/o/oauth2/token', data, headers) content = urllib2.urlopen(req, timeout=TIMEOUT).read() except urllib2.HTTPError: if user_is_admin: model, action_id = self.pool[ 'ir.model.data'].get_object_reference( cr, uid, 'base_setup', 'action_general_configuration') msg = _( "Something went wrong during the token generation. Please request again an authorization code ." ) raise ecore.exceptions.RedirectWarning( msg, action_id, _('Go to the configuration panel')) else: raise UserError( _("Google Drive is not yet configured. Please contact your administrator." )) content = json.loads(content) return content.get('access_token') def copy_doc(self, cr, uid, res_id, template_id, name_gdocs, res_model, context=None): ir_config = self.pool['ir.config_parameter'] google_web_base_url = ir_config.get_param(cr, SUPERUSER_ID, 'web.base.url') access_token = self.get_access_token(cr, uid, context=context) # Copy template in to drive with help of new access token request_url = "https://www.googleapis.com/drive/v2/files/%s?fields=parents/id&access_token=%s" % ( template_id, access_token) headers = {"Content-type": "application/x-www-form-urlencoded"} try: req = urllib2.Request(request_url, None, headers) parents = urllib2.urlopen(req, timeout=TIMEOUT).read() except urllib2.HTTPError: raise UserError( _("The Google Template cannot be found. Maybe it has been deleted." )) parents_dict = json.loads(parents) record_url = "Click on link to open Record in eCore\n %s/?db=%s#id=%s&model=%s" % ( google_web_base_url, cr.dbname, res_id, res_model) data = { "title": name_gdocs, "description": record_url, "parents": parents_dict['parents'] } request_url = "https://www.googleapis.com/drive/v2/files/%s/copy?access_token=%s" % ( template_id, access_token) headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} data_json = json.dumps(data) # resp, content = Http().request(request_url, "POST", data_json, headers) req = urllib2.Request(request_url, data_json, headers) content = urllib2.urlopen(req, timeout=TIMEOUT).read() content = json.loads(content) res = {} if content.get('alternateLink'): attach_pool = self.pool.get("ir.attachment") attach_vals = { 'res_model': res_model, 'name': name_gdocs, 'res_id': res_id, 'type': 'url', 'url': content['alternateLink'] } res['id'] = attach_pool.create(cr, uid, attach_vals) # Commit in order to attach the document to the current object instance, even if the permissions has not been written. cr.commit() res['url'] = content['alternateLink'] key = self._get_key_from_url(res['url']) request_url = "https://www.googleapis.com/drive/v2/files/%s/permissions?emailMessage=This+is+a+drive+file+created+by+eCore&sendNotificationEmails=false&access_token=%s" % ( key, access_token) data = { 'role': 'writer', 'type': 'anyone', 'value': '', 'withLink': True } try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: raise self.pool.get('res.config.settings').get_config_warning( cr, _("The permission 'reader' for 'anyone with the link' has not been written on the document" ), context=context) user = self.pool['res.users'].browse(cr, uid, uid, context=context) if user.email: data = {'role': 'writer', 'type': 'user', 'value': user.email} try: req = urllib2.Request(request_url, json.dumps(data), headers) urllib2.urlopen(req, timeout=TIMEOUT) except urllib2.HTTPError: pass return res def get_google_drive_config(self, cr, uid, res_model, res_id, context=None): ''' Function called by the js, when no google doc are yet associated with a record, with the aim to create one. It will first seek for a google.docs.config associated with the model `res_model` to find out what's the template of google doc to copy (this is usefull if you want to start with a non-empty document, a type or a name different than the default values). If no config is associated with the `res_model`, then a blank text document with a default name is created. :param res_model: the object for which the google doc is created :param ids: the list of ids of the objects for which the google doc is created. This list is supposed to have a length of 1 element only (batch processing is not supported in the code, though nothing really prevent it) :return: the config id and config name ''' if not res_id: raise UserError( _("Creating google drive may only be done by one at a time.")) # check if a model is configured with a template config_ids = self.search(cr, uid, [('model_id', '=', res_model)], context=context) configs = [] for config in self.browse(cr, uid, config_ids, context=context): if config.filter_id: if (config.filter_id.user_id and config.filter_id.user_id.id != uid): #Private continue domain = [('id', 'in', [res_id])] + eval( config.filter_id.domain) local_context = context and context.copy() or {} local_context.update(eval(config.filter_id.context)) google_doc_configs = self.pool.get( config.filter_id.model_id).search(cr, uid, domain, context=local_context) if google_doc_configs: configs.append({'id': config.id, 'name': config.name}) else: configs.append({'id': config.id, 'name': config.name}) return configs def _get_key_from_url(self, url): mo = re.search("(key=|/d/)([A-Za-z0-9-_]+)", url) if mo: return mo.group(2) return None def _resource_get(self, cr, uid, ids, name, arg, context=None): result = {} for data in self.browse(cr, uid, ids, context): mo = self._get_key_from_url(data.google_drive_template_url) if mo: result[data.id] = mo else: raise UserError(_("Please enter a valid Google Document URL.")) return result def _client_id_get(self, cr, uid, ids, name, arg, context=None): result = {} client_id = self.pool['ir.config_parameter'].get_param( cr, SUPERUSER_ID, 'google_drive_client_id') for config_id in ids: result[config_id] = client_id return result _columns = { 'name': fields.char('Template Name', required=True), 'model_id': fields.many2one('ir.model', 'Model', ondelete='set null', required=True), 'model': fields.related('model_id', 'model', type='char', string='Model', readonly=True), 'filter_id': fields.many2one('ir.filters', 'Filter', domain="[('model_id', '=', model)]"), 'google_drive_template_url': fields.char('Template URL', required=True, size=1024), 'google_drive_resource_id': fields.function(_resource_get, type="char", string='Resource Id'), 'google_drive_client_id': fields.function(_client_id_get, type="char", string='Google Client '), 'name_template': fields.char( 'Google Drive Name Pattern', help= 'Choose how the new google drive will be named, on google side. Eg. gdoc_%(field_name)s', required=True), 'active': fields.boolean('Active'), } def onchange_model_id(self, cr, uid, ids, model_id, context=None): res = {} if model_id: model = self.pool['ir.model'].browse(cr, uid, model_id, context=context) res['value'] = {'model': model.model} else: res['value'] = {'filter_id': False, 'model': False} return res _defaults = { 'name_template': 'Document %(name)s', 'active': True, } def _check_model_id(self, cr, uid, ids, context=None): config_id = self.browse(cr, uid, ids[0], context=context) if config_id.filter_id and config_id.model_id.model != config_id.filter_id.model_id: return False return True _constraints = [ (_check_model_id, 'Model of selected filter is not matching with model of current template.', ['model_id', 'filter_id']), ] def get_google_scope(self): return 'https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file'
class stock_picking_wave(osv.osv): _inherit = "mail.thread" _name = "stock.picking.wave" _description = "Picking Wave" _order = "name desc" _columns = { 'name': fields.char('Picking Wave Name', required=True, help='Name of the picking wave', copy=False), 'user_id': fields.many2one('res.users', 'Responsible', track_visibility='onchange', help='Person responsible for this wave'), 'picking_ids': fields.one2many('stock.picking', 'wave_id', 'Pickings', help='List of picking associated to this wave'), 'state': fields.selection([('draft', 'Draft'), ('in_progress', 'Running'), ('done', 'Done'), ('cancel', 'Cancelled')], string="State", track_visibility='onchange', required=True, copy=False), } _defaults = { 'name': '/', 'state': 'draft', } def confirm_picking(self, cr, uid, ids, context=None): picking_todo = self.pool.get('stock.picking').search( cr, uid, [('wave_id', 'in', ids)], context=context) self.write(cr, uid, ids, {'state': 'in_progress'}, context=context) return self.pool.get('stock.picking').action_assign(cr, uid, picking_todo, context=context) def cancel_picking(self, cr, uid, ids, context=None): picking_todo = self.pool.get('stock.picking').search( cr, uid, [('wave_id', 'in', ids)], context=context) self.pool.get('stock.picking').action_cancel(cr, uid, picking_todo, context=context) return self.write(cr, uid, ids, {'state': 'cancel'}, context=context) def print_picking(self, cr, uid, ids, context=None): ''' This function print the report for all picking_ids associated to the picking wave ''' context = dict(context or {}) picking_ids = [] for wave in self.browse(cr, uid, ids, context=context): picking_ids += [picking.id for picking in wave.picking_ids] if not picking_ids: raise UserError(_('Nothing to print.')) context['active_ids'] = picking_ids context['active_model'] = 'stock.picking' return self.pool.get("report").get_action(cr, uid, [], 'stock.report_picking', context=context) def create(self, cr, uid, vals, context=None): if vals.get('name', '/') == '/': vals['name'] = self.pool.get('ir.sequence').next_by_code( cr, uid, 'picking.wave') or '/' return super(stock_picking_wave, self).create(cr, uid, vals, context=context) def done(self, cr, uid, ids, context=None): picking_todo = set() for wave in self.browse(cr, uid, ids, context=context): for picking in wave.picking_ids: if picking.state in ('cancel', 'done'): continue if picking.state != 'assigned': raise UserError( _('Some pickings are still waiting for goods. Please check or force their availability before setting this wave to done.' )) message_body = "<b>%s:</b> %s <a href=#id=%s&view_type=form&model=stock.picking.wave>%s</a>" % ( _("Transferred by"), _("Picking Wave"), wave.id, wave.name) picking.message_post(body=message_body) picking_todo.add(picking.id) if picking_todo: self.pool.get('stock.picking').action_done(cr, uid, list(picking_todo), context=context) return self.write(cr, uid, ids, {'state': 'done'}, context=context) def _track_subtype(self, cr, uid, ids, init_values, context=None): if 'state' in init_values: return 'stock_picking_wave.mt_wave_state' return super(stock_picking_wave, self)._track_subtype(cr, uid, ids, init_values, context=context)
class project_issue(osv.Model): _name = "project.issue" _description = "Project Issue" _order = "priority desc, create_date desc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _mail_post_access = 'read' def _get_default_partner(self, cr, uid, context=None): if context is None: context = {} if 'default_project_id' in context: project = self.pool.get('project.project').browse( cr, uid, context['default_project_id'], context=context) if project and project.partner_id: return project.partner_id.id return False def _get_default_stage_id(self, cr, uid, context=None): """ Gives default stage_id """ if context is None: context = {} return self.stage_find(cr, uid, [], context.get('default_project_id'), [('fold', '=', False)], context=context) def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): if context is None: context = {} access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('project.task.type') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # retrieve team_id from the context, add them to already fetched columns (ids) if 'default_project_id' in context: search_domain = [ '|', ('project_ids', '=', context['default_project_id']), ('id', 'in', ids) ] else: search_domain = [('id', 'in', ids)] # perform search stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort( lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) fold = {} for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context): fold[stage.id] = stage.fold or False return result, fold def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ Calendar = self.pool['resource.calendar'] res = dict((res_id, {}) for res_id in ids) for issue in self.browse(cr, uid, ids, context=context): values = { 'day_open': 0.0, 'day_close': 0.0, 'working_hours_open': 0.0, 'working_hours_close': 0.0, 'days_since_creation': 0.0, 'inactivity_days': 0.0, } # if the working hours on the project are not defined, use default ones (8 -> 12 and 13 -> 17 * 5), represented by None calendar_id = None if issue.project_id and issue.project_id.resource_calendar_id: calendar_id = issue.project_id.resource_calendar_id.id dt_create_date = datetime.strptime(issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) if issue.date_open: dt_date_open = datetime.strptime( issue.date_open, DEFAULT_SERVER_DATETIME_FORMAT) values['day_open'] = (dt_date_open - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_open'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_open, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) if issue.date_closed: dt_date_closed = datetime.strptime( issue.date_closed, DEFAULT_SERVER_DATETIME_FORMAT) values['day_close'] = (dt_date_closed - dt_create_date ).total_seconds() / (24.0 * 3600) values['working_hours_close'] = Calendar._interval_hours_get( cr, uid, calendar_id, dt_create_date, dt_date_closed, timezone_from_uid=issue.user_id.id or uid, exclude_leaves=False, context=context) days_since_creation = datetime.today() - dt_create_date values['days_since_creation'] = days_since_creation.days if issue.date_action_last: inactive_days = datetime.today() - datetime.strptime( issue.date_action_last, DEFAULT_SERVER_DATETIME_FORMAT) elif issue.date_last_stage_update: inactive_days = datetime.today() - datetime.strptime( issue.date_last_stage_update, DEFAULT_SERVER_DATETIME_FORMAT) else: inactive_days = datetime.today() - datetime.strptime( issue.create_date, DEFAULT_SERVER_DATETIME_FORMAT) values['inactivity_days'] = inactive_days.days # filter only required values for field in fields: res[issue.id][field] = values[field] return res def on_change_project(self, cr, uid, ids, project_id, context=None): if project_id: project = self.pool.get('project.project').browse(cr, uid, project_id, context=context) if project and project.partner_id: return {'value': {'partner_id': project.partner_id.id}} return {'value': {'partner_id': False}} _columns = { 'id': fields.integer('ID', readonly=True), 'name': fields.char('Issue', required=True), 'active': fields.boolean('Active', required=False), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'write_date': fields.datetime('Update Date', readonly=True), 'days_since_creation': fields.function(_compute_day, string='Days since creation date', \ multi='compute_day', type="integer", help="Difference in days between creation date and current date"), 'date_deadline': fields.date('Deadline'), 'team_id': fields.many2one('crm.team', 'Sales Team', oldname='section_id',\ select=True, help='Sales team to which Case belongs to.\ Define Responsible user and Email account for mail gateway.' ), 'partner_id': fields.many2one('res.partner', 'Contact', select=1), 'company_id': fields.many2one('res.company', 'Company'), 'description': fields.text('Private Note'), 'kanban_state': fields.selection([('normal', 'Normal'),('blocked', 'Blocked'),('done', 'Ready for next stage')], 'Kanban State', track_visibility='onchange', help="A Issue's kanban state indicates special situations affecting it:\n" " * Normal is the default situation\n" " * Blocked indicates something is preventing the progress of this issue\n" " * Ready for next stage indicates the issue is ready to be pulled to the next stage", required=True), 'email_from': fields.char('Email', size=128, help="These people will receive email.", select=1), 'email_cc': fields.char('Watchers Emails', size=256, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'date_open': fields.datetime('Assigned', readonly=True, select=True), # Project Issue fields 'date_closed': fields.datetime('Closed', readonly=True, select=True), 'date': fields.datetime('Date'), 'date_last_stage_update': fields.datetime('Last Stage Update', select=True), 'channel': fields.char('Channel', help="Communication channel."), 'tag_ids': fields.many2many('project.tags', string='Tags'), 'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority', select=True), 'stage_id': fields.many2one ('project.task.type', 'Stage', track_visibility='onchange', select=True, domain="[('project_ids', '=', project_id)]", copy=False), 'project_id': fields.many2one('project.project', 'Project', track_visibility='onchange', select=True), 'duration': fields.float('Duration'), 'task_id': fields.many2one('project.task', 'Task', domain="[('project_id','=',project_id)]", help="You can link this issue to an existing task or directly create a new one from here"), 'day_open': fields.function(_compute_day, string='Days to Assign', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'day_close': fields.function(_compute_day, string='Days to Close', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'user_id': fields.many2one('res.users', 'Assigned to', required=False, select=1, track_visibility='onchange'), 'working_hours_open': fields.function(_compute_day, string='Working Hours to assign the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_open'], 10)}), 'working_hours_close': fields.function(_compute_day, string='Working Hours to close the Issue', multi='compute_day', type="float", store={'project.issue': (lambda self, cr, uid, ids, c={}: ids, ['date_closed'], 10)}), 'inactivity_days': fields.function(_compute_day, string='Days since last action', multi='compute_day', type="integer", help="Difference in days between last action and current date"), 'color': fields.integer('Color Index'), 'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True), 'date_action_last': fields.datetime('Last Action', readonly=1), 'date_action_next': fields.datetime('Next Action', readonly=1), 'legend_blocked': fields.related("stage_id", "legend_blocked", type="char", string='Kanban Blocked Explanation'), 'legend_done': fields.related("stage_id", "legend_done", type="char", string='Kanban Valid Explanation'), 'legend_normal': fields.related("stage_id", "legend_normal", type="char", string='Kanban Ongoing Explanation'), } _defaults = { 'active': 1, 'team_id': lambda s, cr, uid, c: s.pool['crm.team']._get_default_team_id( cr, uid, context=c), 'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c), 'company_id': lambda s, cr, uid, c: s.pool['res.users']._get_company( cr, uid, context=c), 'priority': '0', 'kanban_state': 'normal', 'date_last_stage_update': fields.datetime.now, 'user_id': lambda obj, cr, uid, context: uid, } _group_by_full = {'stage_id': _read_group_stage_ids} def copy(self, cr, uid, id, default=None, context=None): issue = self.read(cr, uid, [id], ['name'], context=context)[0] if not default: default = {} default = default.copy() default.update(name=_('%s (copy)') % (issue['name'])) return super(project_issue, self).copy(cr, uid, id, default=default, context=context) def create(self, cr, uid, vals, context=None): context = dict(context or {}) if vals.get('project_id') and not context.get('default_project_id'): context['default_project_id'] = vals.get('project_id') if vals.get('user_id') and not vals.get('date_open'): vals['date_open'] = fields.datetime.now() if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, None, vals.get('stage_id'), context=context)['value']) # context: no_log, because subtype already handle this create_context = dict(context, mail_create_nolog=True) return super(project_issue, self).create(cr, uid, vals, context=create_context) def write(self, cr, uid, ids, vals, context=None): # stage change: update date_last_stage_update if 'stage_id' in vals: vals.update( self.onchange_stage_id(cr, uid, ids, vals.get('stage_id'), context=context)['value']) vals['date_last_stage_update'] = fields.datetime.now() if 'kanban_state' not in vals: vals['kanban_state'] = 'normal' # user_id change: update date_open if vals.get('user_id') and 'date_open' not in vals: vals['date_open'] = fields.datetime.now() return super(project_issue, self).write(cr, uid, ids, vals, context) def onchange_task_id(self, cr, uid, ids, task_id, context=None): if not task_id: return {'value': {}} task = self.pool.get('project.task').browse(cr, uid, task_id, context=context) return { 'value': { 'user_id': task.user_id.id, } } def onchange_partner_id(self, cr, uid, ids, partner_id, context=None): """ This function returns value of partner email address based on partner :param part: Partner's id """ if partner_id: partner = self.pool['res.partner'].browse(cr, uid, partner_id, context) return {'value': {'email_from': partner.email}} return {'value': {'email_from': False}} def get_empty_list_help(self, cr, uid, help, context=None): context = dict(context or {}) context['empty_list_help_model'] = 'project.project' context['empty_list_help_id'] = context.get('default_project_id') context['empty_list_help_document_name'] = _("issues") return super(project_issue, self).get_empty_list_help(cr, uid, help, context=context) # ------------------------------------------------------- # Stage management # ------------------------------------------------------- def onchange_stage_id(self, cr, uid, ids, stage_id, context=None): if not stage_id: return {'value': {}} stage = self.pool['project.task.type'].browse(cr, uid, stage_id, context=context) if stage.fold: return {'value': {'date_closed': fields.datetime.now()}} return {'value': {'date_closed': False}} def stage_find(self, cr, uid, cases, team_id, domain=[], order='sequence', context=None): """ Override of the base.stage method Parameter of the stage search taken from the issue: - type: stage type must be the same or 'both' - team_id: if set, stages must belong to this team or be a default case """ if isinstance(cases, (int, long)): cases = self.browse(cr, uid, cases, context=context) # collect all team_ids team_ids = [] if team_id: team_ids.append(team_id) for task in cases: if task.project_id: team_ids.append(task.project_id.id) # OR all team_ids and OR with case_default search_domain = [] if team_ids: search_domain += [('|')] * (len(team_ids) - 1) for team_id in team_ids: search_domain.append(('project_ids', '=', team_id)) search_domain += list(domain) # perform search, return the first found stage_ids = self.pool.get('project.task.type').search(cr, uid, search_domain, order=order, context=context) if stage_ids: return stage_ids[0] return False # ------------------------------------------------------- # Mail gateway # ------------------------------------------------------- def _track_subtype(self, cr, uid, ids, init_values, context=None): record = self.browse(cr, uid, ids[0], context=context) if 'kanban_state' in init_values and record.kanban_state == 'blocked': return 'project_issue.mt_issue_blocked' elif 'kanban_state' in init_values and record.kanban_state == 'done': return 'project_issue.mt_issue_ready' elif 'user_id' in init_values and record.user_id: # assigned -> new return 'project_issue.mt_issue_new' elif 'stage_id' in init_values and record.stage_id and record.stage_id.sequence <= 1: # start stage -> new return 'project_issue.mt_issue_new' elif 'stage_id' in init_values: return 'project_issue.mt_issue_stage' return super(project_issue, self)._track_subtype(cr, uid, ids, init_values, context=context) def _notification_group_recipients(self, cr, uid, ids, message, recipients, done_ids, group_data, context=None): """ Override the mail.thread method to handle project users and officers recipients. Indeed those will have specific action in their notification emails: creating tasks, assigning it. """ group_project_user = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'project.group_project_user') for recipient in recipients: if recipient.id in done_ids: continue if recipient.user_ids and group_project_user in recipient.user_ids[ 0].groups_id.ids: group_data['group_project_user'] |= recipient done_ids.add(recipient.id) return super(project_issue, self)._notification_group_recipients(cr, uid, ids, message, recipients, done_ids, group_data, context=context) def _notification_get_recipient_groups(self, cr, uid, ids, message, recipients, context=None): res = super(project_issue, self)._notification_get_recipient_groups(cr, uid, ids, message, recipients, context=context) new_action_id = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'project_issue.project_issue_categ_act0') take_action = self._notification_link_helper(cr, uid, ids, 'assign', context=context) new_action = self._notification_link_helper(cr, uid, ids, 'new', context=context, action_id=new_action_id) task_record = self.browse(cr, uid, ids[0], context=context) actions = [] if not task_record.user_id: actions.append({'url': take_action, 'title': _('I take it')}) else: actions.append({'url': new_action, 'title': _('New Issue')}) res['group_project_user'] = {'actions': actions} return res @api.cr_uid_context def message_get_reply_to(self, cr, uid, ids, default=None, context=None): """ Override to get the reply_to of the parent project. """ issues = self.browse(cr, SUPERUSER_ID, ids, context=context) project_ids = set( [issue.project_id.id for issue in issues if issue.project_id]) aliases = self.pool['project.project'].message_get_reply_to( cr, uid, list(project_ids), default=default, context=context) return dict( (issue.id, aliases.get(issue.project_id and issue.project_id.id or 0, False)) for issue in issues) def message_get_suggested_recipients(self, cr, uid, ids, context=None): recipients = super(project_issue, self).message_get_suggested_recipients( cr, uid, ids, context=context) try: for issue in self.browse(cr, uid, ids, context=context): if issue.partner_id: issue._message_add_suggested_recipient( recipients, partner=issue.partner_id, reason=_('Customer')) elif issue.email_from: issue._message_add_suggested_recipient( recipients, email=issue.email_from, reason=_('Customer Email')) except AccessError: # no read access rights -> just ignore suggested recipients because this imply modifying followers pass return recipients def email_split(self, cr, uid, ids, msg, context=None): email_list = tools.email_split((msg.get('to') or '') + ',' + (msg.get('cc') or '')) # check left-part is not already an alias issue_ids = self.browse(cr, uid, ids, context=context) aliases = [ issue.project_id.alias_name for issue in issue_ids if issue.project_id ] return filter(lambda x: x.split('@')[0] not in aliases, email_list) def message_new(self, cr, uid, msg, custom_values=None, context=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ if custom_values is None: custom_values = {} context = dict(context or {}, state_to='draft') defaults = { 'name': msg.get('subject') or _("No Subject"), 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'partner_id': msg.get('author_id', False), 'user_id': False, } defaults.update(custom_values) res_id = super(project_issue, self).message_new(cr, uid, msg, custom_values=defaults, context=context) email_list = self.email_split(cr, uid, [res_id], msg, context=context) partner_ids = self._find_partner_from_emails(cr, uid, [res_id], email_list, force_create=True, context=context) self.message_subscribe(cr, uid, [res_id], partner_ids, context=context) return res_id def message_update(self, cr, uid, ids, msg, update_vals=None, context=None): """ Override to update the issue according to the email. """ email_list = self.email_split(cr, uid, ids, msg, context=context) partner_ids = self._find_partner_from_emails(cr, uid, ids, email_list, force_create=True, context=context) self.message_subscribe(cr, uid, ids, partner_ids, context=context) return super(project_issue, self).message_update(cr, uid, ids, msg, update_vals=update_vals, context=context) @api.cr_uid_ids_context @api.returns('mail.message', lambda value: value.id) def message_post(self, cr, uid, thread_id, subtype=None, context=None, **kwargs): """ Overrides mail_thread message_post so that we can set the date of last action field when a new message is posted on the issue. """ if context is None: context = {} res = super(project_issue, self).message_post(cr, uid, thread_id, subtype=subtype, context=context, **kwargs) if thread_id and subtype: self.write(cr, SUPERUSER_ID, thread_id, {'date_action_last': fields.datetime.now()}, context=context) return res
class stock_history(osv.osv): _name = 'stock.history' _auto = False _order = 'date asc' def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): res = super(stock_history, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy) if context is None: context = {} date = context.get('history_date', datetime.now()) if 'inventory_value' in fields: group_lines = {} for line in res: domain = line.get('__domain', []) group_lines.setdefault( str(domain), self.search(cr, uid, domain, context=context)) line_ids = set() for ids in group_lines.values(): for product_id in ids: line_ids.add(product_id) line_ids = list(line_ids) lines_rec = {} if line_ids: cr.execute( 'SELECT id, product_id, price_unit_on_quant, company_id, quantity FROM stock_history WHERE id in %s', (tuple(line_ids), )) lines_rec = cr.dictfetchall() lines_dict = dict((line['id'], line) for line in lines_rec) product_ids = list( set(line_rec['product_id'] for line_rec in lines_rec)) products_rec = self.pool['product.product'].read( cr, uid, product_ids, ['cost_method', 'id'], context=context) products_dict = dict( (product['id'], product) for product in products_rec) cost_method_product_ids = list( set(product['id'] for product in products_rec if product['cost_method'] != 'real')) histories = [] if cost_method_product_ids: cr.execute( 'SELECT DISTINCT ON (product_id, company_id) product_id, company_id, cost FROM product_price_history WHERE product_id in %s AND datetime <= %s ORDER BY product_id, company_id, datetime DESC', (tuple(cost_method_product_ids), date)) histories = cr.dictfetchall() histories_dict = {} for history in histories: histories_dict[(history['product_id'], history['company_id'])] = history['cost'] for line in res: inv_value = 0.0 lines = group_lines.get(str(line.get('__domain', []))) for line_id in lines: line_rec = lines_dict[line_id] product = products_dict[line_rec['product_id']] if product['cost_method'] == 'real': price = line_rec['price_unit_on_quant'] else: price = histories_dict.get( (product['id'], line_rec['company_id']), 0.0) inv_value += price * line_rec['quantity'] line['inventory_value'] = inv_value return res def _get_inventory_value(self, cr, uid, ids, name, attr, context=None): if context is None: context = {} date = context.get('history_date') product_obj = self.pool.get("product.product") res = {} for line in self.browse(cr, uid, ids, context=context): if line.product_id.cost_method == 'real': res[line.id] = line.quantity * line.price_unit_on_quant else: res[line.id] = line.quantity * product_obj.get_history_price( cr, uid, line.product_id.id, line.company_id.id, date=date, context=context) return res _columns = { 'move_id': fields.many2one('stock.move', 'Stock Move', required=True), 'location_id': fields.many2one('stock.location', 'Location', required=True), 'company_id': fields.many2one('res.company', 'Company'), 'product_id': fields.many2one('product.product', 'Product', required=True), 'product_categ_id': fields.many2one('product.category', 'Product Category', required=True), 'quantity': fields.float('Product Quantity'), 'date': fields.datetime('Operation Date'), 'price_unit_on_quant': fields.float('Value'), 'inventory_value': fields.function(_get_inventory_value, string="Inventory Value", type='float', readonly=True), 'source': fields.char('Source'), 'product_template_id': fields.many2one('product.template', 'Product Template', required=True), 'serial_number': fields.char('Serial Number', required=True), } def init(self, cr): tools.drop_view_if_exists(cr, 'stock_history') cr.execute(""" CREATE OR REPLACE VIEW stock_history AS ( SELECT MIN(id) as id, move_id, location_id, company_id, product_id, product_categ_id, product_template_id, SUM(quantity) as quantity, date, price_unit_on_quant, source, serial_number FROM ((SELECT stock_move.id AS id, stock_move.id AS move_id, dest_location.id AS location_id, dest_location.company_id AS company_id, stock_move.product_id AS product_id, product_template.id AS product_template_id, product_template.categ_id AS product_categ_id, quant.qty AS quantity, stock_move.date AS date, quant.cost as price_unit_on_quant, stock_move.origin AS source, stock_production_lot.name AS serial_number FROM stock_quant as quant JOIN stock_quant_move_rel ON stock_quant_move_rel.quant_id = quant.id JOIN stock_move ON stock_move.id = stock_quant_move_rel.move_id LEFT JOIN stock_production_lot ON stock_production_lot.id = quant.lot_id JOIN stock_location dest_location ON stock_move.location_dest_id = dest_location.id JOIN stock_location source_location ON stock_move.location_id = source_location.id JOIN product_product ON product_product.id = stock_move.product_id JOIN product_template ON product_template.id = product_product.product_tmpl_id WHERE quant.qty>0 AND stock_move.state = 'done' AND dest_location.usage in ('internal', 'transit') AND ( (source_location.company_id is null and dest_location.company_id is not null) or (source_location.company_id is not null and dest_location.company_id is null) or source_location.company_id != dest_location.company_id or source_location.usage not in ('internal', 'transit')) ) UNION ALL (SELECT (-1) * stock_move.id AS id, stock_move.id AS move_id, source_location.id AS location_id, source_location.company_id AS company_id, stock_move.product_id AS product_id, product_template.id AS product_template_id, product_template.categ_id AS product_categ_id, - quant.qty AS quantity, stock_move.date AS date, quant.cost as price_unit_on_quant, stock_move.origin AS source, stock_production_lot.name AS serial_number FROM stock_quant as quant JOIN stock_quant_move_rel ON stock_quant_move_rel.quant_id = quant.id JOIN stock_move ON stock_move.id = stock_quant_move_rel.move_id LEFT JOIN stock_production_lot ON stock_production_lot.id = quant.lot_id JOIN stock_location source_location ON stock_move.location_id = source_location.id JOIN stock_location dest_location ON stock_move.location_dest_id = dest_location.id JOIN product_product ON product_product.id = stock_move.product_id JOIN product_template ON product_template.id = product_product.product_tmpl_id WHERE quant.qty>0 AND stock_move.state = 'done' AND source_location.usage in ('internal', 'transit') AND ( (dest_location.company_id is null and source_location.company_id is not null) or (dest_location.company_id is not null and source_location.company_id is null) or dest_location.company_id != source_location.company_id or dest_location.usage not in ('internal', 'transit')) )) AS foo GROUP BY move_id, location_id, company_id, product_id, product_categ_id, date, price_unit_on_quant, source, product_template_id, serial_number )""")
return list(enumerate(["Corge", "Grault", "Wheee", "Moog"])) def function_fn(model, cr, uid, ids, field_name, arg, context): return dict((id, 3) for id in ids) def function_fn_write(model, cr, uid, id, field_name, field_value, fnct_inv_arg, context): """ just so CreatorCase.export can be used """ pass models = [ ('boolean', fields.boolean()), ('integer', fields.integer()), ('float', fields.float()), ('decimal', fields.float(digits=(16, 3))), ('string.bounded', fields.char('unknown', size=16)), ('string.required', fields.char('unknown', size=None, required=True)), ('string', fields.char('unknown', size=None)), ('date', fields.date()), ('datetime', fields.datetime()), ('text', fields.text()), ('selection', fields.selection([(1, "Foo"), (2, "Bar"), (3, "Qux"), (4, '')])), # here use size=-1 to store the values as integers instead of strings ('selection.function', fields.selection(selection_fn, size=-1)), # just relate to an integer ('many2one', fields.many2one('export.integer')), ('one2many', fields.one2many('export.one2many.child', 'parent_id')), ('many2many', fields.many2many('export.many2many.other')), ('function', fields.function(function_fn, fnct_inv=function_fn_write, type="integer")), # related: specialization of fields.function, should work the same way # TODO: reference