class base_partner_merge(osv.osv_memory): """ Merges two partners """ _name = 'base.partner.merge' _description = 'Merges two partners' _columns = { 'container': fields.serialized('Fields Container'), } _values = {} def _build_form(self, cr, uid, field_datas, value1, value2): update_values = {} update_fields = {} columns = {} form_grp = etree.Element('group', colspan="4", col="2") orm.setup_modifiers(form_grp) for fid, fname, fdescription, ttype, required, relation, readonly in field_datas: val1 = value1[fname] val2 = value2[fname] my_selection = [] size = 24 if (val1 and val2) and (val1 == val2): if ttype in ('many2one'): update_values.update({fname: val1.id}) elif ttype in ('many2many'): update_values.update( {fname: [(6, 0, map(lambda x: x.id, val1))]}) else: update_values.update({fname: val1}) if (val1 and val2) and (val1 != val2) and not readonly: if ttype in ('char', 'text', 'selection'): my_selection = [(val1, val1), (val2, val2)] size = max(len(val1), len(val2)) if ttype in ('float', 'integer'): my_selection = [(str(val1), str(val1)), (str(val2), str(val2))] if ttype in ('many2one'): my_selection = [(str(val1.id), val1.name), (str(val2.id), val2.name)] if ttype in ('many2many'): update_values.update({ fname: [(6, 0, list(set(map(lambda x: x.id, val1 + val2))))] }) if my_selection: if not required: my_selection.append((False, '')) columns.update({ fname: fields.sparse('container', type='selection', selection=my_selection, string=fdescription, required=required, size=size) }) update_fields.update({ fname: { 'string': fdescription, 'type': 'selection', 'selection': my_selection, 'required': required } }) f = etree.SubElement(form_grp, 'field', name=fname) orm.setup_modifiers(f, field=update_fields[fname]) if fname == 'ref': if val1 == False or val2 == False: update_values.update({fname: val1 or val2}) else: my_selection = [(val1, val1), (val2, val2)] size = max(len(val1), len(val2)) if my_selection: columns.update({ fname: fields.sparse('container', type='selection', selection=my_selection, string=fdescription, required=required, size=size) }) update_fields.update({ fname: { 'string': fdescription, 'type': 'selection', 'selection': my_selection, 'required': required } }) f = etree.SubElement(form_grp, 'field', name=fname) orm.setup_modifiers(f, field=update_fields[fname]) if (val1 and not val2) or (not val1 and val2): if ttype == 'many2one': update_values.update( {fname: val1 and val1.id or val2 and val2.id}) elif ttype == 'many2many': update_values.update( {fname: [(6, 0, map(lambda x: x.id, val1 or val2))]}) elif ttype == 'one2many': #skip one2many values pass else: update_values.update({fname: val1 or val2}) return form_grp, update_fields, update_values, columns def check_partners(self, cr, uid, partner_ids, context): """ Check validity of selected partners. Inherit to add other checks """ if not len(partner_ids) == 2: raise osv.except_osv(_('Error!'), _('You must select only two partners')) return True def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(base_partner_merge, self).fields_view_get(cr, uid, view_id, view_type, context=context, toolbar=toolbar, submenu=submenu) partner_ids = context.get('active_ids') or [] self.check_partners(cr, uid, partner_ids, context) if not len(partner_ids) == 2: return res partner_obj = self.pool.get('res.partner') cr.execute("SELECT id, name, field_description, ttype, " "required, relation, readonly " "FROM ir_model_fields " "WHERE model = 'res.partner'") field_datas = cr.fetchall() partner1 = partner_obj.browse(cr, uid, partner_ids[0], context=context) partner2 = partner_obj.browse(cr, uid, partner_ids[1], context=context) form_xml, merge_fields, self._values, columns = self._build_form( cr, uid, field_datas, partner1, partner2) self._columns.update(columns) eview = etree.fromstring(res['arch']) placeholder = eview.xpath("//label[@string='placeholder']")[0] placeholder.getparent().replace(placeholder, form_xml) sep_diff = eview.xpath("//separator[@name='sep_diff']")[0] if merge_fields: sep_txt = "Select which data to use for the new record" else: sep_txt = "Merge Records" sep_diff.set('string', _(sep_txt)) res['arch'] = etree.tostring(eview, pretty_print=True) res['fields'] = merge_fields return res def cast_many2one_fields(self, cr, uid, data_record, context=None): """ Some fields are many2one and the ORM expect them to be integer or in the form 'relation,1' where id is the id. As some fields are displayed as selection in the view, we cast them in integer. """ cr.execute("SELECT name " "FROM ir_model_fields " "WHERE model = 'res.partner' " "AND ttype = 'many2one'") fields = cr.fetchall() for field in fields: if data_record.get(field[0], False): data_record[field[0]] = int(data_record[field[0]]) return data_record def action_merge(self, cr, uid, ids, context=None): """ Merges two partners and create 3rd and changes references of old partners with new @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: id of the wizard @param context: A standard dictionary for contextual values @return : empty dict """ record_id = context and context.get('active_id', False) or False pool = self.pool if not record_id: return {} res = self.read(cr, uid, ids, context=context)[0] res.update(self._values) partner_pool = pool.get('res.partner') partner_ids = context.get('active_ids') or [] if not len(partner_ids) == 2: raise osv.except_osv(_('Error!'), _('You must select only two partners')) self.check_partners(cr, uid, partner_ids, context) part1 = partner_ids[0] part2 = partner_ids[1] if hasattr(partner_pool, '_sql_constraints'): #for uniqueness constraint (vat number for example)... c_names = [] remove_field = {} unique_fields = [] for const in partner_pool._sql_constraints: c_names.append('res_partner_' + const[0]) if c_names: c_names = tuple(map(lambda x: "'" + x + "'", c_names)) cr.execute("""select column_name from \ information_schema.constraint_column_usage u \ join pg_constraint p on (p.conname=u.constraint_name) \ where u.constraint_name in (%s) and p.contype='u' """ % c_names) for i in cr.fetchall(): remove_field[i[0]] = None unique_fields.append(i[0]) unique_fields.append('name') unique_data = partner_pool.read(cr, uid, [part1, part2], unique_fields) str_unq = '---------------------------------------\n' for u in unique_data: for key, value in u.items(): if key == 'id' or not value: continue str_unq += key + ': ' + str(value) + '\n' if res.has_key('comment') and res['comment']: res['comment'] += '\n' + str_unq else: res['comment'] = str_unq list_partn = [part1, part2] count_default_address = 0 for partner in partner_pool.browse(cr, uid, list_partn): for address in partner.address: if address.type == 'default': count_default_address += 1 if count_default_address > 1: raise osv.except_osv( _('Error!'), _('You have more than one default type in your addresses.\n Please change it and test once again!' )) remove_field.update({'active': False}) try: partner_pool.write(cr, uid, [part1, part2], remove_field, context=context) except: raise osv.except_osv( _('Error!'), _('You have to change the type of the default address of one of the partner to avoid having two default addresses' )) res = self.cast_many2one_fields(cr, uid, res, context) part_id = partner_pool.create(cr, uid, res, context=context) self.custom_updates(cr, uid, part_id, [part1, part2], context) # For one2many fields on res.partner cr.execute( "SELECT IMF.name, IMF.model " "FROM ir_model_fields IMF INNER JOIN pg_catalog.pg_class PGC ON PGC.relname = replace(IMF.model, '.', '_' )" "WHERE IMF.relation='res.partner' " "AND IMF.ttype NOT IN ('many2many', 'one2many')" "AND PGC.relkind = 'r';") for name, model_raw in cr.fetchall(): if hasattr(pool.get(model_raw), '_check_time'): continue else: if hasattr(pool.get(model_raw), '_columns'): if pool.get(model_raw)._columns.get( name, False) and isinstance( pool.get(model_raw)._columns[name], fields.many2one): model = model_raw.replace('.', '_') if name not in ('relation_partner_answer'): cr.execute("update " + model + " set " + name + "=" + str(part_id) + " where " + tools.ustr(name) + " in (" + tools.ustr(part1) + ", " + tools.ustr(part2) + ")") return {} def custom_updates(self, cr, uid, partner_id, old_partner_ids, context): """Hook for special updates on old partners and new partner """ pass
class report_print_actions(osv.osv_memory): _name = 'aeroo.print_actions' _description = 'Aeroo reports print wizard' def check_report(self, report_name): if 'report.%s' % report_name not in netsvc.Service._services: # check if report exist in register of reports raise osv.except_osv(_('System Error !'), _('Report was not registered in system or deactivated !')) return True def _reopen(self, res_id, model): return {'type': 'ir.actions.act_window', 'view_mode': 'form', 'view_type': 'form', 'res_id': res_id, 'res_model': self._name, 'target': 'new', } def check_if_deferred(self, report_xml, print_ids): extras = report_xml.extras.split(',') if 'deferred_processing' in extras and report_xml.deferred!='off' and len(print_ids)>=report_xml.deferred_limit: return True return False def start_deferred(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids[0], context=context) report_xml = self.pool.get('ir.actions.report.xml').browse(cr, uid, context['report_action_id']) deferred_proc_obj = self.pool.get('deferred_processing.task') process_id = deferred_proc_obj.create(cr, uid, {'name':report_xml.name}, context=context) deferred_proc_obj.new_process(cr, uid, process_id, context=context) deferred_proc_obj.start_process_report(cr, uid, process_id, this.print_ids, context['report_action_id'], context=context) mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') mod_id = mod_obj.search(cr, uid, [('name', '=', 'action_deferred_processing_task_deferred_processing')])[0] res_id = mod_obj.read(cr, uid, mod_id, ['res_id'])['res_id'] act_win = act_obj.read(cr, uid, res_id, ['name','type','view_id','res_model','view_type', \ 'search_view_id','view_mode','target','context']) act_win['res_id'] = process_id act_win['view_type'] = 'form' act_win['view_mode'] = 'form,tree' return act_win def simple_print(self, cr, uid, ids, context): this = self.browse(cr, uid, ids[0], context=context) report_xml = self.pool.get('ir.actions.report.xml').browse(cr, uid, context['report_action_id']) data = {'model':report_xml.model, 'ids':this.print_ids, 'id':context['active_id'], 'report_type': 'aeroo'} if str(report_xml.out_format.id) != this.out_format: report_xml.write({'out_format':this.out_format}, context=context) return { 'type': 'ir.actions.report.xml', 'report_name': report_xml.report_name, 'datas': data, 'context':context } def to_print(self, cr, uid, ids, context=None): this = self.browse(cr, uid, ids[0], context=context) report_xml = self.pool.get('ir.actions.report.xml').browse(cr, uid, context['report_action_id']) self.check_report(report_xml.report_name) print_ids = [] if this.copies<=0: print_ids = this.print_ids else: while(this.copies): print_ids.extend(this.print_ids) this.copies -= 1 if str(report_xml.out_format.id) != this.out_format: report_xml.write({'out_format':this.out_format}, context=context) if self.check_if_deferred(report_xml, print_ids): this.write({'state':'confirm','message':_("This process may take too long for interactive processing. \ It is advisable to defer the process in background. \ Do you want to start a deferred process?"),'print_ids':print_ids}, context=context) return self._reopen(this.id, this._model) ##### Simple print ##### data = {'model':report_xml.model, 'ids':print_ids, 'id':context['active_id'], 'report_type': 'aeroo'} return { 'type': 'ir.actions.report.xml', 'report_name': report_xml.report_name, 'datas': data, 'context':context } def _out_format_get(self, cr, uid, context={}): obj = self.pool.get('report.mimetypes') report_action_id = context.get('report_action_id',False) if report_action_id: in_format = self.pool.get('ir.actions.report.xml').read(cr, uid, report_action_id, ['in_format'])['in_format'] ids = obj.search(cr, uid, [('compatible_types','=',in_format)], context=context) res = obj.read(cr, uid, ids, ['name'], context) return [(str(r['id']), r['name']) for r in res] else: return [] _columns = { 'out_format': fields.selection(_out_format_get, 'Output format', required=True), 'out_format_code':fields.char('Output format code', size=16, required=False, readonly=True), 'copies': fields.integer('Number of copies', required=True), 'message': fields.text('Message'), 'state':fields.selection([ ('draft','Draft'), ('confirm','Confirm'), ('done','Done'), ],'State', select=True, readonly=True), 'print_ids':fields.serialized(), } def onchange_out_format(self, cr, uid, ids, out_format_id): if not out_format_id: return {} out_format = self.pool.get('report.mimetypes').read(cr, uid, int(out_format_id), ['code']) return { 'value': {'out_format_code': out_format['code']} } def _get_default_outformat(field): def get_default_outformat(self, cr, uid, context): report_action_id = context.get('report_action_id',False) if report_action_id: report_xml = self.pool.get('ir.actions.report.xml').browse(cr, uid, report_action_id) return str(getattr(report_xml.out_format, field)) else: return False return get_default_outformat def _get_default_number_of_copies(self, cr, uid, context): report_action_id = context.get('report_action_id',False) if not report_action_id: return False report_xml = self.pool.get('ir.actions.report.xml').browse(cr, uid, context['report_action_id']) return report_xml.copies _defaults = { 'out_format': _get_default_outformat('id'), 'out_format_code': _get_default_outformat('code'), 'copies': _get_default_number_of_copies, 'state': 'draft', 'print_ids': lambda self,cr,uid,ctx: ctx.get('active_ids') }
class external_report_lines(osv.osv): _name = 'external.report.line' _description = 'External Report Lines' _rec_name = 'res_id' _order = 'date desc' _columns = { 'res_model': fields.char('Resource Object', size=64, required=True, readonly=True), 'res_id': fields.integer('Resource Id', readonly=True), 'action': fields.char('Action', size=32, required=True, readonly=True), 'date': fields.datetime('Date', required=True, readonly=True), 'external_id': fields.char('External ID', size=64, readonly=True), 'error_message': fields.text('Error Message', readonly=True), 'traceback': fields.text('Traceback', readonly=True), 'exception_type': fields.char('Exception Type', size=128, readonly=True), 'data_record': fields.serialized('External Data', readonly=True), 'origin_defaults': fields.serialized('Defaults', readonly=True), 'origin_context': fields.serialized('Context', readonly=True), 'referential_id': fields.many2one('external.referential', 'External Referential', required=True, readonly=True) } _defaults = { "date": lambda *a: time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) } def _prepare_log_vals(self, cr, uid, model, action, res_id, external_id, referential_id, data_record, context=None): return { 'res_model': model, 'action': action, 'res_id': res_id, 'external_id': external_id, 'referential_id': referential_id, 'data_record': data_record, } def _prepare_log_info(self, cr, uid, origin_defaults, origin_context, context=None): exc_type, exc_value, exc_traceback = sys.exc_info() return { 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'origin_defaults': origin_defaults, 'origin_context': origin_context, 'exception_type': exc_type, 'error_message': exc_value, 'traceback': ''.join( traceback.format_exception(exc_type, exc_value, exc_traceback)), } def log_failed(self, cr, uid, model, action, referential_id, res_id=None, external_id=None, data_record=None, defaults=None, context=None): defaults = defaults or {} context = context or {} existing_line_id = context.get('retry_report_line_id', False) # if the log was a fail, we raise to not let the import continue # This ensure a backward compatibility, synchro will continue to # work exactly the same way if use_external_log is not in context if not (existing_line_id or context.get('use_external_log', False)): raise log_cr = pooler.get_db(cr.dbname).cursor() try: origin_defaults = defaults.copy() origin_context = context.copy() # connection object can not be kept in text indeed # FIXME : see if we have some problem with other objects # and maybe remove from the conect all objects # which are not string, boolean, list, dict, integer, float or ? if origin_context.get('conn_obj', False): del origin_context['conn_obj'] info = self._prepare_log_info(log_cr, uid, origin_defaults, origin_context, context=context) if existing_line_id: self.write(log_cr, uid, existing_line_id, info, context=context) else: vals = self._prepare_log_vals(log_cr, uid, model, action, res_id, external_id, referential_id, data_record, context=context) vals.update(info) existing_line_id = self.create(log_cr, uid, vals, context=context) except: log_cr.rollback() raise else: log_cr.commit() finally: log_cr.close() return existing_line_id def log_success(self, cr, uid, model, action, referential_id, res_id=None, external_id=None, context=None): if res_id is None and external_id is None: raise ValueError('Missing ext_id or external_id') domain = [ ('res_model', '=', model), ('action', '=', action), ('referential_id', '=', referential_id), ] if res_id is not None: domain += ('res_id', '=', res_id), if external_id is not None: domain += ('external_id', '=', external_id), log_cr = pooler.get_db(cr.dbname).cursor() try: log_ids = self.search(log_cr, uid, domain, context=context) self.unlink(log_cr, uid, log_ids, context=context) except: log_cr.rollback() raise else: log_cr.commit() finally: log_cr.close() return True def retry(self, cr, uid, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] for log in self.browse(cr, uid, ids, context=context): mapping = self.pool.get(log.res_model).\ report_action_mapping(cr, uid, context=context) method = mapping.get(log.action, False) if not method: raise Exception("No python method defined for action %s" % (log.action, )) kwargs = {} for field, value in method['fields'].items(): kwargs[field] = safe_eval(value, {'log': log, 'self': self}) if not kwargs.get('context', False): kwargs['context'] = {} # keep the id of the line to update it with the result kwargs['context']['retry_report_line_id'] = log.id # force export of the resource kwargs['context']['force_export'] = True kwargs['context']['force'] = True method['method'](cr, uid, **kwargs) return True
class base_partner_merge_address_values(osv.osv_memory): """ Merges two addresses """ _name = 'base.partner.merge.address.values' _description = 'Merges two Addresses' _columns = { 'container': fields.serialized('Fields Container'), } _values = {} def check_addresses(self, cr, uid, add_data, context): """ Check validity of selected addresses. Inherit to add other checks """ if add_data.address_id1 == add_data.address_id2: raise osv.except_osv( _("Error!"), _("The same address is selected in both fields.")) return True def _get_previous_wizard(self, cr, uid, context=None): if context is None: context = {} # get address data merge_obj = self.pool.get('base.partner.merge.address') base_wiz_id = context.get('active_model') == 'base.partner.merge.address' and\ context.get('active_id') if not base_wiz_id: return False return merge_obj.browse(cr, uid, base_wiz_id, context=context) def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): res = super(base_partner_merge_address_values, self).fields_view_get(cr, uid, view_id, view_type, context=context, toolbar=toolbar, submenu=submenu) cr.execute("SELECT id, name, field_description, ttype, " "required, relation, readonly " "FROM ir_model_fields " "WHERE model = 'res.partner.address'") field_datas = cr.fetchall() prev_wiz = self._get_previous_wizard(cr, uid, context=context) self.check_addresses(cr, uid, prev_wiz, context) form_xml, merge_fields, self._values, columns = self.pool.get( 'base.partner.merge')._build_form(cr, uid, field_datas, prev_wiz.address_id1, prev_wiz.address_id2) self._columns.update(columns) eview = etree.fromstring(res['arch']) placeholder = eview.xpath("//label[@string='placeholder']")[0] placeholder.getparent().replace(placeholder, form_xml) sep_diff = eview.xpath("//separator[@name='sep_diff']")[0] if merge_fields: sep_txt = "Select which data to use for the new record" else: sep_txt = "Merge Records" sep_diff.set('string', _(sep_txt)) res['arch'] = etree.tostring(eview, pretty_print=True) res['fields'] = merge_fields return res def cast_many2one_fields(self, cr, uid, data_record, context=None): """ Some fields are many2one and the ORM expect them to be integer or in the form 'relation,1' wher id is the id. As some fields are displayed as selection in the view, we cast them in integer. """ cr.execute("SELECT name from ir_model_fields " "WHERE model = 'res.partner.address' " "AND ttype = 'many2one'") fields = cr.fetchall() for field in fields: if data_record.get(field[0], False): data_record[field[0]] = int(data_record[field[0]]) return data_record def action_merge(self, cr, uid, ids, context=None): pool = self.pool address_obj = pool.get('res.partner.address') prev_wiz = self._get_previous_wizard(cr, uid, context=context) add1 = prev_wiz.address_id1.id add2 = prev_wiz.address_id2.id res = self.read(cr, uid, ids, context=context)[0] res.update(self._values) if hasattr(address_obj, '_sql_constraints'): #for uniqueness constraint (vat number for example)... c_names = [] remove_field = {} for const in address_obj._sql_constraints: c_names.append('res_partner_address_' + const[0]) if c_names: c_names = tuple(map(lambda x: "'" + x + "'", c_names)) cr.execute("""select column_name from \ information_schema.constraint_column_usage u \ join pg_constraint p on (p.conname=u.constraint_name) \ where u.constraint_name in (%s) and p.contype='u' """ % c_names) for i in cr.fetchall(): remove_field[i[0]] = None remove_field.update({'active': False}) address_obj.write(cr, uid, [add1, add2], remove_field, context=context) res = self.cast_many2one_fields(cr, uid, res, context) add_id = address_obj.create(cr, uid, res, context=context) self.custom_updates(cr, uid, add_id, [add1, add2], context) # For one2many fields on res.partner.address cr.execute( "select name, model from ir_model_fields where relation='res.partner.address' and ttype not in ('many2many', 'one2many');" ) for name, model_raw in cr.fetchall(): if hasattr(pool.get(model_raw), '_auto'): if not pool.get(model_raw)._auto: continue elif hasattr(pool.get(model_raw), '_check_time'): continue else: if hasattr(pool.get(model_raw), '_columns'): from osv import fields if pool.get(model_raw)._columns.get( name, False) and isinstance( pool.get(model_raw)._columns[name], fields.many2one): model = model_raw.replace('.', '_') cr.execute("update " + model + " set " + name + "=" + str(add_id) + " where " + str(name) + " in (" + str(add1) + ", " + str(add2) + ")") return {} def custom_updates(self, cr, uid, address_id, old_address_ids, context): """Hook for special updates on old addresses and new address """ pass
class IrActionsServerExecution(orm.Model): _name = 'ir.actions.server.execution' _description = "Server Action Execution" _table = 'ir_act_server_execution' _rec_name = 'action_id' _columns = { 'action_id': fields.many2one('ir.actions.server', "Server Action", readonly=True, required=True, ondelete="restrict"), 'locked': fields.boolean('Locked', readonly=True), 'create_date': fields.datetime('Create Date', readonly=True), 'state': fields.selection([('draft', 'To Do'), ('done', 'Done')], "State", readonly=True), 'launch': fields.selection([('manual', 'manually'), ('trigger', 'by trigger')], "Launched", readonly=True), 'args': fields.serialized("Arguments", help="", readonly=True), } _defaults = { 'state': 'draft', 'launch': 'manual', } def _check_locked_action(self, cr, uid, ids, context=None): if isinstance(ids, (int, long)): ids = [ids] executions = self.browse(cr, uid, ids, context=None) action_ids = [execution.action_id.id for execution in executions] locked_execution_ids = self.search(cr, uid, [ ('id', 'not in', ids), ('state', '=', 'draft'), ('action_id', 'in', action_ids), ('locked', '=', True), ], context=context) if locked_execution_ids: return False return True _constraints = [(_check_locked_action, 'This action is under execution!', ['action_id'])] def auto_execute(self, cr, uid, context=None): action_obj = self.pool.get('ir.actions.server') ids = self.search(cr, uid, [('state', '=', 'draft'), ('locked', '=', False)], context=context) for execution in self.browse(cr, uid, ids, context): action_obj.run(cr, **execution.args) return True
class external_report_lines(osv.osv): _name = 'external.report.line' _description = 'External Report Lines' _rec_name = 'res_id' _order = 'date desc' _columns = { 'external_report_id': fields.many2one('external.report', 'External Report', required=True, readonly=True, ondelete='restrict'), 'state': fields.selection((('success', 'Success'), ('fail', 'Failed')), 'Status', required=True, readonly=True), 'res_model': fields.char('Resource Object', size=64, required=True, readonly=True), 'res_id': fields.integer('Resource Id', readonly=True), 'action': fields.char('Action', size=32, required=True, readonly=True), 'date': fields.datetime('Date', required=True, readonly=True), 'external_id': fields.char('External ID', size=64, readonly=True), 'error_message': fields.text('Error Message', readonly=True), 'data_record': fields.serialized('External Data', readonly=True), 'origin_defaults': fields.serialized('Defaults', readonly=True), 'origin_context': fields.serialized('Context', readonly=True), } _defaults = { "date": lambda *a: time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) } def _log_base(self, cr, uid, model, action, state=None, res_id=None, external_id=None, exception=None, data_record=None, defaults=None, context=None): defaults = defaults or {} context = context or {} existing_line_id = context.get('retry_report_line_id', False) # We do not log any action if no report is started # if the log was a fail, we raise to not let the import continue # This ensure a backward compatibility, synchro will continue to # work exactly the same way if no report is started if not (existing_line_id or context.get('external_report_id', False)): if state == 'fail': raise return False external_report_id = context['external_report_id'] log_cr = pooler.get_db(cr.dbname).cursor() try: origin_defaults = defaults.copy() origin_context = context.copy() # connection object can not be kept in text indeed # FIXME : see if we have some problem with other objects # and maybe remove from the conect all objects # which are not string, boolean, list, dict, integer, float or ? if origin_context.get('conn_obj', False): del origin_context['conn_obj'] if existing_line_id: # TODO create a _prepare method self.write( log_cr, uid, existing_line_id, { 'state': state, 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'error_message': exception and str(exception) or False, 'origin_defaults': origin_defaults, 'origin_context': origin_context, }) else: # TODO create a _prepare method existing_line_id = self.create( log_cr, uid, { 'external_report_id': external_report_id, 'state': state, 'res_model': model, 'action': action, 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'res_id': res_id, 'external_id': external_id, 'error_message': exception and str(exception) or False, 'data_record': data_record, 'origin_defaults': origin_defaults, 'origin_context': origin_context, }) log_cr.commit() finally: log_cr.close() return existing_line_id #Deprecated def log_failed(self, cr, uid, model, action, res_id=None, external_id=None, exception=None, data_record=None, defaults=None, context=None): return self._log_base(cr, uid, model, action, 'fail', res_id=res_id, external_id=external_id, exception=exception, data_record=data_record, defaults=defaults, context=context) #Deprecated def log_success(self, cr, uid, model, action, res_id=None, external_id=None, exception=None, data_record=None, defaults=None, context=None): return self._log_base(cr, uid, model, action, 'success', res_id=res_id, external_id=external_id, exception=exception, data_record=data_record, defaults=defaults, context=context) def retry(self, cr, uid, ids, context=None): if isinstance(ids, int): ids = [ids] for log in self.browse(cr, uid, ids, context=context): mapping = self.pool.get(log.res_model).\ report_action_mapping(cr, uid, context=context) method = mapping.get(log.action, False) if not method: raise Exception("No python method defined for action %s" % (log.action, )) kwargs = {} for field, value in method['fields'].items(): kwargs[field] = safe_eval(value, {'log': log, 'self': self}) if not kwargs.get('context', False): kwargs['context'] = {} # keep the id of the line to update it with the result kwargs['context']['retry_report_line_id'] = log.id # force export of the resource kwargs['context']['force_export'] = True kwargs['context']['force'] = True ##TODO remove : not needed since magento 6.1 ######## kwargs['context']['do_not_update_date'] = True # ##################################################### method['method'](cr, uid, **kwargs) return True def aggregate_actions(self, cr, uid, ids, context=None): res = {} for line in self.browse(cr, uid, ids, context=context): state = line.state model = line.res_model action = line.action if not res.get((state, model, action), False): res[(state, model, action)] = 0 res[(state, model, action)] += 1 return res