def button_cancel(self, cr, uid, ids, context=None): cost = self.browse(cr, uid, ids, context=context) if cost.state == 'done': raise UserError(_('Validated landed costs cannot be cancelled, ' 'but you could create negative landed costs to reverse them')) return cost.write({'state': 'cancel'})
def unlink(self, cr, uid, ids, context=None): for rec in self.browse(cr, uid, ids, context=context): if rec.state not in ['draft', 'cancel', 'confirm']: raise UserError(_('You cannot delete a leave which is in %s state.') % (rec.state,)) return super(hr_holidays, self).unlink(cr, uid, ids, context)
def default_get(self, cr, uid, fields, context=None): """ To get default values for the object. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param fields: List of fields for which we want default values @param context: A standard dictionary @return: A dictionary with default values for all field in ``fields`` """ result1 = [] if context is None: context = {} if context and context.get('active_ids', False): if len(context.get('active_ids')) > 1: raise osv.except_osv( _('Warning!'), _("You may only return one picking at a time!")) res = super(stock_return_picking, self).default_get(cr, uid, fields, context=context) record_id = context and context.get('active_id', False) or False uom_obj = self.pool.get('product.uom') pick_obj = self.pool.get('stock.picking') pick = pick_obj.browse(cr, uid, record_id, context=context) quant_obj = self.pool.get("stock.quant") chained_move_exist = False if pick: if pick.state != 'done': raise UserError( _("You may only return pickings that are Done!")) for move in pick.move_lines: if move.move_dest_id: chained_move_exist = True #Sum the quants in that location that can be returned (they should have been moved by the moves that were included in the returned picking) qty = 0 quant_search = quant_obj.search( cr, uid, [('history_ids', 'in', move.id), ('qty', '>', 0.0), ('location_id', 'child_of', move.location_dest_id.id)], context=context) for quant in quant_obj.browse(cr, uid, quant_search, context=context): if not quant.reservation_id or quant.reservation_id.origin_returned_move_id.id != move.id: qty += quant.qty qty = uom_obj._compute_qty(cr, uid, move.product_id.uom_id.id, qty, move.product_uom.id) result1.append((0, 0, { 'product_id': move.product_id.id, 'quantity': qty, 'move_id': move.id })) if len(result1) == 0: raise UserError( _("No products to return (only lines in Done state and not fully returned yet can be returned)!" )) if 'product_return_moves' in fields: res.update({'product_return_moves': result1}) if 'move_dest_exists' in fields: res.update({'move_dest_exists': chained_move_exist}) if 'parent_location_id' in fields and pick.location_id.usage == 'internal': res.update({ 'parent_location_id': pick.picking_type_id.warehouse_id and pick.picking_type_id.warehouse_id.view_location_id.id or pick.location_id.location_id.id }) if 'original_location_id' in fields: res.update({'original_location_id': pick.location_id.id}) if 'location_id' in fields: res.update({'location_id': pick.location_id.id}) return res
def action_forward(self, cr, uid, ids, context=None): lead_obj = self.pool.get('crm.lead') record = self.browse(cr, uid, ids[0], context=context) email_template_obj = self.pool.get('mail.template') try: template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'email_template_lead_forward_mail')[1] except ValueError: raise UserError(_('The Forward Email Template is not in the database')) try: portal_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_portal')[1] except ValueError: raise UserError(_('The Portal group cannot be found')) local_context = context.copy() if not (record.forward_type == 'single'): no_email = set() for lead in record.assignation_lines: if lead.partner_assigned_id and not lead.partner_assigned_id.email: no_email.add(lead.partner_assigned_id.name) if no_email: raise UserError(_('Set an email address for the partner(s): %s') % ", ".join(no_email)) if record.forward_type == 'single' and not record.partner_id.email: raise UserError(_('Set an email address for the partner %s') % record.partner_id.name) partners_leads = {} for lead in record.assignation_lines: partner = record.forward_type == 'single' and record.partner_id or lead.partner_assigned_id lead_details = { 'lead_link': lead.lead_link, 'lead_id': lead.lead_id, } if partner: partner_leads = partners_leads.get(partner.id) if partner_leads: partner_leads['leads'].append(lead_details) else: partners_leads[partner.id] = {'partner': partner, 'leads': [lead_details]} stage_id = False if record.assignation_lines and record.assignation_lines[0].lead_id.type == 'lead': try: stage_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'stage_portal_lead_assigned')[1] except ValueError: pass for partner_id, partner_leads in partners_leads.items(): in_portal = False for contact in (partner.child_ids or [partner]): if contact.user_ids: in_portal = portal_id in [g.id for g in contact.user_ids[0].groups_id] local_context['partner_id'] = partner_leads['partner'] local_context['partner_leads'] = partner_leads['leads'] local_context['partner_in_portal'] = in_portal email_template_obj.send_mail(cr, uid, template_id, ids[0], context=local_context) lead_ids = [lead['lead_id'].id for lead in partner_leads['leads']] values = {'partner_assigned_id': partner_id, 'user_id': partner_leads['partner'].user_id.id} if stage_id: values['stage_id'] = stage_id lead_obj.write(cr, uid, lead_ids, values) self.pool.get('crm.lead').message_subscribe(cr, uid, lead_ids, [partner_id], context=context) return True
def generate_bbacomm(self, cr, uid, ids, type, reference_type, partner_id, reference, context=None): partner_obj = self.pool.get('res.partner') reference = reference or '' algorithm = False if partner_id: algorithm = partner_obj.browse( cr, uid, partner_id, context=context).out_inv_comm_algorithm algorithm = algorithm or 'random' if (type == 'out_invoice'): if reference_type == 'bba': if algorithm == 'date': if not self.check_bbacomm(reference): doy = time.strftime('%j') year = time.strftime('%Y') seq = '001' seq_ids = self.search( cr, uid, [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), ('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference') if seq_ids: prev_seq = int( self.browse(cr, uid, seq_ids[-1]).reference[12:15]) if prev_seq < 999: seq = '%03d' % (prev_seq + 1) else: raise UserError(_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ '\nPlease create manually a unique BBA Structured Communication.')) bbacomm = doy + year + seq base = int(bbacomm) mod = base % 97 or 97 reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod) elif algorithm == 'partner_ref': if not self.check_bbacomm(reference): partner_ref = self.pool.get('res.partner').browse( cr, uid, partner_id).ref partner_ref_nr = re.sub('\D', '', partner_ref or '') if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7): raise UserError(_('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \ '\nPlease correct the Partner record.')) else: partner_ref_nr = partner_ref_nr.ljust(7, '0') seq = '001' seq_ids = self.search( cr, uid, [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'), ('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference') if seq_ids: prev_seq = int( self.browse(cr, uid, seq_ids[-1]).reference[12:15]) if prev_seq < 999: seq = '%03d' % (prev_seq + 1) else: raise UserError(_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \ '\nPlease create manually a unique BBA Structured Communication.')) bbacomm = partner_ref_nr + seq base = int(bbacomm) mod = base % 97 or 97 reference = '+++%s/%s/%s%02d+++' % ( partner_ref_nr[:3], partner_ref_nr[3:], seq, mod) elif algorithm == 'random': if not self.check_bbacomm(reference): base = random.randint(1, 9999999999) bbacomm = str(base).rjust(10, '0') base = int(bbacomm) mod = base % 97 or 97 mod = str(mod).rjust(2, '0') reference = '+++%s/%s/%s%s+++' % ( bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod) else: raise UserError(_("Unsupported Structured Communication Type Algorithm '%s' !" \ "\nPlease contact your eCore support channel.") % algorithm) return {'value': {'reference': reference}}
def unlink(self): if any(rec.state != 'draft' for rec in self): raise UserError( _("You can not delete a payment that is already posted")) return super(account_payment, self).unlink()
def do_print_checks(self): """ This method is a hook for l10n_xx_check_printing modules to implement actual check printing capabilities """ raise UserError( _("There is no check layout configured.\nMake sure the proper check printing module is installed" " and its configuration (in company settings > 'Configuration' tab) is correct." ))
_logger.info("%s - %s" % (e.code, e.reason), exc_info=True) raise UserError( "%s - %s" % (e.code, e.reason) + ':' + self._get_twitter_exception_message(e.code, context)) except URLError, e: _logger.info(_('We failed to reach a twitter server.'), exc_info=True) raise UserError( _('Internet connection refused') + ' ' + _('We failed to reach a twitter server.')) except Exception, e: _logger.info( _('Please double-check your Twitter API Key and Secret!'), exc_info=True) raise UserError( _('Twitter authorization error!') + ' ' + _('Please double-check your Twitter API Key and Secret!')) def create(self, cr, uid, vals, context=None): res_id = super(twitter_config_settings, self).create(cr, uid, vals, context=context) if vals.get('twitter_api_key') or vals.get( 'twitter_api_secret') or vals.get('twitter_screen_name'): self._check_twitter_authorization(cr, uid, res_id, context=context) return res_id def write(self, cr, uid, ids, vals, context=None): res_id = super(twitter_config_settings, self).write(cr, uid,
def reverse_anonymize_database(self, cr, uid, ids, context=None): """Set the 'clear' state to defined fields""" ir_model_fields_anonymization_model = self.pool.get( 'ir.model.fields.anonymization') anonymization_history_model = self.pool.get( 'ir.model.fields.anonymization.history') # create a new history record: vals = { 'date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'state': 'started', 'direction': 'anonymized -> clear', } history_id = anonymization_history_model.create(cr, uid, vals) # check that all the defined fields are in the 'anonymized' state state = ir_model_fields_anonymization_model._get_global_state( cr, uid, context=context) if state == 'clear': raise UserError( _("The database is not currently anonymized, you cannot reverse the anonymization." )) elif state == 'unstable': msg = _("The database anonymization is currently in an unstable state. Some fields are anonymized," + \ " while some fields are not anonymized. You should try to solve this problem before trying to do anything.") raise UserError(msg) wizards = self.browse(cr, uid, ids, context=context) for wizard in wizards: if not wizard.file_import: msg = _( "It is not possible to reverse the anonymization process without supplying the anonymization export file." ) self._raise_after_history_update(cr, uid, history_id, 'Error !', msg) # reverse the anonymization: # load the pickle file content into a data structure: data = pickle.loads(base64.decodestring(wizard.file_import)) migration_fix_obj = self.pool.get( 'ir.model.fields.anonymization.migration.fix') fix_ids = migration_fix_obj.search( cr, uid, [('target_version', '=', '8.0')]) fixes = migration_fix_obj.read(cr, uid, fix_ids, [ 'model_name', 'field_name', 'query', 'query_type', 'sequence' ]) fixes = group(fixes, ('model_name', 'field_name')) for line in data: queries = [] table_name = self.pool[line['model_id']]._table if line[ 'model_id'] in self.pool else None # check if custom sql exists: key = (line['model_id'], line['field_id']) custom_updates = fixes.get(key) if custom_updates: custom_updates.sort(key=itemgetter('sequence')) queries = [(record['query'], record['query_type']) for record in custom_updates if record['query_type']] elif table_name: queries = [( "update %(table)s set %(field)s = %%(value)s where id = %%(id)s" % { 'table': table_name, 'field': line['field_id'], }, 'sql')] for query in queries: if query[1] == 'sql': sql = query[0] cr.execute(sql, { 'value': line['value'], 'id': line['id'] }) elif query[1] == 'python': raw_code = query[0] code = raw_code % line eval(code) else: raise Exception( "Unknown query type '%s'. Valid types are: sql, python." % (query['query_type'], )) # update the anonymization fields: ir_model_fields_anonymization_model = self.pool.get( 'ir.model.fields.anonymization') field_ids = ir_model_fields_anonymization_model.search( cr, uid, [('state', '<>', 'not_existing')], context=context) values = { 'state': 'clear', } ir_model_fields_anonymization_model.write(cr, uid, field_ids, values, context=context) # add a result message in the wizard: msg = '\n'.join([ "Successfully reversed the anonymization.", "", ]) self.write(cr, uid, ids, {'msg': msg}) # update the history record: anonymization_history_model.write( cr, uid, history_id, { 'field_ids': [[6, 0, field_ids]], 'msg': msg, 'filepath': False, 'state': 'done', }) # handle the view: view_id = self.pool['ir.model.data'].xmlid_to_res_id( cr, uid, 'anonymization.view_ir_model_fields_anonymize_wizard_form') return { 'res_id': ids[0], 'view_id': [view_id], 'view_type': 'form', "view_mode": 'form', 'res_model': 'ir.model.fields.anonymize.wizard', 'type': 'ir.actions.act_window', 'context': { 'step': 'just_desanonymized' }, 'target': 'new', }
def open_statement(self, cr, uid, ids, context=None): """ Open the statements @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return : Blank Directory """ data = {} mod_obj = self.pool.get('ir.model.data') statement_obj = self.pool.get('account.bank.statement') sequence_obj = self.pool.get('ir.sequence') journal_obj = self.pool.get('account.journal') if context is None: context = {} st_ids = [] j_ids = journal_obj.search(cr, uid, [('journal_user', '=', 1)], context=context) if not j_ids: raise UserError( _('You have to define which payment method must be available in the point of sale by reusing existing bank and cash through "Accounting / Configuration / Journals / Journals". Select a journal and check the field "PoS Payment Method" from the "Point of Sale" tab. You can also create new payment methods directly from menu "PoS Backend / Configuration / Payment Methods".' )) for journal in journal_obj.browse(cr, uid, j_ids, context=context): ids = statement_obj.search(cr, uid, [('state', '!=', 'confirm'), ('user_id', '=', uid), ('journal_id', '=', journal.id)], context=context) if journal.sequence_id: number = sequence_obj.next_by_id(cr, uid, journal.sequence_id.id, context=context) else: raise UserError(_("No sequence defined on the journal")) data.update({ 'journal_id': journal.id, 'user_id': uid, 'name': number }) statement_id = statement_obj.create(cr, uid, data, context=context) st_ids.append(int(statement_id)) tree_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_bank_statement_tree') tree_id = tree_res and tree_res[1] or False form_res = mod_obj.get_object_reference(cr, uid, 'account', 'view_bank_statement_form') form_id = form_res and form_res[1] or False search_res = mod_obj.get_object_reference( cr, uid, 'account', 'view_bank_statement_search') search_id = search_res and search_res[1] or False return { 'type': 'ir.actions.act_window', 'name': _('List of Cash Registers'), 'view_type': 'form', 'view_mode': 'tree,form', 'res_model': 'account.bank.statement', 'domain': str([('id', 'in', st_ids)]), 'views': [(tree_id, 'tree'), (form_id, 'form')], 'search_view_id': search_id, }
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): state = self.pool.get( 'ir.model.fields.anonymization')._get_global_state(cr, uid, context=context) if context is None: context = {} step = context.get('step', 'new_window') res = super(ir_model_fields_anonymize_wizard, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu) eview = etree.fromstring(res['arch']) placeholder = eview.xpath("group[@name='placeholder1']") if len(placeholder): placeholder = placeholder[0] if step == 'new_window' and state == 'clear': # clicked in the menu and the fields are not anonymized: warn the admin that backuping the db is very important placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Warning'})) eview.remove(placeholder) elif step == 'new_window' and state == 'anonymized': # clicked in the menu and the fields are already anonymized placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('field', { 'name': 'file_import', 'required': "1" })) placeholder.addnext( etree.Element('label', {'string': 'Anonymization file'})) eview.remove(placeholder) elif step == 'just_anonymized': # we just ran the anonymization process, we need the file export field placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('field', {'name': 'file_export'})) # we need to remove the button: buttons = eview.xpath("button") for button in buttons: eview.remove(button) # and add a message: placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Result'})) # remove the placeholer: eview.remove(placeholder) elif step == 'just_desanonymized': # we just reversed the anonymization process, we don't need any field # we need to remove the button buttons = eview.xpath("button") for button in buttons: eview.remove(button) # and add a message # and add a message: placeholder.addnext( etree.Element('field', { 'name': 'msg', 'colspan': '4', 'nolabel': '1' })) placeholder.addnext(etree.Element('newline')) placeholder.addnext( etree.Element('label', {'string': 'Result'})) # remove the placeholer: eview.remove(placeholder) else: msg = _("The database anonymization is currently in an unstable state. Some fields are anonymized," + \ " while some fields are not anonymized. You should try to solve this problem before trying to do anything else.") raise UserError(msg) res['arch'] = etree.tostring(eview) return res
def tender_in_progress(self, cr, uid, ids, context=None): if not all(obj.line_ids for obj in self.pool['purchase.requisition'].browse(cr, uid, ids, context=context)): raise UserError(_('You can not confirm call because there is no product line.')) return self.write(cr, uid, ids, {'state': 'in_progress'}, context=context)
def copy(self, cr, uid, id, values, context=None): raise UserError( _("Duplicating workflows is not possible, please create a new workflow" ))
smtp_server.smtp_port, user=smtp_server.smtp_user, password=smtp_server.smtp_pass, encryption=smtp_server.smtp_encryption, smtp_debug=smtp_server.smtp_debug) except Exception, e: raise UserError( _("Connection Test Failed! Here is what we got instead:\n %s" ) % tools.ustr(e)) finally: try: if smtp: smtp.quit() except Exception: # ignored, just a consequence of the previous exception pass raise UserError( _("Connection Test Succeeded! Everything seems properly set up!")) def connect(self, host, port, user=None, password=None, encryption=False, smtp_debug=False): """Returns a new SMTP connection to the give SMTP server, authenticated with ``user`` and ``password`` if provided, and encrypted as requested by the ``encryption`` parameter. :param host: host or IP of SMTP server to connect to :param int port: SMTP port to connect to :param user: optional username to authenticate with
def do_change_standard_price(self, cr, uid, ids, new_price, context=None): """ Changes the Standard Price of Product and creates an account move accordingly.""" location_obj = self.pool.get('stock.location') move_obj = self.pool.get('account.move') if context is None: context = {} user_company_id = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id.id loc_ids = location_obj.search(cr, uid, [('usage', '=', 'internal'), ('company_id', '=', user_company_id)]) for rec_id in ids: datas = self.get_product_accounts(cr, uid, rec_id, context=context) for location in location_obj.browse(cr, uid, loc_ids, context=context): c = context.copy() c.update({'location': location.id, 'compute_child': False}) product = self.browse(cr, uid, rec_id, context=c) diff = product.standard_price - new_price if not diff: raise UserError( _("No difference between standard price and new price!" )) for prod_variant in product.product_variant_ids: qty = prod_variant.qty_available if qty: # Accounting Entries amount_diff = abs(diff * qty) if diff * qty > 0: debit_account_id = datas['expense'].id credit_account_id = datas['stock_valuation'].id else: debit_account_id = datas['stock_valuation'].id credit_account_id = datas['expense'].id lines = [(0, 0, { 'name': _('Standard Price changed'), 'account_id': debit_account_id, 'debit': amount_diff, 'credit': 0, }), (0, 0, { 'name': _('Standard Price changed'), 'account_id': credit_account_id, 'debit': 0, 'credit': amount_diff, })] move_vals = { 'journal_id': datas['stock_journal'].id, 'company_id': location.company_id.id, 'line_ids': lines, } move_id = move_obj.create(cr, uid, move_vals, context=context) move_obj.post(cr, uid, [move_id], context=context) self.write(cr, uid, rec_id, {'standard_price': new_price}) return True
def send_email(self, cr, uid, message, mail_server_id=None, smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False, context=None): """Sends an email directly (no queuing). No retries are done, the caller should handle MailDeliveryException in order to ensure that the mail is never lost. If the mail_server_id is provided, sends using this mail server, ignoring other smtp_* arguments. If mail_server_id is None and smtp_server is None, use the default mail server (highest priority). If mail_server_id is None and smtp_server is not None, use the provided smtp_* arguments. If both mail_server_id and smtp_server are None, look for an 'smtp_server' value in server config, and fails if not found. :param message: the email.message.Message to send. The envelope sender will be extracted from the ``Return-Path`` (if present), or will be set to the default bounce address. The envelope recipients will be extracted from the combined list of ``To``, ``CC`` and ``BCC`` headers. :param mail_server_id: optional id of ir.mail_server to use for sending. overrides other smtp_* arguments. :param smtp_server: optional hostname of SMTP server to use :param smtp_encryption: optional TLS mode, one of 'none', 'starttls' or 'ssl' (see ir.mail_server fields for explanation) :param smtp_port: optional SMTP port, if mail_server_id is not passed :param smtp_user: optional SMTP user, if mail_server_id is not passed :param smtp_password: optional SMTP password to use, if mail_server_id is not passed :param smtp_debug: optional SMTP debug flag, if mail_server_id is not passed :return: the Message-ID of the message that was just sent, if successfully sent, otherwise raises MailDeliveryException and logs root cause. """ # Use the default bounce address **only if** no Return-Path was # provided by caller. Caller may be using Variable Envelope Return # Path (VERP) to detect no-longer valid email addresses. smtp_from = message['Return-Path'] if not smtp_from: smtp_from = self._get_default_bounce_address(cr, uid, context=context) if not smtp_from: smtp_from = message['From'] assert smtp_from, "The Return-Path or From header is required for any outbound email" # The email's "Envelope From" (Return-Path), and all recipient addresses must only contain ASCII characters. from_rfc2822 = extract_rfc2822_addresses(smtp_from) assert from_rfc2822, ( "Malformed 'Return-Path' or 'From' address: %r - " "It should contain one valid plain ASCII email") % smtp_from # use last extracted email, to support rarities like 'Support@MyComp <*****@*****.**>' smtp_from = from_rfc2822[-1] email_to = message['To'] email_cc = message['Cc'] email_bcc = message['Bcc'] smtp_to_list = filter( None, tools.flatten( map(extract_rfc2822_addresses, [email_to, email_cc, email_bcc]))) assert smtp_to_list, self.NO_VALID_RECIPIENT x_forge_to = message['X-Forge-To'] if x_forge_to: # `To:` header forged, e.g. for posting on mail.channels, to avoid confusion del message['X-Forge-To'] del message['To'] # avoid multiple To: headers! message['To'] = x_forge_to # Do not actually send emails in testing mode! if getattr(threading.currentThread(), 'testing', False): _test_logger.info("skip sending email in test mode") return message['Message-Id'] # Get SMTP Server Details from Mail Server mail_server = None if mail_server_id: mail_server = self.browse(cr, SUPERUSER_ID, mail_server_id) elif not smtp_server: mail_server_ids = self.search(cr, SUPERUSER_ID, [], order='sequence', limit=1) if mail_server_ids: mail_server = self.browse(cr, SUPERUSER_ID, mail_server_ids[0]) if mail_server: smtp_server = mail_server.smtp_host smtp_user = mail_server.smtp_user smtp_password = mail_server.smtp_pass smtp_port = mail_server.smtp_port smtp_encryption = mail_server.smtp_encryption smtp_debug = smtp_debug or mail_server.smtp_debug else: # we were passed an explicit smtp_server or nothing at all smtp_server = smtp_server or tools.config.get('smtp_server') smtp_port = tools.config.get( 'smtp_port', 25) if smtp_port is None else smtp_port smtp_user = smtp_user or tools.config.get('smtp_user') smtp_password = smtp_password or tools.config.get('smtp_password') if smtp_encryption is None and tools.config.get('smtp_ssl'): smtp_encryption = 'starttls' # STARTTLS is the new meaning of the smtp_ssl flag as of v7.0 if not smtp_server: raise UserError( _("Missing SMTP Server") + "\n" + _("Please define at least one SMTP server, or provide the SMTP parameters explicitly." )) try: message_id = message['Message-Id'] # Add email in Maildir if smtp_server contains maildir. if smtp_server.startswith('maildir:/'): from mailbox import Maildir maildir_path = smtp_server[8:] mdir = Maildir(maildir_path, factory=None, create=True) mdir.add(message.as_string(True)) return message_id smtp = None try: smtp = self.connect(smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption or False, smtp_debug) smtp.sendmail(smtp_from, smtp_to_list, message.as_string()) finally: if smtp is not None: smtp.quit() except Exception, e: msg = _("Mail delivery failed via SMTP server '%s'.\n%s: %s") % ( tools.ustr(smtp_server), e.__class__.__name__, tools.ustr(e)) _logger.info(msg) raise MailDeliveryException(_("Mail Delivery Failed"), msg)
def copy(self, cr, uid, id, default=None, context=None): raise UserError(_('Duplicating campaigns is not supported.'))
class twitter_config_settings(osv.TransientModel): _inherit = 'website.config.settings' _columns = { 'twitter_api_key': fields.related( 'website_id', 'twitter_api_key', type="char", string='Twitter API Key', help= "Twitter API key you can get it from https://apps.twitter.com/app/new" ), 'twitter_api_secret': fields.related( 'website_id', 'twitter_api_secret', type="char", string='Twitter API secret', help= "Twitter API secret you can get it from https://apps.twitter.com/app/new" ), 'twitter_tutorial': fields.dummy( type="boolean", string="Show me how to obtain the Twitter API Key and Secret"), 'twitter_screen_name': fields.related( 'website_id', 'twitter_screen_name', type="char", string='Get favorites from this screen name', help= "Screen Name of the Twitter Account from which you want to load favorites." "It does not have to match the API Key/Secret."), } def _get_twitter_exception_message(self, error_code, context=None): TWITTER_EXCEPTION = { 304: _('There was no new data to return.'), 400: _('The request was invalid or cannot be otherwise served. Requests without authentication are considered invalid and will yield this response.' ), 401: _('Authentication credentials were missing or incorrect. Maybe screen name tweets are protected.' ), 403: _('The request is understood, but it has been refused or access is not allowed. Please check your Twitter API Key and Secret.' ), 429: _('Request cannot be served due to the applications rate limit having been exhausted for the resource.' ), 500: _('Twitter seems broken. Please retry later. You may consider posting an issue on Twitter forums to get help.' ), 502: _('Twitter is down or being upgraded.'), 503: _('The Twitter servers are up, but overloaded with requests. Try again later.' ), 504: _('The Twitter servers are up, but the request could not be serviced due to some failure within our stack. Try again later.' ) } if error_code in TWITTER_EXCEPTION: return TWITTER_EXCEPTION[error_code] else: return _('HTTP Error: Something is misconfigured') def _check_twitter_authorization(self, cr, uid, config_id, context=None): website_obj = self.pool['website'] website_config = self.browse(cr, uid, config_id, context=context) try: website_obj.fetch_favorite_tweets(cr, uid, [website_config.website_id.id], context=context) except HTTPError, e: _logger.info("%s - %s" % (e.code, e.reason), exc_info=True) raise UserError( "%s - %s" % (e.code, e.reason) + ':' + self._get_twitter_exception_message(e.code, context)) except URLError, e: _logger.info(_('We failed to reach a twitter server.'), exc_info=True) raise UserError( _('Internet connection refused') + ' ' + _('We failed to reach a twitter server.'))
def send_mail(self, cr, uid, ids, context=None): author_id = self.pool['res.users'].browse( cr, uid, uid, context=context).partner_id.id for mailing in self.browse(cr, uid, ids, context=context): # instantiate an email composer + send emails res_ids = self.get_remaining_recipients(cr, uid, mailing, context=context) if not res_ids: raise UserError(_('Please select recipients.')) if context: comp_ctx = dict(context, active_ids=res_ids) else: comp_ctx = {'active_ids': res_ids} # Convert links in absolute URLs before the application of the shortener self.write(cr, uid, [mailing.id], { 'body_html': self.pool['mail.template']._replace_local_links( cr, uid, mailing.body_html, context) }, context=context) composer_values = { 'author_id': author_id, 'attachment_ids': [(4, attachment.id) for attachment in mailing.attachment_ids], 'body': self.convert_links(cr, uid, [mailing.id], context=context)[mailing.id], 'subject': mailing.name, 'model': mailing.mailing_model, 'email_from': mailing.email_from, 'record_name': False, 'composition_mode': 'mass_mail', 'mass_mailing_id': mailing.id, 'mailing_list_ids': [(4, l.id) for l in mailing.contact_list_ids], 'no_auto_thread': mailing.reply_to_mode != 'thread', } if mailing.reply_to_mode == 'email': composer_values['reply_to'] = mailing.reply_to composer_id = self.pool['mail.compose.message'].create( cr, uid, composer_values, context=comp_ctx) self.pool['mail.compose.message'].send_mail(cr, uid, [composer_id], auto_commit=True, context=comp_ctx) self.write(cr, uid, [mailing.id], {'state': 'done'}, context=context) return True
def unlink(self, cr, uid, ids, context=None): for record in self.browse(cr, uid, ids, context or {}): if record.state == "running": raise UserError(_('You cannot delete an active subscription!')) return super(subscription_subscription, self).unlink(cr, uid, ids, context)
def copy(self, cr, uid, id, values, context=None): raise UserError(_("Cannot duplicate configuration!"), "")
def _create_returns(self, cr, uid, ids, context=None): if context is None: context = {} record_id = context and context.get('active_id', False) or False move_obj = self.pool.get('stock.move') pick_obj = self.pool.get('stock.picking') uom_obj = self.pool.get('product.uom') data_obj = self.pool.get('stock.return.picking.line') pick = pick_obj.browse(cr, uid, record_id, context=context) data = self.read(cr, uid, ids[0], context=context) returned_lines = 0 # Cancel assignment of existing chained assigned moves moves_to_unreserve = [] for move in pick.move_lines: to_check_moves = [move.move_dest_id ] if move.move_dest_id.id else [] while to_check_moves: current_move = to_check_moves.pop() if current_move.state not in ( 'done', 'cancel') and current_move.reserved_quant_ids: moves_to_unreserve.append(current_move.id) split_move_ids = move_obj.search( cr, uid, [('split_from', '=', current_move.id)], context=context) if split_move_ids: to_check_moves += move_obj.browse(cr, uid, split_move_ids, context=context) if moves_to_unreserve: move_obj.do_unreserve(cr, uid, moves_to_unreserve, context=context) #break the link between moves in order to be able to fix them later if needed move_obj.write(cr, uid, moves_to_unreserve, {'move_orig_ids': False}, context=context) #Create new picking for returned products pick_type_id = pick.picking_type_id.return_picking_type_id and pick.picking_type_id.return_picking_type_id.id or pick.picking_type_id.id new_picking = pick_obj.copy( cr, uid, pick.id, { 'move_lines': [], 'picking_type_id': pick_type_id, 'state': 'draft', 'origin': pick.name, 'location_id': pick.location_dest_id.id, 'location_dest_id': data['location_id'] and data['location_id'][0] or pick.location_id.id, }, context=context) for data_get in data_obj.browse(cr, uid, data['product_return_moves'], context=context): move = data_get.move_id if not move: raise UserError( _("You have manually created product lines, please delete them to proceed" )) new_qty = data_get.quantity if new_qty: # The return of a return should be linked with the original's destination move if it was not cancelled if move.origin_returned_move_id.move_dest_id.id and move.origin_returned_move_id.move_dest_id.state != 'cancel': move_dest_id = move.origin_returned_move_id.move_dest_id.id else: move_dest_id = False returned_lines += 1 location_id = data['location_id'] and data['location_id'][ 0] or move.location_id.id move_obj.copy( cr, uid, move.id, { 'product_id': data_get.product_id.id, 'product_uom_qty': new_qty, 'picking_id': new_picking, 'state': 'draft', 'location_id': move.location_dest_id.id, 'location_dest_id': location_id, 'picking_type_id': pick_type_id, 'warehouse_id': pick.picking_type_id.warehouse_id.id, 'origin_returned_move_id': move.id, 'procure_method': 'make_to_stock', 'move_dest_id': move_dest_id, }) if not returned_lines: raise UserError( _("Please specify at least one non-zero quantity.")) pick_obj.action_confirm(cr, uid, [new_picking], context=context) pick_obj.action_assign(cr, uid, [new_picking], context=context) return new_picking, pick_type_id
def get_bban(self): if self.acc_type != 'iban': raise UserError( _("Cannot compute the BBAN because the account number is not an IBAN." )) return get_bban_from_iban(self.acc_number)
def _check_children_scope(self): if not all(child.type_tax_use in ('none', self.type_tax_use) for child in self.children_tax_ids): raise UserError( _('The application scope of taxes in a group must be either the same as the group or "None".' ))
def _run_wkhtmltopdf(self, cr, uid, headers, footers, bodies, landscape, paperformat, spec_paperformat_args=None, save_in_attachment=None, set_viewport_size=False): """Execute wkhtmltopdf as a subprocess in order to convert html given in input into a pdf document. :param header: list of string containing the headers :param footer: list of string containing the footers :param bodies: list of string containing the reports :param landscape: boolean to force the pdf to be rendered under a landscape format :param paperformat: ir.actions.report.paperformat to generate the wkhtmltopf arguments :param specific_paperformat_args: dict of prioritized paperformat arguments :param save_in_attachment: dict of reports to save/load in/from the db :returns: Content of the pdf as a string """ if not save_in_attachment: save_in_attachment = {} command_args = [] if set_viewport_size: command_args.extend( ['--viewport-size', landscape and '1024x1280' or '1280x1024']) # Passing the cookie to wkhtmltopdf in order to resolve internal links. try: if request: command_args.extend( ['--cookie', 'session_id', request.session.sid]) except AttributeError: pass # Wkhtmltopdf arguments command_args.extend(['--quiet']) # Less verbose error messages if paperformat: # Convert the paperformat record into arguments command_args.extend( self._build_wkhtmltopdf_args(paperformat, spec_paperformat_args)) # Force the landscape orientation if necessary if landscape and '--orientation' in command_args: command_args_copy = list(command_args) for index, elem in enumerate(command_args_copy): if elem == '--orientation': del command_args[index] del command_args[index] command_args.extend(['--orientation', 'landscape']) elif landscape and '--orientation' not in command_args: command_args.extend(['--orientation', 'landscape']) # Execute WKhtmltopdf pdfdocuments = [] temporary_files = [] for index, reporthtml in enumerate(bodies): local_command_args = [] pdfreport_fd, pdfreport_path = tempfile.mkstemp( suffix='.pdf', prefix='report.tmp.') temporary_files.append(pdfreport_path) # Directly load the document if we already have it if save_in_attachment and save_in_attachment[ 'loaded_documents'].get(reporthtml[0]): with closing(os.fdopen(pdfreport_fd, 'w')) as pdfreport: pdfreport.write( save_in_attachment['loaded_documents'][reporthtml[0]]) pdfdocuments.append(pdfreport_path) continue else: os.close(pdfreport_fd) # Wkhtmltopdf handles header/footer as separate pages. Create them if necessary. if headers: head_file_fd, head_file_path = tempfile.mkstemp( suffix='.html', prefix='report.header.tmp.') temporary_files.append(head_file_path) with closing(os.fdopen(head_file_fd, 'w')) as head_file: head_file.write(headers[index]) local_command_args.extend(['--header-html', head_file_path]) if footers: foot_file_fd, foot_file_path = tempfile.mkstemp( suffix='.html', prefix='report.footer.tmp.') temporary_files.append(foot_file_path) with closing(os.fdopen(foot_file_fd, 'w')) as foot_file: foot_file.write(footers[index]) local_command_args.extend(['--footer-html', foot_file_path]) # Body stuff content_file_fd, content_file_path = tempfile.mkstemp( suffix='.html', prefix='report.body.tmp.') temporary_files.append(content_file_path) with closing(os.fdopen(content_file_fd, 'w')) as content_file: content_file.write(reporthtml[1]) try: wkhtmltopdf = [_get_wkhtmltopdf_bin() ] + command_args + local_command_args wkhtmltopdf += [content_file_path] + [pdfreport_path] process = subprocess.Popen(wkhtmltopdf, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() if process.returncode not in [0, 1]: raise UserError( _('Wkhtmltopdf failed (error code: %s). ' 'Message: %s') % (str(process.returncode), err)) # Save the pdf in attachment if marked if reporthtml[0] is not False and save_in_attachment.get( reporthtml[0]): with open(pdfreport_path, 'rb') as pdfreport: attachment = { 'name': save_in_attachment.get(reporthtml[0]), 'datas': base64.encodestring(pdfreport.read()), 'datas_fname': save_in_attachment.get(reporthtml[0]), 'res_model': save_in_attachment.get('model'), 'res_id': reporthtml[0], } try: self.pool['ir.attachment'].create( cr, uid, attachment) except AccessError: _logger.info( "Cannot save PDF report %r as attachment", attachment['name']) else: _logger.info( 'The PDF document %s is now saved in the database', attachment['name']) pdfdocuments.append(pdfreport_path) except: raise # Return the entire document if len(pdfdocuments) == 1: entire_report_path = pdfdocuments[0] else: entire_report_path = self._merge_pdf(pdfdocuments) temporary_files.append(entire_report_path) with open(entire_report_path, 'rb') as pdfdocument: content = pdfdocument.read() # Manual cleanup of the temporary files for temporary_file in temporary_files: try: os.unlink(temporary_file) except (OSError, IOError): _logger.error('Error when trying to remove file %s' % temporary_file) return content
def execute_cr(cr, uid, obj, method, *args, **kw): object = ecore.registry(cr.dbname).get(obj) if object is None: raise UserError(_("Object %s doesn't exist") % obj) return getattr(object, method)(cr, uid, *args, **kw)
def process_sheet(self, cr, uid, ids, context=None): move_pool = self.pool.get('account.move') hr_payslip_line_pool = self.pool['hr.payslip.line'] precision = self.pool.get('decimal.precision').precision_get( cr, uid, 'Payroll') timenow = time.strftime('%Y-%m-%d') for slip in self.browse(cr, uid, ids, context=context): line_ids = [] debit_sum = 0.0 credit_sum = 0.0 date = timenow name = _('Payslip of %s') % (slip.employee_id.name) move = { 'narration': name, 'ref': slip.number, 'journal_id': slip.journal_id.id, 'date': date, } for line in slip.details_by_salary_rule_category: amt = slip.credit_note and -line.total or line.total if float_is_zero(amt, precision_digits=precision): continue debit_account_id = line.salary_rule_id.account_debit.id credit_account_id = line.salary_rule_id.account_credit.id if debit_account_id: debit_line = (0, 0, { 'name': line.name, 'partner_id': hr_payslip_line_pool._get_partner_id( cr, uid, line, credit_account=False, context=context), 'account_id': debit_account_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': amt > 0.0 and amt or 0.0, 'credit': amt < 0.0 and -amt or 0.0, 'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False, 'tax_line_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False, }) line_ids.append(debit_line) debit_sum += debit_line[2]['debit'] - debit_line[2][ 'credit'] if credit_account_id: credit_line = (0, 0, { 'name': line.name, 'partner_id': hr_payslip_line_pool._get_partner_id( cr, uid, line, credit_account=True, context=context), 'account_id': credit_account_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': amt < 0.0 and -amt or 0.0, 'credit': amt > 0.0 and amt or 0.0, 'analytic_account_id': line.salary_rule_id.analytic_account_id and line.salary_rule_id.analytic_account_id.id or False, 'tax_line_id': line.salary_rule_id.account_tax_id and line.salary_rule_id.account_tax_id.id or False, }) line_ids.append(credit_line) credit_sum += credit_line[2]['credit'] - credit_line[2][ 'debit'] if float_compare(credit_sum, debit_sum, precision_digits=precision) == -1: acc_id = slip.journal_id.default_credit_account_id.id if not acc_id: raise UserError( _('The Expense Journal "%s" has not properly configured the Credit Account!' ) % (slip.journal_id.name)) adjust_credit = (0, 0, { 'name': _('Adjustment Entry'), 'date': timenow, 'partner_id': False, 'account_id': acc_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': 0.0, 'credit': debit_sum - credit_sum, }) line_ids.append(adjust_credit) elif float_compare(debit_sum, credit_sum, precision_digits=precision) == -1: acc_id = slip.journal_id.default_debit_account_id.id if not acc_id: raise UserError( _('The Expense Journal "%s" has not properly configured the Debit Account!' ) % (slip.journal_id.name)) adjust_debit = (0, 0, { 'name': _('Adjustment Entry'), 'partner_id': False, 'account_id': acc_id, 'journal_id': slip.journal_id.id, 'date': date, 'debit': credit_sum - debit_sum, 'credit': 0.0, }) line_ids.append(adjust_debit) move.update({'line_ids': line_ids}) move_id = move_pool.create(cr, uid, move, context=context) self.write(cr, uid, [slip.id], { 'move_id': move_id, 'date': date }, context=context) move_pool.post(cr, uid, [move_id], context=context) return super(hr_payslip, self).process_sheet(cr, uid, [slip.id], context=context)
def _create_invoice(self, order, so_line, amount): inv_obj = self.env['account.invoice'] ir_property_obj = self.env['ir.property'] account_id = False if self.product_id.id: account_id = self.product_id.property_account_income_id.id if not account_id: prop = ir_property_obj.get('property_account_income_categ_id', 'product.category') prop_id = prop and prop.id or False account_id = order.fiscal_position_id.map_account(prop_id) if not account_id: raise UserError( _('There is no income account defined for this product: "%s". You may have to install a chart of account from Accounting app, settings menu.') % \ (self.product_id.name,)) if self.amount <= 0.00: raise UserError( _('The value of the down payment amount must be positive.')) if self.advance_payment_method == 'percentage': amount = order.amount_untaxed * self.amount / 100 name = _("Down payment of %s%%") % (self.amount, ) else: amount = self.amount name = _('Down Payment') invoice = inv_obj.create({ 'name': order.client_order_ref or order.name, 'origin': order.name, 'type': 'out_invoice', 'reference': False, 'account_id': order.partner_id.property_account_receivable_id.id, 'partner_id': order.partner_invoice_id.id, 'invoice_line_ids': [(0, 0, { 'name': name, 'origin': order.name, 'account_id': account_id, 'price_unit': amount, 'quantity': 1.0, 'discount': 0.0, 'uom_id': self.product_id.uom_id.id, 'product_id': self.product_id.id, 'sale_line_ids': [(6, 0, [so_line.id])], 'invoice_line_tax_ids': [(6, 0, [x.id for x in self.product_id.taxes_id])], 'account_analytic_id': order.project_id.id or False, })], 'currency_id': order.pricelist_id.currency_id.id, 'payment_term_id': order.payment_term_id.id, 'fiscal_position_id': order.fiscal_position_id.id or order.partner_id.property_account_position_id.id, 'team_id': order.team_id.id, }) invoice.compute_taxes() return invoice
def compute_refund(self, mode='refund'): inv_obj = self.env['account.invoice'] inv_tax_obj = self.env['account.invoice.tax'] inv_line_obj = self.env['account.invoice.line'] context = dict(self._context or {}) xml_id = False for form in self: created_inv = [] date = False description = False for inv in inv_obj.browse(context.get('active_ids')): if inv.state in ['draft', 'proforma2', 'cancel']: raise UserError(_('Cannot refund draft/proforma/cancelled invoice.')) if inv.reconciled and mode in ('cancel', 'modify'): raise UserError(_('Cannot refund invoice which is already reconciled, invoice should be unreconciled first. You can only refund this invoice.')) date = form.date or False description = form.description or inv.name refund = inv.refund(form.date_invoice, date, description, inv.journal_id.id) refund.compute_taxes() created_inv.append(refund.id) if mode in ('cancel', 'modify'): movelines = inv.move_id.line_ids to_reconcile_ids = {} to_reconcile_lines = self.env['account.move.line'] for line in movelines: if line.account_id.id == inv.account_id.id: to_reconcile_lines += line to_reconcile_ids.setdefault(line.account_id.id, []).append(line.id) if line.reconciled: line.remove_move_reconcile() refund.signal_workflow('invoice_open') for tmpline in refund.move_id.line_ids: if tmpline.account_id.id == inv.account_id.id: to_reconcile_lines += tmpline to_reconcile_lines.reconcile() if mode == 'modify': invoice = inv.read( ['name', 'type', 'number', 'reference', 'comment', 'date_due', 'partner_id', 'partner_insite', 'partner_contact', 'partner_ref', 'payment_term_id', 'account_id', 'currency_id', 'invoice_line_ids', 'tax_line_ids', 'journal_id', 'date']) invoice = invoice[0] del invoice['id'] invoice_lines = inv_line_obj.browse(invoice['invoice_line_ids']) invoice_lines = inv_obj._refund_cleanup_lines(invoice_lines) tax_lines = inv_tax_obj.browse(invoice['tax_line_ids']) tax_lines = inv_obj._refund_cleanup_lines(tax_lines) invoice.update({ 'type': inv.type, 'date_invoice': date, 'state': 'draft', 'number': False, 'invoice_line_ids': invoice_lines, 'tax_line_ids': tax_lines, 'date': date, 'name': description }) for field in ('partner_id', 'account_id', 'currency_id', 'payment_term_id', 'journal_id'): invoice[field] = invoice[field] and invoice[field][0] inv_refund = inv_obj.create(invoice) if inv_refund.payment_term_id.id: inv_refund._onchange_payment_term_date_invoice() created_inv.append(inv_refund.id) xml_id = (inv.type in ['out_refund', 'out_invoice']) and 'action_invoice_tree1' or \ (inv.type in ['in_refund', 'in_invoice']) and 'action_invoice_tree2' # Put the reason in the chatter subject = _("Invoice refund") body = description refund.message_post(body=body, subject=subject) if xml_id: result = self.env.ref('account.%s' % (xml_id)).read()[0] invoice_domain = eval(result['domain']) invoice_domain.append(('id', 'in', created_inv)) result['domain'] = invoice_domain return result return True