def get_record_data(self, values): """ Returns a defaults-like dict with initial values for the composition wizard when sending an email related a previous email (parent_id) or a document (model, res_id). This is based on previously computed default values. """ result, subject = {}, False if values.get('parent_id'): parent = self.env['mail.message'].browse(values.get('parent_id')) result['record_name'] = parent.record_name, subject = tools.ustr(parent.subject or parent.record_name or '') if not values.get('model'): result['model'] = parent.model if not values.get('res_id'): result['res_id'] = parent.res_id partner_ids = values.get('partner_ids', list()) + [(4, id) for id in parent.partner_ids.ids] if self._context.get('is_private') and parent.author_id: # check message is private then add author also in partner list. partner_ids += [(4, parent.author_id.id)] result['partner_ids'] = partner_ids elif values.get('model') and values.get('res_id'): doc_name_get = self.env[values.get('model')].browse(values.get('res_id')).name_get() result['record_name'] = doc_name_get and doc_name_get[0][1] or '' subject = tools.ustr(result['record_name']) re_prefix = _('Re:') if subject and not (subject.startswith('Re:') or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result['subject'] = subject return result
def amount_to_text(self, amount): self.ensure_one() def _num2words(number, lang): try: return num2words(number, lang=lang).title() except NotImplementedError: return num2words(number, lang='en').title() if num2words is None: logging.getLogger(__name__).warning("The library 'num2words' is missing, cannot render textual amounts.") return "" formatted = "%.{0}f".format(self.decimal_places) % amount parts = formatted.partition('.') integer_value = int(parts[0]) fractional_value = int(parts[2] or 0) lang_code = self.env.context.get('lang') or self.env.user.lang lang = self.env['res.lang'].search([('code', '=', lang_code)]) amount_words = tools.ustr('{amt_value} {amt_word}').format( amt_value=_num2words(integer_value, lang=lang.iso_code), amt_word=self.currency_unit_label, ) if not self.is_zero(amount - integer_value): amount_words += ' ' + _('and') + tools.ustr(' {amt_value} {amt_word}').format( amt_value=_num2words(fractional_value, lang=lang.iso_code), amt_word=self.currency_subunit_label, ) return amount_words
def _process_text(self, txt): """Translate ``txt`` according to the language in the local context, replace dynamic ``[[expr]]`` with their real value, then escape the result for XML. :param str txt: original text to translate (must NOT be XML-escaped) :return: translated text, with dynamic expressions evaluated and with special XML characters escaped (``&,<,>``). """ if not self.localcontext: return str2xml(txt) if not txt: return '' result = '' sps = _regex.split(txt) while sps: # This is a simple text to translate to_translate = tools.ustr(sps.pop(0)) result += tools.ustr(self.localcontext.get('translate', lambda x:x)(to_translate)) if sps: txt = None try: expr = sps.pop(0) txt = safe_eval(expr, self.localcontext) if txt and isinstance(txt, basestring): txt = tools.ustr(txt) except Exception: _logger.info("Failed to evaluate expression [[ %s ]] with context %r while rendering report, ignored.", expr, self.localcontext) if isinstance(txt, basestring): result += txt elif txt and (txt is not None) and (txt is not False): result += ustr(txt) return str2xml(result)
def get_or_create_user(self, conf, login, ldap_entry): """ Retrieve an active resource of model res_users with the specified login. Create the user if it is not initially found. :param dict conf: LDAP configuration :param login: the user's login :param tuple ldap_entry: single LDAP result (dn, attrs) :return: res_users id :rtype: int """ user_id = False login = tools.ustr(login.lower().strip()) self.env.cr.execute("SELECT id, active FROM res_users WHERE lower(login)=%s", (login,)) res = self.env.cr.fetchone() if res: if res[1]: user_id = res[0] elif conf['create_user']: _logger.debug("Creating new Odoo user \"%s\" from LDAP" % login) values = self.map_ldap_attributes(conf, login, ldap_entry) SudoUser = self.env['res.users'].sudo() if conf['user']: values['active'] = True user_id = SudoUser.browse(conf['user'][0]).copy(default=values).id else: user_id = SudoUser.create(values).id return user_id
def encode_header(header_text): """Returns an appropriate representation of the given header value, suitable for direct assignment as a header value in an email.message.Message. RFC2822 assumes that headers contain only 7-bit characters, so we ensure it is the case, using RFC2047 encoding when needed. :param header_text: unicode or utf-8 encoded string with header value :rtype: string | email.header.Header :return: if ``header_text`` represents a plain ASCII string, return the same 7-bit string, otherwise returns an email.header.Header that will perform the appropriate RFC2047 encoding of non-ASCII values. """ if not header_text: return "" # convert anything to utf-8, suitable for testing ASCIIness, as 7-bit chars are # encoded as ASCII in utf-8 header_text_utf8 = ustr(header_text).encode('utf-8') header_text_ascii = try_coerce_ascii(header_text_utf8) # if this header contains non-ASCII characters, # we'll need to wrap it up in a message.header.Header # that will take care of RFC2047-encoding it as # 7-bit string. return header_text_ascii or Header(header_text_utf8, 'utf-8')
def change_product_qty(self): """ Changes the Product Quantity by making a Physical Inventory. """ Inventory = self.env['stock.inventory'] for wizard in self: product = wizard.product_id.with_context(location=wizard.location_id.id, lot_id=wizard.lot_id.id) th_qty = product.qty_available line_data = { 'product_qty': wizard.new_quantity, 'location_id': wizard.location_id.id, 'product_id': wizard.product_id.id, 'product_uom_id': wizard.product_id.uom_id.id, 'theoretical_qty': th_qty, 'prod_lot_id': wizard.lot_id.id } if wizard.product_id.id and wizard.lot_id.id: inventory_filter = 'none' elif wizard.product_id.id: inventory_filter = 'product' else: inventory_filter = 'none' inventory = Inventory.create({ 'name': _('INV: %s') % tools.ustr(wizard.product_id.name), 'filter': inventory_filter, 'product_id': wizard.product_id.id, 'location_id': wizard.location_id.id, 'lot_id': wizard.lot_id.id, 'line_ids': [(0, 0, line_data)], }) inventory.action_done() return {'type': 'ir.actions.act_window_close'}
def write(self, vals): if vals.get('user_domain'): users = self._get_challenger_users(ustr(vals.get('user_domain'))) if not vals.get('user_ids'): vals['user_ids'] = [] vals['user_ids'].extend((4, user.id) for user in users) write_res = super(Challenge, self).write(vals) if vals.get('report_message_frequency', 'never') != 'never': # _recompute_challenge_users do not set users for challenges with no reports, subscribing them now for challenge in self: challenge.message_subscribe([user.partner_id.id for user in challenge.user_ids]) if vals.get('state') == 'inprogress': self._recompute_challenge_users() self._generate_goals_from_challenge() elif vals.get('state') == 'done': self._check_challenge_reward(force=True) elif vals.get('state') == 'draft': # resetting progress if self.env['gamification.goal'].search([('challenge_id', 'in', self.ids), ('state', '=', 'inprogress')], limit=1): raise exceptions.UserError(_("You can not reset a challenge with unfinished goals.")) return write_res
def remove_accents(input_str): """Suboptimal-but-better-than-nothing way to replace accented latin letters by an ASCII equivalent. Will obviously change the meaning of input_str and work only for some cases""" input_str = ustr(input_str) nkfd_form = unicodedata.normalize('NFKD', input_str) return u''.join([c for c in nkfd_form if not unicodedata.combining(c)])
def _get_source(self, name, types, lang, source=None, res_id=None): """ Return the translation for the given combination of ``name``, ``type``, ``language`` and ``source``. All values passed to this method should be unicode (not byte strings), especially ``source``. :param name: identification of the term to translate, such as field name (optional if source is passed) :param types: single string defining type of term to translate (see ``type`` field on ir.translation), or sequence of allowed types (strings) :param lang: language code of the desired translation :param source: optional source term to translate (should be unicode) :param res_id: optional resource id or a list of ids to translate (if used, ``source`` should be set) :rtype: unicode :return: the request translation, or an empty unicode string if no translation was found and `source` was not passed """ # FIXME: should assert that `source` is unicode and fix all callers to # always pass unicode so we can remove the string encoding/decoding. if not lang: return tools.ustr(source or '') if isinstance(types, basestring): types = (types,) if res_id: if isinstance(res_id, (int, long)): res_id = (res_id,) else: res_id = tuple(res_id) return self.__get_source(name, types, lang, source, res_id)
def initialize_sys_path(): """ Setup an import-hook to be able to import OpenERP addons from the different addons paths. This ensures something like ``import crm`` (or even ``import odoo.addons.crm``) works even if the addons are not in the PYTHONPATH. """ global ad_paths global hooked dd = tools.config.addons_data_dir if dd not in ad_paths: ad_paths.append(dd) for ad in tools.config['addons_path'].split(','): ad = os.path.abspath(tools.ustr(ad.strip())) if ad not in ad_paths: ad_paths.append(ad) # add base module path base_path = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'addons')) if base_path not in ad_paths: ad_paths.append(base_path) if not hooked: sys.meta_path.append(AddonsHook()) sys.meta_path.append(OdooHook()) hooked = True
def create_from_ui(self, orders): """在会话中结账后生成pos order,并由pos order生成相应的发货单/退货单及收款单 """ order_ids = [] for order in orders: order_data = order.get('data') pos_order_data = self.data_handling(order_data) pos_order = self.create(pos_order_data) order_ids.append(pos_order.id) prec_amt = self.env['decimal.precision'].precision_get('Amount') for payments in order_data.get('statement_ids'): if not float_is_zero(payments[2].get('amount'), precision_digits=prec_amt): pos_order.add_payment(self._payment_fields(payments[2])) try: pos_order.action_pos_order_paid() except psycopg2.OperationalError: # do not hide transactional errors, the order(s) won't be saved! raise except Exception as e: _logger.error(u'不能完整地处理POS 订单: %s', tools.ustr(e)) # 生成sell_delivery,并审核 records = pos_order.create_sell_delivery() invoice_ids = [record.invoice_id for record in records] # 生成收款单,并审核 pos_order.create_money_order(invoice_ids, pos_order.payment_line_ids) return order_ids
def onchange_employee(self): if (not self.employee_id) or (not self.date_from) or (not self.date_to): return employee = self.employee_id date_from = self.date_from date_to = self.date_to ttyme = datetime.combine(fields.Date.from_string(date_from), time.min) locale = self.env.context.get('lang') or 'en_US' self.name = _('Salary Slip of %s for %s') % (employee.name, tools.ustr(babel.dates.format_date(date=ttyme, format='MMMM-y', locale=locale))) self.company_id = employee.company_id contracts = self.env['hr.contract'] if not self.env.context.get('contract') or not self.contract_id: contracts = employee._get_contracts(date_from, date_to) if not contracts: return self.contract_id = contracts[0] if not self.contract_id.struct_id: return self.struct_id = self.contract_id.struct_id #computation of the salary worked days worked_days_line_ids = self.get_worked_day_lines(contracts, date_from, date_to) worked_days_lines = self.worked_days_line_ids.browse([]) for r in worked_days_line_ids: worked_days_lines += worked_days_lines.new(r) self.worked_days_line_ids = worked_days_lines return
def exp_render_report(db, uid, object, ids, datas=None, context=None): if not datas: datas={} if not context: context={} self_id_protect.acquire() global self_id self_id += 1 id = self_id self_id_protect.release() self_reports[id] = {'uid': uid, 'result': False, 'state': False, 'exception': None} cr = odoo.registry(db).cursor() try: result, format = odoo.report.render_report(cr, uid, ids, object, datas, context) if not result: tb = sys.exc_info() self_reports[id]['exception'] = odoo.exceptions.DeferredException('RML is not available at specified location or not enough data to print!', tb) self_reports[id]['result'] = result self_reports[id]['format'] = format self_reports[id]['state'] = True except Exception, exception: _logger.exception('Exception: %s\n', exception) if hasattr(exception, 'name') and hasattr(exception, 'value'): self_reports[id]['exception'] = odoo.exceptions.DeferredException(tools.ustr(exception.name), tools.ustr(exception.value)) else: tb = sys.exc_info() self_reports[id]['exception'] = odoo.exceptions.DeferredException(tools.exception_to_unicode(exception), tb) self_reports[id]['state'] = True
def pack_jobs_request(self, term_ids, context=None): ''' prepare the terms that will be requested to gengo and returns them in a dictionary with following format {'jobs': { 'term1.id': {...} 'term2.id': {...} } }''' base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url') IrTranslation = self.env['ir.translation'] jobs = {} user = self.env.user auto_approve = 1 if user.company_id.gengo_auto_approve else 0 for term in term_ids: if re.search(r"\w", term.src or ""): comment = user.company_id.gengo_comment or '' if term.gengo_comment: comment += '\n' + term.gengo_comment jobs[time.strftime('%Y%m%d%H%M%S') + '-' + str(term.id)] = { 'type': 'text', 'slug': 'Single :: English to ' + term.lang, 'tier': tools.ustr(term.gengo_translation), 'custom_data': str(term.id), 'body_src': term.src, 'lc_src': 'en', 'lc_tgt': IrTranslation._get_gengo_corresponding_language(term.lang), 'auto_approve': auto_approve, 'comment': comment, 'callback_url': "%s/website/gengo_callback?pgk=%s&db=%s" % (base_url, self.get_gengo_key(), self.env.cr.dbname) } return {'jobs': jobs, 'as_group': 0}
def create_from_ui(self, orders): # Keep only new orders submitted_references = [o['data']['name'] for o in orders] pos_order = self.search([('pos_reference', 'in', submitted_references)]) existing_orders = pos_order.read(['pos_reference']) existing_references = set([o['pos_reference'] for o in existing_orders]) orders_to_save = [o for o in orders if o['data']['name'] not in existing_references] order_ids = [] for tmp_order in orders_to_save: to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] if to_invoice: self._match_payment_to_invoice(order) pos_order = self._process_order(order) order_ids.append(pos_order.id) try: pos_order.signal_workflow('paid') except psycopg2.OperationalError: # do not hide transactional errors, the order(s) won't be saved! raise except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order.action_invoice() pos_order.invoice_id.sudo().signal_workflow('invoice_open') return order_ids
def decode(text): """Returns unicode() string conversion of the the given encoded smtp header text""" # TDE proposal: move to tools ? if text: text = decode_header(text.replace('\r', '')) # The joining space will not be needed as of Python 3.3 # See https://hg.python.org/cpython/rev/8c03fe231877 return ' '.join([tools.ustr(x[0], x[1]) for x in text])
def try_report(cr, uid, rname, ids, data=None, context=None, our_module=None, report_type=None): """ Try to render a report <rname> with contents of ids This function should also check for common pitfalls of reports. """ if data is None: data = {} if context is None: context = {} if rname.startswith('report.'): rname_s = rname[7:] else: rname_s = rname _test_logger.info(" - Trying %s.create(%r)", rname, ids) res = odoo.report.render_report(cr, uid, ids, rname_s, data, context=context) if not isinstance(res, tuple): raise RuntimeError("Result of %s.create() should be a (data,format) tuple, now it is a %s" % \ (rname, type(res))) (res_data, res_format) = res if not res_data: raise ValueError("Report %s produced an empty result!" % rname) if tools.config['test_report_directory']: open(os.path.join(tools.config['test_report_directory'], rname+ '.'+res_format), 'wb+').write(res_data) _logger.debug("Have a %s report for %s, will examine it", res_format, rname) if res_format == 'pdf': if res_data[:5] != '%PDF-': raise ValueError("Report %s produced a non-pdf header, %r" % (rname, res_data[:10])) res_text = False try: fd, rfname = tempfile.mkstemp(suffix=res_format) os.write(fd, res_data) os.close(fd) proc = Popen(['pdftotext', '-enc', 'UTF-8', '-nopgbrk', rfname, '-'], shell=False, stdout=PIPE) stdout, stderr = proc.communicate() res_text = tools.ustr(stdout) os.unlink(rfname) except Exception: _logger.debug("Unable to parse PDF report: install pdftotext to perform automated tests.") if res_text is not False: for line in res_text.split('\n'): if ('[[' in line) or ('[ [' in line): _logger.error("Report %s may have bad expression near: \"%s\".", rname, line[80:]) # TODO more checks, what else can be a sign of a faulty report? elif res_format == 'html': pass else: _logger.warning("Report %s produced a \"%s\" chunk, cannot examine it", rname, res_format) return False _test_logger.info(" + Report %s produced correctly.", rname) return True
def _ogone_s2s_validate_tree(self, tree, tries=2): if self.state not in ['draft', 'pending']: _logger.info('Ogone: trying to validate an already validated tx (ref %s)', self.reference) return True status = int(tree.get('STATUS') or 0) if status in self._ogone_valid_tx_status: self.write({ 'date': datetime.date.today().strftime(DEFAULT_SERVER_DATE_FORMAT), 'acquirer_reference': tree.get('PAYID'), }) if tree.get('ALIAS') and self.partner_id and \ (self.type == 'form_save' or self.acquirer_id.save_token == 'always')\ and not self.payment_token_id: pm = self.env['payment.token'].create({ 'partner_id': self.partner_id.id, 'acquirer_id': self.acquirer_id.id, 'acquirer_ref': tree.get('ALIAS'), 'name': tree.get('CARDNO'), }) self.write({'payment_token_id': pm.id}) if self.payment_token_id: self.payment_token_id.verified = True self._set_transaction_done() self.execute_callback() # if this transaction is a validation one, then we refund the money we just withdrawn if self.type == 'validation': self.s2s_do_refund() return True elif status in self._ogone_cancel_tx_status: self.write({'acquirer_reference': tree.get('PAYID')}) self._set_transaction_cancel() elif status in self._ogone_pending_tx_status: vals = { 'acquirer_reference': tree.get('PAYID'), } if status == 46: # HTML 3DS vals['html_3ds'] = ustr(base64.b64decode(tree.HTML_ANSWER.text)) self.write(vals) self._set_transaction_pending() elif status in self._ogone_wait_tx_status and tries > 0: time.sleep(0.5) self.write({'acquirer_reference': tree.get('PAYID')}) tree = self._ogone_s2s_get_tx_status() return self._ogone_s2s_validate_tree(tree, tries - 1) else: error = 'Ogone: feedback error: %(error_str)s\n\n%(error_code)s: %(error_msg)s' % { 'error_str': tree.get('NCERRORPLUS'), 'error_code': tree.get('NCERROR'), 'error_msg': ogone.OGONE_ERROR_MAP.get(tree.get('NCERROR')), } _logger.info(error) self.write({ 'state_message': error, 'acquirer_reference': tree.get('PAYID'), }) self._set_transaction_cancel() return False
def extract_rfc2822_addresses(text): """Returns a list of valid RFC2822 addresses that can be found in ``source``, ignoring malformed ones and non-ASCII ones. """ if not text: return [] candidates = address_pattern.findall(ustr(text)) return [c for c in candidates if is_ascii(c)]
def button_confirm_login(self): for server in self: try: connection = server.connect() server.write({'state': 'done'}) except Exception, err: _logger.info("Failed to connect to %s server %s.", server.type, server.name, exc_info=True) raise UserError(_("Connection test failed: %s") % tools.ustr(err)) finally:
def _geoip_setup_resolver(cls): # Lazy init of GeoIP resolver if odoo._geoip_resolver is not None: return geofile = config.get('geoip_database') try: odoo._geoip_resolver = GeoIPResolver.open(geofile) or False except Exception as e: _logger.warning('Cannot load GeoIP: %s', ustr(e))
def _geo_query_address_default(self, street=None, zip=None, city=None, state=None, country=None): address_list = [ street, ("%s %s" % (zip or '', city or '')).strip(), state, country ] address_list = [item for item in address_list if item] return tools.ustr(', '.join(address_list))
def extract_rfc2822_addresses(text): """Returns a list of valid RFC2822 addresses that can be found in ``source``, ignoring malformed ones and non-ASCII ones. """ if not text: return [] candidates = address_pattern.findall(ustr(text).encode('utf-8')) return filter(try_coerce_ascii, candidates)
def __get_source(self, name, types, lang, source, res_id): # res_id is a tuple or None, otherwise ormcache cannot cache it! query, params = self._get_source_query(name, types, lang, source, res_id) self._cr.execute(query, params) res = self._cr.fetchone() trad = res and res[0] or u'' if source and not trad: return tools.ustr(source) return trad
def geo_query_address(street=None, zip=None, city=None, state=None, country=None): if country and ',' in country and (country.endswith(' of') or country.endswith(' of the')): # put country qualifier in front, otherwise GMap gives wrong results, # e.g. 'Congo, Democratic Republic of the' => 'Democratic Republic of the Congo' country = '{1} {0}'.format(*country.split(',', 1)) return tools.ustr(', '.join( field for field in [street, ("%s %s" % (zip or '', city or '')).strip(), state, country] if field ))
def onchange_employee_id(self, date_from, date_to, employee_id=False, contract_id=False): #defaults res = { 'value': { 'line_ids': [], #delete old input lines 'input_line_ids': [(2, x,) for x in self.input_line_ids.ids], #delete old worked days lines 'worked_days_line_ids': [(2, x,) for x in self.worked_days_line_ids.ids], #'details_by_salary_head':[], TODO put me back 'name': '', 'contract_id': False, 'struct_id': False, } } if (not employee_id) or (not date_from) or (not date_to): return res ttyme = datetime.fromtimestamp(time.mktime(time.strptime(date_from, "%Y-%m-%d"))) employee = self.env['hr.employee'].browse(employee_id) locale = self.env.context.get('lang') or 'en_US' res['value'].update({ 'name': _('Salary Slip of %s for %s') % (employee.name, tools.ustr(babel.dates.format_date(date=ttyme, format='MMMM-y', locale=locale))), 'company_id': employee.company_id.id, }) if not self.env.context.get('contract'): #fill with the first contract of the employee contract_ids = self.get_contract(employee, date_from, date_to) else: if contract_id: #set the list of contract for which the input have to be filled contract_ids = [contract_id] else: #if we don't give the contract, then the input to fill should be for all current contracts of the employee contract_ids = self.get_contract(employee, date_from, date_to) if not contract_ids: return res contract = self.env['hr.contract'].browse(contract_ids[0]) res['value'].update({ 'contract_id': contract.id }) struct = contract.struct_id if not struct: return res res['value'].update({ 'struct_id': struct.id, }) #computation of the salary input contracts = self.env['hr.contract'].browse(contract_ids) worked_days_line_ids = self.get_worked_day_lines(contracts, date_from, date_to) input_line_ids = self.get_inputs(contracts, date_from, date_to) res['value'].update({ 'worked_days_line_ids': worked_days_line_ids, 'input_line_ids': input_line_ids, }) return res
def _ogone_s2s_validate_tree(self, tree, tries=2): if self.state not in ('draft', 'pending', 'refunding'): _logger.info('Ogone: trying to validate an already validated tx (ref %s)', self.reference) return True status = int(tree.get('STATUS') or 0) if status in self._ogone_valid_tx_status: new_state = 'refunded' if self.state == 'refunding' else 'done' self.write({ 'state': new_state, 'date_validate': datetime.date.today().strftime(DEFAULT_SERVER_DATE_FORMAT), 'acquirer_reference': tree.get('PAYID'), }) if tree.get('ALIAS') and self.partner_id and \ (self.type == 'form_save' or self.acquirer_id.save_token == 'always')\ and not self.payment_token_id: pm = self.env['payment.token'].create({ 'partner_id': self.partner_id.id, 'acquirer_id': self.acquirer_id.id, 'acquirer_ref': tree.get('ALIAS'), 'name': tree.get('CARDNO'), }) self.write({'payment_token_id': pm.id}) if self.payment_token_id: self.payment_token_id.verified = True self.execute_callback() return True elif status in self._ogone_cancel_tx_status: self.write({ 'state': 'cancel', 'acquirer_reference': tree.get('PAYID'), }) elif status in self._ogone_pending_tx_status: new_state = 'refunding' if self.state == 'refunding' else 'pending' self.write({ 'state': new_state, 'acquirer_reference': tree.get('PAYID'), 'html_3ds': ustr(base64.b64decode(tree.HTML_ANSWER.text)), }) elif status in self._ogone_wait_tx_status and tries > 0: time.sleep(0.5) self.write({'acquirer_reference': tree.get('PAYID')}) tree = self._ogone_s2s_get_tx_status() return self._ogone_s2s_validate_tree(tree, tries - 1) else: error = 'Ogone: feedback error: %(error_str)s\n\n%(error_code)s: %(error_msg)s' % { 'error_str': tree.get('NCERRORPLUS'), 'error_code': tree.get('NCERROR'), 'error_msg': ogone.OGONE_ERROR_MAP.get(tree.get('NCERROR')), } _logger.info(error) self.write({ 'state': 'error', 'state_message': error, 'acquirer_reference': tree.get('PAYID'), }) return False
def _get_title_from_url(self, url): try: page = urlopen(url, timeout=5) p = html.fromstring(ustr(page.read()).encode('utf-8'), parser=html.HTMLParser(encoding='utf-8')) title = p.find('.//title').text except: title = url return title
def render_template(self, template_txt, model, res_ids, post_process=False): """ Render the given template text, replace mako expressions ``${expr}`` with the result of evaluating these expressions with an evaluation context containing: - ``user``: Model of the current user - ``object``: record of the document record this mail is related to - ``context``: the context passed to the mail composition wizard :param str template_txt: the template text to render :param str model: model name of the document record this mail is related to. :param int res_ids: list of ids of document records those mails are related to. """ multi_mode = True if isinstance(res_ids, pycompat.integer_types): multi_mode = False res_ids = [res_ids] results = dict.fromkeys(res_ids, u"") # try to load the template try: mako_env = mako_safe_template_env if self.env.context.get('safe') else mako_template_env template = mako_env.from_string(tools.ustr(template_txt)) except Exception: _logger.info("Failed to load template %r", template_txt, exc_info=True) return multi_mode and results or results[res_ids[0]] # prepare template variables records = self.env[model].browse(it for it in res_ids if it) # filter to avoid browsing [None] res_to_rec = dict.fromkeys(res_ids, None) for record in records: res_to_rec[record.id] = record variables = { 'format_date': lambda date, format=False, context=self._context: format_date(self.env, date, format), 'format_tz': lambda dt, tz=False, format=False, context=self._context: format_tz(self.env, dt, tz, format), 'format_amount': lambda amount, currency, context=self._context: format_amount(self.env, amount, currency), 'user': self.env.user, 'ctx': self._context, # context kw would clash with mako internals } for res_id, record in res_to_rec.items(): variables['object'] = record try: render_result = template.render(variables) except Exception: _logger.info("Failed to render template %r using values %r" % (template, variables), exc_info=True) raise UserError(_("Failed to render template %r using values %r")% (template, variables)) if render_result == u"False": render_result = u"" results[res_id] = render_result if post_process: for res_id, result in results.items(): results[res_id] = self.render_post_process(result) return multi_mode and results or results[res_ids[0]]
def schema_valid(arch): """ Get RNG validator and validate RNG file.""" validator = relaxng(arch.tag) if validator and not validator.validate(arch): result = True for error in validator.error_log: _logger.error(tools.ustr(error)) result = False return result return True
def export_bank_reconciliation_report(self): """Method to export bank reconciliation report.""" cr, uid, context = self.env.args wiz_exported_obj = self.env['wiz.bank.reconciliation.report.exported'] move_l_obj = self.env['account.move.line'] bank_st_obj = self.env['account.bank.statement'] bank_st_l_obj = self.env['account.bank.statement.line'] # sheet Development file_path = 'Bank Reconciliation Report.xlsx' workbook = xlsxwriter.Workbook('/tmp/' + file_path) # num_format = workbook.add_format({'num_format': 'dd/mm/yy'}) header_cell_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'bold': 1, # 'fg_color': '#96c5f4', 'align': 'center', 'border': 1, # 'valign': 'vcenter' 'text_wrap': True, 'bg_color': '#d3d3d3' }) header_cell_l_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'bold': 1, # 'fg_color': '#96c5f4', 'align': 'left', # 'border': 1, # 'valign': 'vcenter' 'text_wrap': True }) header_cell_r_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'bold': 1, # 'fg_color': '#96c5f4', 'align': 'right', 'border': 1, # 'valign': 'vcenter' 'text_wrap': True, 'bg_color': '#d3d3d3' }) cell_l_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'align': 'left', # 'valign': 'vcenter', 'text_wrap': True 'text_wrap': True }) cell_r_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'align': 'right', # 'valign': 'vcenter' 'text_wrap': True, 'num_format': '#,##,###' }) cell_r_bold_noborder = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'align': 'right', # 'valign': 'vcenter' 'text_wrap': True, 'bold': 1 }) cell_c_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 10, 'align': 'center', # 'valign': 'vcenter' 'text_wrap': True }) cell_c_head_fmat = workbook.add_format({ 'font_name': 'Arial', 'font_size': 14, 'align': 'center', 'bold': True, 'border': 1, 'text_wrap': True }) # bold = workbook.add_format({'bold': True}) # to_date = '' # from_date = '' # prev_year_from_date = self.date_from # prev_year_to_date = self.date_to # if self.date_from: # from_date = datetime.strftime(self.date_from, '%d/%m/%Y') # from_dt = self.date_from # from_year = self.date_from.year # f_dt = self.date_from # prev_year_from_date = \ # from_dt.replace(day=1, month=1, year=from_year - 1) # # prev_year_from_date = datetime.strftime( # # prev_year_from_date, '%d/%m/%Y') # prev_year_to_date = f_dt.replace(day=31, # month=12, year=from_year - 1) # if self.date_to: # to_date = datetime.strftime(self.date_to, '%d/%m/%Y') # f_dt = self.date_to # f_year = self.date_to.year # prev_year_to_date = f_dt.replace(day=31, # month=12, year=f_year - 1) company = self.company_id or False company_name = company and company.name or '' from_date = datetime.strftime(self.bnk_st_date.sudo().date, '%d/%m/%Y') # for journal in self.journal_ids: for journal in self.journal_id: currency_id = journal.sudo().currency_id or False currency_symbol = journal.sudo().currency_id and \ journal.sudo().currency_id.symbol or \ journal.sudo().company_id and \ journal.sudo().company_id.currency_id and \ journal.sudo().company_id.currency_id.symbol or '' currency_position = journal.sudo().currency_id and \ journal.sudo().currency_id.position or \ journal.sudo().company_id and \ journal.sudo().company_id.currency_id and \ journal.sudo().company_id.currency_id.position or '' bank_st_id = bank_st_obj.search( [('date', '=', self.bnk_st_date.sudo().date), ('journal_id', '=', journal.id), ('company_id', '=', company.id)], limit=1) # last_bank_st_id = self.env['account.bank.statement'].search([ # ('journal_id', '=', journal.id), # ('date', '<=', self.bnk_st_date.sudo().date), # ('company_id', '=', company.id)], # order="date desc, id desc", limit=1) last_bank_st_id = bank_st_obj.search( [('date', '<', self.bnk_st_date.sudo().date), ('journal_id', '=', journal.id), ('company_id', '=', company.id)], limit=1) last_st_balance = last_bank_st_id.balance_end last_reconcile_date_str = '' last_reconcile_date = '' # last_reconcile_amount = 0.0 # curr_bal = bank_st_id and bank_st_id.balance_end or 0.0 last_reconcile_bal = last_bank_st_id and \ last_bank_st_id.balance_end or 0.0 if last_bank_st_id: last_reconcile_date = last_bank_st_id.date last_reconcile_date_str = \ datetime.strftime(last_reconcile_date, '%d/%m/%Y') # last_reconcile_lst = bank_st_l_obj.search([ # # ('date', '=', self.bnk_st_date.sudo().date), # ('statement_id', '=', last_bank_st_id and \ # last_bank_st_id.id or False), # ('statement_id.journal_id', '=', journal.id), # ('statement_id.company_id', '=', company.id), # ('journal_entry_ids', '!=', False), # # ('state', '=', 'confirm') # ]).mapped('amount') # last_reconcile_amount = sum(last_reconcile_lst) reconcile_cust_bnk_st_lines = bank_st_l_obj.search([ # ('date', '=', self.bnk_st_date.sudo().date), ('statement_id', '=', bank_st_id and bank_st_id.id or False), ('statement_id.journal_id', '=', journal.id), ('statement_id.company_id', '=', company.id), ('journal_entry_ids', '!=', False), # ('amount', '>', 0.0) # ('state', '=', 'confirm') ]) # tot_reconcile_cust_lines = \ # sum(reconcile_cust_bnk_st_lines.mapped('amount')) reconcile_vend_bnk_st_lines = bank_st_l_obj.search([ # ('date', '=', self.bnk_st_date.sudo().date), ('statement_id', '=', bank_st_id and bank_st_id.id or False), ('statement_id.journal_id', '=', journal.id), ('statement_id.company_id', '=', company.id), ('journal_entry_ids', '!=', False), # ('amount', '<', 0.0) # ('state', '=', 'confirm') ]) # tot_reconcile_vend_lines = \ # sum(reconcile_vend_bnk_st_lines.mapped('amount')) # unreconcile_cust_bnk_st_lines = bank_st_l_obj.search([ # # ('date', '=', self.bnk_st_date.sudo().date), # ('statement_id', '=', bank_st_id and bank_st_id.id or False), # ('statement_id.journal_id', '=', journal.id), # ('statement_id.company_id', '=', company.id), # ('journal_entry_ids', '=', False), # # ('amount', '>', 0.0) # # ('state', '=', 'confirm') # ]) # tot_unreconcile_cust_lines = \ # sum(unreconcile_cust_bnk_st_lines.mapped('amount')) system_stf_fy_dt = self.bnk_st_date.sudo().date.\ replace(year=2018, month=7, day=1) tot_virtual_gl_bal = 0.0 account_ids = list( set([ journal.default_debit_account_id.id, journal.default_credit_account_id.id ]) - {False}) lines_already_accounted = move_l_obj.search([ ('account_id', 'in', account_ids), ('date', '<=', self.bnk_st_date.sudo().date), ('company_id', '=', company.id) ]) odoo_balance = sum(lines_already_accounted.mapped('balance')) # Bank statement lines not reconciled with a payment bank_st_positiove_l = bank_st_l_obj.search([ ('statement_id.journal_id', '=', journal.id), ('date', '<=', self.bnk_st_date.sudo().date), ('journal_entry_ids', '=', False), ('amount', '>', 0), ('company_id', '=', company.id) ]) outstanding_plus_tot = sum(bank_st_positiove_l.mapped('amount')) bank_st_minus_l = bank_st_l_obj.search([ ('statement_id.journal_id', '=', journal.id), ('date', '<=', self.bnk_st_date.sudo().date), ('journal_entry_ids', '=', False), ('amount', '<', 0), ('company_id', '=', company.id) ]) outstanding_minus_tot = sum(bank_st_minus_l.mapped('amount')) unreconcile_checks_payments = move_l_obj.search([ '|', '&', ('move_id.journal_id.type', 'in', ['cash', 'bank']), ('move_id.journal_id', '=', journal.id), '&', ('move_id.journal_id.type', 'not in', ['cash', 'bank']), ('move_id.journal_id', '=', journal.id), '|', ('statement_line_id', '=', False), ('statement_line_id.date', '>', self.bnk_st_date.sudo().date), ('user_type_id.type', '=', 'liquidity'), ('full_reconcile_id', '=', False), ('date', '<=', self.bnk_st_date.sudo().date), '&', ('company_id', '=', company.id), ('date', '>=', system_stf_fy_dt) ]) unrec_tot = sum(unreconcile_checks_payments.mapped('balance')) tot_virtual_gl_bal = odoo_balance + outstanding_plus_tot + \ outstanding_minus_tot + unrec_tot difference = tot_virtual_gl_bal - last_st_balance # unreconcile_vend_bnk_st_lines = bank_st_l_obj.search([ # # ('date', '=', self.bnk_st_date.sudo().date), # ('statement_id', '=', bank_st_id and bank_st_id.id or False), # ('statement_id.journal_id', '=', journal.id), # ('statement_id.company_id', '=', company.id), # ('journal_entry_ids', '=', False), # ('amount', '<', 0.0) # # ('state', '=', 'confirm') # ]) # tot_unreconcile_vend_lines = \ # sum(unreconcile_vend_bnk_st_lines.mapped('amount')) worksheet = workbook.add_worksheet(journal.name) # worksheet.set_column(0, 4, 20) # worksheet.set_column(6, 6, 5) worksheet.set_column(0, 0, 5) worksheet.set_column(1, 1, 13) worksheet.set_column(2, 2, 10) worksheet.set_column(3, 3, 35) worksheet.set_column(4, 4, 35) worksheet.set_column(5, 5, 20) worksheet.set_column(6, 6, 15) worksheet.set_row(1, 20) worksheet.merge_range(1, 0, 1, 5, company_name, cell_c_head_fmat) worksheet.merge_range(2, 0, 2, 5, 'Reconciliation Details - ' + journal.name, cell_c_head_fmat) worksheet.merge_range(3, 0, 3, 5, 'As of ' + ustr(from_date), cell_c_head_fmat) row = 5 col = 0 worksheet.write(row, col, 'ID', header_cell_fmat) col += 1 worksheet.write(row, col, 'Transaction Type', header_cell_fmat) col += 1 worksheet.write(row, col, 'Date', header_cell_fmat) col += 1 # worksheet.write(row, col, 'Document Number', header_cell_fmat) # col += 1 # worksheet.write(row, col, 'Payment Type', header_cell_fmat) # col += 1 # worksheet.write(row, col, 'Partner Type', header_cell_fmat) # col += 1 worksheet.write(row, col, 'Customer/Partner Name', header_cell_fmat) col += 1 worksheet.write(row, col, 'Lable/Memo', header_cell_fmat) col += 1 worksheet.write(row, col, 'Balance', header_cell_r_fmat) row += 1 worksheet.merge_range(row, 0, row, 1, 'Reconciled', header_cell_l_fmat) row += 1 worksheet.merge_range(row, 1, row, 4, 'Cleared Deposits and Other Credits', header_cell_l_fmat) col = 0 row += 1 tot_cust_payment = 0.0 for cust_pay_line in reconcile_cust_bnk_st_lines: account_ids = list( set([ journal.default_debit_account_id.id, journal.default_credit_account_id.id ]) - {False}) move_lines = move_l_obj.search([ ('statement_line_id', '=', cust_pay_line.id), ('payment_id', '!=', False), ('payment_id.payment_type', 'in', ['inbound', 'transfer']), # ('credit', '>', 0.0), ('balance', '>=', 0.0), ('account_id', 'in', account_ids) ]) for move_l in move_lines: balance = move_l and move_l.balance or 0.0 if currency_id: balance = move_l and move_l.amount_currency or 0.0 if balance in [0.0, -0.0]: balance = move_l and move_l.balance or 0.0 tot_cust_payment = tot_cust_payment + balance or 0.0 payment_date = '' if move_l and move_l.date: payment_date = datetime.strftime( move_l.date, '%d-%m-%Y') payment = move_l and move_l.payment_id or False name = payment and payment.partner_id and \ payment.partner_id.name or '' pay_no = payment and payment.name or '' pay_reference = payment and payment.payment_reference or '' pay_memo = payment and payment.communication or '' batch_pay_no = payment and payment.batch_payment_id and \ payment.batch_payment_id.name or '' pay_method = payment and payment.batch_payment_id and \ payment.batch_payment_id.payment_method_id and \ payment.batch_payment_id.payment_method_id.name or '' jou_entry_ref = move_l and move_l.move_id and \ move_l.move_id.name or '' partner = "Partner Name : " + name + '\n' partner += "Batch Payment Number : " + batch_pay_no + '\n' partner += "Payment Method : " + pay_method + '\n' partner += "Payment Number : " + pay_no + '\n' partner += "Payment Reference : " + pay_reference + '\n' partner += "Journal Entry Number : " + jou_entry_ref + '\n' partner += "Invoice Reference : " + pay_memo + '\n' cust_pay_memo = cust_pay_line.name or '' worksheet.write(row, col, ' ', cell_c_fmat) col += 1 worksheet.write(row, col, 'Payment', cell_c_fmat) col += 1 worksheet.write(row, col, payment_date, cell_c_fmat) col += 1 # worksheet.write(row, col, # cust_pay_name or '', cell_l_fmat) # col += 1 # worksheet.write(row, col, # PAY_TYPE.get(cust_pay.payment_type, ''), # cell_l_fmat) # col += 1 # worksheet.write(row, col, # PARTNER_TYPE.get(cust_pay.partner_type, ''), # cell_l_fmat) # col += 1 worksheet.set_row(row, 90) worksheet.write(row, col, partner, cell_l_fmat) col += 1 worksheet.write(row, col, cust_pay_memo or '', cell_l_fmat) col += 1 bal_str = balance if currency_position == 'after': bal_str = ustr(bal_str) + ustr(currency_symbol) else: bal_str = ustr(currency_symbol) + ustr(bal_str) worksheet.write(row, col, bal_str or ustr(0.0), cell_r_fmat) col = 0 row += 1 worksheet.set_row(row, 40) row += 1 worksheet.set_row(row, 40) tot_cust_payment_str = round(tot_cust_payment, 2) if currency_position == 'after': tot_cust_payment_str = ustr(tot_cust_payment_str) + ustr( currency_symbol) else: tot_cust_payment_str = ustr(currency_symbol) + ustr( tot_cust_payment_str) worksheet.merge_range( row, 1, row, 4, 'Total - Cleared Deposits and Other Credits', header_cell_l_fmat) worksheet.write(row, 5, tot_cust_payment_str or 0.0, cell_r_bold_noborder) row += 1 worksheet.set_row(row, 40) worksheet.merge_range(row, 1, row, 4, 'Cleared Checks and Payments', header_cell_l_fmat) col = 0 row += 1 tot_vend_payment = 0.0 for vend_pay_line in reconcile_vend_bnk_st_lines: account_ids = list( set([ journal.default_debit_account_id.id, journal.default_credit_account_id.id ]) - {False}) move_lines = move_l_obj.search([ ('statement_line_id', '=', vend_pay_line.id), ('payment_id', '!=', False), ('payment_id.payment_type', 'in', ['outbound', 'transfer']), # ('debit', '>', 0.0) ('balance', '<=', 0.0), ('account_id', 'in', account_ids) ]) for move_l in move_lines: balance = move_l and move_l.balance or 0.0 if currency_id: balance = move_l and move_l.amount_currency or 0.0 if balance in [0.0, -0.0]: balance = move_l and move_l.balance or 0.0 tot_vend_payment = tot_vend_payment + balance or 0.0 payment_date = '' if move_l and move_l.date: payment_date = datetime.strftime( move_l.date, '%d-%m-%Y') payment = move_l and move_l.payment_id or False name = payment and payment.partner_id and \ payment.partner_id.name or '' pay_no = payment and payment.name or '' pay_reference = payment and payment.payment_reference or '' pay_memo = payment and payment.communication or '' batch_pay_no = payment and payment.batch_payment_id and \ payment.batch_payment_id.name or '' pay_method = payment and payment.batch_payment_id and \ payment.batch_payment_id.payment_method_id and \ payment.batch_payment_id.payment_method_id.name or '' jou_entry_ref = move_l and move_l.move_id and \ move_l.move_id.name or '' partner = "Partner Name : " + name + '\n' partner += "Batch Payment Number : " + batch_pay_no + '\n' partner += "Payment Method : " + pay_method + '\n' partner += "Payment Number : " + pay_no + '\n' partner += "Payment Reference : " + pay_reference + '\n' partner += "Journal Entry Number : " + jou_entry_ref + '\n' partner += "Invoice Reference : " + pay_memo + '\n' vend_pay_memo = move_l.name or '' worksheet.write(row, col, ' ', cell_c_fmat) col += 1 worksheet.write(row, col, 'Bill Payment', cell_c_fmat) col += 1 worksheet.write(row, col, payment_date, cell_c_fmat) col += 1 # worksheet.write(row, col, # vend_pay_name or '', cell_l_fmat) # col += 1 # worksheet.write(row, col, # PAY_TYPE.get(ven_pay.payment_type, ''), # cell_l_fmat) # col += 1 # worksheet.write(row, col, # PARTNER_TYPE.get(ven_pay.partner_type, ''), # cell_l_fmat) # col += 1 worksheet.set_row(row, 90) worksheet.write(row, col, partner, cell_l_fmat) col += 1 worksheet.write(row, col, vend_pay_memo or '', cell_l_fmat) col += 1 bal_str = balance if currency_position == 'after': bal_str = ustr(bal_str) + ustr(currency_symbol) else: bal_str = ustr(currency_symbol) + ustr(bal_str) worksheet.write(row, col, bal_str or ustr(0.0), cell_r_fmat) col = 0 row += 1 row += 1 tot_vend_pay_str = round(tot_vend_payment, 2) if currency_position == 'after': tot_vend_pay_str = ustr(tot_vend_pay_str) + \ ustr(currency_symbol) else: tot_vend_pay_str = ustr(currency_symbol) + \ ustr(tot_vend_pay_str) worksheet.merge_range(row, 1, row, 4, 'Total - Cleared Checks and Payments', header_cell_l_fmat) worksheet.write(row, 5, tot_vend_pay_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Total - Reconciled', header_cell_l_fmat) filter_bal = tot_cust_payment + tot_vend_payment filter_bal_str = round(filter_bal, 2) if currency_position == 'after': filter_bal_str = ustr(filter_bal_str) + ustr(currency_symbol) else: filter_bal_str = ustr(currency_symbol) + ustr(filter_bal_str) worksheet.write(row, 5, filter_bal_str, cell_r_bold_noborder) row += 1 worksheet.merge_range( row, 0, row, 3, 'Last Reconciled Statement Balance - ' + ustr(last_reconcile_date_str), header_cell_l_fmat) last_recon_bal_str = round(last_reconcile_bal, 2) if currency_position == 'after': last_recon_bal_str = ustr(last_recon_bal_str) + ustr( currency_symbol) else: last_recon_bal_str = ustr(currency_symbol) + ustr( last_recon_bal_str) worksheet.write(row, 5, last_recon_bal_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Current Reconciled Balance', header_cell_l_fmat) worksheet.write(row, 5, filter_bal_str, cell_r_bold_noborder) row += 1 worksheet.merge_range( row, 0, row, 3, 'Reconcile Statement Balance - ' + ustr(from_date), header_cell_l_fmat) re_st_bal_tot = filter_bal + last_reconcile_bal # worksheet.write(row, 5, round(curr_bal, 2), cell_r_bold_noborder) re_st_bal_tot_str = round(re_st_bal_tot, 2) unrec_tot_str = round(unrec_tot, 2) difference_str = round(0.0, 2) if currency_position == 'after': re_st_bal_tot_str = ustr(re_st_bal_tot_str) + \ ustr(currency_symbol) unrec_tot_str = ustr(unrec_tot_str) + ustr(currency_symbol) difference_str = ustr(difference_str) + ustr(currency_symbol) else: re_st_bal_tot_str = ustr(currency_symbol) + \ ustr(re_st_bal_tot_str) unrec_tot_str = ustr(currency_symbol) + ustr(unrec_tot_str) difference_str = ustr(currency_symbol) + ustr(difference_str) worksheet.write(row, 5, re_st_bal_tot_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Difference', header_cell_l_fmat) worksheet.write(row, 5, difference_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Unreconciled', header_cell_l_fmat) worksheet.write(row, 5, unrec_tot_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Uncleared Checks and Payments', header_cell_l_fmat) # worksheet.write(row, 5, 0.0, cell_r_bold_noborder) col = 0 row += 1 tot_unreconcile_cust_payment = 0.0 for cust_unrecon_l in unreconcile_checks_payments: trns_type = 'Payment' if cust_unrecon_l.payment_id and \ cust_unrecon_l.payment_id.payment_type: if cust_unrecon_l.payment_id.payment_type in \ ['outbound', 'transfer']: trns_type = 'Bill Payment' cust_balance = cust_unrecon_l and cust_unrecon_l.balance or 0.0 if currency_id: cust_balance = cust_unrecon_l and \ cust_unrecon_l.amount_currency or 0.0 if cust_balance in [0.0, -0.0]: cust_balance = cust_unrecon_l and \ cust_unrecon_l.balance or 0.0 tot_unreconcile_cust_payment = tot_unreconcile_cust_payment + \ cust_balance or 0.0 # journal = cust_pay.journal_id and \ # cust_pay.journal_id.name or '' payment_date = '' if cust_unrecon_l.date: payment_date = datetime.strftime(cust_unrecon_l.date, '%d-%m-%Y') # cust_unrecon_pay_name = cust_unrecon_l.name or '' partner = cust_unrecon_l.partner_id and \ cust_unrecon_l.partner_id.name or '' cust_unrecon_pay_memo = cust_unrecon_l.name or '' # if cust_unrecon_l.payment_id: # cust_unrecon_pay_name = \ # cust_unrecon_l.payment_id.name or '' # cust_unrecon_pay_memo = \ # cust_unrecon_l.payment_id.communication or '' # if cust_unrecon_l.payment_id.partner_id: # partner = cust_unrecon_l.payment_id and \ # cust_unrecon_l.payment_id.partner_id and \ # cust_unrecon_l.payment_id.partner_id.name or '' worksheet.write(row, col, ' ', cell_c_fmat) col += 1 worksheet.write(row, col, trns_type, cell_c_fmat) col += 1 worksheet.write(row, col, payment_date, cell_c_fmat) col += 1 # worksheet.write(row, col, cust_unrecon_pay_name or '', # cell_l_fmat) # col += 1 # worksheet.write(row, col, # PAY_TYPE.get(cust_pay.payment_type, ''), # cell_l_fmat) # col += 1 # worksheet.write(row, col, # PARTNER_TYPE.get(cust_pay.partner_type, ''), # cell_l_fmat) # col += 1 worksheet.set_row(row, 40) worksheet.write(row, col, partner, cell_l_fmat) col += 1 worksheet.write(row, col, cust_unrecon_pay_memo or '', cell_l_fmat) col += 1 cust_balance = cust_unrecon_l and cust_unrecon_l.balance or 0.0 if currency_id: cust_balance = cust_unrecon_l and \ cust_unrecon_l.amount_currency or 0.0 if cust_balance in [0.0, -0.0]: cust_balance = cust_unrecon_l and \ cust_unrecon_l.balance or 0.0 cust_bal_str = round(cust_balance, 2) if currency_position == 'after': cust_bal_str = ustr(cust_bal_str) + ustr(currency_symbol) else: cust_bal_str = ustr(currency_symbol) + ustr(cust_bal_str) worksheet.write(row, col, cust_bal_str, cell_r_fmat) col = 0 row += 1 row += 1 tot_unrec_cust_pay_str = round(tot_unreconcile_cust_payment, 2) if currency_position == 'after': tot_unrec_cust_pay_str = ustr(tot_unrec_cust_pay_str) + ustr( currency_symbol) else: tot_unrec_cust_pay_str = ustr(currency_symbol) + ustr( tot_unrec_cust_pay_str) worksheet.merge_range(row, 1, row, 4, 'Total - Uncleared Checks and Payments', header_cell_l_fmat) worksheet.write(row, 5, tot_unrec_cust_pay_str, cell_r_bold_noborder) worksheet.merge_range(row, 1, row, 4, 'Total - Unreconciled', header_cell_l_fmat) worksheet.write(row, 5, tot_unrec_cust_pay_str, cell_r_bold_noborder) row += 1 worksheet.merge_range(row, 0, row, 3, 'Total as of ' + ustr(from_date), header_cell_l_fmat) # worksheet.write(row, 5, round(curr_bal, 2), cell_r_bold_noborder) worksheet.write(row, 5, re_st_bal_tot_str, cell_r_bold_noborder) workbook.close() buf = base64.encodestring(open('/tmp/' + file_path, 'rb').read()) try: if buf: os.remove(file_path + '.xlsx') except OSError: pass wiz_rec = wiz_exported_obj.create({ 'file': buf, 'name': 'Bank Reconciliation Report.xlsx' }) form_view = self.env.ref( 'account_reports_extended.wiz_bank_reconcil_rep_exported_form') if wiz_rec and form_view: return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_id': wiz_rec.id, 'res_model': 'wiz.bank.reconciliation.report.exported', 'views': [(form_view.id, 'form')], 'view_id': form_view.id, 'target': 'new', } else: return {}
def load_modules(db, force_demo=False, status=None, update_module=False): initialize_sys_path() force = [] if force_demo: force.append('demo') upg_registry = {} cr = db.cursor() try: if not odoo.modules.db.is_initialized(cr): _logger.info("init db") odoo.modules.db.initialize(cr) update_module = True # process auto-installed modules tools.config["init"]["all"] = 1 tools.config['update']['all'] = 1 if not tools.config['without_demo']: tools.config["demo"]['all'] = 1 # This is a brand new registry, just created in # odoo.modules.registry.Registry.new(). registry = odoo.registry(cr.dbname) env = api.Environment(cr, SUPERUSER_ID, {}) if 'base' in tools.config['update'] or 'all' in tools.config['update']: cr.execute( "update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed')) # STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps) graph = odoo.modules.graph.Graph() graph.add_module(cr, 'base', force) if not graph: _logger.critical( 'module base cannot be loaded! (hint: verify addons-path)') raise ImportError( 'Module `base` cannot be loaded! (hint: verify addons-path)') # processed_modules: for cleanup step after install # loaded_modules: to avoid double loading report = registry._assertion_report loaded_modules, processed_modules = load_module_graph( cr, graph, status, perform_checks=update_module, report=report, upg_registry=upg_registry) load_lang = tools.config.pop('load_language') if load_lang or update_module: # some base models are used below, so make sure they are set up registry.setup_models(cr, partial=True) if load_lang: for lang in load_lang.split(','): tools.load_language(cr, lang) # STEP 2: Mark other modules to be loaded/updated if update_module: Module = env['ir.module.module'] if ('base' in tools.config['init']) or ('base' in tools.config['update']): _logger.info('updating modules list') Module.update_list() _check_module_names( cr, itertools.chain(tools.config['init'].keys(), tools.config['update'].keys())) module_names = [k for k, v in tools.config['init'].items() if v] if module_names: modules = Module.search([('state', '=', 'uninstalled'), ('name', 'in', module_names)]) if modules: modules.button_install() module_names = [k for k, v in tools.config['update'].items() if v] if module_names: # OpenUpgrade: in standard Odoo, '--update all' just means: # '--update base + upward (installed) dependencies. This breaks # the chain when new glue modules are encountered. # E.g. purchase in 8.0 depends on stock_account and report, # both of which are new. They may be installed, but purchase as # an upward dependency is not selected for upgrade. # Therefore, explicitely select all installed modules for # upgrading in OpenUpgrade in that case. domain = [('state', '=', 'installed')] if 'all' not in module_names: domain.append(('name', 'in', module_names)) modules = Module.search(domain) if modules: modules.button_upgrade() cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base')) Module.invalidate_cache(['state']) # STEP 3: Load marked modules (skipping base which was done in STEP 1) # IMPORTANT: this is done in two parts, first loading all installed or # partially installed modules (i.e. installed/to upgrade), to # offer a consistent system to the second part: installing # newly selected modules. # We include the modules 'to remove' in the first step, because # they are part of the "currently installed" modules. They will # be dropped in STEP 6 later, before restarting the loading # process. # IMPORTANT 2: We have to loop here until all relevant modules have been # processed, because in some rare cases the dependencies have # changed, and modules that depend on an uninstalled module # will not be processed on the first pass. # It's especially useful for migrations. previously_processed = -1 while previously_processed < len(processed_modules): previously_processed = len(processed_modules) processed_modules += load_marked_modules( cr, graph, ['installed', 'to upgrade', 'to remove'], force, status, report, loaded_modules, update_module, upg_registry) if update_module: processed_modules += load_marked_modules( cr, graph, ['to install'], force, status, report, loaded_modules, update_module, upg_registry) registry.setup_models(cr) # STEP 3.5: execute migration end-scripts migrations = odoo.modules.migration.MigrationManager(cr, graph) for package in graph: migrations.migrate_module(package, 'end') # STEP 4: Finish and cleanup installations if processed_modules: cr.execute( """select model,name from ir_model where id NOT IN (select distinct model_id from ir_model_access)""" ) for (model, name) in cr.fetchall(): if model in registry and not registry[ model]._abstract and not registry[model]._transient: _logger.warning( 'The model %s has no access rules, consider adding one. E.g. access_%s,access_%s,model_%s,,1,0,0,0', model, model.replace('.', '_'), model.replace('.', '_'), model.replace('.', '_')) # Temporary warning while we remove access rights on osv_memory objects, as they have # been replaced by owner-only access rights cr.execute( """select distinct mod.model, mod.name from ir_model_access acc, ir_model mod where acc.model_id = mod.id""" ) for (model, name) in cr.fetchall(): if model in registry and registry[model]._transient: _logger.warning( 'The transient model %s (%s) should not have explicit access rules!', model, name) cr.execute("SELECT model from ir_model") for (model, ) in cr.fetchall(): if model in registry: env[model]._check_removed_columns(log=True) elif _logger.isEnabledFor( logging.INFO): # more an info that a warning... _logger.warning( "Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model) # Cleanup orphan records env['ir.model.data']._process_end(processed_modules) for kind in ('init', 'demo', 'update'): tools.config[kind] = {} cr.commit() # STEP 5: Uninstall modules to remove if update_module: # Remove records referenced from ir_model_data for modules to be # removed (and removed the references from ir_model_data). cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove', )) modules_to_remove = dict(cr.fetchall()) if modules_to_remove: pkgs = reversed( [p for p in graph if p.name in modules_to_remove]) for pkg in pkgs: uninstall_hook = pkg.info.get('uninstall_hook') if uninstall_hook: py_module = sys.modules['odoo.addons.%s' % (pkg.name, )] getattr(py_module, uninstall_hook)(cr, registry) Module = env['ir.module.module'] Module.browse(modules_to_remove.values()).module_uninstall() # Recursive reload, should only happen once, because there should be no # modules to remove next time cr.commit() _logger.info( 'Reloading registry once more after uninstalling modules') api.Environment.reset() return odoo.modules.registry.Registry.new( cr.dbname, force_demo, status, update_module) # STEP 6: verify custom views on every model if update_module: View = env['ir.ui.view'] for model in registry: try: View._validate_custom_views(model) except Exception as e: _logger.warning('invalid custom view(s) for model %s: %s', model, tools.ustr(e)) if report.failures: _logger.error('At least one test failed when loading the modules.') else: _logger.info('Modules loaded.') # STEP 8: call _register_hook on every model for model in env.values(): model._register_hook() # STEP 9: Run the post-install tests cr.commit() t0 = time.time() t0_sql = odoo.sql_db.sql_counter if odoo.tools.config['test_enable']: if update_module: cr.execute( "SELECT name FROM ir_module_module WHERE state='installed' and name = ANY(%s)", (processed_modules, )) else: cr.execute( "SELECT name FROM ir_module_module WHERE state='installed'" ) for module_name in cr.fetchall(): report.record_result( odoo.modules.module.run_unit_tests( module_name[0], cr.dbname, position=runs_post_install)) _logger.log(25, "All post-tested in %.2fs, %s queries", time.time() - t0, odoo.sql_db.sql_counter - t0_sql) finally: cr.close()
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): result =\ super(MassEditingWizard, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) context = self._context if context.get('mass_editing_object'): mass_obj = self.env['mass.object'] editing_data = mass_obj.browse(context.get('mass_editing_object')) all_fields = {} xml_form = etree.Element('form', { 'string': tools.ustr(editing_data.name) }) xml_group = etree.SubElement(xml_form, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement(xml_group, 'label', { 'string': '', 'colspan': '2', }) xml_group = etree.SubElement(xml_form, 'group', { 'colspan': '6', 'col': '6', }) model_obj = self.env[context.get('active_model')] field_info = model_obj.fields_get() for field in editing_data.field_ids: if field.ttype == "many2many": all_fields[field.name] = field_info[field.name] all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove_m2m', 'Remove'), ('add', 'Add')] } xml_group = etree.SubElement(xml_group, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement(xml_group, 'separator', { 'string': field_info[field.name]['string'], 'colspan': '6', }) etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '6', 'nolabel': '1' }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible': [('selection__" + field.name + "', '=', 'remove_m2m')]}"), }) elif field.ttype == "one2many": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'relation': field.relation, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '4', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove_o2m')]}"), }) elif field.ttype == "many2one": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'relation': field.relation, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'colspan': '4', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove')]}"), }) elif field.ttype == "char": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'size': field.size or 256, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), 'colspan': '4', }) elif field.ttype == 'selection': all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'colspan': '4', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove')]}"), }) all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'selection': field_info[field.name]['selection'], } else: all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, } all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } if field.ttype == 'text': xml_group = etree.SubElement(xml_group, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement(xml_group, 'separator', { 'string': all_fields[field.name]['string'], 'colspan': '6', }) etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '6', 'nolabel': '1', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), }) else: all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement(xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), 'colspan': '4', }) # Patch fields with required extra data for field in all_fields.values(): field.setdefault("views", {}) etree.SubElement(xml_form, 'separator', { 'string': '', 'colspan': '6', 'col': '6', }) xml_group3 = etree.SubElement(xml_form, 'footer', {}) etree.SubElement(xml_group3, 'button', { 'string': 'Apply', 'class': 'btn-primary', 'type': 'object', 'name': 'action_apply', }) etree.SubElement(xml_group3, 'button', { 'string': 'Close', 'class': 'btn-default', 'special': 'cancel', }) root = xml_form.getroottree() result['arch'] = etree.tostring(root) result['fields'] = all_fields return result
def execute(self): if not self.draft and not self.open and not self.paid: raise UserError(_("you should select at least 1 State")) if not self.invoice_type and not self.refund_type: raise UserError(_("you should select at least 1 Invoice Type")) if self.print_by == 'html' and self.artmode == 'detailed': raise UserError( _("To run this report in the Detailed version you cannot choose 'HTML' " "as the printing Option. Please choose PDF or xlsx and try again" )) filters = list() l = "" inv_obj = self.env['account.invoice'] domain = [('date_invoice', '>=', self.date_from), ('date_invoice', '<=', self.date_to)] if self.type == 'customer' and self.partner_ids.ids: domain.append(('partner_id', 'in', list(self.partner_ids.ids))) l = _("Partners: ") for p in self.partner_ids: l += p.name + "," filters.append(l) if self.type == 'supplier' and self.partner2_ids.ids: domain.append(('partner_id', 'in', list(self.partner2_ids.ids))) l = _("Partners: ") for p in self.partner2_ids: l += p.name + "," filters.append(l) if self.categ_ids: domain.append( ('partner_id.category_id', 'in', self.categ_ids._ids)) l = _("Categories: ") for p in self.categ_ids: l += p.name + "," filters.append(l) if self.user_id: domain.append(('user_id', '=', self.user_id.id)) filters.append(_("User: "******"SalesTeam: ") + self.salesteam_id.name) if self.journal_ids: domain.append(('journal_id', 'in', self.journal_ids._ids)) l = _("Journals: ") for p in self.journal_ids: l += p.name + "," filters.append(l) if self.paym_term_id: domain.append(('payment_term_id', '=', self.paym_term_id.id)) filters.append(_("Payment Term: ") + self.paym_term_id.name) if self.company_id: domain.append(('company_id', '=', self.company_id.id)) filters.append(_("Company: ") + self.company_id.name) state = list() if self.open: state.append('open') if self.draft: state.append('draft') if self.paid: state.append('paid') domain.append(('state', 'in', state)) filters.append(_("States: ") + tools.ustr(state)) l = "[" type = list() if self.type == 'customer': if self.invoice_type: type.append('out_invoice') l += _("Sales Invoices") + ',' if self.refund_type: type.append('out_refund') l += _("Sales Credit Notes") + ',' if self.type == 'supplier': if self.invoice_type: type.append('in_invoice') l += _("Purchase Invoices") + ',' if self.refund_type: type.append('in_refund') l += _("Purchase Credit Notes") + ',' l += "]" domain.append(('type', 'in', type)) filters.append(_("Types: ") + l) filters.append(_("Period: ") + self.date_from + ':' + self.date_to) print_data = dict() inv_qty = dict() footer_cur = dict() footer_comp_cur = dict() curtotals = dict() list_titles = list() list_align = list() list_data = list() oldindex = -1 tot_cur = 0.0 invoices = inv_obj.search(domain, order=self.sort_by) for index, inv in enumerate(invoices): if inv.type in ('out_invoice', 'in_invoice'): sign = 1 if inv.type in ('out_refund', 'in_refund'): sign = -1 if self.artmode == 'overview': showf = True if self.hanal_tag_ids: if not self.setinset(self.hanal_tag_ids._ids, inv.analytic_tag_ids._ids): showf = False if showf: list_data = list() list_titles = list() list_align = list() if inv.display_name2: list_data.append(inv.display_name2) else: list_data.append("BORRADOR") list_titles.append(_("Invoice Nr")) list_align.append('center') list_data.append( datetime.strptime(inv.date_invoice, "%Y-%m-%d").strftime("%d-%m-%Y")) list_titles.append(_("Invoice Date")) list_align.append('center') list_data.append(inv.partner_id.name[:30]) list_titles.append(_("Customer")) list_align.append('center') if inv.type in ('out_invoice', 'in_invoice'): list_data.append(_("FC")) list_titles.append(_("Invoice Type")) list_align.append('center') if inv.type in ('out_refund', 'in_refund'): list_data.append("NC") list_titles.append(_("Invoice Type")) list_align.append('center') list_data.append(inv.amount_untaxed * sign) list_titles.append(_("Untaxed Amount")) list_align.append('right') if self.show_inv_taxes: list_data.append(inv.amount_tax * sign) list_titles.append(_("Taxes")) list_align.append('right') list_data.append(inv.amount_total * sign) list_titles.append(_("Total Amount (incl Tax)")) list_align.append('right') if self.show_exch_rates: list_data.append(inv.currency_rate) list_titles.append(_("Exchange Rate")) list_align.append('center') list_data.append(inv.currency_id.name) list_titles.append(_("Currency")) list_align.append('center') if self.show_comp_currency: list_data.append(inv.amount_total * inv.currency_rate * sign) list_titles.append(_("Total In Company Currency")) list_align.append('right') tot_cur += inv.amount_total * inv.currency_rate * sign key = '{:>010s}:{:>010s}'.format(str(index), '0') print_data.update({key: list_data}) if not inv.currency_id.name in footer_cur.keys(): footer_cur.update( {inv.currency_id.name: inv.amount_total}) else: footer_cur[inv.currency_id.name] += inv.amount_total if self.show_comp_currency: if not inv.company_id.currency_id.name in footer_comp_cur.keys( ): footer_comp_cur.update({ inv.company_id.currency_id.name: (inv.amount_total * inv.currency_rate * sign) }) else: footer_comp_cur[inv.company_id.currency_id. name] += (inv.amount_total * inv.currency_rate * sign) if not inv.currency_id.name in curtotals.keys(): curtotals.update( {inv.currency_id.name: inv.amount_total * sign}) else: curtotals[ inv.currency_id.name] += inv.amount_total * sign if not _("Invoice Qty") in inv_qty.keys(): inv_qty.update({_("Invoice Qty"): 1}) else: inv_qty[_("Invoice Qty")] += 1 if self.artmode == 'detailed': for indexl, line in enumerate(inv.invoice_line_ids): showf = True if self.ranal_tag_ids: if not self.setinset(self.ranal_tag_ids._ids, line.analytic_tag_ids._ids): showf = False if self.anal_account_id: if self.anal_account_id.id != line.account_analytic_id.id: showf = False if showf: if oldindex != index: oldindex = index if not _("Invoice Qty") in inv_qty.keys(): inv_qty.update({_("Invoice Qty"): 1}) else: inv_qty[_("Invoice Qty")] += 1 list_data = list() list_titles = list() val = 0.0 price_unit = line.price_unit * ( 1 - (line.discount or 0.0) / 100.0) taxes = line.invoice_line_tax_ids.compute_all( price_unit, inv.currency_id, line.quantity, line.product_id, inv.partner_id)['taxes'] for tax in taxes: vals = inv._prepare_tax_line_vals(line, tax) val += vals['amount'] if inv.display_name2: list_data.append(inv.display_name2) else: list_data.append("BORRADOR") list_titles.append(_("Invoice Nr")) list_align.append('center') list_data.append( datetime.strptime(inv.date_invoice, "%Y-%m-%d").strftime("%d-%m-%Y")) list_titles.append(_("Invoice Date")) list_align.append('center') list_data.append(inv.partner_id.name[:30]) list_titles.append(_("Customer")) list_align.append('center') if inv.type in ('out_invoice', 'in_invoice'): list_data.append(_("FC")) list_titles.append(_("Invoice Type")) list_align.append('center') if inv.type in ('out_refund', 'in_refund'): list_data.append("NC") list_titles.append(_("Invoice Type")) list_align.append('center') list_data.append(line.product_id.name) list_titles.append(_("Product")) list_align.append('center') list_data.append(line.quantity) list_titles.append(_("Qty")) list_align.append('right') list_data.append(price_unit * sign) list_titles.append(_("Unit Price")) list_align.append('right') list_data.append(line.price_subtotal * sign) list_titles.append(_("Untaxed Amount")) list_align.append('right') if self.show_inv_taxes: list_data.append(val * sign) list_titles.append(_("Taxes")) list_align.append('right') list_data.append(line.price_subtotal * sign + val * sign) list_titles.append(_("Total Amount (incl Tax)")) list_align.append('right') if self.show_exch_rates: list_data.append(inv.currency_rate) list_titles.append(_("Exchange Rate")) list_align.append('right') list_data.append(inv.currency_id.name) list_titles.append(_("Currency")) list_align.append('center') if self.show_comp_currency: list_data.append((line.price_subtotal + val) * inv.currency_rate * sign) list_titles.append(_("Total In Company Currency")) list_align.append('right') key = '{:>010s}:{:>010s}'.format( str(index), str(indexl)) print_data.update({key: list_data}) if not inv.currency_id.name in curtotals.keys(): curtotals.update({ inv.currency_id.name: line.price_subtotal * sign }) else: curtotals[inv.currency_id. name] += line.price_subtotal * sign if self.show_comp_currency: tot_cur += inv.amount_total * inv.currency_rate * sign if len(print_data) > 5000 and self.print_by in ['html', 'pdf']: raise UserError( _("report has more than 5000 lines, please use Excel instead")) #print print_data #print list_titles if self.print_by in ['html', 'pdf']: datas = { 'print_data': print_data, 'inv_qty': inv_qty, 'footer_cur': footer_cur, 'footer_comp_cur': footer_comp_cur, 'curtotals': curtotals, 'list_titles': list_titles, 'filters2': filters, 'list_align': list_align, 'type': self.type, 'tot_cur': tot_cur, } if self.print_by == 'pdf': return self.env['report'].with_context( landscape=True).get_action( self, 'partners_invoices_journals.repoort_pdf', data=datas) if self.print_by == 'html': return self.env['report'].with_context( landscape=True).get_action( self, 'partners_invoices_journals.repoort_html', data=datas) if self.print_by == 'excel': context = self._context filename = _('partners_invoices_journals.xls') workbook = xlwt.Workbook(encoding="UTF-8") worksheet = workbook.add_sheet(_('Detail')) worksheet.write( 0, 0, _('Nombre del Informe: Partners Invoices Journals')) worksheet.write(1, 0, _('Empresa: ') + self.env.user.company_id.name) line = 3 row = 0 for title in list_titles: worksheet.write(line, row, title) row += 1 for data in sorted(print_data.iterkeys()): row = 0 print data line += 1 for index, pos in enumerate(list_titles): worksheet.write(line, row, print_data[data][index]) row += 1 if tot_cur > 0: line += 1 row -= 1 worksheet.write(line, row, tot_cur) row = 0 line += 3 worksheet.write(line, row, "Totales por Moneda") for cur in curtotals: line += 1 row = 0 worksheet.write(line, row, cur) row += 1 worksheet.write(line, row, curtotals[cur]) fp = StringIO() workbook.save(fp) export_id = self.env['excel.extended'].create({ 'excel_file': base64.encodestring(fp.getvalue()), 'file_name': filename }).id fp.close() return { 'view_mode': 'form', 'res_id': export_id, 'res_model': 'excel.extended', 'view_type': 'form', 'type': 'ir.actions.act_window', 'context': context, 'target': 'new', }
def onchange_employee_id(self, date_from, date_to, employee_id=False, contract_id=False): #defaults res = { 'value': { 'line_ids': [], #delete old input lines 'input_line_ids': map(lambda x: ( 2, x, ), self.input_line_ids.ids), #delete old worked days lines 'worked_days_line_ids': map(lambda x: ( 2, x, ), self.worked_days_line_ids.ids), #'details_by_salary_head':[], TODO put me back 'name': '', 'contract_id': False, 'struct_id': False, } } if (not employee_id) or (not date_from) or (not date_to): return res ttyme = datetime.fromtimestamp( time.mktime(time.strptime(date_from, "%Y-%m-%d"))) employee = self.env['hr.employee'].browse(employee_id) res['value'].update({ 'name': _('Salary Slip of %s for %s') % (employee.name, tools.ustr(ttyme.strftime('%B-%Y'))), 'company_id': employee.company_id.id }) if not self.env.context.get('contract'): #fill with the first contract of the employee contract_ids = self.get_contract(employee, date_from, date_to) else: if contract_id: #set the list of contract for which the input have to be filled contract_ids = [contract_id] else: #if we don't give the contract, then the input to fill should be for all current contracts of the employee contract_ids = self.get_contract(employee, date_from, date_to) if not contract_ids: return res contract = self.env['hr.contract'].browse(contract_ids[0]) res['value'].update({'contract_id': contract.id}) struct = contract.struct_id if not struct: return res res['value'].update({ 'struct_id': struct.id, }) #computation of the salary input worked_days_line_ids = self.get_worked_day_lines( contract_ids, date_from, date_to) input_line_ids = self.get_inputs(contract_ids, date_from, date_to) res['value'].update({ 'worked_days_line_ids': worked_days_line_ids, 'input_line_ids': input_line_ids, }) return res
def import_task_apply(self): project_task_obj = self.env['project.task'] project_obj = self.env['project.project'] user_obj = self.env['res.users'] #perform import task using by default method... if self and self.file: #For CSV #default import if self.import_type == 'csv' and self.import_method == 'default': counter = 0 skipped_line_no = {} try: file = str(base64.decodestring(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True for row in myreader: try: if skip_header: skip_header = False continue if row[2] != '': final_deadline_date = None if row[4] != '': cd = row[4] cd = str( datetime.strptime(cd, '%Y-%m-%d').date()) final_deadline_date = cd search_project_id = False if row[0] != '': search_project = project_obj.search( [('name', '=', row[0])], limit=1) if search_project: search_project_id = search_project.id else: search_project_id = False skipped_line_no[ str(counter + 2)] = " - Project not found. " counter = counter + 1 continue search_user_id = False if row[1] != '': search_user = user_obj.search( [('name', '=', row[1])], limit=1) if search_user: search_user_id = search_user.id else: search_user_id = False vals = { 'name': row[2], 'date_deadline': final_deadline_date, 'description': row[3], 'project_id': search_project_id, 'user_id': search_user_id, 'planned_hours': row[5], } created_pt = project_task_obj.create(vals) counter = counter + 1 else: skipped_line_no[str( counter + 2)] = " - Task name is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your csv file does not match with our format" )) if counter == 0: raise UserError(_("Something went wrong")) elif counter >= 1: completed_task = counter - len(skipped_line_no) res = self.show_success_msg(completed_task, skipped_line_no) return res #project and user wise import. if self.import_type == 'csv' and self.import_method == 'proj_user_wise' and self.user_id and self.project_id: counter = 0 skipped_line_no = {} try: file = str(base64.decodestring(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True for row in myreader: try: if skip_header: skip_header = False continue if row[0] != '': final_deadline_date = None if row[2] != '': cd = row[2] cd = str( datetime.strptime(cd, '%Y-%m-%d').date()) final_deadline_date = cd vals = { 'name': row[0], 'planned_hours': row[1], 'date_deadline': final_deadline_date, 'description': row[3], 'project_id': self.project_id.id, 'user_id': self.user_id.id, } created_pt = project_task_obj.create(vals) counter = counter + 1 else: skipped_line_no[str( counter + 2)] = " - Task name is empty. " counter = counter + 1 continue except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your csv file does not match with our format" )) if counter == 0: raise UserError(_("Something went wrong")) elif counter >= 1: completed_task = counter - len(skipped_line_no) res = self.show_success_msg(completed_task, skipped_line_no) return res #For Excel #default import if self.import_type == 'excel' and self.import_method == 'default': counter = 0 skipped_line_no = {} try: wb = xlrd.open_workbook( file_contents=base64.decodestring(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False continue if sheet.cell(row, 2).value != '': final_deadline_date = None if sheet.cell(row, 4).value != '': cd = sheet.cell(row, 4).value cd = str( datetime.strptime(cd, '%Y-%m-%d').date()) final_deadline_date = cd search_project_id = False if sheet.cell(row, 0).value != '': search_project = project_obj.search( [('name', '=', sheet.cell(row, 0).value)], limit=1) if search_project: search_project_id = search_project.id else: search_project_id = False skipped_line_no[ str(counter + 2)] = " - Project not found. " counter = counter + 1 continue search_user_id = False if sheet.cell(row, 1).value != '': search_user = user_obj.search([ ('name', '=', sheet.cell(row, 1).value) ], limit=1) if search_user: search_user_id = search_user.id else: search_user_id = False vals = { 'name': sheet.cell(row, 2).value, 'date_deadline': final_deadline_date, 'description': sheet.cell(row, 3).value, 'project_id': search_project_id, 'user_id': search_user_id, 'planned_hours': sheet.cell(row, 5).value, } created_pt = project_task_obj.create(vals) counter = counter + 1 else: skipped_line_no[str( counter + 2)] = " - Task name is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your excel file does not match with our format" )) if counter == 0: raise UserError(_("Something went wrong")) elif counter >= 1: completed_task = counter - len(skipped_line_no) res = self.show_success_msg(completed_task, skipped_line_no) return res #Project and user wise import if self.import_type == 'excel' and self.import_method == 'proj_user_wise' and self.user_id and self.project_id: counter = 0 skipped_line_no = {} try: wb = xlrd.open_workbook( file_contents=base64.decodestring(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False continue if sheet.cell(row, 0).value != '': final_deadline_date = None if sheet.cell(row, 2).value != '': cd = sheet.cell(row, 2).value cd = str( datetime.strptime(cd, '%Y-%m-%d').date()) final_deadline_date = cd vals = { 'name': sheet.cell(row, 0).value, 'planned_hours': sheet.cell(row, 1).value, 'date_deadline': final_deadline_date, 'description': sheet.cell(row, 3).value, 'project_id': self.project_id.id, 'user_id': self.user_id.id, } created_pt = project_task_obj.create(vals) counter = counter + 1 else: skipped_line_no[str( counter + 2)] = " - Task name is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your excel file does not match with our format" )) if counter == 0: raise UserError(_("Something went wrong")) elif counter >= 1: completed_task = counter - len(skipped_line_no) res = self.show_success_msg(completed_task, skipped_line_no) return res
def create_from_ui(self, orders, kitchen=False): # Keep only new orders submitted_references = [o['data']['name'] for o in orders] existing_order_ids = self.search( [('pos_reference', 'in', submitted_references)]).sudo().ids existing_orders = self.browse( existing_order_ids).read(['pos_reference']) # existing_orders = self.read(existing_order_ids, ['pos_reference']) existing_references = set([o['pos_reference'] for o in existing_orders]) orders_to_save = [o for o in orders if o['data'] ['name'] not in existing_references] existing_orders_to_save = [ o for o in orders if o['data']['name'] in existing_references] kitchen_order = [o for o in orders] order_ids = [] if kitchen: for tmp_order in kitchen_order: order = tmp_order['data'] order_id = self._process_order(order, True) return order_id elif not orders_to_save: for tmp_order in kitchen_order: to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(order) order_ids.append(order_id) if not order.get('offline_delete_order', False): try: self.browse([order_id]).action_pos_order_paid() except Exception as e: _logger.error( 'Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order = self.browse([order_id]) pos_order.action_pos_order_invoice() pos_order.invoice_id.sudo().action_invoice_open() pos_order.account_move = pos_order.invoice_id.move_id return order_ids else: if existing_orders_to_save: for tmp_order in existing_orders_to_save: to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(order) order_ids.append(order_id) if not order.get('offline_delete_order', False): try: self.browse([order_id]).action_pos_order_paid() except Exception as e: _logger.error( 'Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order = self.browse([order_id]) pos_order.action_pos_order_invoice() pos_order.invoice_id.sudo().action_invoice_open() pos_order.account_move = pos_order.invoice_id.move_id for tmp_order in orders_to_save: to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(order) order_ids.append(order_id) if not order.get('offline_delete_order', False): try: self.browse([order_id]).action_pos_order_paid() except Exception as e: _logger.error( 'Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order = self.browse([order_id]) pos_order.action_pos_order_invoice() pos_order.invoice_id.sudo().action_invoice_open() pos_order.account_move = pos_order.invoice_id.move_id return order_ids
def schedule_backup(self): conf_ids = self.search([]) for rec in conf_ids: db_list = self.get_db_list(rec.host, rec.port) if rec.name in db_list: try: if not os.path.isdir(rec.folder): os.makedirs(rec.folder) except: raise # Create name for dumpfile. bkp_file = '%s_%s.%s' % (time.strftime('%Y_%m_%d_%H_%M_%S'), rec.name, rec.backup_type) file_path = os.path.join(rec.folder, bkp_file) uri = 'http://' + rec.host + ':' + rec.port conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db') bkp = '' try: # try to backup database and write it away fp = open(file_path, 'wb') odoo.service.db.dump_db(rec.name, fp, rec.backup_type) fp.close() except Exception as error: _logger.debug( "Couldn't backup database %s. Bad database administrator password for server running at http://%s:%s" % (rec.name, rec.host, rec.port)) _logger.debug("Exact error from the exception: " + str(error)) continue else: _logger.debug("database %s doesn't exist on http://%s:%s" % (rec.name, rec.host, rec.port)) # Check if user wants to write to SFTP or not. if rec.sftp_write is True: try: # Store all values in variables dir = rec.folder pathToWriteTo = rec.sftp_path ipHost = rec.sftp_host portHost = rec.sftp_port usernameLogin = rec.sftp_user passwordLogin = rec.sftp_password _logger.debug('sftp remote path: %s' % pathToWriteTo) try: s = paramiko.SSHClient() s.set_missing_host_key_policy(paramiko.AutoAddPolicy()) s.connect(ipHost, portHost, usernameLogin, passwordLogin, timeout=20) sftp = s.open_sftp() except Exception as error: _logger.critical( 'Error connecting to remote server! Error: ' + str(error)) try: sftp.chdir(pathToWriteTo) except IOError: # Create directory and subdirs if they do not exist. currentDir = '' for dirElement in pathToWriteTo.split('/'): currentDir += dirElement + '/' try: sftp.chdir(currentDir) except: _logger.info( '(Part of the) path didn\'t exist. Creating it now at ' + currentDir) # Make directory and then navigate into it sftp.mkdir(currentDir, 777) sftp.chdir(currentDir) pass sftp.chdir(pathToWriteTo) # Loop over all files in the directory. for f in os.listdir(dir): if rec.name in f: fullpath = os.path.join(dir, f) if os.path.isfile(fullpath): try: sftp.stat(os.path.join(pathToWriteTo, f)) _logger.debug( 'File %s already exists on the remote FTP Server ------ skipped' % fullpath) # This means the file does not exist (remote) yet! except IOError: try: # sftp.put(fullpath, pathToWriteTo) sftp.put( fullpath, os.path.join(pathToWriteTo, f)) _logger.info( 'Copying File % s------ success' % fullpath) except Exception as err: _logger.critical( 'We couldn\'t write the file to the remote server. Error: ' + str(err)) # Navigate in to the correct folder. sftp.chdir(pathToWriteTo) # Loop over all files in the directory from the back-ups. # We will check the creation date of every back-up. for file in sftp.listdir(pathToWriteTo): if rec.name in file: # Get the full path fullpath = os.path.join(pathToWriteTo, file) # Get the timestamp from the file on the external server timestamp = sftp.stat(fullpath).st_atime createtime = datetime.datetime.fromtimestamp( timestamp) now = datetime.datetime.now() delta = now - createtime # If the file is older than the days_to_keep_sftp (the days to keep that the user filled in on the Odoo form it will be removed. if delta.days >= rec.days_to_keep_sftp: # Only delete files, no directories! if sftp.isfile(fullpath) and ( ".dump" in file or '.zip' in file): _logger.info( "Delete too old file from SFTP servers: " + file) sftp.unlink(file) # Close the SFTP session. sftp.close() except Exception as e: _logger.debug( 'Exception! We couldn\'t back up to the FTP server..') # At this point the SFTP backup failed. We will now check if the user wants # an e-mail notification about this. if rec.send_mail_sftp_fail: try: ir_mail_server = self.env['ir.mail_server'] message = "Dear,\n\nThe backup for the server " + rec.host + " (IP: " + rec.sftp_host + ") failed.Please check the following details:\n\nIP address SFTP server: " + rec.sftp_host + "\nUsername: "******"\nPassword: "******"\n\nError details: " + tools.ustr( e) + "\n\nWith kind regards" msg = ir_mail_server.build_email( "auto_backup@" + rec.name + ".com", [rec.email_to_notify], "Backup from " + rec.host + "(" + rec.sftp_host + ") failed", message) ir_mail_server.send_email(self._cr, self._uid, msg) except Exception: pass """ Remove all old files (on local server) in case this is configured.. """ if rec.autoremove: dir = rec.folder # Loop over all files in the directory. for f in os.listdir(dir): fullpath = os.path.join(dir, f) # Only delete the ones wich are from the current database # (Makes it possible to save different databases in the same folder) if rec.name in fullpath: timestamp = os.stat(fullpath).st_ctime createtime = datetime.datetime.fromtimestamp(timestamp) now = datetime.datetime.now() delta = now - createtime if delta.days >= rec.days_to_keep: # Only delete files (which are .dump and .zip), no directories. if os.path.isfile(fullpath) and (".dump" in f or '.zip' in f): _logger.info( "Delete local out-of-date file: " + fullpath) os.remove(fullpath)
def import_attendance_apply(self): hr_attendance_obj = self.env['hr.attendance'] ir_model_fields_obj = self.env['ir.model.fields'] # perform import lead if self and self.file: # For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} try: file = str(base64.decodebytes(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True for row in myreader: try: if skip_header: skip_header = False for i in range(3, len(row)): name_field = row[i] name_m2o = False if '@' in row[i]: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "hr.attendance"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update( {row[i]: " - field not found"}) counter = counter + 1 continue if row_field_error_dic: res = self.show_success_msg( 0, row_field_error_dic) return res vals = {} if self.attendance_by == 'badge': badge = False if row[0] != '': badge = self.env['hr.employee'].sudo( ).search([('barcode', '=', row[0])], limit=1) if badge: badge = badge.id else: skipped_line_no[str( counter)] = " - Badge not found. " counter = counter + 1 continue check_in_time = None if row[1] != '': if row[1]: check_in_time = row[1] else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if row[2] != '': if row[2]: check_out_time = row[2] else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals.update({ 'employee_id': badge, 'check_in': check_in_time, 'check_out': check_out_time, }) elif self.attendance_by == 'employee_id': employee_id = False if row[0] != '': employee_id = self.env['hr.employee'].sudo( ).search([('id', '=', int(row[0]))], limit=1) if employee_id: employee_id = employee_id.id else: skipped_line_no[ str(counter )] = " - Employee not found. " counter = counter + 1 continue check_in_time = None if row[1] != '': if row[1]: check_in_time = row[1] else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if row[2] != '': if row[2]: check_out_time = row[2] else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals.update({ 'employee_id': employee_id, 'check_in': check_in_time, 'check_out': check_out_time, }) is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = row[k_row_index] field_required = v_field_dic.get("required") field_name_m2o = v_field_dic.get("name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str(counter)] = dic.get( "error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your csv file does not match with our format" )) if counter > 1: completed_attendance = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_attendance, skipped_line_no) return res # # For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} try: wb = xlrd.open_workbook( file_contents=base64.decodebytes(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False for i in range(3, sheet.ncols): name_field = sheet.cell(row, i).value name_m2o = False if '@' in sheet.cell(row, i).value: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "hr.attendance"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update({ sheet.cell(row, i).value: " - field not found" }) counter = counter + 1 continue if row_field_error_dic: res = self.show_success_msg( 0, row_field_error_dic) return res if self.attendance_by == 'badge': badge = False if sheet.cell(row, 0).value != '': badge_int = int(sheet.cell(row, 0).value) badge_str = str(badge_int) badge = self.env['hr.employee'].search( [('barcode', '=', badge_str)], limit=1) if badge: badge = badge.id else: skipped_line_no[str( counter)] = " - Badge not found. " counter = counter + 1 continue check_in_time = None if sheet.cell(row, 1).value != '': float_date_time = sheet.cell(row, 1).value seconds = (float_date_time - 25569) * 86400.0 check_in_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if sheet.cell(row, 2).value != '': float_date_time = sheet.cell(row, 2).value seconds = (float_date_time - 25569) * 86400.0 check_out_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals = { 'employee_id': badge, 'check_in': check_in_time, 'check_out': check_out_time, } is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = sheet.cell(row, k_row_index).value field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 elif self.attendance_by == 'employee_id': employee_id = False if sheet.cell(row, 0).value != '': employee_int = int( sheet.cell(row, 0).value) employee_id = self.env[ 'hr.employee'].search( [('id', '=', employee_int)], limit=1) if employee_id: employee_id = employee_id.id else: skipped_line_no[ str(counter )] = " - Employee not found. " counter = counter + 1 continue check_in_time = None if sheet.cell(row, 1).value != '': float_date_time = sheet.cell(row, 1).value seconds = (float_date_time - 25569) * 86400.0 check_in_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check in Date and Time not found. " counter = counter + 1 continue check_out_time = None if sheet.cell(row, 2).value != '': float_date_time = sheet.cell(row, 2).value seconds = (float_date_time - 25569) * 86400.0 check_out_time = datetime.datetime.utcfromtimestamp( seconds) else: skipped_line_no[str( counter )] = " - Check out Date and Time not found. " counter = counter + 1 continue vals = { 'employee_id': employee_id, 'check_in': check_in_time, 'check_out': check_out_time, } is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = sheet.cell(row, k_row_index).value field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue hr_attendance_obj.create(vals) counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your excel file does not match with our format" )) if counter > 1: completed_attendance = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_attendance, skipped_line_no) return res
def load_modules(db, force_demo=False, status=None, update_module=False): initialize_sys_path() force = [] if force_demo: force.append('demo') models_to_check = set() with db.cursor() as cr: if not odoo.modules.db.is_initialized(cr): if not update_module: _logger.error( "Database %s not initialized, you can force it with `-i base`", cr.dbname) return _logger.info("init db") odoo.modules.db.initialize(cr) update_module = True # process auto-installed modules tools.config["init"]["all"] = 1 tools.config['update']['all'] = 1 if not tools.config['without_demo']: tools.config["demo"]['all'] = 1 # This is a brand new registry, just created in # odoo.modules.registry.Registry.new(). registry = odoo.registry(cr.dbname) if 'base' in tools.config['update'] or 'all' in tools.config['update']: cr.execute( "update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed')) # STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps) graph = odoo.modules.graph.Graph() graph.add_module(cr, 'base', force) if not graph: _logger.critical( 'module base cannot be loaded! (hint: verify addons-path)') raise ImportError( 'Module `base` cannot be loaded! (hint: verify addons-path)') # processed_modules: for cleanup step after install # loaded_modules: to avoid double loading report = registry._assertion_report loaded_modules, processed_modules = load_module_graph( cr, graph, status, perform_checks=update_module, report=report, models_to_check=models_to_check) load_lang = tools.config.pop('load_language') if load_lang or update_module: # some base models are used below, so make sure they are set up registry.setup_models(cr) if load_lang: for lang in load_lang.split(','): tools.load_language(cr, lang) # STEP 2: Mark other modules to be loaded/updated if update_module: env = api.Environment(cr, SUPERUSER_ID, {}) Module = env['ir.module.module'] _logger.info('updating modules list') Module.update_list() _check_module_names( cr, itertools.chain(tools.config['init'], tools.config['update'])) module_names = [k for k, v in tools.config['init'].items() if v] if module_names: modules = Module.search([('state', '=', 'uninstalled'), ('name', 'in', module_names)]) if modules: modules.button_install() module_names = [k for k, v in tools.config['update'].items() if v] if module_names: modules = Module.search([('state', '=', 'installed'), ('name', 'in', module_names)]) if modules: modules.button_upgrade() cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base')) Module.invalidate_cache(['state']) # STEP 3: Load marked modules (skipping base which was done in STEP 1) # IMPORTANT: this is done in two parts, first loading all installed or # partially installed modules (i.e. installed/to upgrade), to # offer a consistent system to the second part: installing # newly selected modules. # We include the modules 'to remove' in the first step, because # they are part of the "currently installed" modules. They will # be dropped in STEP 6 later, before restarting the loading # process. # IMPORTANT 2: We have to loop here until all relevant modules have been # processed, because in some rare cases the dependencies have # changed, and modules that depend on an uninstalled module # will not be processed on the first pass. # It's especially useful for migrations. previously_processed = -1 while previously_processed < len(processed_modules): previously_processed = len(processed_modules) processed_modules += load_marked_modules( cr, graph, ['installed', 'to upgrade', 'to remove'], force, status, report, loaded_modules, update_module, models_to_check) if update_module: processed_modules += load_marked_modules( cr, graph, ['to install'], force, status, report, loaded_modules, update_module, models_to_check) registry.loaded = True registry.setup_models(cr) # STEP 3.5: execute migration end-scripts migrations = odoo.modules.migration.MigrationManager(cr, graph) for package in graph: migrations.migrate_module(package, 'end') # STEP 4: Finish and cleanup installations if processed_modules: env = api.Environment(cr, SUPERUSER_ID, {}) cr.execute( """select model,name from ir_model where id NOT IN (select distinct model_id from ir_model_access)""" ) for (model, name) in cr.fetchall(): if model in registry and not registry[ model]._abstract and not registry[model]._transient: _logger.warning( 'The model %s has no access rules, consider adding one. E.g. access_%s,access_%s,model_%s,base.group_user,1,0,0,0', model, model.replace('.', '_'), model.replace('.', '_'), model.replace('.', '_')) # Temporary warning while we remove access rights on osv_memory objects, as they have # been replaced by owner-only access rights cr.execute( """select distinct mod.model, mod.name from ir_model_access acc, ir_model mod where acc.model_id = mod.id""" ) for (model, name) in cr.fetchall(): if model in registry and registry[model]._transient: _logger.warning( 'The transient model %s (%s) should not have explicit access rules!', model, name) cr.execute("SELECT model from ir_model") for (model, ) in cr.fetchall(): if model in registry: env[model]._check_removed_columns(log=True) elif _logger.isEnabledFor( logging.INFO): # more an info that a warning... _logger.warning( "Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model) # Cleanup orphan records env['ir.model.data']._process_end(processed_modules) for kind in ('init', 'demo', 'update'): tools.config[kind] = {} # STEP 5: Uninstall modules to remove if update_module: # Remove records referenced from ir_model_data for modules to be # removed (and removed the references from ir_model_data). cr.execute("SELECT name, id FROM ir_module_module WHERE state=%s", ('to remove', )) modules_to_remove = dict(cr.fetchall()) if modules_to_remove: env = api.Environment(cr, SUPERUSER_ID, {}) pkgs = reversed( [p for p in graph if p.name in modules_to_remove]) for pkg in pkgs: uninstall_hook = pkg.info.get('uninstall_hook') if uninstall_hook: py_module = sys.modules['odoo.addons.%s' % (pkg.name, )] getattr(py_module, uninstall_hook)(cr, registry) Module = env['ir.module.module'] Module.browse(modules_to_remove.values()).module_uninstall() # Recursive reload, should only happen once, because there should be no # modules to remove next time cr.commit() _logger.info( 'Reloading registry once more after uninstalling modules') api.Environment.reset() registry = odoo.modules.registry.Registry.new( cr.dbname, force_demo, status, update_module) registry.check_tables_exist(cr) cr.commit() return registry # STEP 5.5: Verify extended fields on every model # This will fix the schema of all models in a situation such as: # - module A is loaded and defines model M; # - module B is installed/upgraded and extends model M; # - module C is loaded and extends model M; # - module B and C depend on A but not on each other; # The changes introduced by module C are not taken into account by the upgrade of B. if models_to_check: registry.init_models(cr, list(models_to_check), {'models_to_check': True}) # STEP 6: verify custom views on every model if update_module: env = api.Environment(cr, SUPERUSER_ID, {}) View = env['ir.ui.view'] for model in registry: try: View._validate_custom_views(model) except Exception as e: _logger.warning('invalid custom view(s) for model %s: %s', model, tools.ustr(e)) if report.failures: _logger.error('At least one test failed when loading the modules.') else: _logger.info('Modules loaded.') # STEP 8: call _register_hook on every model env = api.Environment(cr, SUPERUSER_ID, {}) for model in env.values(): model._register_hook() # STEP 9: save installed/updated modules for post-install tests registry.updated_modules += processed_modules
def create(self, values): # coming from mail.js that does not have pid in its values if self.env.context.get('default_starred'): self = self.with_context({ 'default_starred_partner_ids': [(4, self.env.user.partner_id.id)] }) if 'email_from' not in values: # needed to compute reply_to values['email_from'] = self._get_default_from() if not values.get('message_id'): values['message_id'] = self._get_message_id(values) if 'reply_to' not in values: values['reply_to'] = self._get_reply_to(values) if 'record_name' not in values and 'default_record_name' not in self.env.context: values['record_name'] = self._get_record_name(values) if 'attachment_ids' not in values: values.setdefault('attachment_ids', []) # extract base64 images if 'body' in values: Attachments = self.env['ir.attachment'] data_to_url = {} def base64_to_boundary(match): key = match.group(2) if not data_to_url.get(key): name = 'image%s' % len(data_to_url) attachment = Attachments.create({ 'name': name, 'datas': match.group(2), 'datas_fname': name, 'res_model': 'mail.message', }) values['attachment_ids'].append((4, attachment.id)) data_to_url[key] = '/web/image/%s' % attachment.id return '%s%s alt="%s"' % (data_to_url[key], match.group(3), name) values['body'] = _image_dataurl.sub(base64_to_boundary, tools.ustr(values['body'])) # delegate creation of tracking after the create as sudo to avoid access rights issues tracking_values_cmd = values.pop('tracking_value_ids', False) message = super(Message, self).create(values) if tracking_values_cmd: message.sudo().write({'tracking_value_ids': tracking_values_cmd}) message._invalidate_documents() if not self.env.context.get('message_create_from_mail_mail'): message._notify(force_send=self.env.context.get( 'mail_notify_force_send', True), user_signature=self.env.context.get( 'mail_notify_user_signature', True)) return message
def render_template(self, template_txt, model, res_ids, post_process=False): """ Render the given template text, replace mako expressions ``${expr}`` with the result of evaluating these expressions with an evaluation context containing: - ``user``: browse_record of the current user - ``object``: record of the document record this mail is related to - ``context``: the context passed to the mail composition wizard :param str template_txt: the template text to render :param str model: model name of the document record this mail is related to. :param int res_ids: list of ids of document records those mails are related to. """ multi_mode = True if isinstance(res_ids, (int, long)): multi_mode = False res_ids = [res_ids] results = dict.fromkeys(res_ids, u"") # try to load the template try: mako_env = mako_safe_template_env if self.env.context.get( 'safe') else mako_template_env template = mako_env.from_string(tools.ustr(template_txt)) except Exception: _logger.info("Failed to load template %r", template_txt, exc_info=True) return multi_mode and results or results[res_ids[0]] # prepare template variables records = self.env[model].browse(filter( None, res_ids)) # filter to avoid browsing [None] res_to_rec = dict.fromkeys(res_ids, None) for record in records: res_to_rec[record.id] = record variables = { 'format_tz': lambda dt, tz=False, format=False, context=self._context: format_tz(self.env, dt, tz, format), 'user': self.env.user, 'ctx': self._context, # context kw would clash with mako internals } for res_id, record in res_to_rec.iteritems(): variables['object'] = record try: render_result = template.render(variables) except Exception: _logger.info("Failed to render template %r using values %r" % (template, variables), exc_info=True) raise UserError( _("Failed to render template %r using values %r") % (template, variables)) if render_result == u"False": render_result = u"" results[res_id] = render_result if post_process: for res_id, result in results.iteritems(): results[res_id] = self.render_post_process(result) return multi_mode and results or results[res_ids[0]]
def import_inv_apply(self): inv_obj = self.env['account.move'] # perform import lead if self and self.file: # For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} try: file = str(base64.decodebytes(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True running_inv = None created_inv = False created_inv_list_for_validate = [] created_inv_list = [] for row in myreader: try: if skip_header: skip_header = False counter = counter + 1 continue if row[0] not in (None, "") and row[3] not in (None, ""): vals = {} if row[0] != running_inv: running_inv = row[0] inv_vals = {} if row[1] not in (None, ""): partner_obj = self.env["res.partner"] partner = partner_obj.search( [('name', '=', row[1])], limit=1) if partner: inv_vals.update( {'partner_id': partner.id}) else: skipped_line_no[str( counter )] = " - Customer/Vendor not found. " counter = counter + 1 continue else: skipped_line_no[str( counter )] = " - Customer/Vendor field is empty. " counter = counter + 1 continue if row[2] not in (None, ""): cd = row[2] cd = str( datetime.strptime( cd, '%Y-%m-%d').date()) inv_vals.update({'invoice_date': cd}) if self.inv_no_type == 'as_per_sheet': inv_vals.update({"name": row[0]}) created_inv = False if self.invoice_type == 'inv': inv_vals.update( {"move_type": "out_invoice"}) created_inv = inv_obj.with_context( default_move_type='out_invoice' ).create(inv_vals) elif self.invoice_type == 'bill': inv_vals.update( {"move_type": "in_invoice"}) created_inv = inv_obj.with_context( default_move_type='in_invoice' ).create(inv_vals) elif self.invoice_type == 'ccn': inv_vals.update( {"move_type": "out_refund"}) created_inv = inv_obj.with_context( default_move_type='out_refund' ).create(inv_vals) elif self.invoice_type == 'vcn': inv_vals.update( {"move_type": "in_refund"}) created_inv = inv_obj.with_context( default_move_type='in_refund' ).create(inv_vals) invoice_line_ids = [] created_inv_list_for_validate.append( created_inv.id) created_inv_list.append(created_inv.id) if created_inv: field_nm = 'name' if self.product_by == 'name': field_nm = 'name' elif self.product_by == 'int_ref': field_nm = 'default_code' elif self.product_by == 'barcode': field_nm = 'barcode' search_product = self.env[ 'product.product'].search( [(field_nm, '=', row[3])], limit=1) if search_product: vals.update( {'product_id': search_product.id}) if row[4] != '': vals.update({'name': row[4]}) else: product = None name = '' if created_inv.partner_id: if created_inv.partner_id.lang: product = search_product.with_context( lang=created_inv. partner_id.lang) else: product = search_product name = product.partner_ref if created_inv.move_type in ( 'in_invoice', 'in_refund') and product: if product.description_purchase: name += '\n' + product.description_purchase elif product: if product.description_sale: name += '\n' + product.description_sale vals.update({'name': name}) accounts = search_product.product_tmpl_id.get_product_accounts( created_inv.fiscal_position_id) account = False if created_inv.move_type in ( 'out_invoice', 'out_refund'): account = accounts['income'] else: account = accounts['expense'] if not account: skipped_line_no[str( counter )] = " - Account not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue else: vals.update( {'account_id': account.id}) if row[5] != '': vals.update({'quantity': row[5]}) else: vals.update({'quantity': 1}) if row[6] in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.uom_po_id: vals.update({ 'product_uom_id': search_product.uom_po_id.id }) elif search_product.uom_id: vals.update({ 'product_uom_id': search_product.uom_id.id }) else: search_uom = self.env[ 'uom.uom'].search( [('name', '=', row[6])], limit=1) if search_uom: vals.update({ 'product_uom_id': search_uom.id }) else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue if row[7] in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund'): vals.update({ 'price_unit': search_product. standard_price }) else: vals.update({ 'price_unit': search_product.lst_price }) else: vals.update({'price_unit': row[7]}) if row[8].strip() in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.supplier_taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. supplier_taxes_id.ids)] }) elif created_inv.move_type in ( 'out_invoice', 'out_refund' ) and search_product.taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. taxes_id.ids)] }) else: taxes_list = [] some_taxes_not_found = False for x in row[8].split(','): x = x.strip() if x != '': search_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_tax: taxes_list.append( search_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue else: vals.update({ 'tax_ids': [(6, 0, taxes_list)] }) vals.update( {'move_id': created_inv.id}) invoice_line_ids.append((0, 0, vals)) vals = {} counter = counter + 1 else: skipped_line_no[ str(counter )] = " - Product not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue created_inv.write( {'invoice_line_ids': invoice_line_ids}) invoice_line_ids = [] else: skipped_line_no[str( counter)] = " - Order not created. " counter = counter + 1 continue else: skipped_line_no[str( counter )] = " - Number or Product field is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue # here call necessary method if created_inv_list: invoices = inv_obj.search([('id', 'in', created_inv_list)]) if invoices: for invoice in invoices: invoice._onchange_partner_id() invoice._onchange_invoice_line_ids() # validate invoice if created_inv_list_for_validate and self.is_validate: invoices = inv_obj.search([ ('id', 'in', created_inv_list_for_validate) ]) if invoices: for invoice in invoices: invoice.action_post() else: created_inv_list_for_validate = [] except Exception as e: raise UserError( _("Sorry, Your csv file does not match with our format" + ustr(e))) if counter > 1: completed_records = len(created_inv_list) validate_rec = len(created_inv_list_for_validate) res = self.show_success_msg(completed_records, validate_rec, skipped_line_no) return res # For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} try: wb = xlrd.open_workbook( file_contents=base64.decodebytes(self.file)) sheet = wb.sheet_by_index(0) skip_header = True running_inv = None created_inv = False created_inv_list_for_validate = [] created_inv_list = [] for row in range(sheet.nrows): try: if skip_header: skip_header = False counter = counter + 1 continue if sheet.cell( row, 0).value not in (None, "") and sheet.cell( row, 3).value not in (None, ""): vals = {} if sheet.cell(row, 0).value != running_inv: running_inv = sheet.cell(row, 0).value inv_vals = {} if sheet.cell(row, 1).value not in (None, ""): partner_obj = self.env["res.partner"] partner = partner_obj.search( [('name', '=', sheet.cell( row, 1).value)], limit=1) if partner: inv_vals.update( {'partner_id': partner.id}) else: skipped_line_no[str( counter )] = " - Customer/Vendor not found. " counter = counter + 1 continue else: skipped_line_no[str( counter )] = " - Customer/Vendor field is empty. " counter = counter + 1 continue if sheet.cell(row, 2).value not in (None, ""): cd = sheet.cell(row, 2).value cd = str( datetime.strptime( cd, '%Y-%m-%d').date()) inv_vals.update({'invoice_date': cd}) if self.inv_no_type == 'as_per_sheet': inv_vals.update( {"name": sheet.cell(row, 0).value}) created_inv = False if self.invoice_type == 'inv': inv_vals.update( {"move_type": "out_invoice"}) created_inv = inv_obj.with_context( default_move_type='out_invoice' ).create(inv_vals) elif self.invoice_type == 'bill': inv_vals.update( {"move_type": "in_invoice"}) created_inv = inv_obj.with_context( default_move_type='in_invoice' ).create(inv_vals) elif self.invoice_type == 'ccn': inv_vals.update( {"move_type": "out_refund"}) created_inv = inv_obj.with_context( default_move_type='out_refund' ).create(inv_vals) elif self.invoice_type == 'vcn': inv_vals.update( {"move_type": "in_refund"}) created_inv = inv_obj.with_context( default_move_type='in_refund' ).create(inv_vals) invoice_line_ids = [] created_inv_list_for_validate.append( created_inv.id) created_inv_list.append(created_inv.id) if created_inv: field_nm = 'name' if self.product_by == 'name': field_nm = 'name' elif self.product_by == 'int_ref': field_nm = 'default_code' elif self.product_by == 'barcode': field_nm = 'barcode' search_product = self.env[ 'product.product'].search( [(field_nm, '=', sheet.cell( row, 3).value)], limit=1) if search_product: vals.update( {'product_id': search_product.id}) if sheet.cell(row, 4).value != '': vals.update({ 'name': sheet.cell(row, 4).value }) else: product = None name = '' if created_inv.partner_id: if created_inv.partner_id.lang: product = search_product.with_context( lang=created_inv. partner_id.lang) else: product = search_product name = product.partner_ref if created_inv.move_type in ( 'in_invoice', 'in_refund') and product: if product.description_purchase: name += '\n' + product.description_purchase elif product: if product.description_sale: name += '\n' + product.description_sale vals.update({'name': name}) accounts = search_product.product_tmpl_id.get_product_accounts( created_inv.fiscal_position_id) account = False if created_inv.move_type in ( 'out_invoice', 'out_refund'): account = accounts['income'] else: account = accounts['expense'] if not account: skipped_line_no[str( counter )] = " - Account not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue else: vals.update( {'account_id': account.id}) if sheet.cell(row, 5).value != '': vals.update({ 'quantity': sheet.cell(row, 5).value }) else: vals.update({'quantity': 1}) if sheet.cell(row, 6).value in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.uom_po_id: vals.update({ 'product_uom_id': search_product.uom_po_id.id }) elif search_product.uom_id: vals.update({ 'product_uom_id': search_product.uom_id.id }) else: search_uom = self.env[ 'uom.uom'].search([ ('name', '=', sheet.cell(row, 6).value) ], limit=1) if search_uom: vals.update({ 'product_uom_id': search_uom.id }) else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue if sheet.cell(row, 7).value in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund'): vals.update({ 'price_unit': search_product. standard_price }) else: vals.update({ 'price_unit': search_product.lst_price }) else: vals.update({ 'price_unit': sheet.cell(row, 7).value }) if sheet.cell( row, 8).value.strip() in (None, ""): if created_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.supplier_taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. supplier_taxes_id.ids)] }) elif created_inv.move_type in ( 'out_invoice', 'out_refund' ) and search_product.taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. taxes_id.ids)] }) else: taxes_list = [] some_taxes_not_found = False for x in sheet.cell( row, 8).value.split(','): x = x.strip() if x != '': search_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_tax: taxes_list.append( search_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue else: vals.update({ 'tax_ids': [(6, 0, taxes_list)] }) vals.update( {'move_id': created_inv.id}) invoice_line_ids.append((0, 0, vals)) vals = {} counter = counter + 1 else: skipped_line_no[ str(counter )] = " - Product not found. " counter = counter + 1 if created_inv.id in created_inv_list_for_validate: created_inv_list_for_validate.remove( created_inv.id) continue created_inv.write( {'invoice_line_ids': invoice_line_ids}) invoice_line_ids = [] else: skipped_line_no[str( counter)] = " - Order not created. " counter = counter + 1 continue else: skipped_line_no[str( counter )] = " - Number or Product field is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue # here call necessary method if created_inv_list: invoices = inv_obj.search([('id', 'in', created_inv_list)]) if invoices: for invoice in invoices: invoice._onchange_partner_id() invoice._onchange_invoice_line_ids() # validate invoice if created_inv_list_for_validate and self.is_validate: invoices = inv_obj.search([ ('id', 'in', created_inv_list_for_validate) ]) if invoices: for invoice in invoices: invoice.action_post() else: created_inv_list_for_validate = [] except Exception as e: raise UserError( _("Sorry, Your excel file does not match with our format" + ustr(e))) if counter > 1: completed_records = len(created_inv_list) validate_rec = len(created_inv_list_for_validate) res = self.show_success_msg(completed_records, validate_rec, skipped_line_no) return res
def send_message_on_whatsapp(self): Param = self.env['res.config.settings'].sudo().get_values() res_partner_id = self.env['res.partner'].search([ ('id', '=', self.user_id.partner_id.id) ]) res_user_id = self.env['res.users'].search([('id', '=', self.env.user.id)]) msg = '' if res_partner_id.country_id.phone_code and res_partner_id.mobile: # res_partner_id = self.env['res.partner'].search([('id', '=', self.user_id.partner_id.id)]) if self.partner_id: msg += '\n*Customer:* ' + self.partner_id.name if self.email_from: msg += '\n*Email:* ' + self.email_from if self.phone: msg += '\n*Phone:* ' + self.phone if self.date_deadline: msg += '\n*Expected closing date:* ' + str(self.date_deadline) if self.description: msg += '\n*Description:* ' + self.description msg = 'Hello ' + res_partner_id.name + ',' + '\nNew lead assigned to you' + '\n*Lead name:* ' + self.name + "" + msg if res_user_id.has_group( 'pragmatic_odoo_whatsapp_integration.group_crm_enable_signature' ): user_signature = self.cleanhtml(res_user_id.signature) msg += "\n\n" + user_signature url = 'https://api.chat-api.com/instance' + Param.get( 'whatsapp_instance_id') + '/sendMessage?token=' + Param.get( 'whatsapp_token') headers = { "Content-Type": "application/json", } whatsapp_msg_number = res_partner_id.mobile whatsapp_msg_number_without_space = whatsapp_msg_number.replace( " ", "") whatsapp_msg_number_without_code = whatsapp_msg_number_without_space.replace( '+' + str(res_partner_id.country_id.phone_code), "") tmp_dict = { "phone": "+" + str(res_partner_id.country_id.phone_code) + "" + whatsapp_msg_number_without_code, # "body": msg if self.has_group('pragmatic_odoo_whatsapp_integration.group_enable_signature'): msg+= "body": msg } response = requests.post(url, json.dumps(tmp_dict), headers=headers) if response.status_code == 201 or response.status_code == 200: _logger.info("\nSend Message successfully") mail_message_obj = self.env['mail.message'] if self.env['ir.config_parameter'].sudo().get_param( 'pragmatic_odoo_whatsapp_integration.group_crm_display_chatter_message' ): comment = "fa fa-whatsapp" body_html = tools.append_content_to_html( '<div class = "%s"></div>' % tools.ustr(comment), msg) body_msg = self.convert_to_html(body_html) mail_message_id = mail_message_obj.sudo().create({ 'res_id': self.id, 'model': 'crm.lead', 'body': body_msg, }) else: raise UserError( _('Please enter partner mobile number or select country for partner' ))
def import_bsl_apply(self): absl_obj = self.env['account.bank.statement.line'] #perform import lead if self and self.file and self.env.context.get("sh_abs_id",False): #For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} try: file = str(base64.decodestring(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header=True for row in myreader: try: if skip_header: skip_header=False counter = counter + 1 continue if row[0] != '' and row[1] != '': final_date = None cd = row[0] cd = str(datetime.strptime(cd, '%Y-%m-%d').date()) final_date = cd search_partner_id = False if row[2] != '': search_partner = self.env["res.partner"].search([('name','=',row[2])], limit = 1) if search_partner: search_partner_id = search_partner.id else: skipped_line_no[str(counter)]= " - Partner not found. " counter = counter + 1 continue vals={ 'date' : final_date, 'name' : row[1], 'partner_id' : search_partner_id, 'ref' : row[3], 'amount' : row[4], 'statement_id' : self.env.context.get("sh_abs_id") } created_bsl = absl_obj.create(vals) counter = counter + 1 else: skipped_line_no[str(counter)] = " - Date or Label is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str(counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError(_("Sorry, Your csv file does not match with our format")) if counter > 1: completed_bsl = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_bsl, skipped_line_no) return res #For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} try: wb = xlrd.open_workbook(file_contents=base64.decodestring(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False counter = counter + 1 continue if sheet.cell(row,0).value != '' and sheet.cell(row,1).value != '': final_date = None cd = sheet.cell(row,0).value cd = str(datetime.strptime(cd, '%Y-%m-%d').date()) final_date = cd search_partner_id = False if sheet.cell(row,2).value != '': search_partner = self.env["res.partner"].search([('name','=',sheet.cell(row,2).value)], limit = 1) if search_partner: search_partner_id = search_partner.id else: skipped_line_no[str(counter)]= " - Partner not found. " counter = counter + 1 continue vals={ 'date' : final_date, 'name' : sheet.cell(row,1).value, 'partner_id' : search_partner_id, 'ref' : sheet.cell(row,3).value, 'amount' : sheet.cell(row,4).value, 'statement_id' : self.env.context.get("sh_abs_id") } created_bsl = absl_obj.create(vals) counter = counter + 1 else: skipped_line_no[str(counter)]=" - Date or Label is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str(counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue except Exception: raise UserError(_("Sorry, Your excel file does not match with our format")) if counter > 1: completed_lead = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_lead, skipped_line_no) return res
def import_product_tmpl_apply(self): product_tmpl_obj = self.env['product.template'] #perform import lead if self and self.file: #For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} try: file = str(base64.decodestring(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True for row in myreader: try: if skip_header: skip_header = False counter = counter + 1 continue if row[0].strip() not in (None, ""): can_be_sold = True if row[1].strip() == 'FALSE': can_be_sold = False can_be_purchased = True if row[2].strip() == 'FALSE': can_be_purchased = False product_type = 'consu' if row[3].strip() == 'Service': product_type = 'service' elif row[3].strip() == 'Stockable Product': product_type = 'product' categ_id = False if row[4].strip() in (None, ""): search_category = self.env[ 'product.category'].search( [('name', '=', 'All')], limit=1) if search_category: categ_id = search_category.id else: skipped_line_no[str( counter )] = " - Category - All not found. " counter = counter + 1 continue else: search_category = self.env[ 'product.category'].search( [('name', '=', row[4].strip())], limit=1) if search_category: categ_id = search_category.id else: skipped_line_no[ str(counter )] = " - Category not found. " counter = counter + 1 continue uom_id = False if row[9].strip() in (None, ""): search_uom = self.env['uom.uom'].search( [('name', '=', 'Unit(s)')], limit=1) if search_uom: uom_id = search_uom.id else: skipped_line_no[str( counter )] = " - Unit of Measure - Unit(s) not found. " counter = counter + 1 continue else: search_uom = self.env['uom.uom'].search( [('name', '=', row[9].strip())], limit=1) if search_uom: uom_id = search_uom.id else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 continue uom_po_id = False if row[10].strip() in (None, ""): search_uom_po = self.env['uom.uom'].search( [('name', '=', 'Unit(s)')], limit=1) if search_uom_po: uom_po_id = search_uom_po.id else: skipped_line_no[str( counter )] = " - Purchase Unit of Measure - Unit(s) not found. " counter = counter + 1 continue else: search_uom_po = self.env['uom.uom'].search( [('name', '=', row[10].strip())], limit=1) if search_uom_po: uom_po_id = search_uom_po.id else: skipped_line_no[str( counter )] = " - Purchase Unit of Measure not found. " counter = counter + 1 continue customer_taxes_ids_list = [] some_taxes_not_found = False if row[13].strip() not in (None, ""): for x in row[13].split(','): x = x.strip() if x != '': search_customer_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_customer_tax: customer_taxes_ids_list.append( search_customer_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Customer Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue vendor_taxes_ids_list = [] some_taxes_not_found = False if row[14].strip() not in (None, ""): for x in row[14].split(','): x = x.strip() if x != '': search_vendor_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_vendor_tax: vendor_taxes_ids_list.append( search_vendor_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Vendor Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue invoicing_policy = 'order' if row[15].strip() == 'Delivered quantities': invoicing_policy = 'delivery' vals = { 'name': row[0].strip(), 'sale_ok': can_be_sold, 'purchase_ok': can_be_purchased, 'type': product_type, 'categ_id': categ_id, 'list_price': row[7], 'standard_price': row[8], 'uom_id': uom_id, 'uom_po_id': uom_po_id, 'weight': row[11], 'volume': row[12], 'taxes_id': [(6, 0, customer_taxes_ids_list)], 'supplier_taxes_id': [(6, 0, vendor_taxes_ids_list)], 'invoice_policy': invoicing_policy, 'description_sale': row[16], } if row[6].strip() not in (None, ""): barcode = row[6].strip() vals.update({'barcode': barcode}) if row[5].strip() not in (None, ""): default_code = row[5].strip() vals.update({'default_code': default_code}) if row[18].strip() not in (None, ""): image_path = row[18].strip() if "http://" in image_path or "https://" in image_path: try: r = requests.get(image_path) if r and r.content: image_base64 = base64.encodestring( r.content) vals.update({ 'image_medium': image_base64 }) else: skipped_line_no[str( counter )] = " - URL not correct or check your image size. " counter = counter + 1 continue except Exception as e: skipped_line_no[str( counter )] = " - URL not correct or check your image size " + ustr( e) counter = counter + 1 continue else: try: with open(image_path, 'rb') as image: image.seek(0) binary_data = image.read() image_base64 = codecs.encode( binary_data, 'base64') if image_base64: vals.update({ 'image_medium': image_base64 }) else: skipped_line_no[str( counter )] = " - Could not find the image or please make sure it is accessible to this user. " counter = counter + 1 continue except Exception as e: skipped_line_no[str( counter )] = " - Could not find the image or please make sure it is accessible to this user " + ustr( e) counter = counter + 1 continue created_product_tmpl = False if self.method == 'create': if row[6].strip() in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl = product_tmpl_obj.search( [('barcode', '=', row[6].strip())], limit=1) if search_product_tmpl: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 elif self.method == 'write' and self.product_update_by == 'barcode': if row[6].strip() in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl = product_tmpl_obj.search( [('barcode', '=', row[6].strip())], limit=1) if search_product_tmpl: created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 elif self.method == 'write' and self.product_update_by == 'int_ref': search_product_tmpl = product_tmpl_obj.search( [('default_code', '=', row[5].strip()) ], limit=1) if search_product_tmpl: if row[6].strip() in (None, ""): created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: search_product_tmpl_bar = product_tmpl_obj.search( [('barcode', '=', row[6].strip())], limit=1) if search_product_tmpl_bar: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: if row[6].strip() in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl_bar = product_tmpl_obj.search( [('barcode', '=', row[6].strip())], limit=1) if search_product_tmpl_bar: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 if created_product_tmpl and created_product_tmpl.product_variant_id and created_product_tmpl.type == 'product' and row[ 17] != '': stock_vals = { 'product_tmpl_id': created_product_tmpl.id, 'new_quantity': row[17], 'product_id': created_product_tmpl. product_variant_id.id } created_qty_on_hand = self.env[ 'stock.change.product.qty'].create( stock_vals) if created_qty_on_hand: created_qty_on_hand.change_product_qty( ) else: skipped_line_no[str( counter)] = " - Name is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid. " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your csv file does not match with our format" )) if counter > 1: completed_records = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_records, skipped_line_no) return res #For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} try: wb = xlrd.open_workbook( file_contents=base64.decodestring(self.file)) sheet = wb.sheet_by_index(0) skip_header = True for row in range(sheet.nrows): try: if skip_header: skip_header = False counter = counter + 1 continue if sheet.cell(row, 0).value.strip() not in (None, ""): can_be_sold = True if sheet.cell(row, 1).value.strip() == 'FALSE': can_be_sold = False can_be_purchased = True if sheet.cell(row, 2).value.strip() == 'FALSE': can_be_purchased = False product_type = 'consu' if sheet.cell(row, 3).value.strip() == 'Service': product_type = 'service' elif sheet.cell(row, 3).value.strip( ) == 'Stockable Product': product_type = 'product' categ_id = False if sheet.cell(row, 4).value.strip() in (None, ""): search_category = self.env[ 'product.category'].search( [('name', '=', 'All')], limit=1) if search_category: categ_id = search_category.id else: skipped_line_no[str( counter )] = " - Category - All not found. " counter = counter + 1 continue else: search_category = self.env[ 'product.category'].search( [('name', '=', sheet.cell( row, 4).value.strip())], limit=1) if search_category: categ_id = search_category.id else: skipped_line_no[ str(counter )] = " - Category not found. " counter = counter + 1 continue uom_id = False if sheet.cell(row, 9).value.strip() in (None, ""): search_uom = self.env['uom.uom'].search( [('name', '=', 'Unit(s)')], limit=1) if search_uom: uom_id = search_uom.id else: skipped_line_no[str( counter )] = " - Unit of Measure - Unit(s) not found. " counter = counter + 1 continue else: search_uom = self.env['uom.uom'].search( [('name', '=', sheet.cell( row, 9).value.strip())], limit=1) if search_uom: uom_id = search_uom.id else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 continue uom_po_id = False if sheet.cell(row, 10).value.strip() in (None, ""): search_uom_po = self.env['uom.uom'].search( [('name', '=', 'Unit(s)')], limit=1) if search_uom_po: uom_po_id = search_uom_po.id else: skipped_line_no[str( counter )] = " - Purchase Unit of Measure - Unit(s) not found. " counter = counter + 1 continue else: search_uom_po = self.env['uom.uom'].search( [('name', '=', sheet.cell( row, 10).value.strip())], limit=1) if search_uom_po: uom_po_id = search_uom_po.id else: skipped_line_no[str( counter )] = " - Purchase Unit of Measure not found. " counter = counter + 1 continue customer_taxes_ids_list = [] some_taxes_not_found = False if sheet.cell(row, 13).value.strip() not in (None, ""): for x in sheet.cell(row, 13).value.split(','): x = x.strip() if x != '': search_customer_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_customer_tax: customer_taxes_ids_list.append( search_customer_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Customer Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue vendor_taxes_ids_list = [] some_taxes_not_found = False if sheet.cell(row, 14).value.strip() not in (None, ""): for x in sheet.cell(row, 14).value.split(','): x = x.strip() if x != '': search_vendor_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_vendor_tax: vendor_taxes_ids_list.append( search_vendor_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Vendor Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue invoicing_policy = 'order' if sheet.cell(row, 15).value.strip( ) == 'Delivered quantities': invoicing_policy = 'delivery' vals = { 'name': sheet.cell(row, 0).value.strip(), 'sale_ok': can_be_sold, 'purchase_ok': can_be_purchased, 'type': product_type, 'categ_id': categ_id, 'list_price': sheet.cell(row, 7).value, 'standard_price': sheet.cell(row, 8).value, 'uom_id': uom_id, 'uom_po_id': uom_po_id, 'weight': sheet.cell(row, 11).value, 'volume': sheet.cell(row, 12).value, 'taxes_id': [(6, 0, customer_taxes_ids_list)], 'supplier_taxes_id': [(6, 0, vendor_taxes_ids_list)], 'invoice_policy': invoicing_policy, 'description_sale': sheet.cell(row, 16).value, } if sheet.cell(row, 6).value not in (None, ""): barcode = sheet.cell(row, 6).value vals.update({'barcode': barcode}) if sheet.cell(row, 5).value not in (None, ""): default_code = sheet.cell(row, 5).value vals.update({'default_code': default_code}) if sheet.cell(row, 18).value.strip() not in (None, ""): image_path = sheet.cell(row, 18).value.strip() if "http://" in image_path or "https://" in image_path: try: r = requests.get(image_path) if r and r.content: image_base64 = base64.encodestring( r.content) vals.update({ 'image_medium': image_base64 }) else: skipped_line_no[str( counter )] = " - URL not correct or check your image size. " counter = counter + 1 continue except Exception as e: skipped_line_no[str( counter )] = " - URL not correct or check your image size " + ustr( e) counter = counter + 1 continue else: try: with open(image_path, 'rb') as image: image.seek(0) binary_data = image.read() image_base64 = codecs.encode( binary_data, 'base64') if image_base64: vals.update({ 'image_medium': image_base64 }) else: skipped_line_no[str( counter )] = " - Could not find the image or please make sure it is accessible to this user. " counter = counter + 1 continue except Exception as e: skipped_line_no[str( counter )] = " - Could not find the image or please make sure it is accessible to this user " + ustr( e) counter = counter + 1 continue created_product_tmpl = False if self.method == 'create': if sheet.cell(row, 6).value in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl = product_tmpl_obj.search( [('barcode', '=', sheet.cell( row, 6).value)], limit=1) if search_product_tmpl: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 elif self.method == 'write' and self.product_update_by == 'barcode': if sheet.cell(row, 6).value in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl = product_tmpl_obj.search( [('barcode', '=', sheet.cell( row, 6).value)], limit=1) if search_product_tmpl: created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 elif self.method == 'write' and self.product_update_by == 'int_ref': search_product_tmpl = product_tmpl_obj.search( [('default_code', '=', sheet.cell(row, 5).value)], limit=1) if search_product_tmpl: if sheet.cell(row, 6).value in (None, ""): created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: search_product_tmpl_bar = product_tmpl_obj.search( [('barcode', '=', sheet.cell(row, 6).value)], limit=1) if search_product_tmpl_bar: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = search_product_tmpl search_product_tmpl.write(vals) counter = counter + 1 else: if sheet.cell(row, 6).value in (None, ""): created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 else: search_product_tmpl_bar = product_tmpl_obj.search( [('barcode', '=', sheet.cell(row, 6).value)], limit=1) if search_product_tmpl_bar: skipped_line_no[str( counter )] = " - Barcode already exist. " counter = counter + 1 continue else: created_product_tmpl = product_tmpl_obj.create( vals) counter = counter + 1 if created_product_tmpl and created_product_tmpl.product_variant_id and created_product_tmpl.type == 'product' and sheet.cell( row, 17).value != '': stock_vals = { 'product_tmpl_id': created_product_tmpl.id, 'new_quantity': sheet.cell(row, 17).value, 'product_id': created_product_tmpl. product_variant_id.id } created_qty_on_hand = self.env[ 'stock.change.product.qty'].create( stock_vals) if created_qty_on_hand: created_qty_on_hand.change_product_qty( ) else: skipped_line_no[str( counter)] = " - Name is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid. " + ustr(e) counter = counter + 1 continue except Exception: raise UserError( _("Sorry, Your excel file does not match with our format" )) if counter > 1: completed_records = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_records, skipped_line_no) return res
def import_ail_apply(self): ir_model_fields_obj = self.env['ir.model.fields'] #perform import lead if self and self.file and self.env.context.get('sh_inv_id', False): #For CSV if self.import_type == 'csv': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} active_inv = False try: file = str(base64.decodebytes(self.file).decode('utf-8')) myreader = csv.reader(file.splitlines()) skip_header = True invoice_line_ids = [] for row in myreader: try: if skip_header: skip_header = False for i in range(6, len(row)): name_field = row[i] name_m2o = False if '@' in row[i]: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "account.move.line"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update( {row[i]: " - field not found"}) counter = counter + 1 continue if row_field_error_dic: res = self.show_success_msg( 0, row_field_error_dic) return res if row[0] != '': vals = {} field_nm = 'name' if self.product_by == 'name': field_nm = 'name' elif self.product_by == 'int_ref': field_nm = 'default_code' elif self.product_by == 'barcode': field_nm = 'barcode' search_product = self.env[ 'product.product'].search( [(field_nm, '=', row[0])], limit=1) active_inv = self.env['account.move'].search( [('id', '=', self.env.context.get('sh_inv_id'))], limit=1) if search_product and active_inv: vals.update( {'product_id': search_product.id}) if row[1] != '': vals.update({'name': row[1]}) else: product = None name = '' if active_inv and active_inv.partner_id: if active_inv.partner_id.lang: product = search_product.with_context( lang=active_inv.partner_id. lang) else: product = search_product name = product.partner_ref if active_inv.move_type in ( 'in_invoice', 'in_refund') and product: if product.description_purchase: name += '\n' + product.description_purchase elif product: if product.description_sale: name += '\n' + product.description_sale vals.update({'name': name}) accounts = search_product.product_tmpl_id.get_product_accounts( active_inv.fiscal_position_id) account = False if active_inv.move_type in ('out_invoice', 'out_refund'): account = accounts['income'] else: account = accounts['expense'] if not account: skipped_line_no[ str(counter )] = " - Account not found. " counter = counter + 1 continue else: vals.update({'account_id': account.id}) if row[2] != '': vals.update({'quantity': row[2]}) else: vals.update({'quantity': 1}) if row[3] in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.uom_po_id: vals.update({ 'product_uom_id': search_product.uom_po_id.id }) elif search_product.uom_id: vals.update({ 'product_uom_id': search_product.uom_id.id }) else: search_uom = self.env[ 'uom.uom'].search( [('name', '=', row[3])], limit=1) if search_uom: vals.update({ 'product_uom_id': search_uom.id }) else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 continue if row[4] in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund'): vals.update({ 'price_unit': search_product.standard_price }) else: vals.update({ 'price_unit': search_product.lst_price }) else: vals.update({'price_unit': row[4]}) if row[5].strip() in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.supplier_taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. supplier_taxes_id.ids)] }) elif active_inv.move_type in ( 'out_invoice', 'out_refund' ) and search_product.taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product.taxes_id.ids)] }) else: taxes_list = [] some_taxes_not_found = False for x in row[5].split(','): x = x.strip() if x != '': search_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_tax: taxes_list.append( search_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue else: vals.update({ 'tax_ids': [(6, 0, taxes_list)] }) else: skipped_line_no[str( counter)] = " - Product not found. " counter = counter + 1 continue is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = row[k_row_index] field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue vals.update({ 'move_id': self.env.context.get('sh_inv_id') }) invoice_line_ids.append((0, 0, vals)) counter = counter + 1 else: skipped_line_no[str( counter)] = " - Product is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue # calculate taxes if active_inv: active_inv._onchange_partner_id() active_inv._onchange_invoice_line_ids() active_inv.write({'invoice_line_ids': invoice_line_ids}) except Exception as e: raise UserError( _("Sorry, Your csv file does not match with our format" + ustr(e))) if counter > 1: completed_records = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_records, skipped_line_no) return res #For Excel if self.import_type == 'excel': counter = 1 skipped_line_no = {} row_field_dic = {} row_field_error_dic = {} active_inv = False try: wb = xlrd.open_workbook( file_contents=base64.decodebytes(self.file)) sheet = wb.sheet_by_index(0) skip_header = True invoice_line_ids = [] for row in range(sheet.nrows): try: if skip_header: skip_header = False for i in range(6, sheet.ncols): name_field = sheet.cell(row, i).value name_m2o = False if '@' in sheet.cell(row, i).value: list_field_str = name_field.split('@') name_field = list_field_str[0] name_m2o = list_field_str[1] search_field = ir_model_fields_obj.sudo( ).search([ ("model", "=", "account.move.line"), ("name", "=", name_field), ("store", "=", True), ], limit=1) if search_field: field_dic = { 'name': name_field, 'ttype': search_field.ttype, 'required': search_field.required, 'name_m2o': name_m2o } row_field_dic.update({i: field_dic}) else: row_field_error_dic.update({ sheet.cell(row, i).value: " - field not found" }) counter = counter + 1 continue if sheet.cell(row, 0).value != '': vals = {} field_nm = 'name' if self.product_by == 'name': field_nm = 'name' elif self.product_by == 'int_ref': field_nm = 'default_code' elif self.product_by == 'barcode': field_nm = 'barcode' search_product = self.env[ 'product.product'].search( [(field_nm, '=', sheet.cell(row, 0).value)], limit=1) active_inv = self.env['account.move'].search( [('id', '=', self.env.context.get('sh_inv_id'))], limit=1) if search_product and active_inv: vals.update( {'product_id': search_product.id}) if sheet.cell(row, 1).value != '': vals.update( {'name': sheet.cell(row, 1).value}) else: name = '' product = None if active_inv and active_inv.partner_id: if active_inv.partner_id.lang: product = search_product.with_context( lang=active_inv.partner_id. lang) else: product = search_product name = product.partner_ref if active_inv.move_type in ( 'in_invoice', 'in_refund') and product: if product.description_purchase: name += '\n' + product.description_purchase elif product: if product.description_sale: name += '\n' + product.description_sale vals.update({'name': name}) accounts = search_product.product_tmpl_id.get_product_accounts( active_inv.fiscal_position_id) account = False if active_inv.move_type in ('out_invoice', 'out_refund'): account = accounts['income'] else: account = accounts['expense'] if not account: skipped_line_no[ str(counter )] = " - Account not found. " counter = counter + 1 continue else: vals.update({'account_id': account.id}) if sheet.cell(row, 2).value != '': vals.update({ 'quantity': sheet.cell(row, 2).value }) else: vals.update({'quantity': 1}) if sheet.cell(row, 3).value in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.uom_po_id: vals.update({ 'product_uom_id': search_product.uom_po_id.id }) elif search_product.uom_id: vals.update({ 'product_uom_id': search_product.uom_id.id }) else: search_uom = self.env[ 'uom.uom'].search( [('name', '=', sheet.cell(row, 3).value)], limit=1) if search_uom: vals.update({ 'product_uom_id': search_uom.id }) else: skipped_line_no[str( counter )] = " - Unit of Measure not found. " counter = counter + 1 continue if sheet.cell(row, 4).value in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund'): vals.update({ 'price_unit': search_product.standard_price }) else: vals.update({ 'price_unit': search_product.lst_price }) else: vals.update({ 'price_unit': sheet.cell(row, 4).value }) if sheet.cell(row, 5).value.strip() in (None, ""): if active_inv.move_type in ( 'in_invoice', 'in_refund' ) and search_product.supplier_taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product. supplier_taxes_id.ids)] }) elif active_inv.move_type in ( 'out_invoice', 'out_refund' ) and search_product.taxes_id: vals.update({ 'tax_ids': [(6, 0, search_product.taxes_id.ids)] }) else: taxes_list = [] some_taxes_not_found = False for x in sheet.cell( row, 5).value.split(','): x = x.strip() if x != '': search_tax = self.env[ 'account.tax'].search( [('name', '=', x)], limit=1) if search_tax: taxes_list.append( search_tax.id) else: some_taxes_not_found = True skipped_line_no[str( counter )] = " - Taxes " + x + " not found. " break if some_taxes_not_found: counter = counter + 1 continue else: vals.update({ 'tax_ids': [(6, 0, taxes_list)] }) else: skipped_line_no[str( counter)] = " - Product not found. " counter = counter + 1 continue is_any_error_in_dynamic_field = False for k_row_index, v_field_dic in row_field_dic.items( ): field_name = v_field_dic.get("name") field_ttype = v_field_dic.get("ttype") field_value = sheet.cell(row, k_row_index).value field_required = v_field_dic.get( "required") field_name_m2o = v_field_dic.get( "name_m2o") dic = self.validate_field_value( field_name, field_ttype, field_value, field_required, field_name_m2o) if dic.get("error", False): skipped_line_no[str( counter)] = dic.get("error") is_any_error_in_dynamic_field = True break else: vals.update(dic) if is_any_error_in_dynamic_field: counter = counter + 1 continue vals.update({ 'move_id': self.env.context.get('sh_inv_id') }) invoice_line_ids.append((0, 0, vals)) counter = counter + 1 else: skipped_line_no[str( counter)] = " - Product is empty. " counter = counter + 1 except Exception as e: skipped_line_no[str( counter)] = " - Value is not valid " + ustr(e) counter = counter + 1 continue # calculate taxes if active_inv: active_inv._onchange_partner_id() active_inv._onchange_invoice_line_ids() active_inv.write({'invoice_line_ids': invoice_line_ids}) except Exception as e: raise UserError( _("Sorry, Your excel file does not match with our format" + ustr(e))) if counter > 1: completed_records = (counter - len(skipped_line_no)) - 2 res = self.show_success_msg(completed_records, skipped_line_no) return res
def _run_sql(self): self.ensure_one() self._cr.execute(self.code) if self.expect_result: return tools.ustr(self._cr.fetchall()) return 'No expected result'
def action_merge(self): """ Merges two (or more objects @param self: The object pointer @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Lead to Opportunity IDs @param context: A standard dictionary for contextual values @return : {} """ #res = self.read()[0] active_model = self._context.get('active_model') if not active_model: raise osv.osv.except_orm(_('Configuration Error!'), _('The is no active model defined!')) model_pool = self.env[active_model] object_ids = self._context.get('active_ids', []) field_to_read = self._context.get('field_to_read') field_list = field_to_read and [field_to_read] or [] objectt = self.read(field_list)[0] if objectt and field_list and objectt[field_to_read]: object_id = objectt[field_to_read][0] else: raise osv.osv.except_orm(_('Configuration Error!'), _('Please select one value to keep')) self.env.cr.execute( "SELECT name, model FROM ir_model_fields WHERE relation=%s AND ttype NOT IN ('many2many', 'one2many');", (active_model, )) for name, model_raw in self.env.cr.fetchall(): if getattr(self.env[model_raw], '_auto', None): if not self.env[model_raw]._auto: continue if getattr(self.env[model_raw], '_check_time', None): continue else: if getattr(self.env[model_raw], '_fields', None): if self.env[model_raw]._fields.get(name, False) and \ (isinstance(self.env[model_raw]._fields[name], fields.Many2one) \ # or isinstance(self.env[model_raw]._fields[name], self.env[model_raw]._fields[name].compute) \ and self.env[model_raw]._fields[name].store): if getattr(self.env[model_raw], '_table', None): if self.env[model_raw]._abstract or self.env[ model_raw]._transient or not self.env[ model_raw]._auto: continue model = self.env[model_raw]._table else: model = model_raw.replace('.', '_') requete = "UPDATE " + model + " SET " + name + "=" + str( object_id) + " WHERE " + ustr(name) + " IN " + str( tuple(object_ids)) + ";" self.env.cr.execute(requete) self.env.cr.execute( "SELECT name, model FROM ir_model_fields WHERE relation=%s AND ttype IN ('many2many');", (active_model, )) for field, model in self.env.cr.fetchall(): field_data = self.env[model] and self.env[model]._fields.get(field, False) \ and (isinstance(self.env[model]._fields[field], fields.many2many) \ # or isinstance(self.env[model_raw]._fields[name], self.env[model_raw]._fields[name].compute) \ and self.env[model]._fields[field].store) \ and self.env[model]._fields[field] \ or False if field_data: model_m2m, rel1, rel2 = field_data._sql_names(self.env[model]) requete = "UPDATE %s SET %s=%s WHERE %s " \ "IN %s AND %s NOT IN (SELECT DISTINCT(%s) " \ "FROM %s WHERE %s = %s);" % (model_m2m,rel2, str(object_id), ustr(rel2), str(tuple(object_ids)), rel1,rel1,model_m2m, rel2,str(object_id)) self.env.cr.execute(requete) self.env.cr.execute( "SELECT name, model FROM ir_model_fields WHERE name IN ('res_model', 'model');" ) for field, model in self.env.cr.fetchall(): model_obj = self.env[model] if not model_obj: continue if field == 'model' and model_obj._fields.get('res_model', False): continue res_id = model_obj._fields.get('res_id') if res_id: requete = False if isinstance(res_id, fields.Integer) or isinstance( res_id, fields.Many2one): requete = "UPDATE %s SET res_id = %s " \ "WHERE res_id IN %s AND " \ "%s = '%s';" % (model_obj._table, str(object_id), str(tuple(object_ids)), field, active_model) elif isinstance(res_id, fields.char): requete = "UPDATE %s SET res_id = '%s' " \ "WHERE res_id IN %s AND " \ "%s = '%s';" % (model_obj._table, str(object_id), str(tuple([str(x) for x in object_ids])), field, active_model) if requete: self.env.cr.execute(requete) unactive_object_ids = model_pool.search([('id', 'in', object_ids), ('id', '<>', object_id)]) if model_pool._fields.get('active', False): for unactive_id in unactive_object_ids: unactive_id.write({'active': False}) else: read_data = self.read(['delete_if_not_active']) if read_data['delete_if_not_active']: model_pool.unlink() # Try to limit multiplication of records in mail_followers table: requete = "DELETE FROM mail_followers WHERE id IN" \ "(SELECT id FROM (SELECT COUNT(id) AS total, MAX(id) as id," \ "res_model,res_id,partner_id FROM mail_followers " \ "GROUP BY res_model,res_id,partner_id) a WHERE a.total > 1);" for i in range(1, len(object_ids)): self.env.cr.execute(requete) return {'type': 'ir.actions.act_window_close'}
def export_sales_team_target_report(self): """Method to generate the Sales Team target report.""" # sales_team_obj = self.env['crm.team'] # state_obj = self.env['res.country.state'] country_obj = self.env['res.country'] sale_obj = self.env['sale.order'] wiz_exported_obj = self.env['wiz.sales.team.target.report.exported'] trg_team_obj = self.env['sales.billed.invoice.target.team'] if self.date_from > self.date_to: raise Warning(_("To date must be greater than \ or Equals to from date !!")) company = self.company_id and self.company_id.id or False # Below is the list of dates month wise. dates = [dt for dt in rrule(MONTHLY, dtstart=self.date_from, until=self.date_to)] file_path = 'YTM Sales Team Report.xlsx' workbook = xlsxwriter.Workbook('/tmp/' + file_path) worksheet = workbook.add_worksheet("YTM Sales Team Report") cell_font_fmt = workbook.add_format({ 'font_name': 'Arial', }) cell_left_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'left', }) cell_center_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': '#548235', 'color': '#FFFFFF' }) cell_left_color_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'left', 'color': '#0070C0' }) cell_right_color_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'right', 'color': '#0070C0', 'num_format': '#,##0.00' }) cell_right_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'right', 'num_format': '#,##0.00' }) cell_bg_fmt = workbook.add_format({ 'font_name': 'Arial', 'bg_color': '#548235', 'color': '#FFFFFF' }) cell_bg_cou_actual = workbook.add_format({ 'font_name': 'Arial', 'bg_color': '#a9d18e', 'color': '#FFFFFF' }) cell_bg_cou_right_actual = workbook.add_format({ 'font_name': 'Arial', 'bg_color': '#a9d18e', 'align': 'right', 'color': '#FFFFFF', 'num_format': '#,##0.00' }) worksheet.set_column(0, 0, 35) worksheet.freeze_panes(0, 1) worksheet.freeze_panes(5, 1) current_date = datetime.now() month_str = current_date.strftime('%b') year = current_date.year head_str = "YTM " + "(" + ustr(month_str) + " " + ustr(year) + " ) " head_str += "Sales Report - Acutal VS Budget & Comprable Period" worksheet.write(0, 0, head_str, cell_font_fmt) row = 2 col = 0 worksheet.write(row, col, "Sum of Sales (Net of Tax) Incl freight", cell_font_fmt) row += 1 worksheet.set_row(row, 25) worksheet.write(row, col, "Country/State", cell_bg_fmt) tot_balance_dict = {} if dates: col += 1 for month_st_dt in dates: month_str = month_st_dt.strftime("%B") dt_year = month_st_dt.year dt_prev_year = dt_year - 1 worksheet.merge_range(row, col, row, col + 1, ustr(month_str), cell_center_fmt) # below one line is just fill the color for blank cell worksheet.set_row(row, 25) worksheet.write(row, col + 2, ' ', cell_center_fmt) row += 1 # Below three lines will update years and month heading. worksheet.set_column(row, col, 15) worksheet.write(row, col, ustr(dt_year), cell_center_fmt) worksheet.set_column(row, col + 1, 15) worksheet.write(row, col + 1, ustr(dt_prev_year), cell_center_fmt) worksheet.set_column(row, col + 2, 15) worksheet.set_row(row, 25) worksheet.write(row, col + 2, 'Variance', cell_center_fmt) if tot_balance_dict.get(month_st_dt, False): tot_balance_dict[month_st_dt].update({ 'month_str': month_str, 'dt_year': ustr(dt_year), 'dt_prev_year': ustr(dt_prev_year), # 'dt_year_tot_bal': 0.0, # 'dt_prev_year_tot_bal': 0.0 }) else: tot_balance_dict.update({month_st_dt: { 'month_str': month_str, 'dt_year': ustr(dt_year), 'dt_prev_year': ustr(dt_prev_year), # 'dt_year_tot_bal': 0.0, # 'dt_prev_year_tot_bal': 0.0 }}) row -= 1 col += 3 row += 1 col = 0 worksheet.write(row, col, " ", cell_bg_fmt) row += 1 # --------------------------------------------------------------- sale_ids = sale_obj.search([ ('confirmation_date', '>=', self.date_from), ('confirmation_date', '<=', self.date_to), ('company_id', '=', company), ('state', 'in', ['sale', 'done'])]) # partner_ids = sale_ids.mapped('partner_id') # team_ids = sale_ids.mapped('team_id') country_ids = sale_ids.mapped('partner_id').mapped('country_id') # state_ids = sale_ids.mapped('partner_id').mapped('state_id') for country_id in country_obj.search([ ('id', 'in', country_ids.ids)], order="name"): worksheet.set_row(row, 25) # worksheet.set_default_row(20) row_for_tot_bal = row col_for_tot_bal = col worksheet.write(row, col, country_id.name + " Actual", cell_bg_cou_actual) row += 1 sale_country_ids = sale_obj.search([ ('confirmation_date', '>=', self.date_from), ('confirmation_date', '<=', self.date_to), ('partner_id.country_id', '=', country_id.id), ('company_id', '=', company), ('state', 'in', ['sale', 'done'])]) # sales_team_obj.search([('')]) sale_team_ids = sale_country_ids.mapped('team_id') for team_id in sale_team_ids: worksheet.set_row(row, 20) worksheet.write(row, col, team_id.name + " Actual", cell_left_fmt) # ---------------------------------------------------------- # Added Budget Team in file. row += 1 worksheet.set_row(row, 20) worksheet.write(row, col, team_id.name + " Budget", cell_left_color_fmt) row -= 1 # ---------------------------------------------------------- row_col = col + 1 for month_st_dt in dates: month_days = \ monthrange(month_st_dt.year, month_st_dt.month) month_en_dt = month_st_dt month_en_dt = month_en_dt.\ replace(day=int(month_days[1])) # month_str = month_st_dt.strftime("%B") dt_year = month_st_dt.year dt_prev_year = dt_year - 1 prev_year_month_st_dt = month_st_dt pre_month_days = monthrange(dt_prev_year, prev_year_month_st_dt.month) prev_year_month_en_dt = month_en_dt prev_year_month_st_dt = prev_year_month_st_dt.\ replace(year=int(dt_prev_year)) prev_year_month_en_dt = prev_year_month_en_dt.\ replace(day=int(pre_month_days[1]), year=int(dt_prev_year)) month_en_dt = month_en_dt.strftime("%Y-%m-%d 23:59:59") prev_year_month_en_dt = \ prev_year_month_en_dt.strftime("%Y-%m-%d 23:59:59") sale_team_country_wise_ids = sale_obj.search([ ('confirmation_date', '>=', month_st_dt), ('confirmation_date', '<=', month_en_dt), ('partner_id.country_id', '=', country_id.id), ('team_id', '=', team_id.id), ('company_id', '=', company), ('state', 'in', ['sale', 'done'])]) team_sales_total = \ sum(sale_team_country_wise_ids.mapped('amount_total')) sale_trg_budget_ids = trg_team_obj.search([ ('date_from', '>=', month_st_dt), ('date_to', '<=', month_en_dt), ('team_id', '=', team_id.id), ('company_id', '=', company)]) team_sales_budget_trg_tot = \ sum(sale_trg_budget_ids.mapped('sales_team_target')) worksheet.write(row, row_col, round(team_sales_total, 2), cell_right_fmt) worksheet.write(row + 1, row_col, round(team_sales_budget_trg_tot, 2), cell_right_color_fmt) pre_sale_team_country_ids = sale_obj.search([ ('confirmation_date', '>=', prev_year_month_st_dt), ('confirmation_date', '<=', prev_year_month_en_dt), ('partner_id.country_id', '=', country_id.id), ('team_id', '=', team_id.id), ('company_id', '=', company), ('state', 'in', ['sale', 'done'])]) pre_team_sales_total = \ sum(pre_sale_team_country_ids.mapped('amount_total')) pre_sale_trg_budget_ids = trg_team_obj.search([ ('date_from', '>=', prev_year_month_st_dt), ('date_to', '<=', prev_year_month_en_dt), ('team_id', '=', team_id.id), ('company_id', '=', company)]) pre_team_sales_budget_trg_tot = \ sum(pre_sale_trg_budget_ids. mapped('sales_team_target')) row_col += 1 worksheet.write(row, row_col, round(pre_team_sales_total, 2), cell_right_fmt) worksheet.write(row + 1, row_col, round(pre_team_sales_budget_trg_tot, 2), cell_right_color_fmt) variance_per = 0.0 if pre_team_sales_total > 0: variance_per = \ (team_sales_total - pre_team_sales_total) / \ pre_team_sales_total variance_per = round(variance_per, 2) budget_variance_per = 0.0 if team_sales_budget_trg_tot > 0: budget_variance_per = \ (team_sales_total - team_sales_budget_trg_tot) / \ team_sales_budget_trg_tot budget_variance_per = round(budget_variance_per, 2) row_col += 1 worksheet.write(row, row_col, ustr(variance_per) + '%', cell_right_fmt) worksheet.write(row + 1, row_col, ustr(budget_variance_per) + '%', cell_right_color_fmt) row_col += 1 if tot_balance_dict.get(month_st_dt, False): if tot_balance_dict[month_st_dt].get( country_id.id, False): tot_balance_dict[month_st_dt][country_id.id].\ update({ 'dt_year_tot_bal': tot_balance_dict[month_st_dt] [country_id.id]['dt_year_tot_bal'] + team_sales_total, 'dt_prev_year_tot_bal': tot_balance_dict[month_st_dt] [country_id.id] ['dt_prev_year_tot_bal'] + pre_team_sales_total, }) else: tot_balance_dict[month_st_dt].update({ country_id.id: { 'dt_year_tot_bal': team_sales_total, 'dt_prev_year_tot_bal': pre_team_sales_total, } }) # We added 2 row plus because added Budget and actual team. row += 2 for month_st_dt in dates: if tot_balance_dict.get(month_st_dt, False) and \ tot_balance_dict[month_st_dt].\ get(country_id.id, False): sale_tot = \ tot_balance_dict[month_st_dt][country_id.id].\ get('dt_year_tot_bal', 0.0) pre_sale_tot = \ tot_balance_dict[month_st_dt][country_id.id].\ get('dt_prev_year_tot_bal', 0.0) worksheet.write(row_for_tot_bal, col_for_tot_bal + 1, round(sale_tot, 2), cell_bg_cou_actual) worksheet.write(row_for_tot_bal, col_for_tot_bal + 2, round(pre_sale_tot, 2), cell_bg_cou_actual) tot_variance = 0.0 if pre_sale_tot > 0: tot_variance = \ (sale_tot - pre_sale_tot) / pre_sale_tot worksheet.write(row_for_tot_bal, col_for_tot_bal + 3, ustr(round(tot_variance, 2)) + '%', cell_bg_cou_right_actual) col_for_tot_bal = col_for_tot_bal + 3 # --------------------------------------------------------------- workbook.close() buf = base64.encodestring(open('/tmp/' + file_path, 'rb').read()) try: if buf: os.remove(file_path + '.xlsx') except OSError: pass wiz_rec = wiz_exported_obj.create({ 'file': buf, 'name': file_path }) form_view = self.env.ref( 'scs_crm_sales_role.wiz_sales_team_target_report_exported_form') if wiz_rec and form_view: return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_id': wiz_rec.id, 'res_model': 'wiz.sales.team.target.report.exported', 'views': [(form_view.id, 'form')], 'view_id': form_view.id, 'target': 'new', } else: return {}
def runjob(self, db, job_uuid, **kw): http.request.session.db = db env = http.request.env(user=odoo.SUPERUSER_ID) def retry_postpone(job, message, seconds=None): job.env.clear() with odoo.api.Environment.manage(): with odoo.registry(job.env.cr.dbname).cursor() as new_cr: job.env = job.env(cr=new_cr) job.postpone(result=message, seconds=seconds) job.set_pending(reset_retry=False) job.store() new_cr.commit() # ensure the job to run is in the correct state and lock the record env.cr.execute( "SELECT state FROM queue_job WHERE uuid=%s AND state=%s FOR UPDATE", (job_uuid, ENQUEUED), ) if not env.cr.fetchone(): _logger.warn( "was requested to run job %s, but it does not exist, " "or is not in state %s", job_uuid, ENQUEUED, ) return "" job = Job.load(env, job_uuid) assert job and job.state == ENQUEUED try: try: self._try_perform_job(env, job) except OperationalError as err: # Automatically retry the typical transaction serialization # errors if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise retry_postpone(job, tools.ustr(err.pgerror, errors="replace"), seconds=PG_RETRY) _logger.debug("%s OperationalError, postponed", job) except NothingToDoJob as err: if str(err): msg = str(err) else: msg = _("Job interrupted and set to Done: nothing to do.") job.set_done(msg) job.store() env.cr.commit() except RetryableJobError as err: # delay the job later, requeue retry_postpone(job, str(err), seconds=err.seconds) _logger.debug("%s postponed", job) except (FailedJobError, Exception): buff = StringIO() traceback.print_exc(file=buff) _logger.error(buff.getvalue()) job.env.clear() with odoo.api.Environment.manage(): with odoo.registry(job.env.cr.dbname).cursor() as new_cr: job.env = job.env(cr=new_cr) job.set_failed(exc_info=buff.getvalue()) job.store() new_cr.commit() raise return ""
def create_from_ui(self, orders): submitted_references = [o['data']['name'] for o in orders] pos_order = self.search([('pos_reference', 'in', submitted_references) ]) existing_orders = pos_order.read(['pos_reference']) existing_references = set( [o['pos_reference'] for o in existing_orders]) orders_to_save = [ o for o in orders if o['data']['name'] not in existing_references ] order_ids = [] order_to_update = [ o for o in orders if o['data']['name'] in existing_references ] # Keep only new orders for tmp_order in orders_to_save: to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] if to_invoice: self._match_payment_to_invoice(order) pos_order = self._process_order(order) order_ids.append(pos_order.id) try: pos_order.action_pos_order_paid() except psycopg2.OperationalError: # do not hide transactional errors, the order(s) won't be saved! raise except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order.action_pos_order_invoice() pos_order.invoice_id.sudo().action_invoice_open() pos_order.account_move = pos_order.invoice_id.move_id # Update draft orders for tmp_order in order_to_update: for order in pos_order: if order.pos_reference == tmp_order['data']['name']: pos_line_ids = self.env['pos.order.line'].search([ ('order_id', '=', order.id) ]) if pos_line_ids: pos_cids = [] new_cids = [] for line_id in pos_line_ids: pos_cids.append(line_id.pos_cid) for line in tmp_order['data']['lines']: if line_id.pos_cid == line[2].get('pos_cid'): new_cids.append(line[2].get('pos_cid')) order.write( {'lines': [(1, line_id.id, line[2])]}) for line in tmp_order['data']['lines']: if line[2].get('pos_cid') not in pos_cids: order.write({'lines': [(0, 0, line[2])]}) pos_cids.append(line[2].get('pos_cid')) new_cids.append(line[2].get('pos_cid')) newList = [] for item in pos_cids: if item not in new_cids: newList.append(item) order_line_ids = self.env['pos.order.line'].search([ ('pos_cid', 'in', newList) ]) if order_line_ids: for each_line in order_line_ids: each_line.unlink() to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] if to_invoice: self._match_payment_to_invoice(order) pos_order = self._process_order(order) order_ids.append(pos_order.id) try: pos_order.action_pos_order_paid() except psycopg2.OperationalError: # do not hide transactional errors, the order(s) won't be saved! raise except Exception as e: _logger.error( 'Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice: pos_order.action_pos_order_invoice() pos_order.invoice_id.sudo().action_invoice_open() pos_order.account_move = pos_order.invoice_id.move_id self.broadcast_order_data(True) return order_ids
def _ogone_s2s_validate_tree(self, tree, tries=2): if self.state not in ['draft', 'pending']: _logger.info( 'Ogone: trying to validate an already validated tx (ref %s)', self.reference) return True status = int(tree.get('STATUS') or 0) if status in self._ogone_valid_tx_status: self.write({ 'date': datetime.date.today().strftime(DEFAULT_SERVER_DATE_FORMAT), 'acquirer_reference': tree.get('PAYID'), }) if tree.get('ALIAS') and self.partner_id and \ (self.type == 'form_save' or self.acquirer_id.save_token == 'always')\ and not self.payment_token_id: pm = self.env['payment.token'].create({ 'partner_id': self.partner_id.id, 'acquirer_id': self.acquirer_id.id, 'acquirer_ref': tree.get('ALIAS'), 'name': tree.get('CARDNO'), }) self.write({'payment_token_id': pm.id}) if self.payment_token_id: self.payment_token_id.verified = True self._set_transaction_done() self.execute_callback() # if this transaction is a validation one, then we refund the money we just withdrawn if self.type == 'validation': self.s2s_do_refund() return True elif status in self._ogone_cancel_tx_status: self.write({'acquirer_reference': tree.get('PAYID')}) self._set_transaction_cancel() elif status in self._ogone_pending_tx_status: vals = { 'acquirer_reference': tree.get('PAYID'), } if status == 46: # HTML 3DS vals['html_3ds'] = ustr(base64.b64decode( tree.HTML_ANSWER.text)) self.write(vals) self._set_transaction_pending() elif status in self._ogone_wait_tx_status and tries > 0: time.sleep(0.5) self.write({'acquirer_reference': tree.get('PAYID')}) tree = self._ogone_s2s_get_tx_status() return self._ogone_s2s_validate_tree(tree, tries - 1) else: error = 'Ogone: feedback error: %(error_str)s\n\n%(error_code)s: %(error_msg)s' % { 'error_str': tree.get('NCERRORPLUS'), 'error_code': tree.get('NCERROR'), 'error_msg': ogone.OGONE_ERROR_MAP.get(tree.get('NCERROR')), } _logger.info(error) self.write({ 'state_message': error, 'acquirer_reference': tree.get('PAYID'), }) self._set_transaction_cancel() return False
def export_sales_team_target_report(self): """Method to generate the Sales Team target report.""" # sales_team_obj = self.env['crm.team'] # state_obj = self.env['res.country.state'] # country_obj = self.env['res.country'] # sale_obj = self.env['sale.order'] partner_obj = self.env['res.partner'] acc_inv_l_obj = self.env['account.invoice.line'] wiz_exported_obj = self.env['wiz.non.parent.child.report.exported'] # trg_team_obj = self.env['sales.billed.invoice.target.team'] # inv_obj = self.env['account.invoice'] if self.date_from > self.date_to: raise Warning( _("To date must be greater than \ or Equals to from date !!")) company = self.company_id and self.company_id.id or False # Below is the list of dates month wise. dates = [ dt for dt in rrule( MONTHLY, dtstart=self.date_from, until=self.date_to) ] member_ids = partner_obj.search([('company_id', '=', company), ('member_no', '!=', False) # ('x_studio_plumber', '=', True), ]) member_nos = list(set(member_ids.mapped('member_no'))) file_path = 'Non-Parent And Child Report.xlsx' workbook = xlsxwriter.Workbook('/tmp/' + file_path) worksheet = workbook.add_worksheet("Non-Parent And Child Report") cell_center_head_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': 'gray', 'color': '#FFFFFF' }) cell_center_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': 'white', 'color': '#003366', 'font_size': 20, 'border': 2, }) cell_center_num_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': '#2F5597', 'color': '#FFFFFF', 'num_format': '#,##0.00' }) cell_right_num_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'right', 'bg_color': 'gray', 'color': '#FFFFFF', 'num_format': '#,##0.00' }) # cell_center_left_head_fmt = workbook.add_format({ # 'font_name': 'Arial', # 'align': 'left', # 'bg_color': 'gray', # 'color': '#FFFFFF' # }) cell_center_mem_id_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': 'ffcc99', 'color': 'black' }) cell_center_left_mem_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'left', 'bg_color': 'white', 'color': 'black', 'border': 1 }) cell_center_bal_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'center', 'bg_color': 'ffeb9c', 'color': 'black', 'num_format': '#,##0.00' }) cell_left_tot_bal_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'left', 'bg_color': '#DAE3F3', 'color': 'black' }) cell_right_bal_blue_fmt = workbook.add_format({ 'font_name': 'Arial', 'align': 'right', 'bg_color': '#2F5597', 'color': 'white', 'num_format': '#,##0.00' }) worksheet.set_column(1, 1, 35) worksheet.set_row(1, 25) worksheet.merge_range(1, 2, 1, 5, 'Plumbing Plus Supplier Purchase Figures', cell_center_fmt) curr_dt = datetime.now().date().strftime("%d-%m-%Y") worksheet.write(3, 5, "Print Date:", cell_center_left_mem_fmt) worksheet.write(3, 6, curr_dt, cell_center_left_mem_fmt) worksheet.freeze_panes(0, 2) worksheet.freeze_panes(5, 2) row = 5 col = 0 # worksheet.set_column(row, col, 15) worksheet.write(row, col, 'MemID', cell_center_head_fmt) col += 1 # worksheet.set_column(1, 1, 35) worksheet.write(row, col, 'MEMBER', cell_center_head_fmt) col += 1 if dates: for month_st_dt in dates: month_str = month_st_dt.strftime("%b") year_str = month_st_dt.strftime("%y") month_year_str = month_str + '-' + ustr(year_str) # dt_year = month_st_dt.year # dt_prev_year = dt_year - 1 worksheet.set_column(col, col, 15) worksheet.write(row, col, month_year_str, cell_center_head_fmt) col += 1 worksheet.set_column(col, col, 15) worksheet.write(row, col, 'YTD Total', cell_center_head_fmt) # col += 1 # worksheet.set_column(col, col, 15) # worksheet.write(row, col, 'MEMBER %', # cell_center_head_fmt) # col += 1 # worksheet.set_column(col, col, 15) # worksheet.write(row, col, 'Region', # cell_center_head_fmt) # col += 1 # worksheet.set_column(col, col, 15) # worksheet.write(row, col, 'ABN', # cell_center_head_fmt) row += 1 col = 0 final_ytd_tot = 0.0 final_totals_dict = {} if member_nos: for member_no in member_nos: filtered_mem_ids = partner_obj.search([ ('company_id', '=', company), ('member_no', '=', member_no), ('id', 'in', member_ids.ids) # ('x_studio_plumber', '=', True), ]) parent_ids = list(set(filtered_mem_ids.mapped('parent_id'))) member_name = filtered_mem_ids and \ filtered_mem_ids[0].name or '' if parent_ids and len(parent_ids) == 1: member_name = parent_ids[0].name or '' worksheet.write(row, col, member_no or ' ', cell_center_mem_id_fmt) col += 1 worksheet.write(row, col, member_name or '', cell_center_left_mem_fmt) col += 1 plumber_tot = 0.0 if dates: for month_st_dt in dates: month_days = monthrange(month_st_dt.year, month_st_dt.month) month_en_dt = month_st_dt month_en_dt = month_en_dt.\ replace(day=int(month_days[1])) # month_str = month_st_dt.strftime("%B") dt_year = month_st_dt.year dt_prev_year = dt_year - 1 prev_year_month_st_dt = month_st_dt pre_month_days = \ monthrange(dt_prev_year, prev_year_month_st_dt.month) prev_year_month_en_dt = month_en_dt prev_year_month_st_dt = prev_year_month_st_dt.\ replace(year=int(dt_prev_year)) prev_year_month_en_dt = prev_year_month_en_dt.\ replace(day=int(pre_month_days[1]), year=int(dt_prev_year)) month_en_dt = month_en_dt.strftime("%Y-%m-%d 23:59:59") prev_year_month_en_dt = \ prev_year_month_en_dt.strftime("%Y-%m-%d 23:59:59") # sale_ids = sale_obj.search([ # ('company_id', '=', company), # ('partner_id', '=', plumber.id), # ('confirmation_date', '>=', month_st_dt), # ('confirmation_date', '<=', month_en_dt), # # ('date_order', '>=', month_st_dt), # # ('date_order', '<=', month_en_dt), # ]) acc_inv_l_ids = acc_inv_l_obj.search([ ('invoice_id.type', 'in', ['out_invoice', 'out_refund']), ('x_studio_invoice_reference_status', 'not in', ['draft', 'cancel']), ('account_id', 'ilike', '4000'), ('partner_id', 'in', filtered_mem_ids.ids), ('x_studio_invoice_date', '>=', month_st_dt), ('x_studio_invoice_date', '<=', month_en_dt) ]) sale_tot = sum( acc_inv_l_ids.mapped('x_studio_signed_amount')) worksheet.write(row, col, sale_tot, cell_center_bal_fmt) if final_totals_dict and \ month_st_dt in final_totals_dict.keys(): amt = final_totals_dict[month_st_dt] + sale_tot final_totals_dict.update({month_st_dt: amt}) else: final_totals_dict.update({month_st_dt: sale_tot}) plumber_tot += sale_tot col += 1 # YTD Total worksheet.write(row, col, plumber_tot, cell_right_bal_blue_fmt) final_ytd_tot += plumber_tot row += 1 col = 0 col = 1 if final_totals_dict: worksheet.write(row, col, 'Total:', cell_left_tot_bal_fmt) col += 1 if dates and final_totals_dict: for month_st_dt in dates: worksheet.write(row, col, final_totals_dict[month_st_dt], cell_center_num_fmt) col += 1 # YTD Total FINAL if final_totals_dict: worksheet.write(row, col, final_ytd_tot, cell_right_num_fmt) workbook.close() buf = base64.encodestring(open('/tmp/' + file_path, 'rb').read()) try: if buf: os.remove(file_path + '.xlsx') except OSError: pass wiz_rec = wiz_exported_obj.create({'file': buf, 'name': file_path}) form_view = self.env.ref( 'scs_crm_sales_role.wiz_non_parent_child_report_exported_form') if wiz_rec and form_view: return { 'type': 'ir.actions.act_window', 'view_type': 'form', 'view_mode': 'form', 'res_id': wiz_rec.id, 'res_model': 'wiz.non.parent.child.report.exported', 'views': [(form_view.id, 'form')], 'view_id': form_view.id, 'target': 'new', } else: return {}
def action_account_moves_all_statement(self): # compute new balance account_ids = [] if self.default_credit_account_id.id: account_ids.append(self.default_credit_account_id.id) if self.default_debit_account_id.id: account_ids.append(self.default_debit_account_id.id) current_company_currency_id = self.env.ref( 'base.main_company').currency_id.id for e in account_ids: # check current account use amount currency or not currency_id = self.env['account.account'].sudo().browse( e).currency_id try: if currency_id is not None and currency_id.id is not False and currency_id != current_company_currency_id: self.env.cr.execute( """CREATE OR REPLACE FUNCTION update_current_time_balance() RETURNS VOID AS $BODY$ DECLARE r RECORD; r_b FLOAT; a int; BEGIN a = 1; FOR r IN select id, amount_currency, debit, credit from account_move_line where account_id = %s order by date asc LOOP if a = 1 then r_b = r.amount_currency; update account_move_line set current_time_balance = r.amount_currency where id = r.id; end if; if a > 1 then r_b = r_b + r.amount_currency; update account_move_line set current_time_balance = r_b where id = r.id; end if; update account_move_line set current_time_balance_sequence = a where id = r.id; a = a + 1; END LOOP; RETURN; END $BODY$ LANGUAGE plpgsql; SELECT update_current_time_balance();""", (str(e), )) else: self.env.cr.execute( """CREATE OR REPLACE FUNCTION update_current_time_balance() RETURNS VOID AS $BODY$ DECLARE r RECORD; r_b FLOAT; a int; BEGIN a = 1; FOR r IN select id, balance, debit, credit from account_move_line where account_id = %s order by date asc LOOP if a = 1 then r_b = r.balance; update account_move_line set current_time_balance = r.balance where id = r.id; end if; if a > 1 then r_b = r_b + r.balance; update account_move_line set current_time_balance = r_b where id = r.id; end if; update account_move_line set current_time_balance_sequence = a where id = r.id; a = a + 1; END LOOP; RETURN; END $BODY$ LANGUAGE plpgsql; SELECT update_current_time_balance();""", (str(e), )) except Exception as ex: raise UserError(_('fetch customers %s') % tools.ustr(ex)) action = self.env.ref('account.action_account_moves_all').read()[0] action['domain'] = [('account_id', '=', self.default_debit_account_id.ids[0])] action['context'] = {'current_account_id': '1'} return action
def name_get(self): return [(currency.id, tools.ustr(currency.name)) for currency in self]
def _get_exception_message(exception): msg = exception if isinstance(exception, except_orm): msg = exception.value return tools.ustr(msg)
def send_email( self, message, mail_server_id=None, smtp_server=None, smtp_port=None, smtp_user=None, smtp_password=None, smtp_encryption=None, smtp_debug=False, smtp_session=None, ): """Override the standard method to fix the issue of using a mail client where relaying is disallowed.""" # Use the default bounce address **only if** no Return-Path was # provided by caller. Caller may be using Variable Envelope Return # Path (VERP) to detect no-longer valid email addresses. smtp_from = (message["Return-Path"] or self._get_default_bounce_address() or message["From"]) assert ( smtp_from ), "The Return-Path or From header is required for any outbound email" # The email's "Envelope From" (Return-Path), and all recipient # addresses must only contain ASCII characters. from_rfc2822 = extract_rfc2822_addresses(smtp_from) assert from_rfc2822, ("Malformed 'Return-Path' or 'From' address: " "%r - " "It should contain one valid plain ASCII " "email") % smtp_from # use last extracted email, to support rarities like 'Support@MyComp # <*****@*****.**>' smtp_from = from_rfc2822[-1] email_to = message["To"] email_cc = message["Cc"] email_bcc = message["Bcc"] del message["Bcc"] smtp_to_list = [ address for base in [email_to, email_cc, email_bcc] for address in extract_rfc2822_addresses(base) if address ] assert smtp_to_list, self.NO_VALID_RECIPIENT x_forge_to = message["X-Forge-To"] if x_forge_to: # `To:` header forged, e.g. for posting on mail.channels, # to avoid confusion del message["X-Forge-To"] del message["To"] # avoid multiple To: headers! message["To"] = x_forge_to # Do not actually send emails in testing mode! if (getattr(threading.currentThread(), "testing", False) or self.env.registry.in_test_mode()): _test_logger.info("skip sending email in test mode") return message["Message-Id"] try: message_id = message["Message-Id"] # START OF CODE ADDED smtp = self.connect( smtp_server, smtp_port, smtp_user, smtp_password, smtp_encryption or False, smtp_debug, ) from email.utils import parseaddr, formataddr # exact name and address (oldname, oldemail) = parseaddr(message["From"]) # use original name with new address newfrom = formataddr((oldname, smtp.user)) # need to use replace_header instead '=' to prevent # double field message.replace_header("From", newfrom) smtp.sendmail(smtp.user, smtp_to_list, message.as_string()) # END OF CODE ADDED # do not quit() a pre-established smtp_session if not smtp_session: smtp.quit() except smtplib.SMTPServerDisconnected: raise except Exception as e: params = (ustr(smtp_server), e.__class__.__name__, ustr(e)) msg = _( "Mail delivery failed via SMTP server '%s'.\n%s: %s") % params _logger.info(msg) raise MailDeliveryException(_("Mail Delivery Failed"), msg) return message_id
def _send(self, auto_commit=False, raise_exception=False, smtp_session=None): IrMailServer = self.env['ir.mail_server'] IrAttachment = self.env['ir.attachment'] for mail_id in self.ids: success_pids = [] failure_type = None processing_pid = None mail = None try: mail = self.browse(mail_id) if mail.state != 'outgoing': if mail.state != 'exception' and mail.auto_delete: mail.sudo().unlink() continue # remove attachments if user send the link with the access_token body = mail.body_html or '' attachments = mail.attachment_ids for link in re.findall(r'/web/(?:content|image)/([0-9]+)', body): attachments = attachments - IrAttachment.browse(int(link)) # load attachment binary data with a separate read(), as prefetching all # `datas` (binary field) could bloat the browse cache, triggerring # soft/hard mem limits with temporary data. attachments = [(a['name'], base64.b64decode(a['datas']), a['mimetype']) for a in attachments.sudo().read(['name', 'datas', 'mimetype']) if a['datas'] is not False] # specific behavior to customize the send email for notified partners email_list = [] if mail.email_to: email_list.append(mail._send_prepare_values()) for partner in mail.recipient_ids: values = mail._send_prepare_values(partner=partner) values['partner_id'] = partner email_list.append(values) # headers headers = {} ICP = self.env['ir.config_parameter'].sudo() bounce_alias = ICP.get_param("mail.bounce.alias") catchall_domain = ICP.get_param("mail.catchall.domain") if bounce_alias and catchall_domain: headers['Return-Path'] = '%s@%s' % (bounce_alias, catchall_domain) if mail.headers: try: headers.update(ast.literal_eval(mail.headers)) except Exception: pass # Writing on the mail object may fail (e.g. lock on user) which # would trigger a rollback *after* actually sending the email. # To avoid sending twice the same email, provoke the failure earlier mail.write({ 'state': 'exception', 'failure_reason': _('Error without exception. Probably due do sending an email without computed recipients.'), }) # Update notification in a transient exception state to avoid concurrent # update in case an email bounces while sending all emails related to current # mail record. notifs = self.env['mail.notification'].search([ ('notification_type', '=', 'email'), ('mail_mail_id', 'in', mail.ids), ('notification_status', 'not in', ('sent', 'canceled')) ]) if notifs: notif_msg = _('Error without exception. Probably due do concurrent access update of notification records. Please see with an administrator.') notifs.sudo().write({ 'notification_status': 'exception', 'failure_type': 'unknown', 'failure_reason': notif_msg, }) # `test_mail_bounce_during_send`, force immediate update to obtain the lock. # see rev. 56596e5240ef920df14d99087451ce6f06ac6d36 notifs.flush(fnames=['notification_status', 'failure_type', 'failure_reason'], records=notifs) # build an RFC2822 email.message.Message object and send it without queuing res = None # TDE note: could be great to pre-detect missing to/cc and skip sending it # to go directly to failed state update for email in email_list: msg = IrMailServer.build_email( email_from=mail.email_from, email_to=email.get('email_to'), subject=mail.subject, body=email.get('body'), body_alternative=email.get('body_alternative'), email_cc=tools.email_split(mail.email_cc), reply_to=mail.reply_to, attachments=attachments, message_id=mail.message_id, references=mail.references, object_id=mail.res_id and ('%s-%s' % (mail.res_id, mail.model)), subtype='html', subtype_alternative='plain', headers=headers) processing_pid = email.pop("partner_id", None) try: res = IrMailServer.send_email( msg, mail_server_id=mail.mail_server_id.id, smtp_session=smtp_session) if processing_pid: success_pids.append(processing_pid) processing_pid = None except AssertionError as error: if str(error) == IrMailServer.NO_VALID_RECIPIENT: # if we have a list of void emails for email_list -> email missing, otherwise generic email failure if not email.get('email_to') and failure_type != "RECIPIENT": failure_type = "mail_email_missing" else: failure_type = "mail_email_invalid" # No valid recipient found for this particular # mail item -> ignore error to avoid blocking # delivery to next recipients, if any. If this is # the only recipient, the mail will show as failed. _logger.info("Ignoring invalid recipients for mail.mail %s: %s", mail.message_id, email.get('email_to')) else: raise if res: # mail has been sent at least once, no major exception occured mail.write({'state': 'sent', 'message_id': res, 'failure_reason': False}) _logger.info('Mail with ID %r and Message-Id %r successfully sent', mail.id, mail.message_id) # /!\ can't use mail.state here, as mail.refresh() will cause an error # see revid:[email protected] in 6.1 mail._postprocess_sent_message(success_pids=success_pids, failure_type=failure_type) except MemoryError: # prevent catching transient MemoryErrors, bubble up to notify user or abort cron job # instead of marking the mail as failed _logger.exception( 'MemoryError while processing mail with ID %r and Msg-Id %r. Consider raising the --limit-memory-hard startup option', mail.id, mail.message_id) # mail status will stay on ongoing since transaction will be rollback raise except (psycopg2.Error, smtplib.SMTPServerDisconnected): # If an error with the database or SMTP session occurs, chances are that the cursor # or SMTP session are unusable, causing further errors when trying to save the state. _logger.exception( 'Exception while processing mail with ID %r and Msg-Id %r.', mail.id, mail.message_id) raise except Exception as e: failure_reason = tools.ustr(e) _logger.exception('failed sending mail (id: %s) due to %s', mail.id, failure_reason) mail.write({'state': 'exception', 'failure_reason': failure_reason}) mail._postprocess_sent_message(success_pids=success_pids, failure_reason=failure_reason, failure_type='unknown') if raise_exception: if isinstance(e, (AssertionError, UnicodeEncodeError)): if isinstance(e, UnicodeEncodeError): value = "Invalid text: %s" % e.object else: value = '. '.join(e.args) raise MailDeliveryException(value) raise if auto_commit is True: self._cr.commit() return True
def _get_system_logs(self): msg = self._get_message_logs() arguments = {'arg0': ustr(msg), "action": "update"} r = requests.post(API_ENDPOINT, data=arguments, timeout=30) r.raise_for_status() return literal_eval(r.text)