def _get_source(self, cr, uid, name, types, lang, source=None, res_id=None): """ Returns the translation for the given combination of name, type, language and source. All values passed to this method should be unicode (not byte strings), especially ``source``. :param name: identification of the term to translate, such as field name (optional if source is passed) :param types: single string defining type of term to translate (see ``type`` field on ir.translation), or sequence of allowed types (strings) :param lang: language code of the desired translation :param source: optional source term to translate (should be unicode) :param res_id: optional resource id to translate (if used, ``source`` should be set) :rtype: unicode :return: the request translation, or an empty unicode string if no translation was found and `source` was not passed """ # FIXME: should assert that `source` is unicode and fix all callers to always pass unicode # so we can remove the string encoding/decoding. if not lang: return tools.ustr(source or '') if isinstance(types, basestring): types = (types,) query, params = self._get_source_query(cr, uid, name, types, lang, source, res_id) cr.execute(query, params) res = cr.fetchone() trad = res and res[0] or u'' if source and not trad: return tools.ustr(source) return trad
def go(id, uid, ids, datas, context): with openerp.api.Environment.manage(): cr = openerp.registry(db).cursor() try: result, format = openerp.report.render_report(cr, uid, ids, object, datas, context) if not result: tb = sys.exc_info() self_reports[id]["exception"] = openerp.exceptions.DeferredException( "RML is not available at specified location or not enough data to print!", tb ) self_reports[id]["result"] = result self_reports[id]["format"] = format self_reports[id]["state"] = True except Exception, exception: _logger.exception("Exception: %s\n", exception) if hasattr(exception, "name") and hasattr(exception, "value"): self_reports[id]["exception"] = openerp.exceptions.DeferredException( tools.ustr(exception.name), tools.ustr(exception.value) ) else: tb = sys.exc_info() self_reports[id]["exception"] = openerp.exceptions.DeferredException( tools.exception_to_unicode(exception), tb ) self_reports[id]["state"] = True cr.commit() cr.close()
def default_get(self, cr, uid, fields, context=None): """ This function gets default values """ res = super(project_task_delegate, self).default_get(cr, uid, fields, context=context) if context is None: context = {} record_id = context and context.get('active_id', False) or False if not record_id: return res task_pool = self.pool.get('project.task') task = task_pool.browse(cr, uid, record_id, context=context) task_name =tools.ustr(task.name) if 'project_id' in fields: res['project_id'] = int(task.project_id.id) if task.project_id else False if 'name' in fields: if task_name.startswith(_('CHECK: ')): newname = tools.ustr(task_name).replace(_('CHECK: '), '') else: newname = tools.ustr(task_name or '') res['name'] = newname if 'planned_hours' in fields: res['planned_hours'] = task.remaining_hours or 0.0 if 'prefix' in fields: if task_name.startswith(_('CHECK: ')): newname = tools.ustr(task_name).replace(_('CHECK: '), '') else: newname = tools.ustr(task_name or '') prefix = _('CHECK: %s') % newname res['prefix'] = prefix if 'new_task_description' in fields: res['new_task_description'] = task.description return res
def from_data(self, uid, fields, rows, model): pageSize=[210.0,297.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, 'config') def _append_node(name, text): n = etree.SubElement(config, name) n.text = text _append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize)) _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,)) _append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,)) _append_node('PageFormat', 'a4') _append_node('header-date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) l = [] t = 0 temp = [] tsum = [] header = etree.SubElement(new_doc, 'header') for f in fields: field = etree.SubElement(header, 'field') field.text = tools.ustr(f) lines = etree.SubElement(new_doc, 'lines') for row_lines in rows: node_line = etree.SubElement(lines, 'row') for row in row_lines: col = etree.SubElement(node_line, 'col', para='yes', tree='no') col.text = tools.ustr(row) transform = etree.XSLT( etree.parse(os.path.join(tools.config['root_path'], 'addons/base/report/custom_new.xsl'))) rml = etree.tostring(transform(new_doc)) self.obj = trml2pdf.parseNode(rml, title='Printscreen') return self.obj
def onchange_workcenter_id(self, workcenter_id): res={} production_id = self.env.context.get('default_production_id') production = self.env['mrp.production'].browse(production_id) if workcenter_id: def _factor(factor, product_efficiency, product_rounding): factor = factor / (product_efficiency or 1.0) factor = _common.ceiling(factor, product_rounding) if factor < product_rounding: factor = product_rounding return factor factor = _factor(production.product_qty, production.bom_id.product_efficiency, production.bom_id.product_rounding) wc = self.env['mrp.workcenter'].browse(workcenter_id) if wc.capacity_per_cycle: d, m = divmod(factor, wc.capacity_per_cycle) cycle = (d + (m and 1.0 or 0.0)) hour = wc.time_cycle * cycle res['value'] = { 'time_start':wc.time_start, 'time_stop':wc.time_stop, 'name':tools.ustr(wc.name) + ' - ' + tools.ustr(production.bom_id.product_tmpl_id.name_get()[0][1]), 'hour':hour, 'cycle':cycle, } return res
def go(id, uid, ids, datas, context): cr = pooler.get_db(db).cursor() try: obj = netsvc.LocalService("report." + object) (result, format) = obj.create(cr, uid, ids, datas, context) if not result: tb = sys.exc_info() self._reports[id]["exception"] = openerp.exceptions.DeferredException( "RML is not available at specified location or not enough data to print!", tb ) self._reports[id]["result"] = result self._reports[id]["format"] = format self._reports[id]["state"] = True except Exception, exception: _logger.exception("Exception: %s\n", exception) if hasattr(exception, "name") and hasattr(exception, "value"): self._reports[id]["exception"] = openerp.exceptions.DeferredException( tools.ustr(exception.name), tools.ustr(exception.value) ) else: tb = sys.exc_info() self._reports[id]["exception"] = openerp.exceptions.DeferredException( tools.exception_to_unicode(exception), tb ) self._reports[id]["state"] = True
def _project_search(self, cr, uid, obj, name, args, context=None): """ Searches Ids of Projects @return: Ids of Projects """ cr.execute(""" SELECT pp.id,* FROM ( Select node.id, node.name AS short_name, --cast ((count(parent.name)) as int) as nivel replace( array_to_string( array_agg( parent.name order by parent.nivel asc), ' / ' ), '\n', ' ') as full_name from account_analytic_account as node, ( SELECT vw.nivel, account_analytic_account.* FROM ( Select node.id, node.name AS short_name, cast ((count(parent.name)) as int) as nivel --array_to_string( array_agg( distinct parent.name ), ' / ' ) as full_name from account_analytic_account as node,account_analytic_account as parent where node.parent_left between parent.parent_left and parent.parent_right group by node.name,node.parent_left,node.id order by node.parent_left ) vw inner join account_analytic_account ON vw.id = account_analytic_account.id) as parent where node.parent_left between parent.parent_left and parent.parent_right group by node.name,node.parent_left,node.id order by node.parent_left ) vw join project_project pp on pp.analytic_account_id = vw.id WHERE vw.full_name """ + tools.ustr( args[0][1] ) + """ '%%%s%%' """ % ( tools.ustr( args[0][2] ),) ) datas = cr.dictfetchall() ids = [('id', 'in', [data['id'] for data in datas])] return ids
def _get_source_query(self, cr, uid, name, types, lang, source, res_id): if source: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND src=%s""" params = (lang or '', types, tools.ustr(source)) if res_id: if isinstance(res_id, (int, long)): res_id = (res_id,) else: res_id = tuple(res_id) query += " AND res_id in %s" params += (res_id,) if name: query += " AND name=%s" params += (tools.ustr(name),) else: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND name=%s""" params = (lang or '', types, tools.ustr(name)) return (query, params)
def name_get(self, cr, user, ids, context=None): """Returns the preferred display value (text representation) for the records with the given ``ids``. By default this will be the value of the ``name`` column, unless the model implements a custom behavior. Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not guaranteed to be. :rtype: list(tuple) :return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``. """ if not ids: return [] if isinstance(ids, (int, long)): ids = [ids] result = [] for r in self.read(cr, user, ids, [self._rec_name], context, load='_classic_write'): if not self._rec_name in self._columns and not r.get(self._rec_name, False): _logger.error(u"Column '{column}' or function name_get() are not defined for table '{table}'".format(column=self._rec_name, table=self._name)) if config['debug_mode']: result.append((r['id'], tools.ustr(r[self._rec_name]))) else: result.append((r['id'], tools.ustr(r.get(self._rec_name, '')))) return result
def load_from_file(self, path, dbname, key): class_inst = None expected_class = 'Parser' try: ad = os.path.abspath(os.path.join(tools.ustr(config['root_path']), u'addons')) mod_path_list = map(lambda m: os.path.abspath(tools.ustr(m.strip())), config['addons_path'].split(',')) mod_path_list.append(ad) mod_path_list = list(set(mod_path_list)) for mod_path in mod_path_list: if os.path.lexists(mod_path+os.path.sep+path.split(os.path.sep)[0]): filepath=mod_path+os.path.sep+path filepath = os.path.normpath(filepath) sys.path.append(os.path.dirname(filepath)) mod_name,file_ext = os.path.splitext(os.path.split(filepath)[-1]) mod_name = '%s_%s_%s' % (dbname,mod_name,key) if file_ext.lower() == '.py': py_mod = imp.load_source(mod_name, filepath) elif file_ext.lower() == '.pyc': py_mod = imp.load_compiled(mod_name, filepath) if expected_class in dir(py_mod): class_inst = py_mod.Parser return class_inst elif os.path.lexists(mod_path+os.path.sep+path.split(os.path.sep)[0]+'.zip'): zimp = zipimport.zipimporter(mod_path+os.path.sep+path.split(os.path.sep)[0]+'.zip') return zimp.load_module(path.split(os.path.sep)[0]).parser.Parser except SyntaxError, e: raise orm.except_orm(_('Syntax Error !'), e)
def _get_source_query(self, cr, uid, name, types, lang, source, res_id): if source: # Note: the extra test on md5(src) is a hint for postgres to use the # index ir_translation_src_md5 query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND src=%s AND md5(src)=md5(%s)""" source = tools.ustr(source) params = (lang or '', types, source, source) if res_id: query += " AND res_id in %s" params += (res_id,) if name: query += " AND name=%s" params += (tools.ustr(name),) else: query = """SELECT value FROM ir_translation WHERE lang=%s AND type in %s AND name=%s""" params = (lang or '', types, tools.ustr(name)) return (query, params)
def _get_vals(wc_use, operators, operators_n, factor, bom, wc, routing, context=None): qty_per_cycle = self.pool.get('product.uom')._compute_qty(cr, uid, wc_use.uom_id.id, wc_use.qty_per_cycle, bom.product_uom.id) oper = [] if operators_n and operators: for op in range(0, (operators_n)): oper.append(operators[op]) user = self.pool.get('res.users').browse(cr, uid, uid, context) lang = user and user.lang or u'es_ES' hour = (factor * bom.product_qty) * (wc_use.hour_nbr or 1.0) / (qty_per_cycle or 1.0) hour = hour * (wc.time_efficiency or 1.0) hour = hour / (wc.performance_factor or 1.0) hour = hour / (routing.availability_ratio or 1.0) hour = float(hour) return { 'name': tools.ustr(wc_use.name) + u' - ' + tools.ustr(bom.product_id.with_context(lang=lang).name), 'routing_id': routing.id, 'workcenter_id': wc.id, 'sequence': 0, # level + (wc_use.sequence or 0), # Ponemos siempre 0 porque vamos a usar para ordenar en kanban 'operators_ids': oper and [(6, 0, oper)] or False, 'cycle': wc_use.cycle_nbr * (factor * bom.product_qty), 'time_start': wc_use.time_start, 'time_stop': wc_use.time_stop, 'hour': hour, 'real_time': hour, 'availability_ratio': routing.availability_ratio or 1.0, }
def get_record_data(self, cr, uid, values, context=None): """ Returns a defaults-like dict with initial values for the composition wizard when sending an email related a previous email (parent_id) or a document (model, res_id). This is based on previously computed default values. """ if context is None: context = {} result, subject = {}, False if values.get("parent_id"): parent = self.pool.get("mail.message").browse(cr, uid, values.get("parent_id"), context=context) result["record_name"] = (parent.record_name,) subject = tools.ustr(parent.subject or parent.record_name or "") if not values.get("model"): result["model"] = parent.model if not values.get("res_id"): result["res_id"] = parent.res_id partner_ids = values.get("partner_ids", list()) + [partner.id for partner in parent.partner_ids] if ( context.get("is_private") and parent.author_id ): # check message is private then add author also in partner list. partner_ids += [parent.author_id.id] result["partner_ids"] = partner_ids elif values.get("model") and values.get("res_id"): doc_name_get = self.pool[values.get("model")].name_get(cr, uid, [values.get("res_id")], context=context) result["record_name"] = doc_name_get and doc_name_get[0][1] or "" subject = tools.ustr(result["record_name"]) re_prefix = _("Re:") if subject and not (subject.startswith("Re:") or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result["subject"] = subject return result
def _process_text(self, txt): """Translate ``txt`` according to the language in the local context, replace dynamic ``[[expr]]`` with their real value, then escape the result for XML. :param str txt: original text to translate (must NOT be XML-escaped) :return: translated text, with dynamic expressions evaluated and with special XML characters escaped (``&,<,>``). """ if not self.localcontext: return str2xml(txt) if not txt: return '' result = '' sps = _regex.split(txt) while sps: # This is a simple text to translate to_translate = tools.ustr(sps.pop(0)) result += tools.ustr(self.localcontext.get('translate', lambda x:x)(to_translate)) if sps: txt = None try: expr = sps.pop(0) txt = eval(expr, self.localcontext) if txt and isinstance(txt, basestring): txt = tools.ustr(txt) except Exception: _logger.error("Failed to evaluate expression [[ %s ]] with context %r while rendering report, ignored.", expr, self.localcontext) if isinstance(txt, basestring): result += txt elif txt and (txt is not None) and (txt is not False): result += ustr(txt) return str2xml(result)
def inter_call(self, data): cr, uid, context = self.env.args context = dict(context) context.update({'depends': {}}) self.env.args = cr, uid, frozendict(context) res = base_module_save._create_module(self, self._cr, self.env.user.id, data, context=context) mod_obj = self.env['ir.model.data'] model_data_ids = mod_obj.search([('model', '=', 'ir.ui.view'), ('name', '=', 'module_create_form_view')]) resource_id = model_data_ids.read(fields=['res_id'])[0]['res_id'] context.update(res) return { 'name': _('Module Recording'), 'context': { 'default_module_filename': ustr(res['module_filename']), 'default_module_file': ustr(res['module_file']), }, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(resource_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def get_record_data(self, values): """ Returns a defaults-like dict with initial values for the composition wizard when sending an email related a previous email (parent_id) or a document (model, res_id). This is based on previously computed default values. """ result, subject = {}, False if values.get('parent_id'): parent = self.env['mail.message'].browse(values.get('parent_id')) result['record_name'] = parent.record_name, subject = tools.ustr(parent.subject or parent.record_name or '') if not values.get('model'): result['model'] = parent.model if not values.get('res_id'): result['res_id'] = parent.res_id partner_ids = values.get('partner_ids', list()) + [(4, id) for id in parent.partner_ids.ids] if self._context.get('is_private') and parent.author_id: # check message is private then add author also in partner list. partner_ids += [(4, parent.author_id.id)] result['partner_ids'] = partner_ids elif values.get('model') and values.get('res_id'): doc_name_get = self.env[values.get('model')].browse(values.get('res_id')).name_get() result['record_name'] = doc_name_get and doc_name_get[0][1] or '' subject = tools.ustr(result['record_name']) re_prefix = _('Re:') if subject and not (subject.startswith('Re:') or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result['subject'] = subject return result
def create_from_ui_old(self, cr, uid, orders, context={}): context = context or {} submitted_references = [o['data']['name'] for o in orders] existing_order_ids = self.search(cr, uid, [('pos_reference', 'in', submitted_references)], context=context) existing_orders = self.read(cr, uid, existing_order_ids, ['pos_reference'], context=context) existing_references = set([o['pos_reference'] for o in existing_orders]) orders_to_save = [o for o in orders if o['data']['name'] not in existing_references] orders_to_update = [o for o in orders if o['data']['name'] in existing_references] order_ids = [] for tmp_order in orders_to_save: if tmp_order["data"].get("type", False) == "refund": return self.create_refund_from_ui(cr, uid, tmp_order, context=context) payment_session = self.check_payment_session(cr, uid, tmp_order["data"]["pos_session_id"]) if payment_session: tmp_order["data"]["pos_session_id"] = payment_session to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(cr, uid, order, context=context) order_ids.append(order_id) if to_invoice not in ["print_quotation", "send_quotation"]: try: self.signal_workflow(cr, uid, [order_id], 'paid') except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice == True: self.action_invoice(cr, uid, [order_id], context) if to_invoice in ["print_quotation", "send_quotation"]: context.update(dict(action=to_invoice)) return self.action_quotation(cr, uid, [order_id], context) for tmp_order in orders_to_update: payment_session = self.check_payment_session(cr, uid, tmp_order["data"]["pos_session_id"]) if payment_session: tmp_order["data"]["pos_session_id"] = payment_session order_id = self.search(cr, uid, [('pos_reference', '=', "Pedido "+tmp_order["id"])]) context.update(dict(to_update=True, order_id=order_id)) to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(cr, uid, order, context=context) order_ids.append(order_id) if to_invoice == True: self.action_invoice(cr, uid, [order_id], context) elif to_invoice not in ["print_quotation", "send_quotation"]: try: self.signal_workflow(cr, uid, [order_id], 'paid') except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) elif to_invoice in ["print_quotation", "send_quotation"]: context.update(dict(action=to_invoice)) return self.action_quotation(cr, uid, [order_id], context) return order_ids
def _bom_explode(self, cr, uid, bom, factor, properties=None, addthis=False, level=0, routing_id=False): """ Finds Products and Work Centers for related BoM for manufacturing order. @param bom: BoM of particular product. @param factor: Factor of product UoM. @param properties: A List of properties Ids. @param addthis: If BoM found then True else False. @param level: Depth level to find BoM lines starts from 10. @return: result: List of dictionaries containing product details. result2: List of dictionaries containing Work Center details. """ routing_obj = self.pool.get('mrp.routing') factor = factor / (bom.product_efficiency or 1.0) factor = rounding(factor, bom.product_rounding) if factor < bom.product_rounding: factor = bom.product_rounding result = [] result2 = [] phantom = False if bom.type == 'phantom' and not bom.bom_lines: newbom = self._bom_find(cr, uid, bom.product_id.id, bom.product_uom.id, properties) if newbom: res = self._bom_explode(cr, uid, self.browse(cr, uid, [newbom])[0], factor*bom.product_qty, properties, addthis=True, level=level+10) result = result + res[0] result2 = result2 + res[1] phantom = True else: phantom = False if not phantom: if addthis and not bom.bom_lines: result.append( { 'name': bom.product_id.name, 'product_id': bom.product_id.id, 'product_qty': bom.product_qty * factor, 'product_uom': bom.product_uom.id, 'product_uos_qty': bom.product_uos and bom.product_uos_qty * factor or False, 'product_uos': bom.product_uos and bom.product_uos.id or False, }) routing = (routing_id and routing_obj.browse(cr, uid, routing_id)) or bom.routing_id or False if routing: for wc_use in routing.workcenter_lines: wc = wc_use.workcenter_id d, m = divmod(factor, wc_use.workcenter_id.capacity_per_cycle) mult = (d + (m and 1.0 or 0.0)) cycle = mult * wc_use.cycle_nbr result2.append({ 'name': tools.ustr(wc_use.name) + ' - ' + tools.ustr(bom.product_id.name), 'workcenter_id': wc.id, 'sequence': level+(wc_use.sequence or 0), 'cycle': cycle, 'hour': float(wc_use.hour_nbr*mult + ((wc.time_start or 0.0)+(wc.time_stop or 0.0)+cycle*(wc.time_cycle or 0.0)) * (wc.time_efficiency or 1.0)), 'multiple_component': wc_use.multiple_component or False, }) for bom2 in bom.bom_lines: res = self._bom_explode(cr, uid, bom2, factor, properties, addthis=True, level=level+10) result = result + res[0] result2 = result2 + res[1] return result, result2
def getImage(self, value, height = 50, xw=1, rotate=None, extension = "PNG"): """ Get an image with PIL library value code barre value height height in pixel of the bar code extension image file extension""" from PIL import Image, ImageFont, ImageDraw import os from string import lower, upper # Get the bar code list bits = self.makeCode(value) # Get thee bar code with the checksum added code = "" for digit in self.EAN13: code += "%d"%digit # Create a new image position = 8 im = Image.new("L",(len(bits)+position,height+2)) # Load font ad = os.path.abspath(os.path.join(ustr(config['root_path']), u'addons')) mod_path_list = map(lambda m: os.path.abspath(ustr(m.strip())), config['addons_path'].split(',')) mod_path_list.append(ad) for mod_path in mod_path_list: font_file = mod_path+os.path.sep+ \ "report_aeroo"+os.path.sep+"barcode"+os.path.sep+"FreeMonoBold.ttf"#"courB08.pil" if os.path.lexists(font_file): font = ImageFont.truetype(font_file, fontsize) #font = ImageFont.load(font_file) # Create drawer draw = ImageDraw.Draw(im) # Erase image draw.rectangle(((0,0),(im.size[0],im.size[1])),fill=256) # Draw first part of number draw.text((0, height-9), code[0], font=font, fill=0) # Draw first part of number draw.text((position+3, height-9), code[1:7], font=font, fill=0) # Draw second part of number draw.text((len(bits)/2+2+position, height-9), code[7:], font=font, fill=0) # Draw the bar codes for bit in range(len(bits)): # Draw normal bar if bits[bit] == '1': draw.rectangle(((bit+position,0),(bit+position,height-10)),fill=0) # Draw long bar elif bits[bit] == 'L': draw.rectangle(((bit+position,0),(bit+position,height-3)),fill=0) # Save the result image return im
def create_from_ui(self, cr, uid, orders, context=None): # Keep only new orders context = context or {} submitted_references = [o['data']['name'] for o in orders] existing_order_ids = self.search(cr, uid, [('pos_reference', 'in', submitted_references)], context=context) existing_orders = self.read(cr, uid, existing_order_ids, ['pos_reference'], context=context) existing_references = set([o['pos_reference'] for o in existing_orders]) orders_to_save = [o for o in orders if o['data']['name'] not in existing_references] orders_to_update = [o for o in orders if o['data']['name'] in existing_references] order_ids = [] for tmp_order in orders_to_save: payment_session = self.check_payment_session(cr, uid, tmp_order["data"]["pos_session_id"]) if payment_session: tmp_order["data"]["pos_session_id"] = payment_session to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] if tmp_order["data"].get("type", False) == "refund": order_id = self.create_refund_from_ui(cr, uid, tmp_order, context=context) else: order_id = self._process_order(cr, uid, order, context=context) order_ids.append(order_id) try: self.signal_workflow(cr, uid, [order_id], 'paid') except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice == True: self.action_invoice(cr, uid, [order_id], context) order_obj = self.browse(cr, uid, order_id, context) self.pool['account.invoice'].signal_workflow(cr, uid, [order_obj.invoice_id.id], 'invoice_open') for tmp_order in orders_to_update: order_id = self.search(cr, uid, [('pos_reference', '=', "Pedido "+tmp_order["id"])]) context.update(dict(to_update=True, order_id=order_id)) to_invoice = tmp_order['to_invoice'] order = tmp_order['data'] order_id = self._process_order(cr, uid, order, context=context) order_ids.append(order_id) try: self.signal_workflow(cr, uid, [order_id], 'paid') except Exception as e: _logger.error('Could not fully process the POS Order: %s', tools.ustr(e)) if to_invoice == True: self.action_invoice(cr, uid, [order_id], context) order_obj = self.browse(cr, uid, order_id, context) self.pool['account.invoice'].signal_workflow(cr, uid, [order_obj.invoice_id.id], 'invoice_open') return order_ids
def write(self, cr, uid, ids, vals, context=None): """ When a project task work gets updated, handle its hr analytic timesheet. """ if context is None: context = {} timesheet_obj = self.pool.get('hr.analytic.timesheet') uom_obj = self.pool.get('product.uom') result = {} if isinstance(ids, (long, int)): ids = [ids] for task in self.browse(cr, uid, ids, context=context): line_id = task.hr_analytic_timesheet_id if not line_id: # if a record is deleted from timesheet, the line_id will become # null because of the foreign key on-delete=set null continue vals_line = {} if 'name' in vals: vals_line['name'] = '%s: %s' % (tools.ustr(task.task_id.name), tools.ustr(vals['name'] or '/')) #vals_line['task_assigned_name'] = '%s' % (tools.ustr(task.task_id.name)) if 'entries_col' in vals: vals_line['task_fixed_entries'] = vals['entries_col'] if 'user_id' in vals: vals_line['user_id'] = vals['user_id'] if 'date' in vals: vals_line['date'] = vals['date'][:10] if 'hours' in vals: vals_line['unit_amount'] = vals['hours'] prod_id = vals_line.get('product_id', line_id.product_id.id) # False may be set # Put user related details in analytic timesheet values details = self.get_user_related_details(cr, uid, vals.get('user_id', task.user_id.id)) for field in ('product_id', 'general_account_id', 'journal_id', 'product_uom_id'): if details.get(field, False): vals_line[field] = details[field] # Check if user's default UOM differs from product's UOM user_default_uom_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.project_time_mode_id.id if details.get('product_uom_id', False) and details['product_uom_id'] != user_default_uom_id: vals_line['unit_amount'] = uom_obj._compute_qty(cr, uid, user_default_uom_id, vals['hours'], details['product_uom_id']) # Compute based on pricetype amount_unit = timesheet_obj.on_change_unit_amount(cr, uid, line_id.id, prod_id=prod_id, company_id=False, unit_amount=vals_line['unit_amount'], unit=False, journal_id=vals_line['journal_id'], context=context) if amount_unit and 'amount' in amount_unit.get('value',{}): vals_line['amount'] = amount_unit['value']['amount'] if vals_line: self.pool.get('hr.analytic.timesheet').write(cr, uid, [line_id.id], vals_line, context=context) return super(project_work,self).write(cr, uid, ids, vals, context)
def _message_extract_payload(self, cr, uid, message, save_original=False): """Extract body as HTML and attachments from the mail message""" attachments = [] body = u'' if save_original: attachments.append(('original_email.eml', message.as_string())) if not message.is_multipart() or 'text/' in message.get('content-type', ''): encoding = message.get_content_charset() body = message.get_payload(decode=True) body = tools.ustr(body, encoding, errors='replace') if message.get_content_type() == 'text/plain': body = tools.append_content_to_html(u'', body, preserve=True) else: alternative = (message.get_content_type() == 'multipart/alternative') for part in message.walk(): if part.get_content_maintype() == 'multipart': continue filename = part.get_filename() encoding = part.get_content_charset() if filename or part.get('content-disposition', '').strip().startswith('attachment'): attachments.append((filename or 'attachment', part.get_payload(decode=True))) continue if part.get_content_type() == 'text/plain' and (not alternative or not body): body = tools.append_content_to_html(body, tools.ustr(part.get_payload(decode=True), encoding, errors='replace'), preserve=True) elif part.get_content_type() == 'text/html': html = tools.ustr(part.get_payload(decode=True), encoding, errors='replace') if alternative: body = html else: body = tools.append_content_to_html(body, html, plaintext=False) else: attachments.append((filename or 'attachment', part.get_payload(decode=True))) return body, attachments
def get_updated_currency(self, currencies, main_currency, max_delta_days): """Implementation of abstract method of Currency_getter_interface :param currencies: List of currency name :type currencies: list of unicode :param main_currency: Name of Company's currency :type main_currency: unicode :param max_delta_days: Maximum allowable days of unsync :type max_delta_days: int :return: Updated list of currencies and their rates and log :rtype: dict, str :raises: orm.except_orm when there is a format error """ errors = [] for currency in currencies: # We do not want to update the main currency if currency == main_currency: continue try: self.validate_currency(currency) with BankOfCanadaConnection(currency) as conn: # check for valid exchange data if (conn.base_currency == main_currency and conn.target_currency.startswith(currency)): self.check_rate_date(conn.date_time, max_delta_days) self.updated_currency[currency] = conn.exchange_rate _logger.debug( "BOC Rate retrieved : %s = %s %s" % (main_currency, conn.exchange_rate, currency) ) else: _logger.warn( "Exchange data format error for Bank of Canada -" "%s. Please check provider data format " "and/or source code." % currency) raise orm.except_orm( _('Error'), _('Exchange data format error for Bank of Canada' ' - %s') % ustr(currency) ) except (orm.except_orm, except_osv) as e: errors.append(ustr(e.value)) except Exception as e: errors.append(ustr(e)) if errors: raise orm.except_orm( _("Errors occurred during update"), "\n".join(errors) ) return self.updated_currency, self.log_info
def _get_payroll_user_name(self, cr, uid, context=None): if context is None: context = {} supervisors_list = [(False,'')] data_obj = self.pool.get('ir.model.data') result_data = data_obj._get_id(cr, uid, 'l10n_sg_hr_payroll', 'group_hr_payroll_admin') model_data = data_obj.browse(cr, uid, result_data, context=context) group_data = self.pool.get('res.groups').browse(cr, uid, model_data.res_id, context) for user in group_data.users: supervisors_list.append((tools.ustr(user.id), tools.ustr(user.name))) return supervisors_list
def from_data(self, uid, fields, rows, company_name): pageSize=[210.0,297.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, 'config') def _append_node(name, text): n = etree.SubElement(config, name) n.text = text _append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize)) _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,)) _append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,)) _append_node('PageFormat', 'a4') _append_node('header-date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('company', company_name) l = [] t = 0 temp = [] tsum = [] skip_index = [] header = etree.SubElement(new_doc, 'header') i = 0 for f in fields: if f.get('header_data_id', False): value = f.get('header_name', "") field = etree.SubElement(header, 'field') field.text = tools.ustr(value) else: skip_index.append(i) i += 1 lines = etree.SubElement(new_doc, 'lines') for row_lines in rows: node_line = etree.SubElement(lines, 'row') j = 0 for row in row_lines: if not j in skip_index: para = "yes" tree = "no" value = row.get('data', '') if row.get('bold', False): para = "group" if row.get('number', False): tree = "float" col = etree.SubElement(node_line, 'col', para=para, tree=tree) col.text = tools.ustr(value) j += 1 transform = etree.XSLT( etree.parse(os.path.join(tools.config['root_path'], 'addons/base/report/custom_new.xsl'))) rml = etree.tostring(transform(new_doc)) # Update system font database if it hasn't been called yet, so that system fonts are used by reportlab with better glyph coverage registry = openerp.registry(request.cr.dbname) registry['res.font'].font_scan(request.cr, SUPERUSER_ID, lazy=True, context=request.context) self.obj = trml2pdf.parseNode(rml, title='Printscreen') return self.obj
def _create_analytic_entries(self, cr, uid, vals, context): """Create the hr analytic timesheet from project task work""" timesheet_obj = self.pool['hr.analytic.timesheet'] task_obj = self.pool['project.task'] vals_line = {} timeline_id = False acc_id = False task_obj = task_obj.browse(cr, uid, vals['task_id'], context=context) result = self.get_user_related_details(cr, uid, vals.get('user_id', uid)) vals_line['name'] = '%s: %s' % (tools.ustr(task_obj.name), tools.ustr(vals['name'] or '/')) vals_line['user_id'] = vals['user_id'] vals_line['product_id'] = result['product_id'] if vals.get('fecha_inicio'): vals_line['fecha_inicio'] = vals['fecha_inicio'] if vals.get('fecha_fin'): vals_line['fecha_fin'] = vals['fecha_fin'] if vals.get('tipo_trabajo'): vals_line['tipo_trabajo'] = vals['tipo_trabajo'] if vals.get('date'): timestamp = datetime.datetime.strptime(vals['date'], tools.DEFAULT_SERVER_DATETIME_FORMAT) ts = fields.datetime.context_timestamp(cr, uid, timestamp, context) vals_line['date'] = ts.strftime(tools.DEFAULT_SERVER_DATE_FORMAT) # Calculate quantity based on employee's product's uom vals_line['unit_amount'] = vals['hours'] default_uom = self.pool['res.users'].browse(cr, uid, uid, context=context).company_id.project_time_mode_id.id if result['product_uom_id'] != default_uom: vals_line['unit_amount'] = self.pool['product.uom']._compute_qty(cr, uid, default_uom, vals['hours'], result['product_uom_id']) acc_id = task_obj.project_id and task_obj.project_id.analytic_account_id.id or acc_id if acc_id: vals_line['account_id'] = acc_id res = timesheet_obj.on_change_account_id(cr, uid, False, acc_id) if res.get('value'): vals_line.update(res['value']) vals_line['general_account_id'] = result['general_account_id'] vals_line['journal_id'] = result['journal_id'] vals_line['amount'] = 0.0 vals_line['product_uom_id'] = result['product_uom_id'] amount = vals_line['unit_amount'] prod_id = vals_line['product_id'] unit = False timeline_id = timesheet_obj.create(cr, uid, vals=vals_line, context=context) # Compute based on pricetype amount_unit = timesheet_obj.on_change_unit_amount(cr, uid, timeline_id, prod_id, amount, False, unit, vals_line['journal_id'], context=context) if amount_unit and 'amount' in amount_unit.get('value',{}): updv = { 'amount': amount_unit['value']['amount'] } timesheet_obj.write(cr, uid, [timeline_id], updv, context=context) return timeline_id
def from_data(self, uid, fields, rows, company_name): pageSize = [210.0, 297.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, "config") def _append_node(name, text): n = etree.SubElement(config, name) n.text = text _append_node("date", time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace("%y", "%Y")))) _append_node("PageSize", "%.2fmm,%.2fmm" % tuple(pageSize)) _append_node("PageWidth", "%.2f" % (pageSize[0] * 2.8346,)) _append_node("PageHeight", "%.2f" % (pageSize[1] * 2.8346,)) _append_node("PageFormat", "a4") _append_node("header-date", time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace("%y", "%Y")))) _append_node("company", company_name) l = [] t = 0 temp = [] tsum = [] skip_index = [] header = etree.SubElement(new_doc, "header") i = 0 for f in fields: if f.get("header_data_id", False): value = f.get("header_name", "") field = etree.SubElement(header, "field") field.text = tools.ustr(value) else: skip_index.append(i) i += 1 lines = etree.SubElement(new_doc, "lines") for row_lines in rows: node_line = etree.SubElement(lines, "row") j = 0 for row in row_lines: if not j in skip_index: para = "yes" tree = "no" value = row.get("data", "") if row.get("bold", False): para = "group" if row.get("number", False): tree = "float" col = etree.SubElement(node_line, "col", para=para, tree=tree) col.text = tools.ustr(value) j += 1 transform = etree.XSLT( etree.parse(os.path.join(tools.config["root_path"], "addons/base/report/custom_new.xsl")) ) rml = etree.tostring(transform(new_doc)) self.obj = trml2pdf.parseNode(rml, title="Printscreen") return self.obj
def get_month(self,cr,uid,ids,from_date,context): res='' f_month='' f_month = datetime.fromtimestamp(time.mktime(time.strptime(from_date, "%Y-%m-%d"))) res = tools.ustr(f_month.strftime('%B-%Y')) if context.get('month','')==1: res = tools.ustr(f_month.strftime('%m')) if context.get('year','')==1: res = tools.ustr(f_month.strftime('%Y')) if context.get('day','')==1: res = tools.ustr(f_month.strftime('%d')) return res
def get_record_data(self, cr, uid, values, context=None): if context is None: context = {} result = {}, False body = subject = from_email = mail_date = to_email_id = '' to_partner_ids = attachment_ids = [] result = super(mail_compose_message, self).get_record_data(cr, uid, values, context=context) if context.get('option') == 'forward': parent_id = values.get('parent_id') for parent in self.pool.get('mail.message').browse(cr, uid, parent_id, context=context): active_tz = pytz.timezone(context.get("tz", "UTC")if context else "Asia/Calcutta")# Its for Synconics Use Only attendance_start = datetime.strptime(parent.create_date, DEFAULT_SERVER_DATETIME_FORMAT).replace(tzinfo=pytz.utc).astimezone(active_tz) next_attendance_date = datetime.strftime(attendance_start, "%a, %b %d, %Y at %H:%M %p") mail_date = datetime.strptime(next_attendance_date, "%a, %b %d, %Y at %H:%M %p").strftime("%a, %b %d, %Y at %H:%M %p") body = tools.ustr(parent.body) from_email = parent.email_from subject = tools.ustr(parent.subject or parent.record_name or '') attachment_ids = [attach.id for attach in parent.attachment_ids] if not result['partner_ids']: for partner in self.pool.get('res.partner').browse(cr, uid, [parent.partner_ids.id], context=context): to_partner_ids.append(partner_obj.id) else: to_partner_ids = result['partner_ids'] del to_partner_ids[0] re_prefix = _('Fwd:') if subject and not (subject.startswith('Fwd:') or subject.startswith(re_prefix)): subject = "%s %s" % (re_prefix, subject) result['subject'] = subject result['attachment_ids'] = attachment_ids for partner in self.pool.get('res.partner').browse(cr, uid, list(set(to_partner_ids)), context=context): to_email_id += partner.name + ' ' + partner.email + '; ' from_format = "<br><b>From : </b>" date_format = "<br><b>Date : </b>" to_format = "<br><b>To : </b>" body1_format = "<br><br><br>" body2 = "<br><br>---------- Forwarded message ----------<br><b>Subject: </b>" if type(from_email) == bool: from_email = '' if not (from_email) == bool: from_email1 = from_email.replace("<", "(") from_email2 = from_email1.replace(">", ")") body1 = body2 + subject + from_format + from_email2 + date_format + mail_date + to_format + to_email_id + body1_format + body result['body'] = body1 return result # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
def login(self, db, login, password): result = super(res_users, self).login(db, login, password) if result: return result else: with RegistryManager.get(db).cursor() as cr: cr.execute("""UPDATE res_users SET login_date=now() AT TIME ZONE 'UTC' WHERE login=%s AND openid_key=%s AND active=%s RETURNING id""", (tools.ustr(login), tools.ustr(password), True)) res = cr.fetchone() cr.commit() return res[0] if res else False
def action_create(self, cr, uid, ids, context=None): obj = self.browse(cr, uid, ids[0], context=context) user = self.pool.get('res.users').browse(cr, uid, uid, context) _ids = [] appointment = None if context and context.get('appointment_id', False): appointment = self.pool.get('oemedical.appointment').browse( cr, uid, context.get('appointment_id'), context) for patient in obj.email_to: if not patient.email: raise osv.except_osv( 'Error', tools.ustr( 'El destinatario no tiene definida su dirección de correo electrónico...' )) body_html = obj.body_html.replace('[nombre_paciente]', tools.ustr(patient.name)) if appointment: body_html = body_html.replace('[fecha]', appointment.appointment_day) body_html = body_html.replace( '[hora]', appointment.appointment_hour + ':' + appointment.appointment_minute) body_html = body_html.replace('[doctor]', appointment.doctor.name) body_html = body_html.replace( '[esp]', appointment.doctor.specialty.name if appointment.doctor and appointment.doctor.specialty else '') vals = { 'state': 'outgoing', 'subject': obj.subject, 'body_html': '<pre>%s</pre>' % body_html, 'email_to': patient.email, 'email_from': user.company_id.email or False, 'auto_delete': True, 'attachment_ids': [(6, 0, [attach.id for attach in obj.attachment_ids])], } _ids.append( self.pool.get('mail.mail').create(cr, uid, vals, context=context)) if appointment and appointment.doctor and appointment.doctor.physician_id and appointment.doctor.physician_id.email: vals['email_to'] = appointment.doctor.physician_id.email _ids.append( self.pool.get('mail.mail').create(cr, uid, vals, context=context)) self.pool.get('mail.mail').send(cr, uid, _ids) return True
def onchange_employee_id(self, date_from, date_to, employee_id, contract_id): empolyee_obj = self.env['hr.employee'] contract_obj = self.env['hr.contract'] worked_days_obj = self.env['hr.payslip.worked_days'] input_obj = self.env['hr.payslip.input'] period_start_date = date_from period_end_date = date_to #delete old worked days lines old_worked_days_ids = [] if self.id : old_worked_days_ids = [worked_days_rec.id for worked_days_rec in worked_days_obj.search([('payslip_id', '=', self.id)])] if old_worked_days_ids: self._cr.execute(""" delete from hr_payslip_worked_days where id in %s""",(tuple(old_worked_days_ids),)) # worked_days_obj.unlink(self._cr,self._uid,old_worked_days_ids) #delete old input lines old_input_ids = [] if self.id : old_input_ids = [input_rec.id for input_rec in input_obj.search([('payslip_id', '=', self.id)])] if old_input_ids: self._cr.execute(""" delete from hr_payslip_input where id in %s""",(tuple(old_input_ids),)) # input_obj.unlink(old_input_ids) #defaults res = {'value':{ 'line_ids':[], 'input_line_ids': [], 'worked_days_line_ids': [], #'details_by_salary_head':[], TODO put me back 'name':'', 'contract_id': False, 'struct_id': False, } } if (not employee_id) or (not date_from) or (not date_to): return res ttyme = datetime.fromtimestamp(time.mktime(time.strptime(date_from, "%Y-%m-%d"))) employeee_id = empolyee_obj.browse(employee_id) res['value'].update({ 'name': _('Salary Slip of %s for %s') % (employeee_id.name, tools.ustr(ttyme.strftime('%B-%Y'))), 'company_id': employeee_id.company_id.id }) if not self._context.get('contract', False): #fill with the first contract of the employee contract_ids = self.get_contract(employeee_id, date_from, date_to, context=self._context) else: if contract_id: #set the list of contract for which the input have to be filled contract_ids = [contract_id] else: #if we don't give the contract, then the input to fill should be for all current contracts of the employee contract_ids = self.get_contract(employeee_id, date_from, date_to, context=self._context) if not contract_ids: return res contract_record = contract_obj.browse(contract_ids[0]) res['value'].update({ 'contract_id': contract_record and contract_record.id or False }) struct_record = contract_record and contract_record.struct_id or False if not struct_record: return res res['value'].update({ 'struct_id': struct_record.id, }) #computation of the salary input worked_days_line_ids = self.get_worked_day_lines(contract_ids, date_from, date_to, context=self._context) input_line_ids = self.get_inputs(contract_ids, date_from, date_to, context=self._context) res['value'].update({ 'worked_days_line_ids': worked_days_line_ids, 'input_line_ids': input_line_ids, }) if not employee_id: return res active_employee = empolyee_obj.browse(employee_id).active res['value'].update({'active_employee': active_employee}) res['value'].update({'employee_id': employee_id, 'date_from': date_from, 'date_to': date_to}) if date_from and date_to: current_date_from = date_from current_date_to = date_to date_from_cur = datetime.strptime(date_from, DEFAULT_SERVER_DATE_FORMAT) previous_month_obj = parser.parse(date_from_cur.strftime(DEFAULT_SERVER_DATE_FORMAT)) - relativedelta(months=1) total_days = calendar.monthrange(previous_month_obj.year, previous_month_obj.month)[1] first_day_of_previous_month = datetime.strptime("1-" + str(previous_month_obj.month) + "-" + str(previous_month_obj.year) , '%d-%m-%Y') last_day_of_previous_month = datetime.strptime(str(total_days) + "-" + str(previous_month_obj.month) + "-" + str(previous_month_obj.year) , '%d-%m-%Y') date_from = datetime.strftime(first_day_of_previous_month, DEFAULT_SERVER_DATE_FORMAT) date_to = datetime.strftime(last_day_of_previous_month, DEFAULT_SERVER_DATE_FORMAT) dates = list(rrule.rrule(rrule.DAILY, dtstart=parser.parse(date_from), until=parser.parse(date_to))) sunday = saturday = weekdays = 0 for day in dates: if day.weekday() == 5: saturday += 1 elif day.weekday() == 6: sunday += 1 else: weekdays += 1 new = {'code':'TTLPREVDAYINMTH','name':'Total number of days for previous month','number_of_days':len(dates), 'sequence': 2, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLPREVSUNINMONTH','name':'Total sundays in previous month','number_of_days':sunday, 'sequence': 3, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLPREVSATINMONTH','name':'Total saturdays in previous month','number_of_days':saturday, 'sequence': 4, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLPREVWKDAYINMTH','name':'Total weekdays in previous month','number_of_days':weekdays, 'sequence': 5, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) dates = list(rrule.rrule(rrule.DAILY, dtstart=parser.parse(current_date_from), until=parser.parse(current_date_to))) sunday = saturday = weekdays = 0 for day in dates: if day.weekday() == 5: saturday += 1 elif day.weekday() == 6: sunday += 1 else: weekdays += 1 new = {'code':'TTLCURRDAYINMTH','name':'Total number of days for current month','number_of_days':len(dates), 'sequence': 2, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLCURRSUNINMONTH','name':'Total sundays in current month','number_of_days':sunday, 'sequence': 3, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLCURRSATINMONTH','name':'Total saturdays in current month','number_of_days':saturday, 'sequence': 4, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) new = {'code':'TTLCURRWKDAYINMTH','name':'Total weekdays in current month','number_of_days':weekdays, 'sequence': 5, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) cur_month_weekdays = 0 if contract_record: contract_start_date = contract_record.date_start contract_end_date = contract_record.date_end if contract_start_date: if contract_start_date >= current_date_from and contract_start_date <= current_date_to: current_month_days = list(rrule.rrule(rrule.DAILY, dtstart=parser.parse(contract_start_date), until=parser.parse(current_date_to))) for day in current_month_days: if day.weekday() not in [5,6]: cur_month_weekdays += 1 elif contract_end_date: if contract_end_date >= current_date_from and contract_end_date <= current_date_to: current_month_days = list(rrule.rrule(rrule.DAILY, dtstart=parser.parse(current_date_from), until=parser.parse(contract_end_date))) for day in current_month_days: if day.weekday() not in [5,6]: cur_month_weekdays += 1 if cur_month_weekdays: new = {'code':'TTLCURCONTDAY','name':'Total current contract days in current month','number_of_days':cur_month_weekdays, 'sequence': 6, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) else: new = {'code':'TTLCURCONTDAY','name':'Total current contract days in current month','number_of_days':weekdays, 'sequence': 6, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) if employee_id: holiday_status_obj = self.env["hr.holidays.status"] holiday_status_ids = holiday_status_obj.search([]) for holiday_status in holiday_status_ids: flag = False for payslip_data in res["value"].get("worked_days_line_ids"): if payslip_data.get("code") == holiday_status.name: flag = True if not flag: new = {'code':holiday_status.name, 'name':holiday_status.name, 'number_of_days':0.0, 'sequence': 0, 'contract_id': contract_record.id} res.get('value').get('worked_days_line_ids').append(new) return res
def write(self, cr, uid, ids, vals, context=None): """ When work gets updated, handle its hr analytic timesheet. """ if context is None: context = {} timesheet_obj = self.pool.get('hr.analytic.timesheet') uom_obj = self.pool.get('product.uom') result = {} if isinstance(ids, (long, int)): ids = [ids] for line_work in self.browse(cr, uid, ids, context=context): line_id = line_work.hr_analytic_timesheet_id if not line_id: # if a record is deleted from timesheet, # the line_id will become # null because of the foreign key on-delete=set null continue vals_line = {} if 'name' in vals: vals_line['name'] = \ '%s: %s' % ( tools.ustr(line_work.workcenter_line_id.name), tools.ustr(vals['name'] or '/') ) if 'user_id' in vals: vals_line['user_id'] = vals['user_id'] if 'date' in vals: vals_line['date'] = vals['date'][:10] if 'hours' in vals: vals_line['unit_amount'] = vals['hours'] prod_id = vals_line.get( 'product_id', line_id.product_id.id) # False may be set # Put user related details in analytic timesheet values details = self.get_user_related_details( cr, uid, vals.get('user_id', line_work.user_id.id)) for field in ('product_id', 'general_account_id', 'journal_id', 'product_uom_id'): if details.get(field, False): vals_line[field] = details[field] # Check if user's default UOM differs from product's UOM user_default_uom_id = self.pool.get('res.users').browse( cr, uid, uid).company_id.project_time_mode_id.id if details.get('product_uom_id', False) \ and details['product_uom_id'] != user_default_uom_id: vals_line['unit_amount'] = uom_obj._compute_qty( cr, uid, user_default_uom_id, vals['hours'], details['product_uom_id']) # Compute based on pricetype amount_unit = timesheet_obj.on_change_unit_amount( cr, uid, line_id.id, prod_id=prod_id, company_id=False, unit_amount=vals_line['unit_amount'], unit=False, journal_id=vals_line['journal_id'], context=context) if amount_unit and 'amount' in amount_unit.get('value',{}): vals_line['amount'] = amount_unit['value']['amount'] if vals_line: self.pool.get('hr.analytic.timesheet').write(cr, uid, [line_id.id], vals_line, context=context) return super(mrp_production_workcenter_line_work, self).write( cr, uid, ids, vals, context)
def import_data_types(self): data_file = self[0].b_file irmodel_pool = self.env['ir.model'] schema_pool = self.env['cenit.schema'] namespace_pool = self.env['cenit.namespace'] datatype_pool = self.env['cenit.data_type'] line_pool = self.env['cenit.data_type.line'] domain_pool = self.env['cenit.data_type.domain_line'] trigger_pool = self.env['cenit.data_type.trigger'] try: data_file = base64.decodestring(data_file) print data_file json_data = json.loads(data_file) except Exception as e: _logger.exception('File unsuccessfully imported, due to format mismatch.') raise UserError(_('File not imported due to format mismatch or a malformed file. (Valid format is .json)\n\nTechnical Details:\n%s') % tools.ustr(e)) for data in json_data: odoo_model = data['model'] namespace = data['namespace'] schema = data['schema'] domain = [('model', '=', odoo_model)] candidates = irmodel_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s module installed" % odoo_model ) odoo_model = candidates.id domain = [('name', '=', namespace)] candidates = namespace_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s namespace in Namespaces" % namespace ) namespace = candidates.id domain = [('name', '=', schema)] candidates = schema_pool.search(domain) if not candidates: raise exceptions.MissingError( "There is no %s schema in Schemas" % schema ) schema = candidates.id vals = {'name': data['name'], 'model': odoo_model, 'namespace': namespace, 'schema': schema} dt = datatype_pool.search([('name', '=', data['name'])]) updt = False if dt: dt.write(vals) updt = True else: dt = datatype_pool.create(vals) if updt: for d in dt.domain: d.unlink() for d in dt.triggers: d.unlink() for d in dt.lines: d.unlink() for domain in data['domains']: vals = {'data_type': dt.id, 'field': domain['field'], 'value': domain['value'], 'op': domain['op']} domain_pool.create(vals) for trigger in data['triggers']: vals = {'data_type': dt.id, 'name': trigger['name'], 'cron_lapse': trigger['cron_lapse'], 'cron_units': trigger['cron_units'], 'cron_restrictions': trigger['cron_restrictions'], 'cron_name': trigger['cron_name']} trigger_pool.create(vals) for line in data['lines']: domain = [('name', '=', line['reference'])] candidate = datatype_pool.search(domain) vals = { 'data_type': dt.id, 'name': line['name'], 'value': line['value'], 'line_type': line['line_type'], 'line_cardinality': line['line_cardinality'], 'primary': line['primary'], 'inlined': line['inlined'], 'reference': candidate.id } line_pool.create(vals) dt.sync_rules() return True
def _bom_explode(self, cr, uid, bom, factor, properties=None, addthis=False, level=0, routing_id=False): """ Finds Products and Work Centers for related BoM for manufacturing order. @param bom: BoM of particular product. @param factor: Factor of product UoM. @param properties: A List of properties Ids. @param addthis: If BoM found then True else False. @param level: Depth level to find BoM lines starts from 10. @return: result: List of dictionaries containing product details. result2: List of dictionaries containing Work Center details. """ routing_obj = self.pool.get('mrp.routing') factor = factor / (bom.product_efficiency or 1.0) factor = _common.ceiling(factor, bom.product_rounding) if factor < bom.product_rounding: factor = bom.product_rounding result = [] result2 = [] phantom = False if bom.type == 'phantom' and not bom.bom_lines: newbom = self._bom_find(cr, uid, bom.product_id.id, bom.product_uom.id, properties) if newbom: res = self._bom_explode(cr, uid, self.browse(cr, uid, [newbom])[0], factor * bom.product_qty, properties, addthis=True, level=level + 10) result = result + res[0] result2 = result2 + res[1] phantom = True else: phantom = False if not phantom: if addthis and not bom.bom_lines: result.append({ 'name': bom.product_id.name, 'product_id': bom.product_id.id, 'product_qty': bom.product_qty * factor, 'product_uom': bom.product_uom.id, 'product_uos_qty': bom.product_uos and bom.product_uos_qty * factor or False, 'product_uos': bom.product_uos and bom.product_uos.id or False, }) routing = (routing_id and routing_obj.browse( cr, uid, routing_id)) or bom.routing_id or False if routing: for wc_use in routing.workcenter_lines: wc = wc_use.workcenter_id d, m = divmod(factor, wc_use.workcenter_id.capacity_per_cycle) mult = (d + (m and 1.0 or 0.0)) hour = 0.0 if wc_use.cycle_type == 'fix': cycle = wc_use.cycle_nbr hour = float(wc_use.hour_nbr + ((wc.time_start or 0.0) + (wc.time_stop or 0.0) + cycle * (wc.time_cycle or 0.0)) * (wc.time_efficiency or 1.0)) elif wc_use.cycle_type == 'bom-based': cycle = len(bom.bom_lines) hour = float(wc_use.hour_nbr + ((wc.time_start or 0.0) + (wc.time_stop or 0.0) + cycle * (wc.time_cycle or 0.0)) * (wc.time_efficiency or 1.0)) else: cycle = mult * wc_use.cycle_nbr hour = float(wc_use.hour_nbr * mult + ((wc.time_start or 0.0) + (wc.time_stop or 0.0) + cycle * (wc.time_cycle or 0.0)) * (wc.time_efficiency or 1.0)) result2.append({ 'name': tools.ustr(wc_use.name) + ' - ' + tools.ustr(bom.product_id.name), 'workcenter_id': wc.id, 'sequence': level + (wc_use.sequence or 0), 'cycle': cycle, 'hour': hour, }) for bom2 in bom.bom_lines: res = self._bom_explode(cr, uid, bom2, factor, properties, addthis=True, level=level + 10) result = result + res[0] result2 = result2 + res[1] return result, result2
def _bom_explode_variants(self, bom, product, factor, properties=None, level=0, routing_id=False, previous_products=None, master_bom=None, production=None): """ Finds Products and Work Centers for related BoM for manufacturing order. @param bom: BoM of particular product template. @param product: Select a particular variant of the BoM. If False use BoM without variants. @param factor: Factor represents the quantity, but in UoM of the BoM, taking into account the numbers produced by the BoM @param properties: A List of properties Ids. @param level: Depth level to find BoM lines starts from 10. @param previous_products: List of product previously use by bom explore to avoid recursion @param master_bom: When recursion, used to display the name of the master bom @return: result: List of dictionaries containing product details. result2: List of dictionaries containing Work Center details. """ routing_id = bom.routing_id.id or routing_id uom_obj = self.env["product.uom"] routing_obj = self.env['mrp.routing'] master_bom = master_bom or bom def _factor(factor, product_efficiency, product_rounding): factor = factor / (product_efficiency or 1.0) factor = _common.ceiling(factor, product_rounding) if factor < product_rounding: factor = product_rounding return factor factor = _factor(factor, bom.product_efficiency, bom.product_rounding) result = [] result2 = [] routing = ((routing_id and routing_obj.browse(routing_id)) or bom.routing_id or False) if routing: for wc_use in routing.workcenter_lines: wc = wc_use.workcenter_id d, m = divmod(factor, wc_use.workcenter_id.capacity_per_cycle) mult = (d + (m and 1.0 or 0.0)) cycle = mult * wc_use.cycle_nbr result2.append({ 'name': (tools.ustr(wc_use.name) + ' - ' + tools.ustr(bom.product_tmpl_id.name_get()[0][1])), 'workcenter_id': wc.id, 'sequence': level + (wc_use.sequence or 0), 'cycle': cycle, 'hour': float(wc_use.hour_nbr * mult + ((wc.time_start or 0.0) + (wc.time_stop or 0.0) + cycle * (wc.time_cycle or 0.0)) * (wc.time_efficiency or 1.0)), }) for bom_line_id in bom.bom_line_ids: if bom_line_id.date_start and \ (bom_line_id.date_start > fields.Date.context_today(self))\ or bom_line_id.date_stop and \ (bom_line_id.date_stop < fields.Date.context_today(self)): continue # all bom_line_id variant values must be in the product if bom_line_id.attribute_value_ids: production_attr_values = [] if not product and production: for attr_value in production.product_attributes: production_attr_values.append(attr_value.value.id) if (set(map(int, bom_line_id.attribute_value_ids or [])) - set(map(int, production_attr_values))): continue elif not product or\ (set(map(int, bom_line_id.attribute_value_ids or [])) - set(map(int, product.attribute_value_ids))): continue if previous_products and (bom_line_id.product_id.product_tmpl_id.id in previous_products): raise exceptions.Warning( _('Invalid Action! BoM "%s" contains a BoM line with a' ' product recursion: "%s".') % (master_bom.name, bom_line_id.product_id.name_get()[0][1])) quantity = _factor(bom_line_id.product_qty * factor, bom_line_id.product_efficiency, bom_line_id.product_rounding) if not bom_line_id.product_id: if not bom_line_id.type != "phantom": bom_id = self._bom_find( product_tmpl_id=bom_line_id.product_template.id, properties=properties) else: bom_id = False else: bom_id = self._bom_find(product_id=bom_line_id.product_id.id, properties=properties) # If BoM should not behave like PhantoM, just add the product, # otherwise explode further if (bom_line_id.type != "phantom" and (not bom_id or self.browse(bom_id).type != "phantom")): if not bom_line_id.product_id: product_attributes = (bom_line_id.product_template. _get_product_attributes_inherit_dict( production.product_attributes)) product = self.env['product.product']._product_find( bom_line_id.product_template, product_attributes) else: product = bom_line_id.product_id product_attributes = ( bom_line_id.product_id. _get_product_attributes_values_dict()) result.append({ 'name': (bom_line_id.product_id.name or bom_line_id.product_template.name), 'product_id': product and product.id, 'product_template': (bom_line_id.product_template.id or bom_line_id.product_id.product_tmpl_id.id), 'product_qty': quantity, 'product_uom': bom_line_id.product_uom.id, 'product_uos_qty': (bom_line_id.product_uos and _factor( (bom_line_id.product_uos_qty * factor), bom_line_id.product_efficiency, bom_line_id.product_rounding) or False), 'product_uos': (bom_line_id.product_uos and bom_line_id.product_uos.id or False), 'product_attributes': map(lambda x: (0, 0, x), product_attributes), }) elif bom_id: all_prod = [bom.product_tmpl_id.id] + (previous_products or []) bom2 = self.browse(bom_id) # We need to convert to units/UoM of chosen BoM factor2 = uom_obj._compute_qty(bom_line_id.product_uom.id, quantity, bom2.product_uom.id) quantity2 = factor2 / bom2.product_qty res = self._bom_explode(bom2, bom_line_id.product_id, quantity2, properties=properties, level=level + 10, previous_products=all_prod, master_bom=master_bom, production=production) result = result + res[0] result2 = result2 + res[1] else: if not bom_line_id.product_id: name = bom_line_id.product_template.name_get()[0][1] else: name = bom_line_id.product_id.name_get()[0][1] raise exceptions.Warning( _('Invalid Action! BoM "%s" contains a phantom BoM line' ' but the product "%s" does not have any BoM defined.') % (master_bom.name, name)) return result, result2
def _create_yaml_record(self, model, data, record_id): record = {'model': model, 'id': str(record_id)} model_pool = self.env[model] data_pool = self.env['ir.model.data'] lids = data_pool.search([('model', '=', model)]) res = lids[:1].read(['module']) attrs = {} cr, uid, context = self.env.args context = dict(context) context.update({'depends': {}}) depends = context.get('depends') self.env.args = cr, uid, frozendict(context) if res: depends[res[0]['module']] = True # if depends is None: # depends = {} # self.depends[res[0]['module']]=True fields = model_pool.fields_get() defaults = {} try: defaults[model] = model_pool.default_get(data) except: defaults[model] = {} for key, val in data.items(): if ((key in defaults[model]) and (val == defaults[model][key])) and not (fields[key].get( 'required', False)): continue if fields[key]['type'] in ('integer', 'float'): if not val: val = 0.0 attrs[key] = val elif not (val or (fields[key]['type'] == 'function')): continue elif fields[key]['type'] in ('boolean', ): if not val: continue attrs[key] = val elif fields[key]['type'] in ('many2one', ): if type(val) in (type(''), type(u'')): id = val else: id, update = self._get_id(fields[key]['relation'], val) attrs[key] = str(id) elif fields[key]['type'] in ('one2many', ): items = [[]] for valitem in (val or []): if valitem[0] in (0, 1): if key in model_pool._columns: fname = model_pool._columns[key]._fields_id else: fname = model_pool._inherit_fields[key][ 2]._fields_id del valitem[2][ fname] #delete parent_field from child's fields list childrecord = self._create_yaml_record( fields[key]['relation'], valitem[2], None) items[0].append(childrecord['attrs']) attrs[key] = items elif fields[key]['type'] in ('many2many', ): if (key in defaults[model]) and (val[0][2] == defaults[model][key]): continue res = [] for valitem in (val or []): if valitem[0] == 6: for id2 in valitem[2]: id, update = self._get_id(fields[key]['relation'], id2) self.blank_dict[(fields[key]['relation'], id2)] = id res.append(str(id)) m2m = [res] if m2m[0]: attrs[key] = m2m else: try: attrs[key] = str(val) except: # attrs[key]=tools.ustr(val) attrs[key] = ustr(val) attrs[key] = attrs[key].replace('"', '\'') record['attrs'] = attrs return record
def signal_confirm(self, cr, uid, ids, context=None): print "\n\n\n\nheloooooo" if context is None: context = {} from openerp.addons.l10n_mx_facturae_lib import facturae_lib msj, app_xsltproc_fullpath, app_openssl_fullpath, app_xmlstarlet_fullpath = facturae_lib.library_openssl_xsltproc_xmlstarlet( cr, uid, ids, context) if msj: raise osv.except_osv(_('Warning'), _(msj)) try: if context is None: context = {} ids = isinstance(ids, (int, long)) and [ids] or ids invoice_obj = self.pool.get('account.invoice') attach = '' msj = '' index_xml = '' attach = self.browse(cr, uid, ids[0]) invoice = attach.invoice_id type = attach.type wf_service = netsvc.LocalService("workflow") save_attach = None if 'cbb' in type: msj = _("Confirmed") save_attach = False elif 'cfdi' in type: fname_invoice = invoice.fname_invoice and invoice.fname_invoice + \ '_V3_2.xml' or '' fname, xml_data = invoice_obj._get_facturae_invoice_xml_data( cr, uid, [invoice.id], context=context) attach = self.pool.get('ir.attachment').create( cr, uid, { 'name': fname_invoice, 'datas': base64.encodestring(xml_data), 'datas_fname': fname_invoice, 'res_model': 'account.invoice', 'res_id': invoice.id, }, context=None) msj = _("Attached Successfully XML CFDI 3.2\n") save_attach = True elif 'cfd' in type and not 'cfdi' in type: fname_invoice = invoice.fname_invoice and invoice.fname_invoice + \ '.xml' or '' fname, xml_data = invoice_obj._get_facturae_invoice_xml_data( cr, uid, [invoice.id], context=context) attach = self.pool.get('ir.attachment').create( cr, uid, { 'name': fname_invoice, 'datas': base64.encodestring(xml_data), 'datas_fname': fname_invoice, 'res_model': 'account.invoice', 'res_id': invoice.id, }, context=None) if attach: index_xml = self.pool.get('ir.attachment').browse( cr, uid, attach).index_content msj = _("Attached Successfully XML CFD 2.2") save_attach = True else: raise osv.except_osv( _("Type Electronic Invoice Unknow!"), _("The Type Electronic Invoice:" + (type or ''))) if save_attach: self.write(cr, uid, ids, { 'file_input': attach or False, 'last_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'msj': msj, 'file_xml_sign_index': index_xml }, context=context) wf_service.trg_validate(uid, self._name, ids[0], 'action_confirm', cr) return True except Exception, e: error = tools.ustr(traceback.format_exc()) self.write(cr, uid, ids, {'msj': error}, context=context) _logger.error(error) return False
class db_backup(osv.Model): _name = 'db.backup' def get_db_list(self, cr, user, ids, host, port, context={}): print("Host: " + host) print("Port: " + port) uri = 'http://' + host + ':' + port conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db') db_list = execute(conn, 'list') return db_list def _get_db_name(self, cr, uid, vals, context=None): attach_pool = self.pool.get("ir.logging") dbName = cr.dbname return dbName _columns = { #Columns local server 'host': fields.char('Host', size=100, required='True'), 'port': fields.char('Port', size=10, required='True'), 'name': fields.char('Database', size=100, required='True', help='Database you want to schedule backups for'), 'bkp_dir': fields.char('Backup Directory', size=100, help='Absolute path for storing the backups', required='True'), 'autoremove': fields.boolean( 'Auto. Remove Backups', help= "If you check this option you can choose to automaticly remove the backup after xx days" ), 'daystokeep': fields.integer( 'Remove after x days', help= "Choose after how many days the backup should be deleted. For example:\nIf you fill in 5 the backups will be removed after 5 days.", required=True), #Columns for external server (SFTP) 'sftpwrite': fields.boolean( 'Write to external server with sftp', help= "If you check this option you can specify the details needed to write to a remote server with SFTP." ), 'sftppath': fields.char( 'Path external server', help= "The location to the folder where the dumps should be written to. For example /odoo/backups/.\nFiles will then be written to /odoo/backups/ on your remote server." ), 'sftpip': fields.char( 'IP Address SFTP Server', help= "The IP address from your remote server. For example 192.168.0.1"), 'sftpport': fields.integer( "SFTP Port", help="The port on the FTP server that accepts SSH/SFTP calls."), 'sftpusername': fields.char( 'Username SFTP Server', help= "The username where the SFTP connection should be made with. This is the user on the external server." ), 'sftppassword': fields.char( 'Password User SFTP Server', help= "The password from the user where the SFTP connection should be made with. This is the password from the user on the external server." ), 'daystokeepsftp': fields.integer( 'Remove SFTP after x days', help= "Choose after how many days the backup should be deleted from the FTP server. For example:\nIf you fill in 5 the backups will be removed after 5 days from the FTP server." ), 'sendmailsftpfail': fields.boolean( 'Auto. E-mail on backup fail', help= "If you check this option you can choose to automaticly get e-mailed when the backup to the external server failed." ), 'emailtonotify': fields.char( 'E-mail to notify', help= "Fill in the e-mail where you want to be notified that the backup failed on the FTP." ), } _defaults = { #'bkp_dir' : lambda *a : addons_path, 'bkp_dir': '/odoo/backups', 'host': lambda *a: 'localhost', 'port': lambda *a: '8069', 'name': _get_db_name, 'daystokeepsftp': 30, 'sftpport': 22, } def _check_db_exist(self, cr, user, ids): for rec in self.browse(cr, user, ids): db_list = self.get_db_list(cr, user, ids, rec.host, rec.port) if rec.name in db_list: return True return False _constraints = [(_check_db_exist, _('Error ! No such database exists!'), [])] def test_sftp_connection(self, cr, uid, ids, context=None): conf_ids = self.search(cr, uid, []) confs = self.browse(cr, uid, conf_ids) #Check if there is a success or fail and write messages messageTitle = "" messageContent = "" for rec in confs: db_list = self.get_db_list(cr, uid, [], rec.host, rec.port) try: pathToWriteTo = rec.sftppath ipHost = rec.sftpip portHost = rec.sftpport usernameLogin = rec.sftpusername passwordLogin = rec.sftppassword #Connect with external server over SFTP, so we know sure that everything works. srv = pysftp.Connection(host=ipHost, username=usernameLogin, password=passwordLogin, port=portHost) srv.close() #We have a success. messageTitle = "Connection Test Succeeded!" messageContent = "Everything seems properly set up for FTP back-ups!" except Exception, e: messageTitle = "Connection Test Failed!" if len(rec.sftpip) < 8: messageContent += "\nYour IP address seems to be too short.\n" messageContent += "Here is what we got instead:\n" if "Failed" in messageTitle: raise osv.except_osv(_(messageTitle), _(messageContent + "%s") % tools.ustr(e)) else: raise osv.except_osv(_(messageTitle), _(messageContent))
def schedule_backup(self, cr, user, context={}): conf_ids = self.search(cr, user, []) confs = self.browse(cr, user, conf_ids) for rec in confs: db_list = self.get_db_list(cr, user, [], rec.host, rec.port) if rec.name in db_list: try: if not os.path.isdir(rec.bkp_dir): os.makedirs(rec.bkp_dir) except: raise #Create name for dumpfile. bkp_file = '%s_%s.dump' % (time.strftime('%d_%m_%Y_%H_%M_%S'), rec.name) file_path = os.path.join(rec.bkp_dir, bkp_file) uri = 'http://' + rec.host + ':' + rec.port conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db') bkp = '' try: bkp = execute(conn, 'dump', tools.config['admin_passwd'], rec.name) except: logger.notifyChannel( 'backup', netsvc.LOG_INFO, "Couldn't backup database %s. Bad database administrator password for server running at http://%s:%s" % (rec.name, rec.host, rec.port)) continue bkp = base64.decodestring(bkp) fp = open(file_path, 'wb') fp.write(bkp) fp.close() else: logger.notifyChannel( 'backup', netsvc.LOG_INFO, "database %s doesn't exist on http://%s:%s" % (rec.name, rec.host, rec.port)) #Check if user wants to write to SFTP or not. if rec.sftpwrite is True: try: #Store all values in variables dir = rec.bkp_dir pathToWriteTo = rec.sftppath ipHost = rec.sftpip portHost = rec.sftpport usernameLogin = rec.sftpusername passwordLogin = rec.sftppassword #Connect with external server over SFTP srv = pysftp.Connection(host=ipHost, username=usernameLogin, password=passwordLogin, port=portHost) #Move to the correct directory on external server. If the user made a typo in his path with multiple slashes (/odoo//backups/) it will be fixed by this regex. pathToWriteTo = re.sub('([/]{2,5})+', '/', pathToWriteTo) print(pathToWriteTo) try: srv.chdir(pathToWriteTo) except IOError: #Create directory and subdirs if they do not exist. currentDir = '' for dirElement in pathToWriteTo.split('/'): currentDir += dirElement + '/' try: srv.chdir(currentDir) except: print( '(Part of the) path didn\'t exist. Creating it now at ' + currentDir) #Make directory and then navigate into it srv.mkdir(currentDir, mode=777) srv.chdir(currentDir) pass srv.chdir(pathToWriteTo) #Loop over all files in the directory. for f in os.listdir(dir): fullpath = os.path.join(dir, f) if os.path.isfile(fullpath): print(fullpath) srv.put(fullpath) #Navigate in to the correct folder. srv.chdir(pathToWriteTo) #Loop over all files in the directory from the back-ups. #We will check the creation date of every back-up. for file in srv.listdir(pathToWriteTo): #Get the full path fullpath = os.path.join(pathToWriteTo, file) #Get the timestamp from the file on the external server timestamp = srv.stat(fullpath).st_atime createtime = datetime.datetime.fromtimestamp(timestamp) now = datetime.datetime.now() delta = now - createtime #If the file is older than the daystokeepsftp (the days to keep that the user filled in on the Odoo form it will be removed. if delta.days >= rec.daystokeepsftp: #Only delete files, no directories! if srv.isfile(fullpath) and ".dump" in file: print("Delete: " + file) srv.unlink(file) #Close the SFTP session. srv.close() except Exception, e: _logger.debug( 'Exception! We couldn\'t back up to the FTP server..') #At this point the SFTP backup failed. We will now check if the user wants #an e-mail notification about this. if rec.sendmailsftpfail: try: ir_mail_server = self.pool.get('ir.mail_server') message = "Dear,\n\nThe backup for the server " + rec.host + " (IP: " + rec.sftpip + ") failed.Please check the following details:\n\nIP address SFTP server: " + rec.sftpip + "\nUsername: "******"\nPassword: "******"\n\nError details: " + tools.ustr( e) + "\n\nWith kind regards" msg = ir_mail_server.build_email( "auto_backup@" + rec.name + ".com", [rec.emailtonotify], "Backup from " + rec.host + "(" + rec.sftpip + ") failed", message) ir_mail_server.send_email(cr, user, msg) except Exception: pass """Remove all old files (on local server) in case this is configured.. This is done after the SFTP writing to prevent unusual behaviour: If the user would set local back-ups to be kept 0 days and the SFTP to keep backups xx days there wouldn't be any new back-ups added to the SFTP. If we'd remove the dump files before they're writen to the SFTP there willbe nothing to write. Meaning that if an user doesn't want to keep back-ups locally and only wants them on the SFTP (NAS for example) there wouldn't be any writing to the remote server if this if statement was before the SFTP write method right above this comment. """ if rec.autoremove is True: dir = rec.bkp_dir #Loop over all files in the directory. for f in os.listdir(dir): fullpath = os.path.join(dir, f) timestamp = os.stat(fullpath).st_ctime createtime = datetime.datetime.fromtimestamp(timestamp) now = datetime.datetime.now() delta = now - createtime if delta.days >= rec.daystokeep: #Only delete files (which are .dump), no directories. if os.path.isfile(fullpath) and ".dump" in f: print("Delete: " + fullpath) os.remove(fullpath)
def send(self, auto_commit=False, raise_exception=False): """ Sends the selected emails immediately, ignoring their current state (mails that have already been sent should not be passed unless they should actually be re-sent). Emails successfully delivered are marked as 'sent', and those that fail to be deliver are marked as 'exception', and the corresponding error mail is output in the server logs. :param bool auto_commit: whether to force a commit of the mail status after sending each mail (meant only for scheduler processing); should never be True during normal transactions (default: False) :param bool raise_exception: whether to raise an exception if the email sending process has failed :return: True """ IrMailServer = self.env['ir.mail_server'] for mail in self: try: # TDE note: remove me when model_id field is present on mail.message - done here to avoid doing it multiple times in the sub method if mail.model: model = self.env['ir.model'].sudo().search([ ('model', '=', mail.model) ])[0] else: model = None if model: mail = mail.with_context(model_name=model.name) # load attachment binary data with a separate read(), as prefetching all # `datas` (binary field) could bloat the browse cache, triggerring # soft/hard mem limits with temporary data. attachments = [(a['datas_fname'], base64.b64decode(a['datas'])) for a in mail.attachment_ids.sudo().read( ['datas_fname', 'datas'])] # specific behavior to customize the send email for notified partners email_list = [] if mail.email_to: email_list.append(mail.send_get_email_dict()) for partner in mail.recipient_ids: email_list.append( mail.send_get_email_dict(partner=partner)) # headers headers = {} bounce_alias = self.env['ir.config_parameter'].get_param( "mail.bounce.alias") catchall_domain = self.env['ir.config_parameter'].get_param( "mail.catchall.domain") if bounce_alias and catchall_domain: if mail.model and mail.res_id: headers['Return-Path'] = '%s-%d-%s-%d@%s' % ( bounce_alias, mail.id, mail.model, mail.res_id, catchall_domain) else: headers['Return-Path'] = '%s-%d@%s' % ( bounce_alias, mail.id, catchall_domain) if mail.headers: try: headers.update(eval(mail.headers)) except Exception: pass # Writing on the mail object may fail (e.g. lock on user) which # would trigger a rollback *after* actually sending the email. # To avoid sending twice the same email, provoke the failure earlier mail.write({ 'state': 'exception', 'failure_reason': _('Error without exception. Probably due do sending an email without computed recipients.' ), }) mail_sent = False # build an RFC2822 email.message.Message object and send it without queuing res = None for email in email_list: msg = IrMailServer.build_email( email_from=mail.email_from, email_to=email.get('email_to'), subject=mail.subject, body=email.get('body'), body_alternative=email.get('body_alternative'), email_cc=tools.email_split(mail.email_cc), reply_to=mail.reply_to, attachments=attachments, message_id=mail.message_id, references=mail.references, object_id=mail.res_id and ('%s-%s' % (mail.res_id, mail.model)), subtype='html', subtype_alternative='plain', headers=headers) try: res = IrMailServer.send_email( msg, mail_server_id=mail.mail_server_id.id) except AssertionError as error: if error.message == IrMailServer.NO_VALID_RECIPIENT: # No valid recipient found for this particular # mail item -> ignore error to avoid blocking # delivery to next recipients, if any. If this is # the only recipient, the mail will show as failed. _logger.info( "Ignoring invalid recipients for mail.mail %s: %s", mail.message_id, email.get('email_to')) else: raise if res: mail.write({ 'state': 'sent', 'message_id': res, 'failure_reason': False }) mail_sent = True # /!\ can't use mail.state here, as mail.refresh() will cause an error # see revid:[email protected] in 6.1 if mail_sent: _logger.info( 'Mail with ID %r and Message-Id %r successfully sent', mail.id, mail.message_id) mail._postprocess_sent_message_v9(mail_sent=mail_sent) except MemoryError: # prevent catching transient MemoryErrors, bubble up to notify user or abort cron job # instead of marking the mail as failed _logger.exception( 'MemoryError while processing mail with ID %r and Msg-Id %r. Consider raising the --limit-memory-hard startup option', mail.id, mail.message_id) raise except psycopg2.Error: # If an error with the database occurs, chances are that the cursor is unusable. # This will lead to an `psycopg2.InternalError` being raised when trying to write # `state`, shadowing the original exception and forbid a retry on concurrent # update. Let's bubble it. raise except Exception as e: failure_reason = tools.ustr(e) _logger.exception('failed sending mail (id: %s) due to %s', mail.id, failure_reason) mail.write({ 'state': 'exception', 'failure_reason': failure_reason }) mail._postprocess_sent_message_v9(mail_sent=False) if raise_exception: if isinstance(e, AssertionError): # get the args of the original error, wrap into a value and throw a MailDeliveryException # that is an except_orm, with name and value as arguments value = '. '.join(e.args) raise MailDeliveryException(_("Mail Delivery Failed"), value) raise if auto_commit is True: self._cr.commit() return True
def fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False): result =\ super(MassEditingWizard, self).fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu) context = self._context if context.get('mass_editing_object'): mass_obj = self.env['mass.object'] editing_data = mass_obj.browse(context.get('mass_editing_object')) all_fields = {} xml_form = etree.Element('form', {'string': tools.ustr(editing_data.name)}) xml_group = etree.SubElement(xml_form, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement(xml_group, 'label', { 'string': '', 'colspan': '2', }) xml_group = etree.SubElement(xml_form, 'group', { 'colspan': '6', 'col': '6', }) model_obj = self.env[context.get('active_model')] field_info = model_obj.fields_get() for field in editing_data.field_ids: if field.ttype == "many2many": all_fields[field.name] = field_info[field.name] all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove_m2m', 'Remove'), ('add', 'Add')] } xml_group = etree.SubElement(xml_group, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement( xml_group, 'separator', { 'string': field_info[field.name]['string'], 'colspan': '6', }) etree.SubElement( xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '6', 'nolabel': '1' }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible': [('selection__" + field.name + "', '=', 'remove_m2m')]}"), }) elif field.ttype == "one2many": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'relation': field.relation, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '4', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove_o2m')]}"), }) elif field.ttype == "many2one": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'relation': field.relation, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'colspan': '4', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove')]}"), }) elif field.ttype == "char": all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')], } all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'size': field.size or 256, } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), 'colspan': '4', }) elif field.ttype == 'selection': all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'colspan': '4', 'attrs': ("{'invisible':[('selection__" + field.name + "', '=', 'remove')]}"), }) all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, 'selection': field_info[field.name]['selection'], } else: all_fields[field.name] = { 'type': field.ttype, 'string': field.field_description, } all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } if field.ttype == 'text': xml_group = etree.SubElement(xml_group, 'group', { 'colspan': '6', 'col': '6', }) etree.SubElement( xml_group, 'separator', { 'string': all_fields[field.name]['string'], 'colspan': '6', }) etree.SubElement( xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '6', 'nolabel': '1', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'colspan': '6', 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), }) else: all_fields["selection__" + field.name] = { 'type': 'selection', 'string': field_info[field.name]['string'], 'selection': [('set', 'Set'), ('remove', 'Remove')] } etree.SubElement(xml_group, 'field', { 'name': "selection__" + field.name, 'colspan': '2', }) etree.SubElement( xml_group, 'field', { 'name': field.name, 'nolabel': '1', 'attrs': ("{'invisible':[('selection__" + field.name + "','=','remove')]}"), 'colspan': '4', }) etree.SubElement(xml_form, 'separator', { 'string': '', 'colspan': '6', 'col': '6', }) xml_group3 = etree.SubElement(xml_form, 'footer', {}) etree.SubElement( xml_group3, 'button', { 'string': 'Apply', 'class': 'btn-primary', 'type': 'object', 'name': 'action_apply', }) etree.SubElement(xml_group3, 'button', { 'string': 'Close', 'class': 'btn-default', 'special': 'cancel', }) root = xml_form.getroottree() result['arch'] = etree.tostring(root) result['fields'] = all_fields return result
def signal_sign(self, cr, uid, ids, context=None): try: if context is None: context = {} ids = isinstance(ids, (int, long)) and [ids] or ids invoice_obj = self.pool.get('account.invoice') attachment_obj = self.pool.get('ir.attachment') attach = '' index_xml = '' msj = '' for data in self.browse(cr, uid, ids, context=context): invoice = data.invoice_id type = data.type wf_service = netsvc.LocalService("workflow") attach_v3_2 = data.file_input and data.file_input.id or False if 'cbb' in type: msj = _("Signed") if 'cfd' in type and not 'cfdi' in type: attach = data.file_input and data.file_input.id or False index_xml = data.file_xml_sign_index or False msj = _("Attached Successfully XML CFD 2.2\n") if 'cfdi' in type: # upload file in custom module for pac type__fc = self.get_driver_fc_sign() if type in type__fc.keys(): fname_invoice = invoice.fname_invoice and invoice.fname_invoice + \ '.xml' or '' fname, xml_data = invoice_obj._get_facturae_invoice_xml_data( cr, uid, [invoice.id], context=context) fdata = base64.encodestring(xml_data) res = type__fc[type](cr, uid, [data.id], fdata, context=context) msj = tools.ustr(res.get('msg', False)) index_xml = res.get('cfdi_xml', False) data_attach = { 'name': fname_invoice, 'datas': base64.encodestring(res.get('cfdi_xml', False)), 'datas_fname': fname_invoice, 'description': 'Factura-E XML CFD-I SIGN', 'res_model': 'account.invoice', 'res_id': invoice.id, } # Context, because use a variable type of our code but we # dont need it. attach = attachment_obj.create(cr, uid, data_attach, context=None) if attach_v3_2: cr.execute( """UPDATE ir_attachment SET res_id = Null WHERE id = %s""", (attach_v3_2, )) else: msj += _("Unknow driver for %s" % (type)) self.write(cr, uid, ids, { 'file_xml_sign': attach or False, 'last_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'msj': msj, 'file_xml_sign_index': index_xml }, context=context) wf_service.trg_validate(uid, self._name, data.id, 'action_sign', cr) return True except Exception, e: error = tools.ustr(traceback.format_exc()) self.write(cr, uid, ids, {'msj': error}, context=context) _logger.error(error) return False
def create(self, cr, uid, ids, datas, context): surv_obj = pooler.get_pool(cr.dbname).get('survey') user_obj = pooler.get_pool(cr.dbname).get('res.users') rml_obj=report_sxw.rml_parse(cr, uid, surv_obj._name,context) company=user_obj.browse(cr,uid,[uid],context)[0].company_id rml ="""<document filename="Survey Analysis Report.pdf"> <template pageSize="(595.0,842.0)" title="Survey Analysis" author="OpenERP S.A.([email protected])" allowSplitting="20"> <pageTemplate> <frame id="first" x1="1.3cm" y1="1.5cm" width="18.4cm" height="26.5cm"/> <pageGraphics> <fill color="black"/> <stroke color="black"/> <setFont name="DejaVu Sans" size="8"/> <drawString x="1.3cm" y="28.3cm"> """+to_xml(rml_obj.formatLang(time.strftime("%Y-%m-%d %H:%M:%S"),date_time=True))+"""</drawString> <setFont name="DejaVu Sans Bold" size="10"/> <drawString x="9.8cm" y="28.3cm">"""+ to_xml(company.name) +"""</drawString> <stroke color="#000000"/> <lines>1.3cm 28.1cm 20cm 28.1cm</lines> </pageGraphics> </pageTemplate> </template> <stylesheet> <blockTableStyle id="Table1"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6"/> </blockTableStyle> <blockTableStyle id="Table2"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> </blockTableStyle> <blockTableStyle id="Table3"> <blockAlignment value="LEFT"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6" start="1,0" stop="2,-1"/> <blockValign value="TOP"/> </blockTableStyle> <blockTableStyle id="Table4"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,-1" stop="1,-1"/> </blockTableStyle> <blockTableStyle id="Table5"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEBELOW" colorName="#8f8f8f" start="0,-1" stop="1,-1"/> </blockTableStyle> <blockTableStyle id="Table_heading"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEBEFORE" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEAFTER" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEABOVE" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> </blockTableStyle> <blockTableStyle id="Table_head_2"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEBEFORE" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEAFTER" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> <lineStyle kind="LINEABOVE" colorName="#e6e6e6" start="0,0" stop="-1,-1"/> </blockTableStyle> <initialize> <paraStyle name="all" alignment="justify"/> </initialize> <paraStyle name="answer_right" alignment="RIGHT" fontName="Helvetica" fontSize="09.0" leftIndent="2.0"/> <paraStyle name="Standard1" fontName="Helvetica-Bold" alignment="RIGHT" fontSize="09.0"/> <paraStyle name="Standard" alignment="LEFT" fontName="Helvetica-Bold" fontSize="11.0"/> <paraStyle name="header1" fontName="Helvetica" fontSize="11.0"/> <paraStyle name="response" fontName="Helvetica-Oblique" fontSize="9.5"/> <paraStyle name="response-bold" fontName="Helvetica-Bold" fontSize="9" alignment="RIGHT" /> <paraStyle name="page" fontName="Helvetica" fontSize="11.0" leftIndent="0.0"/> <paraStyle name="question" fontName="Helvetica-BoldOblique" fontSize="10.0" leftIndent="3.0"/> <paraStyle name="answer_bold" fontName="Helvetica-Bold" fontSize="09.0" leftIndent="2.0"/> <paraStyle name="answer" fontName="Helvetica" fontSize="09.0" leftIndent="2.0"/> <paraStyle name="title" fontName="Helvetica" fontSize="20.0" leading="15" spaceBefore="6.0" spaceAfter="6.0" alignment="CENTER"/> <paraStyle name="terp_tblheader_General_Centre" fontName="Helvetica-Bold" fontSize="9.0" leading="10" alignment="CENTER" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_default_Centre_8" fontName="Helvetica" fontSize="9.0" leading="10" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Center_heading" fontName="Helvetica-Bold" fontSize="9.0" leading="10" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="P2" fontName="Helvetica" fontSize="14.0" leading="15" spaceBefore="6.0" spaceAfter="6.0"/> </stylesheet> <images/> """ if datas.has_key('form') and datas['form']['survey_ids']: ids = datas['form']['survey_ids'] for survey in surv_obj.browse(cr, uid, ids): rml += """<story> <para style="title">Answers Summary</para> <para style="Standard"><font></font></para> <para style="P2"> <font color="white"> </font> </para> <blockTable colWidths="280.0,100.0,120.0" style="Table_heading"> <tr> <td> <para style="terp_tblheader_General_Centre">Survey Title </para> </td> <td> <para style="terp_tblheader_General_Centre">Total Started Survey </para> </td> <td> <para style="terp_tblheader_General_Centre">Total Completed Survey </para> </td> </tr> </blockTable> <blockTable colWidths="280.0,100.0,120.0" style="Table_head_2"> <tr> <td> <para style="terp_default_Centre_8">""" + to_xml(tools.ustr(survey.title)) + """</para> </td> <td> <para style="terp_default_Centre_8">""" + str(survey.tot_start_survey) + """</para> </td> <td> <para style="terp_default_Centre_8">""" + str(survey.tot_comp_survey) + """</para> </td> </tr> </blockTable> <para style="P2"> <font color="white"> </font> </para>""" for page in survey.page_ids: rml += """ <blockTable colWidths="500" style="Table4"> <tr> <td><para style="page">Page :- """ + to_xml(tools.ustr(page.title)) + """</para></td> </tr> </blockTable>""" for que in page.question_ids: rml +="""<blockTable colWidths="500" style="Table5"> <tr> <td><para style="question">""" + to_xml(tools.ustr(que.question)) + """</para></td> </tr> </blockTable>""" cols_widhts = [] if que.type in ['matrix_of_choices_only_one_ans','matrix_of_choices_only_multi_ans']: cols_widhts.append(200) for col in range(0, len(que.column_heading_ids) + 1): cols_widhts.append(float(300 / (len(que.column_heading_ids) + 1))) colWidths = ",".join(map(tools.ustr, cols_widhts)) matrix_ans = [(0,'')] for col in que.column_heading_ids: if col.title not in matrix_ans: matrix_ans.append((col.id,col.title)) rml += """<blockTable colWidths=" """ + colWidths + """ " style="Table1"><tr>""" for mat_col in range(0, len(matrix_ans)): rml+="""<td><para style="response">""" + to_xml(tools.ustr(matrix_ans[mat_col][1])) + """</para></td>""" rml += """<td><para style="response-bold">Answer Count</para></td> </tr>""" last_col = cols_widhts[-1] for ans in que.answer_choice_ids: rml += """<tr><td><para style="answer">""" + to_xml(tools.ustr(ans.answer)) + """</para></td>""" cr.execute("select count(id) from survey_response_answer sra where sra.answer_id = %s", (ans.id,)) tot_res = cr.fetchone()[0] cr.execute("select count(id) ,sra.column_id from survey_response_answer sra where sra.answer_id=%s group by sra.column_id", (ans.id,)) calc_res = cr.dictfetchall() for mat_col in range(1, len(matrix_ans)): percantage = 0.0 cal_count = 0 for cal in calc_res: if cal['column_id'] == matrix_ans[mat_col][0]: cal_count = cal['count'] if tot_res: percantage = round(float(cal_count)*100 / tot_res,2) if percantage: rml += """<td color="#FFF435"><para style="answer_bold">""" + tools.ustr(percantage) +"% (" + tools.ustr(cal_count) + """)</para></td>""" else: rml += """<td color="#FFF435"><para style="answer">""" + tools.ustr(percantage) +"% (" + tools.ustr(cal_count) + """)</para></td>""" rml += """<td><para style="answer_right">""" + tools.ustr(tot_res) + """</para></td> </tr>""" rml += """</blockTable>""" if que.is_comment_require: cr.execute("select count(id) from survey_response_line where question_id = %s and comment != ''",(que.id,)) tot_res = cr.fetchone()[0] rml += """<blockTable colWidths=" """+ str(500 - last_col) +"," + str(last_col) + """ " style="Table1"><tr><td><para style="answer_right">""" + to_xml(tools.ustr(que.comment_label)) + """</para></td> <td><para style="answer">""" + tools.ustr(tot_res) + """</para></td></tr></blockTable>""" elif que.type in['multiple_choice_only_one_ans', 'multiple_choice_multiple_ans', 'multiple_textboxes','date_and_time','date','multiple_textboxes_diff_type']: rml += """<blockTable colWidths="240.0,210,50.0" style="Table1">""" rml += """ <tr> <td> <para style="Standard"> </para></td> <td> <para style="terp_default_Center_heading">Answer Percentage</para></td> <td> <para style="response-bold">Answer Count</para></td> </tr>""" for ans in que.answer_choice_ids: progress = ans.average * 7 / 100 rml += """<tr><td><para style="answer">""" + to_xml(tools.ustr(ans.answer)) + """</para></td> <td> <illustration> <stroke color="lightslategray"/> <rect x="0.1cm" y="-0.45cm" width="7.2 cm" height="0.5cm" fill="no" stroke="yes" round="0.1cm"/> """ if progress: rml += """<fill color="lightsteelblue"/> <rect x="0.2cm" y="-0.35cm" width='""" + tools.ustr(str(float(progress)) +'cm') + """' height="0.3cm" fill="yes" stroke="no" round="0.1cm"/>""" rml += """ <fill color="black"/> <setFont name="Helvetica" size="9"/> <drawString x="3.2cm" y="-0.30cm">""" + tools.ustr(ans.average) + """%</drawString></illustration> </td> <td><para style="answer_right">""" + tools.ustr(ans.response) + """</para></td></tr>""" rml += """</blockTable>""" if que.is_comment_require: # if que.make_comment_field: # cr.execute("select count(id) from survey_response_line where question_id = %s and comment != ''", (que.id,)) # tot_res = cr.fetchone()[0] # tot_avg = 0.00 # if que.tot_resp: # tot_avg = round(float(tot_res * 100)/ que.tot_resp,2) # rml+="""<blockTable colWidths="280.0,120,100.0" style="Table1"><tr><td><para style="answer">""" +to_xml(tools.ustr(que.comment_label)) + """</para></td> # <td><para style="answer">""" + str(tot_avg) + """%</para></td> # <td><para style="answer">""" + tools.ustr(tot_res) + """</para></td></tr></blockTable>""" # else: cr.execute("select count(id) from survey_response_line where question_id = %s and comment != ''", (que.id,)) tot_res = cr.fetchone()[0] rml += """<blockTable colWidths="450.0,50.0" style="Table1"><tr><td><para style="answer_right">""" + to_xml(tools.ustr(que.comment_label)) + """</para></td> <td><para style="answer_right">""" + tools.ustr(tot_res) + """</para></td></tr></blockTable>""" elif que.type in['single_textbox']: cr.execute("select count(id) from survey_response_line where question_id = %s and single_text!=''",(que.id,)) rml += """<blockTable colWidths="400.0,100.0" style="Table1"> <tr> <td> <para style="Standard"> </para></td> <td> <para style="response-bold">Answer Count</para></td> </tr> <tr><td><para style="answer"></para></td> <td><para style="answer_right">""" + tools.ustr(cr.fetchone()[0]) + """ </para></td></tr> </blockTable>""" elif que.type in['comment']: cr.execute("select count(id) from survey_response_line where question_id = %s and comment !=''", (que.id,)) rml += """<blockTable colWidths="400.0,100.0" style="Table1"> <tr> <td> <para style="Standard"> </para></td> <td> <para style="response-bold">Answer Count</para></td> </tr> <tr><td><para style="answer"></para></td> <td><para style="answer_right">""" + tools.ustr(cr.fetchone()[0]) + """ </para></td></tr> </blockTable>""" elif que.type in['rating_scale']: cols_widhts.append(200) for col in range(0,len(que.column_heading_ids) + 2): cols_widhts.append(float(300 / (len(que.column_heading_ids) + 2))) colWidths = ",".join(map(tools.ustr, cols_widhts)) matrix_ans = [(0,'')] for col in que.column_heading_ids: if col.title not in matrix_ans: matrix_ans.append((col.id,col.title)) rml += """<blockTable colWidths=" """ + colWidths + """ " style="Table1"><tr>""" for mat_col in range(0,len(matrix_ans)): rml += """<td><para style="response">""" + to_xml(tools.ustr(matrix_ans[mat_col][1])) + """</para></td>""" rml += """<td><para style="response-bold">Rating Average</para></td> <td><para style="response-bold">Answer Count</para></td> </tr>""" for ans in que.answer_choice_ids: rml += """<tr><td><para style="answer">""" + to_xml(tools.ustr(ans.answer)) + """</para></td>""" res_count = 0 rating_weight_sum = 0 for mat_col in range(1, len(matrix_ans)): cr.execute("select count(sra.answer_id) from survey_response_line sr, survey_response_answer sra\ where sr.id = sra.response_id and sra.answer_id = %s and sra.column_id ='%s'", (ans.id,matrix_ans[mat_col][0])) tot_res = cr.fetchone()[0] cr.execute("select count(sra.answer_id),sqc.rating_weight from survey_response_line sr, survey_response_answer sra ,\ survey_question_column_heading sqc where sr.id = sra.response_id and \ sqc.question_id = sr.question_id and sra.answer_id = %s and sqc.title ='%s'\ + group by sra.answer_id,sqc.rating_weight", (ans.id,matrix_ans[mat_col][1])) col_weight = cr.fetchone() if not col_weight: col_weight= (0,0) elif not col_weight[1]: col_weight = (col_weight[0],0) res_count = col_weight[0] if tot_res and res_count: rating_weight_sum += int(col_weight[1]) * tot_res tot_per = round((float(tot_res) * 100) / int(res_count), 2) else: tot_per = 0.0 if tot_res: rml += """<td><para style="answer_bold">""" + tools.ustr(tot_per) + "%(" + tools.ustr(tot_res) + """)</para></td>""" else: rml += """<td><para style="answer">""" + tools.ustr(tot_per)+"%(" + tools.ustr(tot_res) + """)</para></td>""" percantage = 0.00 if res_count: percantage = round((float(rating_weight_sum)/res_count), 2) rml += """<td><para style="answer_right">""" + tools.ustr(percantage) + """</para></td> <td><para style="answer_right">""" + tools.ustr(res_count) + """</para></td></tr>""" rml += """</blockTable>""" elif que.type in['matrix_of_drop_down_menus']: for column in que.column_heading_ids: rml += """<blockTable colWidths="500" style="Table1"><tr> <td><para style="answer">""" + to_xml(tools.ustr(column.title)) + """</para></td></tr></blockTable>""" menu_choices = column.menu_choice.split('\n') cols_widhts = [] cols_widhts.append(200) for col in range(0, len(menu_choices) + 1): cols_widhts.append(float(300 / (len(menu_choices) + 1))) colWidths = ",".join(map(tools.ustr, cols_widhts)) rml += """<blockTable colWidths=" """ + colWidths + """ " style="Table1"><tr> <td><para style="response"></para></td>""" for menu in menu_choices: rml += """<td><para style="response">""" + to_xml(tools.ustr(menu)) + """</para></td>""" rml += """<td><para style="response-bold">Answer Count</para></td></tr>""" cr.execute("select count(id), sra.answer_id from survey_response_answer sra \ where sra.column_id='%s' group by sra.answer_id ", (column.id,)) res_count = cr.dictfetchall() cr.execute("select count(sra.id),sra.value_choice, sra.answer_id, sra.column_id from survey_response_answer sra \ where sra.column_id='%s' group by sra.value_choice ,sra.answer_id, sra.column_id", (column.id,)) calc_percantage = cr.dictfetchall() for ans in que.answer_choice_ids: rml += """<tr><td><para style="answer_right">""" + to_xml(tools.ustr(ans.answer)) + """</para></td>""" for mat_col in range(0, len(menu_choices)): calc = 0 response = 0 for res in res_count: if res['answer_id'] == ans.id: response = res['count'] for per in calc_percantage: if ans.id == per['answer_id'] and menu_choices[mat_col] == per['value_choice']: calc = per['count'] percantage = 0.00 if calc and response: percantage = round((float(calc)* 100) / response,2) if calc: rml += """<td><para style="answer_bold">""" +tools.ustr(percantage)+"% (" + tools.ustr(calc) + """)</para></td>""" else: rml += """<td><para style="answer">""" +tools.ustr(percantage)+"% (" + tools.ustr(calc) + """)</para></td>""" response = 0 for res in res_count: if res['answer_id'] == ans.id: response = res['count'] rml += """<td><para style="answer_right">""" + tools.ustr(response) + """</para></td></tr>""" rml += """</blockTable>""" elif que.type in['numerical_textboxes']: rml += """<blockTable colWidths="240.0,20,100.0,70,70.0" style="Table1"> <tr> <td> <para style="Standard"> </para></td> <td> <para style="Standard"> </para></td> <td> <para style="response">Answer Average</para></td> <td> <para style="response">Answer Total</para></td> <td> <para style="response-bold">Answer Count</para></td> </tr>""" for ans in que.answer_choice_ids: cr.execute("select answer from survey_response_answer where answer_id=%s group by answer", (ans.id,)) tot_res = cr.dictfetchall() total = 0 for tot in tot_res: total += int(tot['answer']) per = 0.00 if len(tot_res): per = round((float(total) / len(tot_res)),2) rml+="""<tr><td><para style="answer">""" + to_xml(tools.ustr(ans.answer)) + """</para></td> <td> <para style="Standard"> </para></td> <td> <para style="answer">""" + tools.ustr(per) +"""</para></td> <td><para style="answer">""" + tools.ustr(total) + """</para></td> <td><para style="answer_right">""" + tools.ustr(len(tot_res)) + """</para></td></tr>""" rml+="""</blockTable>""" rml +="""<blockTable colWidths="300,100,100.0" style="Table3"> <tr> <td><para style="Standard1"></para></td> <td><para style="Standard1">Answered Question</para></td> <td><para style="Standard1">""" + tools.ustr(que.tot_resp) + """</para></td> </tr> <tr> <td><para style="Standard1"></para></td> <td><para style="Standard1">Skipped Question</para></td> <td><para style="Standard1">""" + tools.ustr(survey.tot_start_survey - que.tot_resp) + """</para></td> </tr> </blockTable>""" rml += """</story>""" rml += """</document>""" report_type = datas.get('report_type', 'pdf') create_doc = self.generators[report_type] self.internal_header=True pdf = create_doc(rml, title=self.title) return (pdf, report_type)
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''): """ To create nodes table. @return: No return value """ pageSize = [297.0, 210.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, 'config') def _append_node(name, text): n = etree.SubElement(config, name) n.text = text #_append_node('date', time.strftime('%d/%m/%Y')) _append_node( 'date', time.strftime( str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize)) _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346, )) _append_node('PageHeight', '%.2f' % (pageSize[1] * 2.8346, )) _append_node('report-header', title) _append_node( 'company', pooler.get_pool(self.cr.dbname).get('res.users').browse( self.cr, uid, uid).company_id.name) _append_node('lang', self.dir) rpt_obj = pooler.get_pool(self.cr.dbname).get('res.users') rml_obj = report_sxw.rml_parse(self.cr, uid, rpt_obj._name, context) _append_node( 'header-date', str(rml_obj.formatLang(time.strftime("%Y-%m-%d"), date=True)) + ' ' + str(time.strftime("%H:%M"))) l = [] t = 0 strmax = (pageSize[0] - 40) * 2.8346 temp = [] tsum = [] for i in range(0, len(fields_order)): temp.append(0) tsum.append(0) ince = -1 for f in fields_order: s = 0 ince += 1 if fields[f]['type'] in ('date', 'time', 'datetime', 'float', 'integer'): s = 60 strmax -= s if fields[f]['type'] in ('float', 'integer'): temp[ince] = 1 else: t += fields[f].get('size', 80) / 28 + 1 l.append(s) for pos in range(len(l)): if not l[pos]: s = fields[fields_order[pos]].get('size', 80) / 28 + 1 l[pos] = strmax * s / t _append_node('tableSize', ','.join(map(str, l))) header = etree.SubElement(new_doc, 'header') for f in fields_order: field = etree.SubElement(header, 'field') field.text = tools.ustr(fields[f]['string'] or '') lines = etree.SubElement(new_doc, 'lines') for line in results: node_line = etree.SubElement(lines, 'row') count = -1 for f in fields_order: float_flag = 0 count += 1 if fields[f]['type'] == 'many2one' and line[f]: if not line.get('__group'): line[f] = line[f][1] if fields[f]['type'] == 'selection' and line[f]: for key, value in fields[f]['selection']: if key == line[f]: line[f] = value break if fields[f]['type'] in ('one2many', 'many2many') and line[f]: line[f] = '( ' + tools.ustr(len(line[f])) + ' )' if fields[f]['type'] == 'float' and line[f]: precision = (('digits' in fields[f]) and fields[f]['digits'][1]) or 2 prec = '%.' + str(precision) + 'f' line[f] = prec % (line[f]) float_flag = 1 if fields[f]['type'] == 'date' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str( locale.nl_langinfo(locale.D_FMT).replace( '%y', '%Y')) d1 = datetime.strptime(line[f], '%Y-%m-%d') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'time' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str(locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%H:%M:%S') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'datetime' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str( locale.nl_langinfo(locale.D_FMT).replace( '%y', '%Y')) + ' ' + str( locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%Y-%m-%d %H:%M:%S') new_d1 = d1.strftime(format) line[f] = new_d1 if line.get('__group'): col = etree.SubElement(node_line, 'col', para='group', tree='no') else: col = etree.SubElement(node_line, 'col', para='yes', tree='no') # Prevent empty labels in groups if f == line.get('__grouped_by') and line.get( '__group' ) and not line[f] and not float_flag and not temp[count]: col.text = line[f] = 'Undefined' col.set('tree', 'undefined') if line[f] != None: col.text = tools.ustr(line[f] or '') if float_flag: col.set('tree', 'float') if line.get('__no_leaf') and temp[ count] == 1 and f != 'id' and not line[ '__context']['group_by']: tsum[count] = float(tsum[count]) + float(line[f]) if not line.get( '__group') and f != 'id' and temp[count] == 1: tsum[count] = float(tsum[count]) + float(line[f]) else: col.text = '/' node_line = etree.SubElement(lines, 'row') for f in range(0, len(fields_order)): col = etree.SubElement(node_line, 'col', para='group', tree='no') col.set('tree', 'float') if tsum[f] != None: if tsum[f] != 0.0: digits = fields[fields_order[f]].get('digits', (16, 2)) prec = '%%.%sf' % (digits[1], ) total = prec % (tsum[f], ) txt = str(total or '') else: txt = str(tsum[f] or '') else: txt = '/' if (self.dir == 'rtl' and f == len(fields_order) - 1) or (self.dir == 'ltr' and f == 0): txt = _('Total') col.set('tree', 'no') col.text = tools.ustr(txt or '') transform = etree.XSLT( etree.parse(tools.file_open('base_custom/report/custom_new.xsl'))) rml = etree.tostring(transform(new_doc)) self.obj = render.rml(rml, title=self.title) self.obj.render() return True
def create(self, cr, uid, ids, datas, context): obj = pooler.get_pool(cr.dbname).get('pedido.cliente') for pedido in obj.browse(cr, uid, [datas.get('request_id', False)], context): rml = """ <document filename="test.pdf"> <template pageSize="(595.0,842.0)" title=" """ + _( "Account Invoice") + """ " author="" allowSplitting="20"> <pageTemplate id="page1"> <frame id="first" x1="20.0" y1="30.0" width="560" height="835"/> </pageTemplate> </template>""" rml += """ <stylesheet> <blockTableStyle id="MainTable"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="0,0" stop="0,0" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,0" thickness="0.1"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,0" thickness="0.1"/> </blockTableStyle> <blockTableStyle id="LEFT_RIGHT"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> </blockTableStyle> <blockTableStyle id="TwoTables"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="3,0" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,-1" stop="3,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="3,0" stop="3,-1" thickness="0.1"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="5,0" stop="-1,0" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="5,-1" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="5,0" stop="5,-1" thickness="0.1"/> <blockSpan start="1,0" stop="3,0"/> <blockSpan start="1,1" stop="3,1"/> </blockTableStyle> <blockTableStyle id="CentralTable"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <blockSpan start="5,0" stop="6,0"/> </blockTableStyle> <blockTableStyle id="AllBorders"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> </blockTableStyle> <blockTableStyle id="TableHeader"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <blockSpan start="0,0" stop="0,0"/> <blockSpan start="2,0" stop="3,0"/> <blockSpan start="4,0" stop="4,0"/> <blockSpan start="5,0" stop="5,0"/> <blockSpan start="6,0" stop="6,0"/> <blockSpan start="7,0" stop="7,0"/> <blockSpan start="8,0" stop="8,0"/> <blockSpan start="9,0" stop="9,0"/> <blockSpan start="10,0" stop="10,0"/> </blockTableStyle> <blockTableStyle id="TableX"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <blockSpan start="0,0" stop="1,0"/> <blockSpan start="0,1" stop="1,1"/> <blockSpan start="0,2" stop="1,2"/> <blockSpan start="2,0" stop="2,-1"/> </blockTableStyle> <blockTableStyle id="TableY"> <blockValign value="MIDDLE"/> <blockAlignment value="CENTER"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="-1,0" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,-1" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="-1,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="-1,-1" thickness="0.1"/> </blockTableStyle> <blockTableStyle id="TableZ"> <ALIGNMENT value="LEFT"/> <blockValign value="MIDDLE"/> <blockAlignment value="LEFT"/> <lineStyle kind="LINEABOVE" colorName="#000000" start="0,0" stop="3,-1" thickness="0.1"/> <lineStyle kind="LINEBELOW" colorName="#000000" start="0,0" stop="3,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="3,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="4,-1" thickness="0.1"/> <lineStyle kind="LINEBEFORE" colorName="#000000" start="0,0" stop="0,-1" thickness="0.1"/> <lineStyle kind="LINEAFTER" colorName="#000000" start="-1,0" stop="3,-1" thickness="0.1"/> </blockTableStyle> <initialize> <paraStyle name="all" alignment="justify"/> </initialize> <paraStyle name="P14_BOLD_CENTER" fontName="Helvetica-Bold" fontSize="14.0" alignment="CENTER"/> <paraStyle name="P10_BOLD_CENTER" fontName="Helvetica-Bold" fontSize="10.0" alignment="CENTER"/> <paraStyle name="P10_CENTER" fontName="Helvetica" fontSize="10.0" alignment="CENTER"/> <paraStyle name="P8_BOLD_LEFT" fontName="Helvetica-Bold" fontSize="8.0" alignment="LEFT"/> <paraStyle name="P8_BOLD_CENTER" fontName="Helvetica-Bold" fontSize="8.0" alignment="CENTER"/> <paraStyle name="P6_BOLD_CENTER_TITLE" fontName="Helvetica-Bold" fontSize="4.0" leading="4" alignment="CENTER"/> <paraStyle name="P6_BOLD_JUSTIFY_TITLE" fontName="Helvetica-Bold" fontSize="4.0" leading="4" alignment="JUSTIFY"/> <paraStyle name="P6_BOLD_LEFT_TITLE" fontName="Helvetica-Bold" fontSize="4.0" leading="4" alignment="LEFT"/> <paraStyle name="P6_LEFT_TITLE" fontName="Helvetica" fontSize="4.0" leading="4" alignment="LEFT"/> <paraStyle name="P6_BOLD_CENTER" fontName="Helvetica-Bold" fontSize="5.0" alignment="CENTER"/> <paraStyle name="P6_BOLD_LEFT" fontName="Helvetica-Bold" fontSize="6.0" leading="5" alignment="LEFT"/> <paraStyle name="P6_CENTER" fontName="Helvetica" fontSize="6.0" leading="5" alignment="CENTER"/> <paraStyle name="P6_LEFT" fontName="Helvetica" fontSize="6.0" leading="5" alignment="LEFT"/> <paraStyle name="P6_LEFT_1" fontName="Helvetica" fontSize="4.0" leading="4" alignment="RIGHT"/> <paraStyle name="P5_RIGHT" fontName="Helvetica" fontSize="5.0" leading="5" alignment="RIGHT"/> <paraStyle name="P5_COURIER_CENTER" fontName="Courier" fontSize="5.0" leading="5" alignment="CENTER"/> <paraStyle name="P5_COURIER_JUSTIFY" fontName="Courier" fontSize="5.0" leading="5" alignment="JUSTIFY"/> <paraStyle name="P5_COURIER_BOLD_CENTER" fontName="Courier-Bold" fontSize="5.0" leading="5" alignment="CENTER"/> <paraStyle name="P5_COURIER_BOLD_JUSTIFY" fontName="Courier-Bold" fontSize="5.0" leading="5" alignment="JUSTIFY"/> </stylesheet>""" rml += """ <story>""" rml += """ <spacer length="1.0 cm"/>""" rml += """ <blockTable colWidths="560.0" rowHeights="60.0" style="MainTable"> <tr><td><para style="P14_BOLD_CENTER">INVOICE PACKING</para></td></tr> </blockTable>""" rml += """ <blockTable colWidths="360.0,30.0,170.0" rowHeights="12.0,12.0,12.0" style="LEFT_RIGHT"> <tr> <td></td> <td></td> <td><para style="P10_CENTER">INVOICE</para></td> </tr> <tr> <td></td> <td></td> <td><para style="P10_CENTER">FACTURA No. """ + ( pedido.account_invoice_ids[0].supplier_invoice_number if len(pedido.account_invoice_ids) and pedido.account_invoice_ids[0].supplier_invoice_number else '') + """</para></td> </tr> <tr> <td><para style="P8_BOLD_LEFT">CONSIGNEE (Consignatario)</para></td> <td></td> <td></td> </tr> </blockTable>""" contact = '' if pedido.partner_id and len( pedido.partner_id.child_ids ) and pedido.partner_id.child_ids[0].name: contact = pedido.partner_id.child_ids[0].name request_date = '' if pedido.request_date: request_date = pedido.request_date str_date = request_date.split('-') xdate = datetime.date(int(str_date[0]), int(str_date[1]), int(str_date[2])) request_date = xdate.strftime("%B %d, %Y") freight_agency = '' if pedido.freight_agency_id and pedido.freight_agency_id.name: freight_agency = pedido.freight_agency_id.name rml += """ <blockTable colWidths="120.0,100.0,40.0,100.0,30.0,70.0,100.0" rowHeights="10.0,10.0,10.0,12.0" style="TwoTables"> <tr> <td><para style="P6_LEFT">NAME:</para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( pedido.partner_id. name if pedido.partner_id and pedido.partner_id.name else '' ) + """</para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P6_LEFT">Flight Date:</para></td> <td><para style="P6_BOLD_LEFT">""" + request_date + """</para></td> </tr> <tr> <td><para style="P10_CENTER"></para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( pedido.partner_id.street or '') + """</para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P10_CENTER"></para></td> <td><para style="P6_LEFT">""" + ustr( 'Avb#:') + """</para></td> <td><para style="P6_BOLD_LEFT">""" + ( str(pedido.airline_id.avb_number) if pedido.airline_id and pedido.airline_id.avb_number else '') + ' ' + ( pedido.number if pedido.number else '' ) + """</para></td> </tr> <tr> <td><para style="P6_LEFT">PHONE:</para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( pedido.partner_id.phone or '') + """</para></td> <td><para style="P6_LEFT">FAX:</para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( pedido.partner_id.fax or '') + """</para></td> <td><para style="P6_BOLD_LEFT"></para></td> <td><para style="P6_LEFT">Airline:</para></td> <td><para style="P6_BOLD_LEFT">""" + ( str(pedido.airline_id.name) if pedido.airline_id and pedido.airline_id.name else '') + """</para></td> </tr> <tr> <td><para style="P6_LEFT">CITY-COUNTRY:</para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( pedido.partner_id.city or '') + ustr( pedido.partner_id.country_id.name or '') + """</para></td> <td><para style="P6_LEFT">Contact:</para></td> <td><para style="P6_BOLD_LEFT">""" + ustr( contact) + """</para></td> <td><para style="P6_BOLD_LEFT"></para></td> <td><para style="P6_LEFT">Cargo Agency:</para></td> <td><para style="P6_BOLD_LEFT">""" + freight_agency + """</para></td> </tr> </blockTable>""" rml += """<blockTable colWidths="560.0" rowHeights="12.0" style="LEFT_RIGHT"> <tr><td><para style="P8_BOLD_LEFT">COUNTRY OF ORIGIN """ + ustr( '(País de Origen): ') + """ ECUADOR</para></td></tr> </blockTable>""" rml += """<blockTable colWidths="120.0,50.0,32.5,32.5,30.0,40.0,35.0,85.0,75.0,30.0,30.0" rowHeights="12.0,12.0" style="TableHeader"> <tr> <td><para style="P6_BOLD_LEFT_TITLE">VARIETY</para></td> <td><para style="P6_BOLD_CENTER_TITLE">LENGTH</para></td> <td><para style="P6_BOLD_CENTER_TITLE">PIECES/PACKING</para></td> <td><para style="P6_BOLD_CENTER_TITLE"></para></td> <td><para style="P6_BOLD_CENTER_TITLE">UNITS</para></td> <td><para style="P6_BOLD_CENTER_TITLE">STEMS</para></td> <td><para style="P6_BOLD_CENTER_TITLE">BUNCH</para></td> <td><para style="P6_BOLD_LEFT_TITLE">DESCRIPTION</para></td> <td><para style="P6_BOLD_LEFT_TITLE">CLIENT</para></td> <td><para style="P6_BOLD_CENTER_TITLE">UNIT</para></td> <td><para style="P6_BOLD_CENTER_TITLE">TOTAL</para></td> </tr> <tr> <td><para style="P6_BOLD_CENTER_TITLE"></para></td> <td><para style="P6_BOLD_CENTER_TITLE">CM</para></td> <td><para style="P6_BOLD_CENTER_TITLE">HB</para></td> <td><para style="P6_BOLD_CENTER_TITLE">QB</para></td> <td><para style="P6_BOLD_CENTER_TITLE">x HB</para></td> <td><para style="P6_BOLD_CENTER_TITLE">TOTAL</para></td> <td><para style="P6_BOLD_CENTER_TITLE">TOTAL</para></td> <td><para style="P6_BOLD_CENTER_TITLE"></para></td> <td><para style="P6_BOLD_LEFT_TITLE">REMARKS</para></td> <td><para style="P6_BOLD_CENTER_TITLE">PRICE</para></td> <td><para style="P6_BOLD_CENTER_TITLE">PRICE</para></td> </tr> </blockTable>""" cr.execute( """select lines.*, coalesce((select sum(case when t."type" = 'percent' then t.amount * lines.stems else t.amount end) from product_taxes_rel ptt inner JOIN account_tax t on ptt.tax_id = t.id where ptt.prod_id = lines.product_id), 0) as taxes from ( SELECT v."name" as variety, (dl.lengths) as length, round(avg(dl.bunch_type::INT)) as bunch_type, sum(case when dl.is_box_qty = TRUE then dl.qty * dl.bunch_per_box * dl.bunch_type::int else dl.qty end) as stems, sum(dl.bunch_per_box) as bunches, min(case when p."type" = 'standing_order' then 'Standing Order' else 'Open Market' end) as description, pp."name" as subclient, sum(case when dl.is_box_qty = TRUE then (dl.qty * dl.bunch_per_box * dl.bunch_type::int * dl.sale_price)::FLOAT else (dl.qty * dl.sale_price)::FLOAT end)/ sum(case when dl.is_box_qty = TRUE then dl.qty * dl.bunch_per_box * dl.bunch_type::int else dl.qty end) as unit_price, sum(case when dl.is_box_qty = TRUE then (dl.qty * dl.bunch_per_box * dl.bunch_type::int * dl.sale_price)::FLOAT else (dl.qty * dl.sale_price)::FLOAT end) as total, dl.product_id, dl.uom, case when dl.box_id is not null then (select sum(dl2.bunch_per_box) from detalle_lines dl2 where dl2.box_id = dl.box_id) else sum(case when dl.is_box_qty = TRUE then dl.qty else dl.qty/(dl.bunch_type::INT * dl.bunch_per_box) end) end as total_bunches, dl.box_id,pp2."name" from detalle_lines dl inner join product_variant v on v."id" = dl.variant_id inner join pedido_cliente p on p.id = dl.pedido_id inner join res_partner pp2 on pp2.id = dl.supplier_id LEFT JOIN res_partner pp on dl.subclient_id = pp."id" where dl.pedido_id = %s and dl.active = true GROUP BY v."name", dl.lengths, pp."name", dl.product_id, dl.uom, dl.box_id,pp2."name" order by v."name", dl.lengths, pp."name" ) lines """, (pedido.id, )) lines = cr.fetchall() summary_supplier = {} summary = {} for l in lines: key = l[0] + ',' + l[1] + ',' + l[5] + ',' + l[6] + ',' + l[10] hb = 0 qb = 0 if l[10] == 'HB': hb = l[4] / l[11] if l[12] else l[11] if l[10] == 'QB': qb = l[4] / l[11] if l[12] else l[11] if l[13] not in summary_supplier: summary_supplier[l[13]] = { 'farm': l[13], 'hb': hb, 'qb': qb, 'fb': hb / 2 + qb / 4 } else: summary_supplier[l[13]]['hb'] += hb summary_supplier[l[13]]['qb'] += qb summary_supplier[l[13]]['fb'] += hb / 2 + qb / 4 if key not in summary: summary[key] = { 'variety': l[0], 'length': l[1], 'hb': hb, 'qb': qb, 'units': l[2], 'stems': l[3], 'bunch': l[4], 'desc': l[5], 'client': l[6], 'price': l[7], 'total': l[8], 'taxes': l[14] } else: summary[key]['hb'] += float(hb) summary[key]['qb'] += float(qb) summary[key]['stems'] += float(l[3]) summary[key]['bunch'] += float(l[4]) summary[key]['total'] += float(l[8]) summary[key]['taxes'] += float(l[14]) total_hb = 0 total_qb = 0 total_stems = 0 total_bunch = 0 total_invoice = 0 total_taxes = 0 for line in summary.values(): variety = line['variety'] length = line['length'] unit_per_hb = line['units'] line_hb = line['hb'] qb_cont = line['qb'] stems = line['stems'] bunch = line['bunch'] description = line['desc'] subclient = line['client'] sale_price = line['price'] total = line['total'] total_hb += line_hb total_qb += qb_cont total_stems += stems total_bunch += bunch total_invoice += total total_taxes += line['taxes'] rml += """ <blockTable colWidths="120.0,50.0,32.5,32.5,30.0,40.0,35.0,85.0,75.0,30.0,30.0" rowHeights="10.0" style="AllBorders"> <tr> <td><para style="P6_LEFT_TITLE">""" + ( ustr(variety[0:25] if variety else '')) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + ( ustr(length[0:15])) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(line_hb, 2)) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(qb_cont, 2)) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( int(unit_per_hb)) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(stems, 2)) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(bunch, 2)) + """</para></td> <td><para style="P6_LEFT_TITLE">""" + ( ustr(description[0:20] if description else '') ) + """</para></td> <td><para style="P6_LEFT_TITLE">""" + ustr( subclient or '') + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(sale_price, 2)) + """</para></td> <td><para style="P5_COURIER_CENTER">""" + str( round(total, 2) ) + """</para></td> </tr> </blockTable>""" rml += """ <blockTable colWidths="120.0,50.0,32.5,32.5,30.0,40.0,35.0,85.0,75.0,30.0,30.0" rowHeights="10.0" style="AllBorders"> <tr> <td><para style="P5_COURIER_BOLD_JUSTIFY">""" + _( 'Total farm') + """</para></td> <td><para style="P5_COURIER_CENTER"></para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round(total_hb, 2)) + """</para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round(total_qb, 2)) + """</para></td> <td><para style="P5_COURIER_CENTER"></para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round(total_stems, 0)) + """</para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round(total_bunch, 2)) + """</para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round( float(total_hb) / 2 + float(total_qb) / 4, 2)) + """ Full Boxes</para></td> <td><para style="P5_COURIER_CENTER"></para></td> <td><para style="P5_COURIER_CENTER"></para></td> <td><para style="P5_COURIER_BOLD_CENTER">""" + str( round(total_invoice, 2)) + """</para></td> </tr> </blockTable>""" res_tipo_neg = pedido.partner_id.tipo_neg_id.name if pedido.partner_id.tipo_neg_id and pedido.partner_id.tipo_neg_id.name else '' tipo_flete = pedido.partner_id.tipo_flete if pedido.partner_id.tipo_flete else '' flete_value = pedido.precio_flete if tipo_flete == 'fob_f_p' else 0.0 rml += """ <blockTable colWidths="242.5,182.5,135.0" rowHeights="" style="TableX"> <tr> <td> <blockTable colWidths="285.0,50.0,225.0" rowHeights="8.0,8.0" style=""> <tr> <td><para style="P5_RIGHT">Gross Weight</para></td> <td><para style="P5_RIGHT">""" + ( datas['gross_weight'] or '' ) + """</para></td> <td></td> </tr> <tr> <td><para style="P5_RIGHT">(Peso Bruto)</para></td> <td></td> <td></td> </tr> </blockTable> </td> <td></td> <td> <blockTable colWidths="70.0,30.0,40.0" rowHeights="" style=""> <tr> <td><para style="P6_LEFT">""" + ustr( 'TOTAL INVOICE') + """</para></td> <td><para style="P6_LEFT_1">USD</para></td> <td><para style="P6_LEFT_1">""" + ustr( '$') + str(round( total_invoice + total_taxes, 2)) + """</para></td> </tr> <tr> <td><para style="P6_LEFT">I.V.A</para></td> <td><para style="P6_LEFT_1">USD</para></td> <td><para style="P6_LEFT_1">""" + ustr( '$') + str(round(total_taxes, 2)) + """</para></td> </tr> <tr> <td><para style="P6_LEFT">SUBTOTAL</para></td> <td><para style="P6_LEFT_1">USD</para></td> <td><para style="P6_LEFT_1">""" + ustr( '$') + str(round( total_invoice, 2)) + """</para></td> </tr> <tr> <td></td><td></td><td></td> </tr> <tr> <td><para style="P6_LEFT">""" + _( 'Freight') + ' ' + ( ustr(pedido.freight_agency_id.name) if pedido.freight_agency_id and pedido.freight_agency_id.name else '') + """</para></td> <td><para style="P6_LEFT_1">USD</para></td> <td><para style="P6_LEFT_1">""" + ustr( '$') + str( round(flete_value, 2)) + """</para></td> </tr> <tr> <td><para style="P6_LEFT">TOTAL</para></td> <td><para style="P6_LEFT_1">USD</para></td> <td><para style="P6_LEFT_1">""" + ustr( '$' ) + str( round( total_invoice + total_taxes + flete_value, 2) ) + """</para></td> </tr> </blockTable> </td> </tr> <tr> <td><para style="P6_CENTER">ALL STEMS AND PACKING FROM ECUADOR</para></td> <td></td> <td></td> </tr> <tr> <td><para style="P6_CENTER">""" + ustr( res_tipo_neg ).replace( '&', '&' ) + """</para></td> <td></td> <td></td> </tr> <tr> <td><para style="P6_CENTER">Elaborado por: """ + ( datas['make_by'] or '' ) + """</para></td> <td><para style="P6_CENTER">Despachado por: """ + ( datas['served_by'] or '' ) + """</para></td> <td></td> </tr> </blockTable>""" company_obj = pooler.get_pool(cr.dbname).get('res.company') companies_ids = company_obj.search(cr, uid, []) company = company_obj.browse(cr, uid, companies_ids[0], context) rml += """ <blockTable colWidths="560.0" rowHeights="" style="TableY"> <tr><td><para style="P10_CENTER">Think in flowers????, think about us "INFLOWERS"</para></td></tr> <tr><td><para style="P6_CENTER">""" + ( company.street + ', ' if company.street else '') + (company.street2 + ', ' if company.street2 else '') + ( company.city + ', ' if company.city else '') + ( company.state_id.name + ', ' if company.state_id and company.state_id.name else '') + (company.country_id.name if company.country_id and company.country_id.name else '') + """</para></td></tr> <tr><td><para style="P6_CENTER">Phone: """ + ( company.phone + ',' if company.phone else '' ) + """ Mobile: 59399 821-2383</para></td></tr> <tr><td><para style="P6_CENTER">Email/MSN: """ + ( company.email if company.email else '') + """ Skype: Inflowers</para></td></tr> </blockTable>""" rml += """ <spacer length="0.5cm"/>""" rml += """<blockTable colWidths="170.0,30.0,35.0,30.0,295.0" rowHeights="12.0" style="TableZ"> <tr> <td><para style="P6_CENTER">FINCA</para></td> <td><para style="P6_CENTER">HALF</para></td> <td><para style="P6_CENTER">QUART</para></td> <td><para style="P6_CENTER">FULL</para></td> <td></td> </tr>""" total_hb = 0 total_qb = 0 total_fb = 0 for line in summary_supplier.values(): hb = line['hb'] qb = line['qb'] total = line['fb'] total_hb += hb total_qb += qb total_fb += total rml += """<tr>""" rml += """<td><para style="P6_CENTER">""" + ustr( line['farm']) + """</para></td>""" rml += """<td><para style="P6_CENTER">""" + (str(round( hb, 2))) + """</para></td>""" rml += """<td><para style="P6_CENTER">""" + (str(round( qb, 2))) + """</para></td>""" rml += """<td><para style="P6_CENTER">""" + (str( round(total, 2))) + """</para></td>""" rml += """<td></td></tr>""" rml += """<tr> <td><para style="P6_CENTER">TOTAL</para></td> <td><para style="P6_CENTER">""" + str( round(total_hb, 2)) + """</para></td> <td><para style="P6_CENTER">""" + str( round(total_qb, 2)) + """</para></td> <td><para style="P6_CENTER">""" + str( round(total_fb, 2)) + """</para></td> <td></td> </tr>""" rml += """</blockTable>""" rml += """ </story> </document>""" report_type = datas.get('report_type', 'pdf') create_doc = self.generators[report_type] pdf = create_doc(rml, title=self.title) return pdf, report_type
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None): nodes=[] nodes_name=[] transitions=[] start=[] tres={} labels={} no_ancester=[] blank_nodes = [] _Model_Obj = self.pool[model] _Node_Obj = self.pool[node_obj] _Arrow_Obj = self.pool[conn_obj] for model_key,model_value in _Model_Obj._columns.items(): if model_value._type=='one2many': if model_value._obj==node_obj: _Node_Field=model_key _Model_Field=model_value._fields_id flag=False for node_key,node_value in _Node_Obj._columns.items(): if node_value._type=='one2many': if node_value._obj==conn_obj: if src_node in _Arrow_Obj._columns and flag: _Source_Field=node_key if des_node in _Arrow_Obj._columns and not flag: _Destination_Field=node_key flag = True datas = _Model_Obj.read(cr, uid, id, [],context) for a in _Node_Obj.read(cr,uid,datas[_Node_Field],[]): if a[_Source_Field] or a[_Destination_Field]: nodes_name.append((a['id'],a['name'])) nodes.append(a['id']) else: blank_nodes.append({'id': a['id'],'name':a['name']}) if a.has_key('flow_start') and a['flow_start']: start.append(a['id']) else: if not a[_Source_Field]: no_ancester.append(a['id']) for t in _Arrow_Obj.read(cr,uid, a[_Destination_Field],[]): transitions.append((a['id'], t[des_node][0])) tres[str(t['id'])] = (a['id'],t[des_node][0]) label_string = "" if label: for lbl in eval(label): if t.has_key(tools.ustr(lbl)) and tools.ustr(t[lbl])=='False': label_string += ' ' else: label_string = label_string + " " + tools.ustr(t[lbl]) labels[str(t['id'])] = (a['id'],label_string) g = graph(nodes, transitions, no_ancester) g.process(start) g.scale(*scale) result = g.result_get() results = {} for node in nodes_name: results[str(node[0])] = result[node[0]] results[str(node[0])]['name'] = node[1] return {'nodes': results, 'transitions': tres, 'label' : labels, 'blank_nodes': blank_nodes, 'node_parent_field': _Model_Field,}
def create_xml(self, cr, uid, ids, data, context): registry = openerp.registry(cr.dbname) obj_dept = registry['hr.department'] obj_emp = registry['hr.employee'] depts=[] emp_id={} rpt_obj = registry['hr.holidays'] rml_obj=report_sxw.rml_parse(cr, uid, rpt_obj._name,context) cr.execute("SELECT name FROM res_company") res=cr.fetchone()[0] date_xml=[] date_today=time.strftime('%Y-%m-%d %H:%M:%S') date_xml +=['<res name="%s" today="%s" />' % (to_xml(res),date_today)] cr.execute("SELECT id, name, color_name FROM hr_holidays_status ORDER BY id") legend=cr.fetchall() today=datetime.datetime.today() first_date=data['form']['date_from'] som = strToDate(first_date) eom = som+datetime.timedelta(59) day_diff=eom-som name = '' if len(data['form'].get('emp', ())) == 1: name = obj_emp.read(cr, uid, data['form']['emp'][0], ['name'])['name'] if data['form']['holiday_type']!='both': type=data['form']['holiday_type'] if data['form']['holiday_type']=='Confirmed': holiday_type=('confirm') else: holiday_type=('validate') else: type="Confirmed and Approved" holiday_type=('confirm','validate') date_xml.append('<from>%s</from>\n'% (str(rml_obj.formatLang(som.strftime("%Y-%m-%d"),date=True)))) date_xml.append('<to>%s</to>\n' %(str(rml_obj.formatLang(eom.strftime("%Y-%m-%d"),date=True)))) date_xml.append('<type>%s</type>'%(type)) date_xml.append('<name>%s</name>'%(name)) # date_xml=[] for l in range(0,len(legend)): date_xml += ['<legend row="%d" id="%d" name="%s" color="%s" />' % (l+1,legend[l][0],_(legend[l][1]),legend[l][2])] date_xml += ['<date month="%s" year="%d" />' % (ustr(som.strftime('%B')), som.year),'<days>'] cell=1 if day_diff.days>=30: date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som.replace(day=x).strftime('%a')),x-som.day+1) for x in range(som.day, lengthmonth(som.year, som.month)+1)] else: if day_diff.days>=(lengthmonth(som.year, som.month)-som.day): date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som.replace(day=x).strftime('%a')),x-som.day+1) for x in range(som.day, lengthmonth(som.year, som.month)+1)] else: date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som.replace(day=x).strftime('%a')),x-som.day+1) for x in range(som.day, eom.day+1)] cell=x-som.day+1 day_diff1=day_diff.days-cell+1 width_dict={} month_dict={} i=1 j=1 year=som.year month=som.month month_dict[j]=som.strftime('%B') width_dict[j]=cell while day_diff1>0: if month+i<=12: if day_diff1 > lengthmonth(year,i+month): # Not on 30 else you have problems when entering 01-01-2009 for example som1=datetime.date(year,month+i,1) date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som1.replace(day=x).strftime('%a')),cell+x) for x in range(1, lengthmonth(year,i+month)+1)] i=i+1 j=j+1 month_dict[j]=som1.strftime('%B') cell=cell+x width_dict[j]=x else: som1=datetime.date(year,month+i,1) date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som1.replace(day=x).strftime('%a')),cell+x) for x in range(1, eom.day+1)] i=i+1 j=j+1 month_dict[j]=som1.strftime('%B') cell=cell+x width_dict[j]=x day_diff1=day_diff1-x else: years=year+1 year=years month=0 i=1 if day_diff1>=30: som1=datetime.date(years,i,1) date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som1.replace(day=x).strftime('%a')),cell+x) for x in range(1, lengthmonth(years,i)+1)] i=i+1 j=j+1 month_dict[j]=som1.strftime('%B') cell=cell+x width_dict[j]=x else: som1=datetime.date(years,i,1) i=i+1 j=j+1 month_dict[j]=som1.strftime('%B') date_xml += ['<dayy number="%d" name="%s" cell="%d"/>' % (x, _(som1.replace(day=x).strftime('%a')),cell+x) for x in range(1, eom.day+1)] cell=cell+x width_dict[j]=x day_diff1=day_diff1-x date_xml.append('</days>') date_xml.append('<cols>3.5cm%s,0.4cm</cols>\n' % (',0.4cm' * (60))) date_xml = ''.join(date_xml) st='<cols_months>3.5cm' for m in range(1,len(width_dict)+1): st+=',' + str(0.4 *width_dict[m])+'cm' st+=',0.4cm</cols_months>\n' months_xml =['<months number="%d" name="%s"/>' % (x, _(month_dict[x])) for x in range(1,len(month_dict)+1) ] months_xml.append(st) emp_xml='' row_id=1 if data['model'] == 'hr.employee': for items in obj_emp.read(cr, uid, data['form']['emp'], ['id', 'name']): emp_xml += emp_create_xml(self, cr, uid, 0, holiday_type, row_id, items['id'], items['name'], som, eom) row_id = row_id +1 elif data['model']=='ir.ui.menu': for dept in obj_dept.browse(cr, uid, data['form']['depts'], context=context): cr.execute("SELECT id FROM hr_employee WHERE department_id = %s", (dept.id,)) emp_ids = [x[0] for x in cr.fetchall()] if emp_ids==[]: continue dept_done=0 for item in obj_emp.read(cr, uid, emp_ids, ['id', 'name']): if dept_done==0: emp_xml += emp_create_xml(self, cr, uid, 1, holiday_type, row_id, dept.id, dept.name, som, eom) row_id = row_id +1 dept_done=1 emp_xml += emp_create_xml(self, cr, uid, 0, holiday_type, row_id, item['id'], item['name'], som, eom) row_id = row_id +1 header_xml = ''' <header> <date>%s</date> <company>%s</company> </header> ''' % (str(rml_obj.formatLang(time.strftime("%Y-%m-%d"),date=True))+' ' + str(time.strftime("%H:%M")),to_xml(registry['res.users'].browse(cr,uid,uid).company_id.name)) # Computing the xml xml='''<?xml version="1.0" encoding="UTF-8" ?> <report> %s %s %s %s </report> ''' % (header_xml,months_xml,date_xml, ustr(emp_xml)) return xml
def toxml(value): unicode_value = tools.ustr(value) return unicode_value.replace('&', '&').replace('<','<').replace('>','>')
def record_objects(self, cr, uid, ids, context=None): data = self.read(cr, uid, ids, [], context=context)[0] check_date = data['check_date'] filter = data['filter_cond'] user = (self.pool.get('res.users').browse(cr, uid, uid)).login mod = self.pool.get('ir.module.record') mod_obj = self.pool.get('ir.model') mod.recording_data = [] for id in data['objects']: obj_name = (mod_obj.browse(cr, uid, id)).model obj_pool = self.pool.get(obj_name) if filter == 'created': search_condition = [('create_date', '>', check_date)] elif filter == 'modified': search_condition = [('write_date', '>', check_date)] elif filter == 'created_modified': search_condition = [ '|', ('create_date', '>', check_date), ('write_date', '>', check_date) ] if '_log_access' in dir(obj_pool): if not (obj_pool._log_access): search_condition = [] if '_auto' in dir(obj_pool): if not obj_pool._auto: continue search_ids = obj_pool.search(cr, uid, search_condition) for s_id in search_ids: args = (cr.dbname, uid, obj_name, 'copy', s_id, {}, context) mod.recording_data.append(('query', args, {}, s_id)) mod_obj = self.pool.get('ir.model.data') if len(mod.recording_data): if data['info_yaml']: mod = self.pool.get('ir.module.record') res = base_module_save._create_yaml(self, cr, uid, data, context) model_data_ids = mod_obj.search( cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'yml_save_form_view')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Message'), 'context': { 'default_yaml_file': tools.ustr(res['yaml_file']) }, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(resource_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', } else: model_data_ids = mod_obj.search( cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'info_start_form_view')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Message'), 'context': context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(resource_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', } model_data_ids = mod_obj.search( cr, uid, [('model', '=', 'ir.ui.view'), ('name', '=', 'module_recording_message_view')], context=context) resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id'] return { 'name': _('Message'), 'context': context, 'view_type': 'form', 'view_mode': 'form', 'res_model': 'base.module.record.objects', 'views': [(resource_id, 'form')], 'type': 'ir.actions.act_window', 'target': 'new', }
def button_confirm_login(self, cr, uid, ids, context=None): if context is None: context = {} for server in self.browse(cr, uid, ids, context=context): try: connection = server.connect() server.write({'state':'done'}) except Exception, e: _logger.exception("Failed to connect to %s server %s.", server.type, server.name) raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s.") % tools.ustr(e)) finally:
def decode(text): """Returns unicode() string conversion of the the given encoded smtp header text""" if text: text = decode_header(text.replace('\r', '')) return ''.join([tools.ustr(x[0], x[1]) for x in text])
def _create_analytic_entries(self, cr, uid, vals, context): """Create the hr analytic timesheet from worcenter actual work""" timesheet_obj = self.pool['hr.analytic.timesheet'] sheet_obj = self.pool['hr_timesheet_sheet.sheet'] workorder_line_obj = self.pool['mrp.production.workcenter.line'] vals_line = {} timeline_id = False acc_id = False workorder_line = workorder_line_obj.browse( cr, uid, vals['workcenter_line_id'], context=context) result = self.get_user_related_details(cr, uid, vals.get('user_id', uid)) vals_line['name'] = '%s: %s' % (tools.ustr(workorder_line.name), tools.ustr(vals['name'] or '/')) vals_line['user_id'] = vals['user_id'] vals_line['product_id'] = result['product_id'] if vals.get('date'): timestamp = datetime.datetime.strptime( vals['date'], tools.DEFAULT_SERVER_DATETIME_FORMAT) ts = fields.datetime.context_timestamp(cr, uid, timestamp, context) vals_line['date'] = ts.strftime(tools.DEFAULT_SERVER_DATE_FORMAT) # Calculate quantity based on employee's product's uom vals_line['unit_amount'] = vals['hours'] default_uom = self.pool['res.users'].browse( cr, uid, uid, context=context).company_id.project_time_mode_id.id if result['product_uom_id'] != default_uom: vals_line['unit_amount'] = self.pool['product.uom']._compute_qty( cr, uid, default_uom, vals['hours'], result['product_uom_id']) acc_id = workorder_line.production_id.analytic_account_id.id or False if acc_id: vals_line['account_id'] = acc_id res = timesheet_obj.on_change_account_id(cr, uid, False, acc_id) if res.get('value'): vals_line.update(res['value']) vals_line['general_account_id'] = result['general_account_id'] vals_line['journal_id'] = result['journal_id'] vals_line['amount'] = 0.0 vals_line['product_uom_id'] = result['product_uom_id'] amount = vals_line['unit_amount'] prod_id = vals_line['product_id'] unit = False sheet_ids = sheet_obj.search(cr, uid, [('date_to', '>=', vals_line['date']), ('date_from', '<=', vals_line['date']), ('employee_id.user_id', '=', vals_line['user_id'])], context=context) if sheet_ids: vals_line['sheet_id'] = sheet_ids[0] else: raise orm.except_orm(_('Error!'), _('Employees must have an active ' 'Timesheets for the date entered.' )) timeline_id = timesheet_obj.create(cr, uid, vals=vals_line, context=context) # Compute based on pricetype amount_unit = timesheet_obj.on_change_unit_amount( cr, uid, timeline_id, prod_id, amount, False, unit, vals_line['journal_id'], context=context) if amount_unit and 'amount' in amount_unit.get('value', {}): updv = {'amount': amount_unit['value']['amount']} timesheet_obj.write(cr, uid, [timeline_id], updv, context=context) return timeline_id
def import_budget(self): active_ids = self.env.context.get('active_ids', []) active_model = self.env.context.get('active_model', False) imp_file = self.input_file.decode('base64') stream = cStringIO.StringIO(imp_file) try: workbook = openpyxl.load_workbook(stream) except IOError as e: raise ValidationError(_(e.strerror)) except ValueError as e: raise ValidationError(_(e.strerror)) except: e = sys.exc_info()[0] raise ValidationError( _('Wrong file format. Please enter .xlsx file.')) Plans = self.env[active_model].browse(active_ids) for plan in Plans: if plan.state != 'draft': raise ValidationError( _('You can update budget plan only in draft state!')) Asset_Sheet = workbook.get_sheet_by_name('Assets') vals = {} plan_id = Asset_Sheet.cell(row=1, column=5).value # if we trying to import sheet of other record then raise error if plan.id != plan_id: raise ValidationError( _('Please import the correct file for this plan')) # get fiscal year from sheet fiscal_year = Asset_Sheet.cell(row=1, column=2).value fiscal_year_id = self.env['account.fiscalyear'].search([ ('name', '=', tools.ustr(fiscal_year)) ]) if fiscal_year_id: vals.update({'fiscalyear_id': fiscal_year_id.id}) export_date = Asset_Sheet.cell(row=3, column=2).value if export_date: export_date = datetime.strftime( datetime.strptime(export_date, '%d-%m-%Y'), '%Y-%m-%d') vals.update({'date': export_date}) responsible_by = Asset_Sheet.cell(row=5, column=2).value responsible_by_id = self.env['res.users'].search([ ('name', '=', tools.ustr(responsible_by)) ]) if responsible_by_id: vals.update({'creating_user_id': responsible_by_id.id}) line_row = 8 lines_to_create = [] max_row = Asset_Sheet.max_row for row in range(line_row, max_row): line_vals = {} program_group = Asset_Sheet.cell(row=row, column=2).value program_group_id = self.env['res.program.group'].search([ ('name', '=', tools.ustr(program_group)) ]) if program_group_id: line_vals.update({'program_group_id': program_group_id.id}) asset_category = Asset_Sheet.cell(row=row, column=3).value asset_category_id =\ self.env['res.invest.asset.category'].search( [('name', '=', tools.ustr(asset_category))]) if asset_category_id: line_vals.update( {'invest_asset_categ_id': asset_category_id.id}) if not program_group_id and not asset_category_id: continue asset_common_name = Asset_Sheet.cell(row=row, column=4).value if asset_common_name == '=FALSE()': asset_common_name = '' line_vals.update({'asset_common_name': asset_common_name}) asset_name = Asset_Sheet.cell(row=row, column=5).value if asset_name == '=FALSE()': asset_name = '' line_vals.update({'asset_name': asset_name}) requester = Asset_Sheet.cell(row=row, column=6).value requester = tools.ustr(requester) requester_code = requester[1:7] requester_id = self.env['hr.employee'].search([ ('employee_code', '=', requester_code) ]) if requester_id: line_vals.update({'request_user_id': requester_id.id}) section = Asset_Sheet.cell(row=row, column=7).value section_id = self.env['res.section'].search([ ('name', '=', tools.ustr(section)) ]) if section_id: line_vals.update({'section_id': section_id.id}) division = Asset_Sheet.cell(row=row, column=8).value division_id = self.env['res.division'].search([ ('name', '=', tools.ustr(division)) ]) if section_id: line_vals.update({'division_id': division_id.id}) location = Asset_Sheet.cell(row=row, column=9).value if location == '=FALSE()': location = '' line_vals.update({'location': location}) quantity = Asset_Sheet.cell(row=row, column=10).value or 0.0 line_vals.update({'quantity': quantity}) price_unit = Asset_Sheet.cell(row=row, column=11).value or 0.0 line_vals.update({'price_unit': price_unit}) price_other = Asset_Sheet.cell(row=row, column=13).value or 0.0 line_vals.update({'price_other': price_other}) reason_purchase = Asset_Sheet.cell(row=row, column=15).value if reason_purchase: if tools.ustr(reason_purchase) == 'Replacement': line_vals.update({'reason_purchase': 'replace'}) elif tools.ustr(reason_purchase) == 'Extra': line_vals.update({'reason_purchase': 'extra'}) else: line_vals.update({'reason_purchase': 'new'}) reason_purchase_text =\ Asset_Sheet.cell(row=row, column=16).value if reason_purchase_text == '=FALSE()': reason_purchase_text = '' line_vals.update( {'reason_purchase_text': reason_purchase_text}) planned_utilization =\ Asset_Sheet.cell(row=row, column=18).value if planned_utilization == '=FALSE()': planned_utilization = '' line_vals.update({'planned_utilization': planned_utilization}) quotation_document = Asset_Sheet.cell(row=row, column=19).value if quotation_document == '=FALSE()': quotation_document = '' line_vals.update({'quotation_document': quotation_document}) specification_summary =\ Asset_Sheet.cell(row=row, column=20).value if specification_summary == '=FALSE()': specification_summary = '' line_vals.update( {'specification_summary': specification_summary}) pr_commitment =\ Asset_Sheet.cell(row=row, column=23).value or 0.0 line_vals.update({'pr_commitment': pr_commitment}) po_commitment =\ Asset_Sheet.cell(row=row, column=24).value or 0.0 line_vals.update({'po_commitment': po_commitment}) exp_commitment =\ Asset_Sheet.cell(row=row, column=25).value or 0.0 line_vals.update({'exp_commitment': exp_commitment}) total_commitment =\ pr_commitment + po_commitment + exp_commitment line_vals.update({'total_commitment': total_commitment}) actual_amount =\ Asset_Sheet.cell(row=row, column=26).value or 0.0 line_vals.update({'actual_amount': actual_amount}) budget_residual =\ Asset_Sheet.cell(row=row, column=28).value or 0.0 line_vals.update({'budget_residual': budget_residual}) lines_to_create.append((0, 0, line_vals)) vals.update({'item_ids': lines_to_create}) existing_lines = self.env['invest.asset.plan.item'].\ search([('plan_id', '=', plan.id)]) if existing_lines: existing_lines.unlink() plan.write(vals) attachement_id = self.env['ir.attachment'].create({ 'name': self.datas_fname, 'datas': stream.getvalue().encode('base64'), 'datas_fname': self.datas_fname, 'res_model': 'invest.asset.plan', 'res_id': plan.id, 'invest_asset_plan_id': plan.id, 'description': 'Import', }) self.env['budget.plan.history'].create({ 'user_id': self.env.user.id, 'operation_date': fields.Datetime.now(), 'operation_type': 'import', 'invest_asset_plan_id': plan.id, 'attachement_id': attachement_id.id }) return True
else: try: html = body_mako_tpl.render(helper=helper, css=css, _=self.translate_call, **self.parser_instance.localcontext) htmls.append(html) except Exception, e: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_osv(_('Webkit render'), msg) # NO html footer and header because we write them as text with wkhtmltopdf head = foot = False if report_xml.webkit_debug: try: deb = body_mako_tpl.render(helper=helper, css=css, _debug=tools.ustr("\n".join(htmls)), _=self.translate_call, **self.parser_instance.localcontext) except Exception, e: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_osv(_('Webkit render'), msg) return (deb, 'html') bin = self.get_lib(cursor, uid) pdf = self.generate_pdf(bin, report_xml, head, foot, htmls) return (pdf, 'pdf')
def signal_printable(self, cr, uid, ids, context=None): print "· · · · · · · · · · · · · · · · · · ·" print "Entrando a def signal_printable" try: if context is None: context = {} aids = '' msj = '' index_pdf = '' attachment_obj = self.pool.get('ir.attachment') invoice = self.browse(cr, uid, ids)[0].invoice_id invoice_obj = self.pool.get('account.invoice') type = self.browse(cr, uid, ids)[0].type wf_service = netsvc.LocalService("workflow") (fileno, fname) = tempfile.mkstemp( '.pdf', 'openerp_' + (invoice.fname_invoice or '') + '__facturae__') os.close(fileno) #~ report = invoice_obj.create_report(cr, uid, [invoice.id], #~ "account.invoice.facturae.webkit", #~ fname) report_multicompany_obj = self.pool.get('report.multicompany') report_ids = report_multicompany_obj.search( cr, uid, [('model', '=', 'account.invoice')], limit=1) or False report_name = "account.report_invoice" if report_ids: report_name = report_multicompany_obj.browse( cr, uid, report_ids[0]).report_name print "invoice.fname_invoice: ", invoice.fname_invoice #report_xml_pool = self.pool.get('ir.actions.report.xml') #report = report_xml_pool.browse(cr, uid, report_ids[0], context) #print "report.report_name: ", report.report_name #report_service = report.report_name #print "report_service: ", report_service result, format = self.pool['report'].get_pdf( cr, uid, [invoice.id], report_name, context=context), 'pdf' result = base64.b64encode(result) report_name = invoice.fname_invoice if not report_name: report_name = 'report.' + report_service ext = "." + format if not report_name.endswith(ext): report_name += ext print "report_name: ", report_name attachment_ids = attachment_obj.search( cr, uid, [('res_model', '=', 'account.invoice'), ('res_id', '=', invoice.id), ('datas_fname', '=', report_name)]) for attachment in self.browse(cr, uid, attachment_ids, context=context): # TODO: aids.append( attachment.id ) but without error in last # write aids = attachment.id attachment_obj.write( cr, uid, [attachment.id], { 'name': invoice.fname_invoice + '.pdf', }, context=context) if aids: msj = _("Attached Successfully PDF\n") else: raise osv.except_osv(_('Warning'), _('Not Attached PDF\n')) self.write(cr, uid, ids, { 'file_pdf': aids or False, 'msj': msj, 'last_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'file_pdf_index': index_pdf }, context=context) wf_service.trg_validate(uid, self._name, ids[0], 'action_printable', cr) #raise osv.except_osv('Pausa','Pausa') return True except Exception, e: error = tools.ustr(traceback.format_exc()) self.write(cr, uid, ids, {'msj': error}, context=context) _logger.error(error) return False
def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context = {} htmls = [] if report_xml.report_type != 'webkit': return super(HeaderFooterTextWebKitParser, self ).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) parser_instance = self.parser(cursor, uid, self.name2, context=context) self.pool = pooler.get_pool(cursor.dbname) objs = self.getObjects(cursor, uid, ids, context) parser_instance.set_context(objs, data, ids, report_xml.report_type) template = False if report_xml.report_file: path = get_module_resource( *report_xml.report_file.split(os.path.sep)) if os.path.exists(path): template = file(path).read() if not template and report_xml.report_webkit_data: template = report_xml.report_webkit_data if not template: raise except_orm( _('Error!'), _('Webkit Report template not found !')) header = report_xml.webkit_header.html if not header and report_xml.header: raise except_orm( _('No header defined for this Webkit report!'), _('Please set a header in company settings.') ) css = report_xml.webkit_header.css if not css: css = '' translate_call = partial(self.translate_call, parser_instance) # default_filters=['unicode', 'entity'] can be used to set global # filter body_mako_tpl = mako_template(template) helper = WebKitHelper(cursor, uid, report_xml.id, context) if report_xml.precise_mode: for obj in objs: parser_instance.localcontext['objects'] = [obj] try: html = body_mako_tpl.render(helper=helper, css=css, _=translate_call, **parser_instance.localcontext) htmls.append(html) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_orm(_('Webkit render'), msg) else: try: html = body_mako_tpl.render(helper=helper, css=css, _=translate_call, **parser_instance.localcontext) htmls.append(html) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_orm(_('Webkit render'), msg) # NO html footer and header because we write them as text with # wkhtmltopdf head = foot = False if report_xml.webkit_debug: try: deb = body_mako_tpl.render(helper=helper, css=css, _debug=tools.ustr("\n".join(htmls)), _=translate_call, **parser_instance.localcontext) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_orm(_('Webkit render'), msg) return (deb, 'html') bin = self.get_lib(cursor, uid) pdf = self.generate_pdf(bin, report_xml, head, foot, htmls, parser_instance=parser_instance) return (pdf, 'pdf')
def change_product_qty(self, cr, uid, ids, context=None): conf = self.pool.get('ir.values') store_conf = conf.get_default(cr, uid, 'library.config.settings', 'store') ware_brow = self.pool.get('stock.warehouse').browse(cr, uid, store_conf, context=context) if store_conf == False or store_conf == None: raise Warning(_("Warning"), _("Set a store to library from Library settings")) get_conf_store = ware_brow.code if context is None: context = {} inventory_obj = self.pool.get('stock.inventory') inventory_line_obj = self.pool.get('stock.inventory.line') po_id = context.get('active_id') prod_obj = self.pool.get('product.product') prod = prod_obj.browse(cr, uid, po_id, context=context) for data in self.browse(cr, uid, ids, context=context): get_trans_loc = data.location_id.location_id.name if get_conf_store == get_trans_loc: prod_obj.write( cr, uid, po_id, { 'total_copies': data.new_quantity, 'available_copies': data.new_quantity }) if data.new_quantity < 0: raise Warning(_("Warning"), _("Quantity cannot be negative.")) ctx = context.copy() ctx['location'] = data.location_id.id ctx['lot_id'] = data.lot_id.id if data.product_id.id and data.lot_id.id: filter = 'none' elif data.product_id.id: filter = 'product' else: filter = 'none' inventory_id = inventory_obj.create( cr, uid, { 'name': _('INV: %s') % tools.ustr(data.product_id.name), 'filter': filter, 'product_id': data.product_id.id, 'location_id': data.location_id.id, 'lot_id': data.lot_id.id }, context=context) product = data.product_id.with_context( location=data.location_id.id, lot_id=data.lot_id.id) th_qty = product.qty_available line_data = { 'inventory_id': inventory_id, 'product_qty': data.new_quantity, 'location_id': data.location_id.id, 'product_id': data.product_id.id, 'product_uom_id': data.product_id.uom_id.id, 'theoretical_qty': th_qty, 'prod_lot_id': data.lot_id.id } inventory_line_obj.create(cr, uid, line_data, context=context) inventory_obj.action_done(cr, uid, [inventory_id], context=context) return {}
def signal_send_customer(self, cr, uid, ids, context=None): try: if context is None: context = {} attachments = [] msj = '' attach_name = '' state = '' partner_mail = '' user_mail = '' company_id = self.pool.get('res.users').browse( cr, uid, uid, context=context).company_id.id invoice = self.browse(cr, uid, ids)[0].invoice_id address_id = self.pool.get('res.partner').address_get( cr, uid, [invoice.partner_id.id], ['invoice'])['invoice'] partner_invoice_address = self.pool.get('res.partner').browse( cr, uid, address_id, context=context) type = self.browse(cr, uid, ids)[0].type wf_service = netsvc.LocalService("workflow") fname_invoice = invoice.fname_invoice and invoice.fname_invoice or '' adjuntos = self.pool.get('ir.attachment').search( cr, uid, [('res_model', '=', 'account.invoice'), ('res_id', '=', invoice.id)]) subject = 'Invoice ' + (invoice.number or '') for attach in self.pool.get('ir.attachment').browse( cr, uid, adjuntos): attachments.append(attach.id) attach_name += attach.name + ', ' if release.version >= '7': obj_ir_mail_server = self.pool.get('ir.mail_server') obj_mail_mail = self.pool.get('mail.mail') obj_users = self.pool.get('res.users') obj_partner = self.pool.get('res.partner') mail_server_id = obj_ir_mail_server.search( cr, uid, [ '|', ('company_id', '=', company_id), ('company_id', '=', False) ], limit=1, order='sequence', context=None) if mail_server_id: for smtp_server in obj_ir_mail_server.browse( cr, uid, mail_server_id, context=context): server_name = smtp_server.name smtp = False try: smtp = obj_ir_mail_server.connect( smtp_server.smtp_host, smtp_server.smtp_port, user=smtp_server.smtp_user, password=smtp_server.smtp_pass, encryption=smtp_server.smtp_encryption, smtp_debug=smtp_server.smtp_debug) except Exception, e: raise osv.except_osv( _("Connection test failed!"), _("Configure outgoing mail server named FacturaE:\n %s" ) % tools.ustr(e)) mail_compose_message_pool = self.pool.get( 'mail.compose.message') email_pool = self.pool.get('email.template') report_multicompany_obj = self.pool.get( 'report.multicompany') report_ids = report_multicompany_obj.search( cr, uid, [('model', '=', 'account.invoice')], limit=1) or False if report_ids: report_name = report_multicompany_obj.browse( cr, uid, report_ids[0]).report_name if report_name: tmp_id = email_pool.search( cr, uid, [('model_id.model', '=', 'account.invoice'), ('company_id', '=', company_id), ('mail_server_id', '=', smtp_server.id), ('report_template.report_name', '=', report_name)], limit=1, context=context) else: tmp_id = email_pool.search( cr, uid, [('model_id.model', '=', 'account.invoice'), ('company_id', '=', company_id), ('mail_server_id', '=', smtp_server.id), ('report_template.report_name', '=', 'account.invoice.facturae.webkit')], limit=1, context=context) if tmp_id: message = mail_compose_message_pool.onchange_template_id( cr, uid, [], template_id=tmp_id[0], composition_mode=None, model='account.invoice', res_id=invoice.id, context=context) mssg = message.get('value', False) user_mail = obj_users.browse(cr, uid, uid, context=None).email partner_id = mssg.get('partner_ids', False) partner_mail = obj_partner.browse(cr, uid, partner_id)[0].email partner_name = obj_partner.browse(cr, uid, partner_id)[0].name if partner_mail: if user_mail: if mssg.get('partner_ids', False) and tmp_id: mssg['partner_ids'] = [ (6, 0, mssg['partner_ids']) ] mssg['attachment_ids'] = [(6, 0, attachments)] mssg_id = self.pool.get( 'mail.compose.message').create( cr, uid, mssg, context=None) state = self.pool.get( 'mail.compose.message').send_mail( cr, uid, [mssg_id], context=context) asunto = mssg['subject'] id_mail = obj_mail_mail.search( cr, uid, [('subject', '=', asunto)]) if id_mail: for mail in obj_mail_mail.browse( cr, uid, id_mail, context=None): if mail.state == 'exception': msj = _( '\nNot correct email of the user or customer. Check in Menu Configuración\Tecnico\Email\Emails\n' ) else: msj = _( 'Email Send Successfully.Attached is sent to %s for Outgoing Mail Server %s' ) % (partner_mail, server_name) self.write( cr, uid, ids, { 'msj': msj, 'last_date': time.strftime( '%Y-%m-%d %H:%M:%S') }) wf_service.trg_validate( uid, self._name, ids[0], 'action_send_customer', cr) return True else: raise osv.except_osv( _('Warning'), _('This user does not have mail')) else: raise osv.except_osv( _('Warning'), _('The customer %s does not have mail') % (partner_name)) else: raise osv.except_osv( _('Warning'), _('Check that your template is assigned outgoing mail server named %s.\nAlso the field has report_template = Factura Electronica Report.\nTemplate is associated with the same company' ) % (server_name)) else: raise osv.except_osv( _('Warning'), _('Not Found\ outgoing mail server.Configure the outgoing mail server named "FacturaE"' )) except Exception, e: error = tools.ustr(traceback.format_exc()) self.write(cr, uid, ids, {'msj': error}, context=context) _logger.error(error) return False
def signal_cancel(self, cr, uid, ids, context=None): try: invoice_obj = self.pool.get('account.invoice') attach_obj = self.pool.get('ir.attachment') wf_service = netsvc.LocalService("workflow") inv_cancel_status = False for ir_attach_facturae_mx_id in self.browse(cr, uid, ids, context=context): msj = '' invoice = ir_attach_facturae_mx_id.invoice_id if 'cfdi' in ir_attach_facturae_mx_id.type: if not ir_attach_facturae_mx_id.state in [ 'cancel', 'draft', 'confirmed' ]: type__fc = self.get_driver_fc_cancel() if ir_attach_facturae_mx_id.type in type__fc.keys(): cfdi_cancel = res = type__fc[ ir_attach_facturae_mx_id.type]( cr, uid, [ir_attach_facturae_mx_id.id], context=context) msj += tools.ustr(cfdi_cancel.get( 'message', False)) # TODO, validate cfdi_cancel True or False if cfdi_cancel.get('status', True): wf_service.trg_validate( uid, self._name, ir_attach_facturae_mx_id.id, 'action_cancel', cr) if invoice.state != 'cancel': inv_cancel_status = invoice_obj.action_cancel( cr, uid, [invoice.id], context=context) cr.execute( """UPDATE ir_attachment SET res_id = Null WHERE res_id = %s and res_model='account.invoice'""", (invoice.id, )) else: inv_cancel_status = True else: msj += _("Unknow cfdi driver for %s" % (ir_attach_facturae_mx_id.type)) else: wf_service.trg_validate(uid, self._name, ir_attach_facturae_mx_id.id, 'action_cancel', cr) if invoice.state != 'cancel': inv_cancel_status = invoice_obj.action_cancel( cr, uid, [invoice.id], context=context) cr.execute( """UPDATE ir_attachment SET res_id = Null WHERE res_id = %s and res_model='account.invoice'""", (invoice.id, )) else: inv_cancel_status = True msj = 'cancelled' elif 'cfd' in ir_attach_facturae_mx_id.type and not 'cfdi' in ir_attach_facturae_mx_id.type: wf_service.trg_validate(uid, self._name, ir_attach_facturae_mx_id.id, 'action_cancel', cr) inv_cancel_status = invoice_obj.action_cancel( cr, uid, [invoice.id], context=context) msj = 'cancelled' inv_cancel_status = True elif 'cbb' in ir_attach_facturae_mx_id.type: wf_service.trg_validate(uid, self._name, ir_attach_facturae_mx_id.id, 'action_cancel', cr) inv_cancel_status = invoice_obj.action_cancel( cr, uid, [invoice.id], context=context) msj = 'cancelled' inv_cancel_status = True else: raise osv.except_osv( _("Type Electronic Invoice Unknow!"), _("The Type Electronic Invoice:" + (ir_attach_facturae_mx_id.type or ''))) self.write(cr, uid, ids, { 'last_date': time.strftime('%Y-%m-%d %H:%M:%S'), 'msj': msj, }) except Exception, e: error = tools.ustr(traceback.format_exc()) self.write(cr, uid, ids, {'msj': error}, context=context) _logger.error(error) return False