def get_config(cr, uid, sale=None, logistic_company_id=None, context=None, config=None): """Returns the UPS configuration relevant to the given object.""" if not config and sale and sale.ups_shipper_id: config = sale.ups_shipper_id if not config and logistic_company_id: log_comp = pooler.get_pool('logistic.company').browse(cr, uid, logistic_company_id, context=context) config = log_comp.ups_account_shipping_id if log_comp else None if not config and sale: config = sale.company_id.ups_account_shipping_id if not config: # Just go by uid. user_pool = pooler.get_pool(cr.dbname).get("res.users") user = user_pool.browse(cr, uid, uid, context=context) config = user.company_id.ups_account_shipping_id if config: return { 'username': config.userid, 'password': config.password, 'access_license': config.access_license, 'shipper_number': config.acc_no, "sandbox": config.sandbox, "negotiated_rates": config.negotiated_rates } return settings.UPS_CONFIG
def get_config(cr, uid, sale=None, logistic_company_id=None, context=None, config=None, test=False): """Returns the FedEx configuration relevant to the given object.""" if not config and sale and sale.fedex_shipper_id: config = sale.fedex_shipper_id if not config and logistic_company_id: log_comp = pooler.get_pool('logistic.company').browse(cr, uid, logistic_company_id, context=context) config = log_comp.fedex_account_shipping_id if log_comp else None if not config and sale: config = sale.company_id.fedex_account_shipping_id if not config: # Just go by uid. user_pool = pooler.get_pool(cr.dbname).get("res.users") user = user_pool.browse(cr, uid, uid, context=context) config = user.company_id.fedex_account_shipping_id if config: return FedexConfig( config.key, config.password, account_number=config.account_number, meter_number=config.meter_number, integrator_id=config.integrator_id, use_test_server=config.sandbox or test ) return settings.FEDEX_CONFIG
def __init__(self, cr, uid, name, context): super(account_fstr_report, self).__init__(cr, uid, name, context) try: self.root_node_obj = pooler.get_pool(cr.dbname).get('account_fstr.category').browse(cr, uid, context.get('account_fstr_root_node', context['active_id']), context=context)[0] ept_test_obj = self.pool.get('account_fstr.category').browse(cr, uid, [self.root_node_obj.id], context=context) # print str(self.root_node_obj.id) for o in ept_test_obj: digit_rounding = o.digits_round self.category_pool = pooler.get_pool(cr.dbname).get('account_fstr.category') ids = context['active_ids'] self.localcontext.update({ 'time': time, 'template_data': self._get_template_data(cr, uid, ids, [], [self.root_node_obj.id], context=context), 'date_end': '', 'digits_round': '0'*(digit_rounding - 2) }) except: self.root_node_obj = pooler.get_pool(cr.dbname).get('account_fstr.category').browse(cr, uid, context.get('account_fstr_root_node', context['active_id']), context=context) self.category_pool = pooler.get_pool(cr.dbname).get('account_fstr.category') ids = context['active_ids'] self.localcontext.update({ 'time': time, 'template_data': self._get_template_data(cr, uid, ids, [], self.root_node_obj.id, context=context), 'date_end': '', 'digits_round': '0'*(self.root_node_obj.digits_round - 2) })
def create(self, cr, uid, ids, datas, context): xml = self.create_xml(cr, uid, ids, datas, context) xml = tools.ustr(xml).encode('utf8') report_type = datas.get('report_type', 'pdf') if report_type == 'raw': return xml, report_type rml = self.create_rml(cr, xml, uid, context) pool = pooler.get_pool(cr.dbname) ir_actions_report_xml_obj = pool.get('ir.actions.report.xml') report_xml_ids = ir_actions_report_xml_obj.search(cr, uid, [('report_name', '=', self.name[7:])], context=context) self.title = report_xml_ids and ir_actions_report_xml_obj.browse(cr,uid,report_xml_ids)[0].name or 'OpenERP Report' create_doc = self.generators[report_type] # change the fontname install_ids = pool.get('ir.module.module').search(cr, uid, [('name','=','oecn_base_fonts'),('state','=','installed')]) originCustomTTFonts = deepcopy(cfonts.CustomTTFonts) if install_ids: p1 = re.compile('<setFont name=".*?" ') p2 = re.compile('fontName=".*?" ') config_parameter_obj = pooler.get_pool(cr.dbname).get("ir.config_parameter") base_font = config_parameter_obj.get_param(cr, uid, "font_url") cjk_wrap = config_parameter_obj.get_param(cr, uid, "cjk_wrap") fonts_map = [] if base_font: for font in OE_FONTS: fonts_map += [(font, 'myFont', base_font, 'all')] cfonts.CustomTTFonts = fonts_map ParagraphStyle.defaults['wordWrap'] = cjk_wrap and 'CJK' or '' rml = p1.sub('<setFont name="' + 'myFont' + '" ', rml) rml = p2.sub('fontName="' + 'myFont' + '" ', rml) # change the fontname pdf = create_doc(rml, title=self.title) cfonts.CustomTTFonts = originCustomTTFonts return pdf, report_type
def _get_text(self, stat_line, followup_id, context=None): if context is None: context = {} context.update({'lang': stat_line.partner_id.lang}) fp_obj = pooler.get_pool(self.cr.dbname).get('account_followup.followup') fp_line = fp_obj.browse(self.cr, self.uid, followup_id, context=context).followup_line if not fp_line: raise osv.except_osv(_('Error!'),_("The followup plan defined for the current company does not have any followup action.")) #the default text will be the first fp_line in the sequence with a description. default_text = '' li_delay = [] for line in fp_line: if not default_text and line.description: default_text = line.description li_delay.append(line.delay) li_delay.sort(reverse=True) a = {} #look into the lines of the partner that already have a followup level, and take the description of the higher level for which it is available partner_line_ids = pooler.get_pool(self.cr.dbname).get('account.move.line').search(self.cr, self.uid, [('partner_id','=',stat_line.partner_id.id),('reconcile_id','=',False),('company_id','=',stat_line.company_id.id),('blocked','=',False),('state','!=','draft'),('debit','!=',False),('account_id.type','=','receivable'),('followup_line_id','!=',False)]) partner_max_delay = 0 partner_max_text = '' for i in pooler.get_pool(self.cr.dbname).get('account.move.line').browse(self.cr, self.uid, partner_line_ids, context=context): if i.followup_line_id.delay > partner_max_delay and i.followup_line_id.description: partner_max_delay = i.followup_line_id.delay partner_max_text = i.followup_line_id.description text = partner_max_delay and partner_max_text or default_text if text: text = text % { 'partner_name': stat_line.partner_id.name, 'date': time.strftime('%Y-%m-%d'), 'company_name': stat_line.company_id.name, 'user_signature': pooler.get_pool(self.cr.dbname).get('res.users').browse(self.cr, self.uid, self.uid, context).signature or '', } return text
def _get_tax(self, order_obj): self.cr.execute("SELECT DISTINCT tax_id FROM purchase_order_taxe, purchase_order_line, purchase_order \ WHERE (purchase_order_line.order_id=purchase_order.id) AND (purchase_order.id=%s)", (order_obj.id)) res = self.cr.fetchall() or None if not res: return [] if isinstance(res, list): tax_ids = [t[0] for t in res] else: tax_ids = res[0] tax_obj = pooler.get_pool(self.cr.dbname).get('account.tax') res = [] for tax in tax_obj.browse(self.cr, self.uid, tax_ids): self.cr.execute("SELECT DISTINCT order_line_id FROM purchase_order_line, purchase_order_taxe \ WHERE (purchase_order_taxe.tax_id=%s) AND (purchase_order_line.order_id=%s)", (tax.id, order_obj.id)) lines = self.cr.fetchall() or None if lines: if isinstance(lines, list): line_ids = [l[0] for l in lines] else: line_ids = lines[0] base = 0 for line in pooler.get_pool(self.cr.dbname).get('purchase.order.line').browse(self.cr, self.uid, line_ids): base += line.price_subtotal res.append({'code':tax.name, 'base':base, 'amount':base*tax.amount}) return res
def init(self, cr, uid=1): """ This view will be used in dashboard The reason writing this code here is, we need to check date range from today to first date of fiscal year. """ pool_obj_fy = pooler.get_pool(cr.dbname).get('account.fiscalyear') today = time.strftime('%Y-%m-%d') fy_id = pool_obj_fy.find(cr, uid, exception=False) LIST_RANGES = [] if fy_id: fy_start_date = pool_obj_fy.read(cr, uid, fy_id, ['date_start'])['date_start'] fy_start_date = datetime.strptime(fy_start_date, '%Y-%m-%d') last_month_date = datetime.strptime(today, '%Y-%m-%d') - relativedelta(months=1) while (last_month_date > fy_start_date): LIST_RANGES.append(today + " to " + last_month_date.strftime('%Y-%m-%d')) today = (last_month_date- relativedelta(days=1)).strftime('%Y-%m-%d') last_month_date = datetime.strptime(today, '%Y-%m-%d') - relativedelta(months=1) LIST_RANGES.append(today +" to " + fy_start_date.strftime('%Y-%m-%d')) cr.execute('delete from temp_range') for range in LIST_RANGES: pooler.get_pool(cr.dbname).get('temp.range').create(cr, uid, {'name':range}) cr.execute(""" create or replace view report_aged_receivable as ( select id,name from temp_range )""")
def jasper_witholding(cr, uid, ids, data, context): vat1 = vat2 =vat3 =vat4 = cvat1 = cvat2 =cvat3 =cvat4= '' voucher = pooler.get_pool(cr.dbname).get('account.voucher').browse(cr, uid, ids[0]) partner_id = voucher.partner_id.id vat = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, partner_id).vat if vat: vat = str(vat).split("/") vat1 = vat[0] vat2 = vat[1] vat3 = vat[2] vat4 = vat[3] company_id = pooler.get_pool(cr.dbname).get('res.company')._company_default_get(cr, uid, 'account.voucher',context=context) company = pooler.get_pool(cr.dbname).get('res.company').browse(cr, uid, company_id) partner_id = company.partner_id.id vat = pooler.get_pool(cr.dbname).get('res.partner').browse(cr, uid, partner_id).vat if vat: vat = str(vat).split("/") cvat1 = vat[0] cvat2 = vat[1] cvat3 = vat[2] cvat4 = vat[3] invo ='' cr.execute("select vl.name from account_voucher av \ left outer join account_voucher_line vl on vl.voucher_id = av.id\ where vl.amount > 0 and av.id IN %s ", (tuple(ids),)) invoice = cr.dictfetchall() a = len(invoice) i = 0 for inv in invoice: i += 1 if a == 1: invo = str(inv['name']) else: invoice = str(inv['name']) if i == a: invo += str(invoice) else: invo += str(invoice) + ' ,' return { 'parameters': { 'voucher_id': ids[0], 'vat1' : vat1, 'vat2' : vat2, 'vat3' : vat3, 'vat4' : vat4, 'cvat1' : cvat1, 'cvat2' : cvat2, 'cvat3' : cvat3, 'cvat4' : cvat4, 'company': company.name, 'street': company.street, 'city': company.city or '', 'invoice': invo, }, }
def do_action(self, cr, uid, ids, context=None): """ This Function Open added Action. @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of report.webkit.actions's ID @param context: A standard dictionary @return: Dictionary of ir.values form. """ if context is None: context = {} report_obj = self.pool.get('ir.actions.report.xml') for current in self.browse(cr, uid, ids, context=context): report = report_obj.browse( cr, uid, context.get('active_id'), context=context ) if current.print_button: ir_values_obj = pooler.get_pool(cr.dbname).get('ir.values') res = ir_values_obj.set( cr, uid, 'action', 'client_print_multi', report.report_name, [report.model], 'ir.actions.report.xml,%d' % context.get('active_id', False), isobject=True ) else: ir_values_obj = pooler.get_pool(cr.dbname).get('ir.values') res = ir_values_obj.set( cr, uid, 'action', 'client_print_multi', report.report_name, [report.model,0], 'ir.actions.report.xml,%d' % context.get('active_id', False), isobject=True ) if res[0]: if not current.open_action: return {'type': 'ir.actions.act_window_close'} return { 'name': _('Client Actions Connections'), 'view_type': 'form', 'view_mode': 'form', 'res_id' : res[0], 'res_model': 'ir.values', 'view_id': False, 'type': 'ir.actions.act_window', }
def create_xml(self, cr, uid, ids, data, context): # Computing the dates (start of month: som, and end of month: eom) som = datetime.date(data["form"]["year"], data["form"]["month"], 1) eom = som + datetime.timedelta(lengthmonth(som.year, som.month)) date_xml = [ '<date month="%s" year="%d" />' % (self.get_month_name(cr, uid, som.month, context=context), som.year), "<days>", ] date_xml += [ '<day number="%d" name="%s" weekday="%d" />' % ( x, self.get_weekday_name(cr, uid, som.replace(day=x).weekday() + 1, context=context), som.replace(day=x).weekday() + 1, ) for x in range(1, lengthmonth(som.year, som.month) + 1) ] date_xml.append("</days>") date_xml.append("<cols>2.5cm%s,2cm</cols>\n" % (",0.7cm" * lengthmonth(som.year, som.month))) emp_xml = "" emp_obj = pooler.get_pool(cr.dbname).get("hr.employee") for id in data["form"]["employee_ids"]: user = emp_obj.browse(cr, uid, id).user_id.id empl_name = emp_obj.browse(cr, uid, id).name if user: emp_xml += emp_create_xml(cr, user, som, eom, empl_name) # Computing the xml # Without this, report don't show non-ascii characters (TO CHECK) date_xml = "\n".join(date_xml) rpt_obj = pooler.get_pool(cr.dbname).get("hr.employee") rml_obj = report_sxw.rml_parse(cr, uid, rpt_obj._name, context) header_xml = """ <header> <date>%s</date> <company>%s</company> </header> """ % ( str(rml_obj.formatLang(time.strftime("%Y-%m-%d"), date=True)) + " " + str(time.strftime("%H:%M")), toxml(pooler.get_pool(cr.dbname).get("res.users").browse(cr, uid, uid).company_id.name), ) xml = """<?xml version="1.0" encoding="UTF-8" ?> <report> %s %s %s </report> """ % ( header_xml, date_xml, ustr(emp_xml), ) return xml
def _journal_ids(self, form, user_id): line_obj=pooler.get_pool(self.cr.dbname).get('account.analytic.line') journal_obj=pooler.get_pool(self.cr.dbname).get('account.analytic.journal') line_ids=line_obj.search(self.cr, self.uid, [ ('date', '>=', form['date_from']), ('date', '<=', form['date_to']), ('journal_id', 'in', form['journal_ids'][0][2]), ('user_id', '=', user_id), ]) ids=list(set([b.journal_id.id for b in line_obj.browse(self.cr, self.uid, line_ids)])) return journal_obj.browse(self.cr, self.uid, ids)
def cmis_connect(cr, uid): """Connect to the CMIS Server and returns the document repository""" user = pooler.get_pool(cr.dbname).get('res.users').browse(cr, uid, uid) server_url = pooler.get_pool(cr.dbname).get('ir.config_parameter').get_param(cr, uid, 'document_cmis.server_url') if not server_url: raise osv.except_osv(_('Error!'),_("Cannot connect to the CMIS Server: No CMIS Server URL system property found")) client = CmisClient(server_url, user.login, user.password) repo = client.getDefaultRepository() return repo
def _get_defaults(self, cr, uid, data, context=None): if context is None: context = {} user = pooler.get_pool(cr.dbname).get('res.users').browse(cr, uid, uid, context=context) if user.company_id: company_id = user.company_id.id else: company_id = pooler.get_pool(cr.dbname).get('res.company').search(cr, uid, [('parent_id', '=', False)])[0] data['form']['company_id'] = company_id fiscalyear_obj = pooler.get_pool(cr.dbname).get('account.fiscalyear') data['form']['fiscalyear'] = fiscalyear_obj.find(cr, uid) data['form']['context'] = context return data['form']
def _get_order_invoice_line(self, invoice_id, limit_page, offset_page): invoice_obj = pooler.get_pool(self.cr.dbname).get('account.invoice') invoice_data = invoice_obj.browse(self.cr, self.uid, invoice_id) invoice_lines = [] if invoice_data and invoice_data.invoice_line: hrs_list = [] hrs = pooler.get_pool(self.cr.dbname).get('account.invoice.line') hrs_list = hrs.search(self.cr, self.uid, [('invoice_id', '=', invoice_id), ], limit=limit_page, offset=offset_page) invoice_lines = hrs.browse(self.cr, self.uid, hrs_list) return invoice_lines
def _set_dates(self, form_values, selected_items): t_period_obj = pooler.get_pool(self.cr.dbname).get('account.period') t_period_id = form_values['period_id'][0] t_period_data = t_period_obj.browse(self.cr, self.uid, t_period_id) self.date_start = t_period_data.date_start self.date_stop = t_period_data.date_stop end_date = self.date_stop.split("-") self.date_year = end_date[0] t_registry_obj = pooler.get_pool(self.cr.dbname).get('vat.registries.isa') t_vat_registry = form_values['vat_register'][0] t_registry_data = t_registry_obj.browse(self.cr, self.uid, t_vat_registry) self.registry_name = t_registry_data.name
def _lines_get(self, move): moveline_obj = pooler.get_pool(self.cr.dbname).get('account.move.line') movelines = moveline_obj.search(self.cr, self.uid,[('move_id','=',move.id)]) movelines = moveline_obj.browse(self.cr, self.uid, movelines) # Set Invoice Number to self._invoiceNumber invoice_obj = pooler.get_pool(self.cr.dbname).get('account.invoice') invoiceLine = invoice_obj.search(self.cr, self.uid,[('move_id','=',move.id)]) if len(invoice_obj.browse(self.cr, self.uid, invoiceLine)): self._invoiceNumber = invoice_obj.browse(self.cr, self.uid, invoiceLine)[0]['number'] else: self._invoiceNumber = '' return movelines
def __init__(self, cr, uid, name, context): super(account_fstr_report, self).__init__(cr, uid, name, context) category_id = context.get('active_id') if context.get('active_model') == 'account_fstr.wizard': category_id = context.get('account_fstr_root_node')[0] self.root_node_obj = pooler.get_pool(cr.dbname).get('account_fstr.category').browse(cr, uid, category_id, context=context) self.category_pool = pooler.get_pool(cr.dbname).get('account_fstr.category') ids = context['active_ids'] self.localcontext.update({ 'time': time, 'template_data': self._get_template_data(cr, uid, ids, [], self.root_node_obj, context=context), 'date_end': '', 'digits_round': '0' * (self.root_node_obj.digits_round - 2) })
def web_login(self, *args, **kw): ensure_db() dbname = request.session.db registry = RegistryManager.get(dbname) #cr = registry.cursor() cr = request.cr response = super(LockoutSign, self).web_login(*args, **kw) if response.is_qweb and response.qcontext.has_key('error'): error = response.qcontext['error'] if error: if request.httprequest.method == 'POST': old_uid = request.uid company_ids = pooler.get_pool( request.session.db).get('res.company').search( cr, SUPERUSER_ID, []) company = pooler.get_pool( request.session.db).get('res.company').browse( cr, SUPERUSER_ID, company_ids[0]) attempt_cnt = company.attempt_cnt unlock_after = company.lockouttime_id.value unlock_after_name = company.lockouttime_id.name uid = request.session.authenticate( request.session.db, request.params['login'], request.params['password']) if uid is False: uloginids = pooler.get_pool( request.session.db).get('res.users').search( cr, SUPERUSER_ID, [('login', '=', request.params['login'])]) for lid in pooler.get_pool( request.session.db).get('res.users').browse( cr, SUPERUSER_ID, uloginids): if lid.flg_userlocked: if unlock_after == 0: error = 'Your Login is temporarily Locked. Please Contact Administrator to Unlock it.' else: error = 'Your Login is temporarily Locked. Please try after ' + unlock_after_name else: wronglogin_cnt = lid.wronglogin_cnt and lid.wronglogin_cnt + 1 or 1 pooler.get_pool( request.session.db).get('res.users').write( cr, SUPERUSER_ID, [lid.id], {'wronglogin_cnt': wronglogin_cnt}) if int(lid.wronglogin_cnt) >= int(attempt_cnt): pooler.get_pool( request.session.db ).get('res.users').write( cr, SUPERUSER_ID, [lid.id], { 'flg_userlocked': True, 'userlocked_datetime': time.strftime('%Y-%m-%d %H:%M:%S') }) if unlock_after == 0: error = 'Your Login is temporarily Locked. Please Contact Administrator to Unlock it.' else: error = 'Your Login is temporarily Locked. Please try after ' + unlock_after_name response.qcontext['error'] = error return response
def upload_download(self, cr, uid, ids, context=None): print '_______________upload_download_______' ''' Find all objects that are created or modified after the synchronize_date Synchronize these objects ''' start_date = time.strftime('%Y-%m-%d, %Hh %Mm %Ss') syn_obj = self.browse(cr, uid, ids, context=context)[0] pool = pooler.get_pool(cr.dbname) server = pool.get('base.synchro.server').browse(cr, uid, syn_obj.server_url.id, context=context) for object_ in server.obj_ids: dt = time.strftime('%Y-%m-%d %H:%M:%S') self.synchronize(cr, uid, server, object_, context=context) if object_.action == 'b': time.sleep(1) dt = time.strftime('%Y-%m-%d %H:%M:%S') self.pool.get('base.synchro.obj').write(cr, uid, [object_.id], {'synchronize_date': dt}, context=context) end_date = time.strftime('%Y-%m-%d, %Hh %Mm %Ss') if syn_obj.user_id: request = pooler.get_pool(cr.dbname).get('res.request') if not self.report: self.report.append('No exception.') summary = '''Here is the synchronization report: Synchronization started: %s Synchronization finnished: %s Synchronized records: %d Records updated: %d Records created: %d Exceptions: ''' % (start_date, end_date, self.report_write + self.report_create, self.report_write, self.report_create) summary += '\n'.join(self.report) print '__________summary_______',summary request.create(cr, uid, { 'name' : "Synchronization report", 'act_from' : uid, 'act_to' : syn_obj.user_id.id, 'body': summary, }, context=context) return True
def __init__(self, cr, uid, name, context): super(BankStatementWebkit, self).__init__(cr, uid, name, context=context) self.pool = pooler.get_pool(self.cr.dbname) self.cursor = self.cr company = self.pool.get('res.users').browse( self.cr, uid, uid, context=context).company_id header_report_name = ' - '.join((_('BORDEREAU DE REMISE DE CHEQUES'), company.name, company.currency_id.name)) footer_date_time = self.formatLang(str(datetime.today())[:19], date_time=True) self.localcontext.update({ 'cr': cr, 'uid': uid, 'get_bank_statement': self._get_bank_statement_data, 'report_name': _('BORDEREAU DE REMISE DE CHEQUES'), 'additional_args': [ ('--header-font-name', 'Helvetica'), ('--footer-font-name', 'Helvetica'), ('--header-font-size', '10'), ('--footer-font-size', '6'), ('--header-left', header_report_name), ('--header-spacing', '2'), ('--footer-left', footer_date_time), ('--footer-right', ' '.join((_('Page'), '[page]', _('of'), '[topage]'))), ('--footer-line',), ], })
def relation_transform(self, cr, uid, pool_src, pool_dest, object, id, action, context=None): if not id: return False pool = pooler.get_pool(cr.dbname) cr.execute('''select o.id from base_synchro_obj o left join ir_model m on (o.model_id =m.id) where m.model=%s and o.active''', (object,)) obj = cr.fetchone() result = False if obj: # # If the object is synchronised and found, set it # result = self.get_id(cr, uid, obj[0], id, action, context=context) else: # # If not synchronized, try to find it with name_get/name_search # names = pool_src.get(object).name_get(cr, uid, [id])[0][1] res = pool_dest.get(object).name_search(cr, uid, names, [], 'like') if res: result = res[0][0] else: # LOG this in the report, better message. print self.report.append('WARNING: Record "%s" on relation %s not found, set to null.' % (names, object)) return result
def __init__(self, cr, uid, name, context): super(Parser, self).__init__(cr, uid, name, context=context) pool = pooler.get_pool(self.cr.dbname) self.localcontext.update({ 'convert_date': self.convert_date, 'get_lines': self.get_lines, })
def create(self, cursor, uid, ids, data, context=None): """We override the create function in order to handle generator Code taken from report openoffice. Thanks guys :) """ pool = pooler.get_pool(cursor.dbname) ir_obj = pool.get('ir.actions.report.xml') report_xml_ids = ir_obj.search(cursor, uid, [('report_name', '=', self.name[7:])], context=context) if report_xml_ids: report_xml = ir_obj.browse(cursor, uid, report_xml_ids[0], context=context) report_xml.report_rml = None report_xml.report_rml_content = None report_xml.report_sxw_content_data = None report_xml.report_sxw_content = None report_xml.report_sxw = None else: return super(CvsParser, self).create(cursor, uid, ids, data, context) from ir_report import REPORT_TYPE if report_xml.report_type != REPORT_TYPE : return super(CvsParser, self).create(cursor, uid, ids, data, context) result = self.create_source_pdf(cursor, uid, ids, data, report_xml, context) if not result: return (False,False) return result
def _get_invoice_facturae_xml(self, cr, uid, data, context=None): if context is None: context = {} # context.update( {'date': data['form']['date']} ) pool = pooler.get_pool(cr.dbname) invoice_obj = pool.get('account.invoice') ids = data['ids'] id = ids[0] invoice = invoice_obj.browse(cr, uid, [id], context=context)[0] fname_invoice = invoice.fname_invoice and invoice.fname_invoice + \ '.xml' or '' aids = pool.get('ir.attachment').search(cr, uid, [('datas_fname', '=', invoice.fname_invoice+'.xml'), ( 'res_model', '=', 'account.invoice'), ('res_id', '=', id)]) xml_data = "" if aids: brow_rec = pool.get('ir.attachment').browse(cr, uid, aids[0]) if brow_rec.datas: xml_data = base64.decodestring(brow_rec.datas) else: fname, xml_data = invoice_obj._get_facturae_invoice_xml_data( cr, uid, ids, context=context) # TODO: Del this line # pool.get('ir.attachment').create(cr, uid, { #'name': fname_invoice, #'datas': base64.encodestring(xml_data), #'datas_fname': fname_invoice, #'res_model': 'account.invoice', #'res_id': invoice.id, #}, context=context #) id = invoice_obj._attach_invoice(cr, uid, ids, context=context) fdata = base64.encodestring(xml_data) return {'facturae': fdata, 'facturae_fname': fname_invoice, }
def __init__(self, cr, uid, model, ids): self.cr = cr self.uid = uid self.model = model self.ids = ids self.obj = pooler.get_pool(cr.dbname).get(model) self.columns = self.obj._columns.keys() + self.obj._inherit_fields.keys()
def __init__(self, cr, name, table, rml=False, parser=False, header=True, store=False): super(Aeroo_report, self).__init__( name, table, rml, parser, header, store, register=False) self.logger("registering %s (%s)" % (name, table), logging.INFO) self.active_prints = {} pool = pooler.get_pool(cr.dbname) ir_obj = pool.get('ir.actions.report.xml') name = name.startswith('report.') and name[7:] or name try: report_xml_ids = ir_obj.search(cr, 1, [('report_name', '=', name)]) if report_xml_ids: report_xml = ir_obj.browse(cr, 1, report_xml_ids[0]) else: report_xml = False if report_xml and report_xml.preload_mode == 'preload': file_data = report_xml.report_sxw_content if not file_data: self.logger("template is not defined in %s (%s) !" % (name, table), logging.WARNING) template_io = None else: template_io = StringIO() template_io.write(base64.decodestring(file_data)) style_io=self.get_styles_file(cr, 1, report_xml) if template_io: self.serializer = OOSerializer(template_io, oo_styles=style_io) except Exception, e: print e
def _get_categories(self, products, form): cat_ids=[] res=[] self.pricelist = form['price_list'] self._set_quantity(form) pool = pooler.get_pool(self.cr.dbname) pro_ids=[] for product in products: pro_ids.append(product.id) if product.categ_id.id not in cat_ids: cat_ids.append(product.categ_id.id) cats = pool.get('product.category').name_get(self.cr, self.uid, cat_ids, context=self.localcontext) if not cats: return res for cat in cats: product_ids=pool.get('product.product').search(self.cr, self.uid, [('id', 'in', pro_ids), ('categ_id', '=', cat[0])], context=self.localcontext) products = [] for product in pool.get('product.product').read(self.cr, self.uid, product_ids, ['name', 'code'], context=self.localcontext): val = { 'id':product['id'], 'name':product['name'], 'code':product['code'] } i = 1 for qty in self.quantity: if qty == 0: val['qty'+str(i)] = 0.0 else: val['qty'+str(i)]=self._get_price(self.pricelist, product['id'], qty) i += 1 products.append(val) res.append({'name':cat[1],'products': products}) return res
def __call__(self, source): res = source cr = None is_new_cr = False try: frame = inspect.currentframe() if frame is None: return source frame = frame.f_back if not frame: return source lang = self._get_lang(frame) if lang: cr, is_new_cr = self._get_cr(frame) if cr: # Try to use ir.translation to benefit from global cache if possible pool = pooler.get_pool(cr.dbname) res = pool.get('ir.translation')._get_source(cr, SUPERUSER_ID, None, ('code','sql_constraint'), lang, source) else: _logger.debug('no context cursor detected, skipping translation for "%r"', source) else: _logger.debug('no translation language detected, skipping translation for "%r" ', source) except Exception: _logger.debug('translation went wrong for "%r", skipped', source) # if so, double-check the root/base translations filenames finally: if cr and is_new_cr: cr.close() return res
def _get_attachment_create_vals(self, cr, uid, report_xml, vals, context=None): if context is None: context = {} pool = pooler.get_pool(cr.dbname) report_obj = pool.get('ir.actions.report.xml') vals = report_obj._get_attachment_create_vals(cr, uid, report_xml, vals, context=context) return vals
def get_object(self, data): dayofWeek = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"] data_list = [] for timetable_obj in ( pooler.get_pool(self.cr.dbname) .get("op.timetable") .browse(self.cr, self.uid, data["teacher_time_table_ids"]) ): oldDate = datetime.strptime(timetable_obj.start_datetime, "%Y-%m-%d %H:%M:%S") day = dayofWeek[datetime.weekday(oldDate)] timetable_data = { "period": timetable_obj.period_id.name, "period_time": timetable_obj.period_id.hour + ":" + timetable_obj.period_id.minute + timetable_obj.period_id.am_pm, "sequence": timetable_obj.period_id.sequence, "start_datetime": timetable_obj.start_datetime[10:], "end_datetime": timetable_obj.end_datetime[10:], "day": day, "subject": timetable_obj.subject_id.name, # 'faculty': self.get_full_name(timetable_obj), # 'faculty_middle': timetable_obj.faculty_id.middle_name, # 'faculty_last': timetable_obj.faculty_id.last_name, "course": timetable_obj.standard_id.course_id.name, "standard": timetable_obj.standard_id.name, } data_list.append(timetable_data) ttdl = sorted(data_list, key=lambda k: k["sequence"]) final_list = self.sort_tt(ttdl) return final_list
def __init__(self, cr, uid, name, context): super(Parser, self).__init__(cr, uid, name, context=context) pool = pooler.get_pool(self.cr.dbname) self.localcontext.update({ 'get_phieunhap': self.get_phieunhap, })
def create(self, cr, uid, ids, datas, context): # data = datas['form'] design_obj = pooler.get_pool(cr.dbname).get('mto.design') data = design_obj.browse(cr, uid, ids[0], context=context) _divide_columns_for_matrix = 0.7 _display_ans_in_rows = 5 #default A4 size _pageSize = ('21.1cm', '29.7cm') _frame_width = tools.ustr(_pageSize[0]) _frame_height = tools.ustr( float(_pageSize[1].replace('cm', '')) - float(1.90)) + 'cm' _tbl_widths = tools.ustr( float(_pageSize[0].replace('cm', '')) - float(2.10)) + 'cm' rml = """<document filename="Production Configuration.pdf"> <template pageSize="(""" + _pageSize[0] + """,""" + _pageSize[ 1] + """)" title='Options' author="MTT" allowSplitting="20" > <pageTemplate id="first"> <frame id="first" x1="0.0cm" y1="1.0cm" width='""" + _frame_width + """' height='""" + _frame_height + """'/> <pageGraphics> <lineMode width="1.0"/> <lines>1.0cm """ + tools.ustr( float(_pageSize[1].replace('cm', '')) - float(1.00)) + 'cm' + """ """ + tools.ustr( float(_pageSize[0].replace('cm', '')) - float(1.00)) + 'cm' + """ """ + tools.ustr( float(_pageSize[1].replace('cm', '')) - float(1.00)) + 'cm' + """</lines> <lines>1.0cm """ + tools.ustr( float(_pageSize[1].replace('cm', '')) - float(1.00) ) + 'cm' + """ 1.0cm 1.00cm</lines> <lines>""" + tools.ustr( float(_pageSize[0].replace('cm', '')) - float(1.00)) + 'cm' + """ """ + tools.ustr( float(_pageSize[1].replace('cm', '')) - float(1.00)) + 'cm' + """ """ + tools.ustr( float(_pageSize[0].replace('cm', '')) - float(1.00)) + 'cm' + """ 1.00cm</lines> <lines>1.0cm 1.00cm """ + tools.ustr( float(_pageSize[0].replace('cm', '')) - float(1.00) ) + 'cm' + """ 1.00cm</lines>""" #page number rml += """ <fill color="gray"/> <setFont name="Helvetica" size="10"/> <drawRightString x='""" + tools.ustr( float(_pageSize[0].replace('cm', '')) - float(1.00) ) + 'cm' + """' y="0.6cm">Page : <pageNumber/> </drawRightString>""" rml += """</pageGraphics> </pageTemplate> </template> <stylesheet> <blockTableStyle id="Standard_Outline"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <blockTopPadding length="0"/> <blockBottomPadding length="0"/> <blockLeftPadding length="0"/> <blockRightPadding length="0"/> </blockTableStyle> <blockTableStyle id="Table1"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <blockTopPadding length="1"/> <blockBottomPadding length="1"/> <blockLeftPadding length="1"/> <blockRightPadding length="1"/> </blockTableStyle> <blockTableStyle id="Tableau1"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <blockTopPadding length="1"/> <blockBottomPadding length="1"/> <blockLeftPadding length="1"/> <blockRightPadding length="1"/> </blockTableStyle> <blockTableStyle id="table_attr"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <lineStyle kind="LINEABOVE" colorName="#e6e6e6" start="0,0" stop="0,0"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6" start="0,-1" stop="0,-1"/> <lineStyle kind="LINEBELOW" colorName="#e6e6e6" start="0,-1" stop="0,-1"/> <blockTopPadding length="1"/> <blockBottomPadding length="1"/> <blockLeftPadding length="1"/> <blockRightPadding length="1"/> </blockTableStyle> <blockTableStyle id="Table_Outer_Notes"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <blockTopPadding length="1"/> <blockBottomPadding length="1"/> <blockLeftPadding length="1"/> <blockRightPadding length="1"/> </blockTableStyle> <blockTableStyle id="Table_options"> <blockAlignment value="LEFT"/> <blockValign value="TOP"/> <blockTopPadding length="1"/> <blockBottomPadding length="1"/> <blockLeftPadding length="1"/> <blockRightPadding length="1"/> </blockTableStyle> <initialize> <paraStyle name="all" alignment="justify"/> </initialize> <paraStyle name="P1" fontName="Helvetica-Bold" fontSize="20.0" leading="25" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="P2" fontName="Helvetica" fontSize="6.0" leading="8" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="P3" fontName="Helvetica" fontSize="16.0" leading="20" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="P4" fontName="Helvetica-Bold" fontSize="18.0" leading="22" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="P5" fontName="Helvetica" fontSize="14.0" leading="17" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="opt_item" fontName="Helvetica" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="30" spaceAfter="0.0"/> <paraStyle name="Standard" fontName="Helvetica"/> <paraStyle name="Text body" fontName="Helvetica" spaceBefore="0.0" spaceAfter="6.0"/> <paraStyle name="Heading" fontName="Helvetica" fontSize="14.0" leading="17" spaceBefore="12.0" spaceAfter="6.0"/> <paraStyle name="Heading 9" fontName="Helvetica-Bold" fontSize="75%" leading="NaN" spaceBefore="12.0" spaceAfter="6.0"/> <paraStyle name="List" fontName="Helvetica" spaceBefore="0.0" spaceAfter="6.0"/> <paraStyle name="Footer" fontName="Helvetica"/> <paraStyle name="Table Contents" fontName="Helvetica"/> <paraStyle name="Table Heading" fontName="Helvetica" alignment="CENTER"/> <paraStyle name="Caption" fontName="Helvetica" fontSize="12.0" leading="15" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="Index" fontName="Helvetica"/> <paraStyle name="Horizontal Line" fontName="Helvetica" fontSize="6.0" leading="8" spaceBefore="0.0" spaceAfter="14.0"/> <paraStyle name="terp_header" fontName="Helvetica-Bold" fontSize="12.0" leading="15" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_tblheader_General" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_tblheader_Details" fontName="Helvetica-Bold" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_default_8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Bold_8" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_tblheader_General_Centre" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="CENTER" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_tblheader_General_Right" fontName="Helvetica-Bold" fontSize="8.0" leading="10" alignment="RIGHT" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_tblheader_Details_Centre" fontName="Helvetica-Bold" fontSize="10.0" leading="13" alignment="CENTER" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_tblheader_Details_Right" fontName="Helvetica-Bold" fontSize="10.0" leading="13" alignment="RIGHT" spaceBefore="6.0" spaceAfter="6.0"/> <paraStyle name="terp_default_Right_8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Centre_8" fontName="Helvetica" fontSize="8.0" leading="10" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_header_Right" fontName="Helvetica-Bold" fontSize="15.0" leading="19" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_header_Centre" fontName="Helvetica-Bold" fontSize="12.0" leading="15" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_address" fontName="Helvetica" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_9" fontName="Helvetica" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Bold_9" fontName="Helvetica-Bold" fontSize="10.0" leading="13" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Centre_9" fontName="Helvetica" fontSize="9.0" leading="11" alignment="CENTER" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Right_9" fontName="Helvetica" fontSize="10.0" leading="13" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_Bold_9_Right" fontName="Helvetica-Bold" fontSize="10.0" leading="13" alignment="RIGHT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_8_Italic" fontName="Helvetica-Oblique" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <paraStyle name="terp_default_2" fontName="Helvetica" fontSize="2.0" leading="3" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/> <images/> </stylesheet> <story>""" #basic data rml += """ <blockTable colWidths="535.0" style="Table1"> <tr> <td> <para style="P1"> <font face="Helvetica" size="20.0">Production Configuration """ + to_xml( tools.ustr(data.name)) + """</font> </para> </td> </tr> </blockTable> <blockTable colWidths="180.0,159.0,195.0" style="Tableau1"> <tr> <td> <para style="P5">Model: """ + to_xml( tools.ustr(data.design_tmpl_id.name)) + """</para> </td> <td> <para style="P5">Price: """ + tools.ustr( data.design_tmpl_id.currency_id.symbol) + tools.ustr( data.list_price) + """</para> </td> <td> <para style="P5">Weight: """ + tools.ustr( data.weight) + """KG</para> </td> </tr> </blockTable> """ design_tmpl = data.design_tmpl_id opt_obj = pooler.get_pool(cr.dbname).get('attribute.option') for attr_group in design_tmpl.attribute_group_ids: rml += """ <blockTable colWidths="533.0" style="table_attr"> <tr> <td> <para style="P4">""" + to_xml( tools.ustr(attr_group.name)) + """</para> </td> </tr> """ for attr in attr_group.attribute_ids: attr_label = attr.field_description attr_type = attr.attribute_type attr_val = getattr(data, attr.name) #field label rml += """ <tr> <td> <para style="P5">%s.%s</para> </td> </tr> """ % ( attr.sequence, to_xml(design_obj._get_attr_pw_name(data, attr)), ) #field value if attr_type in ('char', 'text', 'boolean', 'integer', 'date', 'datetime', 'float'): if attr_val: rml += """ <tr> <td> <para style="opt_item">%s</para> </td> </tr> """ % (to_xml(attr_val), ) elif attr_type == 'select': rml += """ <tr> <td> <blockTable colWidths="18.0,516.0" style="Table_options"> """ for sel_opt in attr.option_ids: rml += """ <tr> <td> <illustration> <fill color="white"/> <circle x="0.3cm" y="-0.2cm" radius="0.18 cm" fill="yes" stroke="yes" round="0.1cm"/> """ if attr_val and attr_val.id == sel_opt.id: rml += """ <fill color="black"/> <circle x="0.3cm" y="-0.2cm" radius="0.13 cm" fill="yes" stroke="no" round="0.1cm"/> """ rml += """ </illustration> </td> <td> <para style="opt_item">%s</para> </td> </tr> """ % (to_xml( opt_obj.name_get(cr, uid, sel_opt.id, context)[0][1]), ) rml += """ </blockTable> </td> </tr> """ elif attr_type == 'multiselect': attr_val_ids = [val.id for val in attr_val] rml += """ <tr> <td> <blockTable colWidths="18.0,516.0" style="Table_options"> """ for sel_opt in attr.option_ids: rml += """ <tr> <td> <illustration> <fill color="white"/> <rect x="0.1cm" y="-0.45cm" width="0.4 cm" height="0.4cm" fill="yes" stroke="yes" round="0.1cm"/> """ if sel_opt.id in attr_val_ids: rml += """ <fill color="black"/> <rect x="0.15cm" y="-0.4cm" width="0.3 cm" height="0.3cm" fill="yes" stroke="no" round="0.1cm"/> """ rml += """ </illustration> </td> <td> <para style="opt_item">%s</para> </td> </tr> """ % (to_xml( opt_obj.name_get(cr, uid, sel_opt.id, context)[0][1]), ) rml += """ </blockTable> </td> </tr> """ rml += """ </blockTable> """ #the description rml += """ <blockTable colWidths="535.0" style="Table_Outer_Notes"> <tr> <td> <para style="terp_default_9">Description:</para> </td> </tr> <tr> <td> <para style="terp_default_9">%s</para> </td> </tr> </blockTable> """ % (data.description and data.description or '', ) rml += """</story></document>""" report_type = datas.get('report_type', 'pdf') create_doc = self.generators[report_type] pdf = create_doc(rml, title=self.title) return (pdf, report_type)
def exp_authenticate(self, db, login, password, user_agent_env): res_users = pooler.get_pool(db).get('res.users') return res_users.authenticate(db, login, password, user_agent_env)
def get_proxy_args(instance, cr, uid, prpt_content, context_vars={}): """Return the arguments needed by Pentaho server proxy. @return: Tuple with: [0]: Has the url for the Pentaho server. [1]: Has dict with basic arguments to pass to Pentaho server. This includes the connection settings and report definition, as well as reserved parameters evaluated according to values in the dictionary "context_vars". """ pool = pooler.get_pool(cr.dbname) current_user = pool.get('res.users').browse(cr, uid, uid) config_obj = pool.get('ir.config_parameter') proxy_url = config_obj.get_param( cr, uid, 'pentaho.server.url', default='http://localhost:8080/pentaho-reports-for-openerp') xml_interface = config_obj.get_param( cr, uid, 'pentaho.openerp.xml.interface', default='').strip() or config['xmlrpc_interface'] or 'localhost' xml_port = config_obj.get_param( cr, uid, 'pentaho.openerp.xml.port', default='').strip() or str( config['xmlrpc_port']) password_to_use = pool.get('res.users').pentaho_pass_token(cr, uid, uid) proxy_argument = { 'prpt_file_content': xmlrpclib.Binary(prpt_content), 'connection_settings': { 'openerp': { 'host': xml_interface, 'port': xml_port, 'db': cr.dbname, 'login': current_user.login, 'password': password_to_use, } }, 'report_parameters': dict([(param_name, param_formula(instance, cr, uid, context_vars)) for (param_name, param_formula) in RESERVED_PARAMS.iteritems() if param_formula(instance, cr, uid, context_vars)]), } postgresconfig_host = config_obj.get_param(cr, uid, 'pentaho.postgres.host', default='localhost') postgresconfig_port = config_obj.get_param(cr, uid, 'pentaho.postgres.port', default='5432') postgresconfig_login = config_obj.get_param(cr, uid, 'pentaho.postgres.login') postgresconfig_password = config_obj.get_param( cr, uid, 'pentaho.postgres.password') if postgresconfig_host and postgresconfig_port and postgresconfig_login and postgresconfig_password: proxy_argument['connection_settings'].update({ 'postgres': { 'host': postgresconfig_host, 'port': postgresconfig_port, 'db': cr.dbname, 'login': postgresconfig_login, 'password': postgresconfig_password, } }) return proxy_url, proxy_argument
def extend_trans_generate(lang, modules, cr): dbname = cr.dbname pool = pooler.get_pool(dbname) trans_obj = pool.get('ir.translation') model_data_obj = pool.get('ir.model.data') uid = SUPERUSER_ID l = pool.models.items() l.sort() query = 'SELECT name, model, res_id, module' \ ' FROM ir_model_data' query_models = """SELECT m.id, m.model, imd.module FROM ir_model AS m, ir_model_data AS imd WHERE m.id = imd.res_id AND imd.model = 'ir.model' """ if 'all_installed' in modules: query += ' WHERE module IN ( SELECT name FROM ir_module_module WHERE state = \'installed\') ' query_models += " AND imd.module in ( SELECT name FROM ir_module_module WHERE state = 'installed') " query_param = None if 'all' not in modules: query += ' WHERE module IN %s' query_models += ' AND imd.module in %s' query_param = (tuple(modules), ) query += ' ORDER BY module, model, name' query_models += ' ORDER BY module, model' cr.execute(query, query_param) _to_translate = [] def push_translation(module, type, name, id, source, comments=None): tuple = (module, source, name, id, type, comments or []) # empty and one-letter terms are ignored, they probably are not meant to be # translated, and would be very hard to translate anyway. if not source or len(source.strip()) <= 1: _logger.debug("Ignoring empty or 1-letter source term: %r", tuple) return if tuple not in _to_translate: _to_translate.append(tuple) def encode(s): if isinstance(s, unicode): return s.encode('utf8') return s for (xml_name, model, res_id, module) in cr.fetchall(): module = encode(module) model = encode(model) xml_name = "%s.%s" % (module, encode(xml_name)) if not pool.get(model): _logger.error("Unable to find object %r", model) continue exists = pool.get(model).exists(cr, uid, res_id) if not exists: _logger.warning("Unable to find object %r with id %d", model, res_id) continue obj = pool.get(model).browse(cr, uid, res_id) if model == 'ir.ui.view': d = etree.XML(encode(obj.arch)) for t in trans_parse_view(d): push_translation(module, 'view', encode(obj.model), 0, t) elif model == 'ir.actions.wizard': service_name = 'wizard.' + encode(obj.wiz_name) if netsvc.Service._services.get(service_name): obj2 = netsvc.Service._services[service_name] for state_name, state_def in obj2.states.iteritems(): if 'result' in state_def: result = state_def['result'] if result['type'] != 'form': continue name = "%s,%s" % (encode(obj.wiz_name), state_name) def_params = { 'string': ('wizard_field', lambda s: [encode(s)]), 'selection': ('selection', lambda s: [ encode(e[1]) for e in ((not callable(s)) and s or []) ]), 'help': ('help', lambda s: [encode(s)]), } # export fields if not result.has_key('fields'): _logger.warning("res has no fields: %r", result) continue for field_name, field_def in result[ 'fields'].iteritems(): res_name = name + ',' + field_name for fn in def_params: if fn in field_def: transtype, modifier = def_params[fn] for val in modifier(field_def[fn]): push_translation( module, transtype, res_name, 0, val) # export arch arch = result['arch'] if arch and not isinstance(arch, UpdateableStr): d = etree.XML(arch) for t in trans_parse_view(d): push_translation(module, 'wizard_view', name, 0, t) # export button labels for but_args in result['state']: button_name = but_args[0] button_label = but_args[1] res_name = name + ',' + button_name push_translation(module, 'wizard_button', res_name, 0, button_label) elif model == 'ir.model.fields': try: field_name = encode(obj.name) except AttributeError, exc: _logger.error("name error in %s: %s", xml_name, str(exc)) continue objmodel = pool.get(obj.model) if not objmodel or not field_name in objmodel._columns: continue field_def = objmodel._columns[field_name] name = "%s,%s" % (encode(obj.model), field_name) push_translation(module, 'field', name, 0, encode(field_def.string)) if field_def.help: push_translation(module, 'help', name, 0, encode(field_def.help)) if field_def.translate: ids = objmodel.search(cr, uid, []) obj_values = objmodel.read(cr, uid, ids, [field_name]) for obj_value in obj_values: res_id = obj_value['id'] if obj.name in ('ir.model', 'ir.ui.menu'): res_id = 0 model_data_ids = model_data_obj.search( cr, uid, [ ('model', '=', model), ('res_id', '=', res_id), ]) if not model_data_ids: push_translation(module, 'model', name, 0, encode(obj_value[field_name])) if hasattr(field_def, 'selection') and isinstance( field_def.selection, (list, tuple)): for dummy, val in field_def.selection: push_translation(module, 'selection', name, 0, encode(val)) elif model == 'ir.actions.report.xml': name = encode(obj.report_name) fname = "" ##### Changes for Aeroo ###### if obj.report_type == 'aeroo': trans_ids = trans_obj.search(cr, uid, [('type', '=', 'report'), ('res_id', '=', obj.id)]) for t in trans_obj.read(cr, uid, trans_ids, ['name', 'src']): push_translation(module, "report", t['name'], xml_name, encode(t['src'])) ############################## else: if obj.report_rml: fname = obj.report_rml parse_func = trans_parse_rml report_type = "report" elif obj.report_xsl: fname = obj.report_xsl parse_func = trans_parse_xsl report_type = "xsl" if fname and obj.report_type in ('pdf', 'xsl'): try: report_file = file_open(fname) try: d = etree.parse(report_file) for t in parse_func(d.iter()): push_translation(module, report_type, name, 0, t) finally: report_file.close() except (IOError, etree.XMLSyntaxError): _logger.exception( "couldn't export translation for report %s %s %s", name, report_type, fname)
def load_modules(db, force_demo=False, status=None, update_module=False): # TODO status['progress'] reporting is broken: used twice (and reset each # time to zero) in load_module_graph, not fine-grained enough. # It should be a method exposed by the pool. initialize_sys_path() open_openerp_namespace() force = [] if force_demo: force.append('demo') cr = db.cursor() try: if not openerp.modules.db.is_initialized(cr): _logger.info("init db") openerp.modules.db.initialize(cr) tools.config["init"]["all"] = 1 tools.config['update']['all'] = 1 if not tools.config['without_demo']: tools.config["demo"]['all'] = 1 # This is a brand new pool, just created in pooler.get_db_and_pool() pool = pooler.get_pool(cr.dbname) report = tools.assertion_report() if 'base' in tools.config['update'] or 'all' in tools.config['update']: cr.execute( "update ir_module_module set state=%s where name=%s and state=%s", ('to upgrade', 'base', 'installed')) # STEP 1: LOAD BASE (must be done before module dependencies can be computed for later steps) graph = openerp.modules.graph.Graph() graph.add_module(cr, 'base', force) if not graph: _logger.critical( 'module base cannot be loaded! (hint: verify addons-path)') raise osv.osv.except_osv( _('Could not load base module'), _('module base cannot be loaded! (hint: verify addons-path)')) # processed_modules: for cleanup step after install # loaded_modules: to avoid double loading loaded_modules, processed_modules = load_module_graph( cr, graph, status, perform_checks=(not update_module), report=report) if tools.config['load_language']: for lang in tools.config['load_language'].split(','): tools.load_language(cr, lang) # STEP 2: Mark other modules to be loaded/updated if update_module: modobj = pool.get('ir.module.module') if ('base' in tools.config['init']) or ('base' in tools.config['update']): _logger.info('updating modules list') modobj.update_list(cr, 1) _check_module_names( cr, itertools.chain(tools.config['init'].keys(), tools.config['update'].keys())) mods = [k for k in tools.config['init'] if tools.config['init'][k]] if mods: ids = modobj.search( cr, 1, ['&', ('state', '=', 'uninstalled'), ('name', 'in', mods)]) if ids: modobj.button_install(cr, 1, ids) mods = [ k for k in tools.config['update'] if tools.config['update'][k] ] if mods: ids = modobj.search( cr, 1, ['&', ('state', '=', 'installed'), ('name', 'in', mods)]) if ids: modobj.button_upgrade(cr, 1, ids) cr.execute("update ir_module_module set state=%s where name=%s", ('installed', 'base')) # STEP 3: Load marked modules (skipping base which was done in STEP 1) # IMPORTANT: this is done in two parts, first loading all installed or # partially installed modules (i.e. installed/to upgrade), to # offer a consistent system to the second part: installing # newly selected modules. states_to_load = ['installed', 'to upgrade'] processed = load_marked_modules(cr, graph, states_to_load, force, status, report, loaded_modules) processed_modules.extend(processed) if update_module: states_to_load = ['to install'] processed = load_marked_modules(cr, graph, states_to_load, force, status, report, loaded_modules) processed_modules.extend(processed) # load custom models cr.execute('select model from ir_model where state=%s', ('manual', )) for model in cr.dictfetchall(): pool.get('ir.model').instanciate(cr, 1, model['model'], {}) # STEP 4: Finish and cleanup if processed_modules: cr.execute( """select model,name from ir_model where id NOT IN (select distinct model_id from ir_model_access)""" ) for (model, name) in cr.fetchall(): model_obj = pool.get(model) if model_obj and not model_obj.is_transient(): _logger.warning('Model %s (%s) has no access rules!', model, name) # Temporary warning while we remove access rights on osv_memory objects, as they have # been replaced by owner-only access rights cr.execute( """select distinct mod.model, mod.name from ir_model_access acc, ir_model mod where acc.model_id = mod.id""" ) for (model, name) in cr.fetchall(): model_obj = pool.get(model) if model_obj and model_obj.is_transient(): _logger.warning( 'The transient model %s (%s) should not have explicit access rules!', model, name) cr.execute("SELECT model from ir_model") for (model, ) in cr.fetchall(): obj = pool.get(model) if obj: obj._check_removed_columns(cr, log=True) else: _logger.warning( "Model %s is declared but cannot be loaded! (Perhaps a module was partially removed or renamed)", model) # Cleanup orphan records pool.get('ir.model.data')._process_end(cr, 1, processed_modules) for kind in ('init', 'demo', 'update'): tools.config[kind] = {} cr.commit() if update_module: # Remove records referenced from ir_model_data for modules to be # removed (and removed the references from ir_model_data). cr.execute("select id,name from ir_module_module where state=%s", ('to remove', )) for mod_id, mod_name in cr.fetchall(): cr.execute( 'select model,res_id from ir_model_data where noupdate=%s and module=%s order by id desc', ( False, mod_name, )) for rmod, rid in cr.fetchall(): uid = 1 rmod_module = pool.get(rmod) if rmod_module: # TODO group by module so that we can delete multiple ids in a call rmod_module.unlink(cr, uid, [rid]) else: _logger.error('Could not locate %s to remove res=%d' % (rmod, rid)) cr.execute( 'delete from ir_model_data where noupdate=%s and module=%s', ( False, mod_name, )) cr.commit() # Remove menu items that are not referenced by any of other # (child) menu item, ir_values, or ir_model_data. # This code could be a method of ir_ui_menu. # TODO: remove menu without actions of children while True: cr.execute('''delete from ir_ui_menu where (id not IN (select parent_id from ir_ui_menu where parent_id is not null)) and (id not IN (select res_id from ir_values where model='ir.ui.menu')) and (id not IN (select res_id from ir_model_data where model='ir.ui.menu'))''' ) cr.commit() if not cr.rowcount: break else: _logger.info('removed %d unused menus', cr.rowcount) # Pretend that modules to be removed are actually uninstalled. cr.execute("update ir_module_module set state=%s where state=%s", ( 'uninstalled', 'to remove', )) cr.commit() _logger.info('Modules loaded.') finally: cr.close()
def create(self, cr, uid, ids, datas, context=None): if not context: context = {} self.cr = cr self.context = context self.groupby = context.get('group_by', []) self.groupby_no_leaf = context.get('group_by_no_leaf', False) pool = pooler.get_pool(cr.dbname) model = pool.get(datas['model']) model_id = pool.get('ir.model').search(cr, uid, [('model', '=', model._name)]) model_desc = model._description if model_id: model_desc = pool.get('ir.model').browse(cr, uid, model_id[0], context).name self.title = model_desc datas['ids'] = ids result = model.fields_view_get(cr, uid, view_type='tree', context=context) fields_order = self.groupby + self._parse_string(result['arch']) if self.groupby: rows = [] def get_groupby_data(groupby=[], domain=[]): records = model.read_group(cr, uid, domain, fields_order, groupby, 0, None, context) for rec in records: rec['__group'] = True rec['__no_leaf'] = self.groupby_no_leaf rec['__grouped_by'] = groupby[0] if ( isinstance(groupby, list) and groupby) else groupby for f in fields_order: if f not in rec: rec.update({f: False}) elif isinstance(rec[f], tuple): rec[f] = rec[f][1] rows.append(rec) inner_groupby = (rec.get('__context', {})).get('group_by', []) inner_domain = rec.get('__domain', []) if inner_groupby: get_groupby_data(inner_groupby, inner_domain) else: if self.groupby_no_leaf: continue child_ids = model.search(cr, uid, inner_domain) res = model.read(cr, uid, child_ids, result['fields'].keys(), context) res.sort(lambda x, y: cmp(ids.index(x['id']), ids.index(y['id']))) rows.extend(res) dom = [('id', 'in', ids)] if self.groupby_no_leaf and len(ids) and not ids[0]: dom = datas.get('_domain', []) get_groupby_data(self.groupby, dom) else: rows = model.read(cr, uid, datas['ids'], result['fields'].keys(), context) ids2 = map(itemgetter('id'), rows) # getting the ids from read result if datas['ids'] != ids2: # sorted ids were not taken into consideration for print screen rows_new = [] for id in datas['ids']: rows_new += [elem for elem in rows if elem['id'] == id] rows = rows_new res = self._create_table(uid, datas['ids'], result['fields'], fields_order, rows, context, model_desc) return self.obj.get(), 'pdf'
def change_digit(cr): res = pooler.get_pool( cr.dbname).get('decimal.precision').precision_get( cr, SUPERUSER_ID, application) return (16, res)
def generate_records(self, cr, uid, ids, data, context): pool = pooler.get_pool(cr.dbname) result = [] if 'form' in data: from_date = data['form']['date_debut'] to_date = data['form']['date_fin'] #dateAuj = time.strftime('%d-%m-%Y %H:%M') employee_ids = pool.get('hr.employee').search( cr, uid, [('slip_ids', '!=', '')]) employee_objs = pool.get('hr.employee').browse( cr, uid, employee_ids) date = datetime.now().strftime('%d-%m-%Y') if employee_objs: for employee in employee_objs: net = 0 brut = 0 cnss = 0 irpp = 0 avance = 0 nb_jour = 0 nb_heure = 0 nb_heure_supp = 0 company = pool.get('res.company').browse( cr, uid, employee.company_id.id) payslip_ids = pool.get('hr.payslip').search( cr, uid, [('date_from', '>=', from_date), ('date_to', '<=', to_date), ('employee_id', '=', employee.id)]) payslip_objs = pool.get('hr.payslip').browse( cr, uid, payslip_ids) if payslip_objs: for pay in payslip_objs: hr_payslip_line_ids = self.pool.get( 'hr.payslip.line').search( cr, uid, [('slip_id', '=', pay.id)]) hr_payslip_line_obj = self.pool.get( 'hr.payslip.line').browse( cr, uid, hr_payslip_line_ids) hr_worked_days_ids = self.pool.get( 'hr.payslip.worked_days').search( cr, uid, [('payslip_id', '=', pay.id)]) hr_worked_days_obj = self.pool.get( 'hr.payslip.worked_days').browse( cr, uid, hr_worked_days_ids) for hr_payslip_line in hr_payslip_line_obj: if hr_payslip_line.code == 'NET': net += hr_payslip_line.total if hr_payslip_line.code == 'BRUT': brut += hr_payslip_line.total if hr_payslip_line.code == 'CNSS': cnss += hr_payslip_line.total if hr_payslip_line.code == 'IRPP': irpp += hr_payslip_line.total if hr_worked_days_obj: for worked_days in hr_worked_days_obj: if worked_days.code == 'WORK100': nb_jour += worked_days.number_of_days nb_heure += worked_days.number_of_hours if worked_days.code == 'HS25' or worked_days.code == 'HS50': nb_heure_supp += worked_days.number_of_hours hr_payslip_input_ids = self.pool.get( 'hr.payslip.input').search( cr, uid, [('payslip_id', '=', pay.id)]) hr_payslip_input_obj = self.pool.get( 'hr.payslip.input').browse( cr, uid, hr_payslip_input_ids) if hr_payslip_input_obj: for payinput in hr_payslip_input_obj: if payinput.code == 'AV': avance += payinput.amount data = { 'net': net, 'brut': brut, 'employee': employee.name, 'matricule': employee.num_chezemployeur, 'stat_path': os.getcwd() + "/addons/pay_report/", 'nb_jour': nb_jour, 'nb_heure': nb_heure, 'nb_heure_supp': nb_heure_supp, 'cnss': cnss, 'irpp': irpp, 'date': date, 'avance': avance, 'nap': net - avance, 'from_date': from_date, 'to_date': to_date, 'company_name': company.name, 'company_street': company.street, 'company_street2': company.street2, 'company_city': company.city, 'company_zip': company.zip, } result.append(data) else: data = { 'net': '', 'brut': '', 'employee': employee.name, 'matricule': employee.num_chezemployeur, 'stat_path': os.getcwd() + "/addons/pay_report/", 'nb_jour': '', 'nb_heure': '', 'nb_heure_supp': '', 'cnss': '', 'irpp': '', 'date': date, 'avance': avance, 'nap': net - avance, 'from_date': from_date, 'to_date': to_date, 'company_name': company.name, 'company_street': company.street, 'company_street2': company.street2, 'company_city': company.city, 'company_zip': company.zip, } result.append(data) return result
def graph_get(self, cr, uid, id, res_model, res_id, scale, context=None): pool = pooler.get_pool(cr.dbname) process = pool.get('process.process').browse(cr, uid, id, context=context) name = process.name resource = False state = 'N/A' expr_context = {} states = {} perm = False if res_model: states = dict( pool.get(res_model).fields_get(cr, uid, context=context).get( 'state', {}).get('selection', {})) if res_id: current_object = pool.get(res_model).browse(cr, uid, res_id, context=context) current_user = pool.get('res.users').browse(cr, uid, uid, context=context) expr_context = Env(current_object, current_user) resource = current_object.name if 'state' in current_object: state = states.get(current_object.state, 'N/A') perm = pool.get(res_model).perm_read(cr, uid, [res_id], context=context)[0] notes = process.note or "N/A" nodes = {} start = [] transitions = {} for node in process.node_ids: data = {} data['name'] = node.name data['model'] = (node.model_id or None) and node.model_id.model data['kind'] = node.kind data['subflow'] = (node.subflow_id or False) and [ node.subflow_id.id, node.subflow_id.name ] data['notes'] = node.note data['active'] = False data['gray'] = False data['url'] = node.help_url data['model_states'] = node.model_states # get assosiated workflow if data['model']: wkf_ids = self.pool.get('workflow').search( cr, uid, [('osv', '=', data['model'])]) data['workflow'] = (wkf_ids or False) and wkf_ids[0] if 'directory_id' in node and node.directory_id: data['directory_id'] = node.directory_id.id data['directory'] = self.pool.get( 'document.directory').get_resource_path( cr, uid, data['directory_id'], data['model'], False) if node.menu_id: data['menu'] = { 'name': node.menu_id.complete_name, 'id': node.menu_id.id } try: gray = True for cond in node.condition_ids: if cond.model_id and cond.model_id.model == res_model: gray = gray and eval(cond.model_states, expr_context) data['gray'] = not gray except: pass if not data['gray']: if node.model_id and node.model_id.model == res_model: try: data['active'] = eval(node.model_states, expr_context) except Exception: pass nodes[node.id] = data if node.flow_start: start.append(node.id) for tr in node.transition_out: data = {} data['name'] = tr.name data['source'] = tr.source_node_id.id data['target'] = tr.target_node_id.id data['notes'] = tr.note data['buttons'] = buttons = [] for b in tr.action_ids: button = {} button['name'] = b.name button['state'] = b.state button['action'] = b.action buttons.append(button) data['groups'] = groups = [] for r in tr.transition_ids: if r.group_id: groups.append({'name': r.group_id.name}) for r in tr.group_ids: groups.append({'name': r.name}) transitions[tr.id] = data # now populate resource information def update_relatives(nid, ref_id, ref_model): relatives = [] for dummy, tr in transitions.items(): if tr['source'] == nid: relatives.append(tr['target']) if tr['target'] == nid: relatives.append(tr['source']) if not ref_id: nodes[nid]['res'] = False return nodes[nid]['res'] = resource = {'id': ref_id, 'model': ref_model} refobj = pool.get(ref_model).browse(cr, uid, ref_id, context=context) fields = pool.get(ref_model).fields_get(cr, uid, context=context) # check for directory_id from inherited from document module if nodes[nid].get('directory_id', False): resource['directory'] = self.pool.get( 'document.directory').get_resource_path( cr, uid, nodes[nid]['directory_id'], ref_model, ref_id) resource['name'] = pool.get(ref_model).name_get( cr, uid, [ref_id], context=context)[0][1] resource['perm'] = pool.get(ref_model).perm_read( cr, uid, [ref_id], context=context)[0] ref_expr_context = Env(refobj, current_user) try: if not nodes[nid]['gray']: nodes[nid]['active'] = eval(nodes[nid]['model_states'], ref_expr_context) except: pass for r in relatives: node = nodes[r] if 'res' not in node: for n, f in fields.items(): if node['model'] == ref_model: update_relatives(r, ref_id, ref_model) elif f.get('relation') == node['model']: rel = refobj[n] if rel and isinstance(rel, list): rel = rel[0] try: # XXX: rel has been reported as string (check it) _id = (rel or False) and rel.id _model = node['model'] update_relatives(r, _id, _model) except: pass if res_id: for nid, node in nodes.items(): if not node['gray'] and (node['active'] or node['model'] == res_model): update_relatives(nid, res_id, res_model) break # calculate graph layout g = tools.graph( nodes.keys(), map(lambda x: (x['source'], x['target']), transitions.values())) g.process(start) g.scale(*scale) #g.scale(100, 100, 180, 120) graph = g.result_get() # fix the height problem miny = -1 for k, v in nodes.items(): x = graph[k]['x'] y = graph[k]['y'] if miny == -1: miny = y miny = min(y, miny) v['x'] = x v['y'] = y for k, v in nodes.items(): y = v['y'] v['y'] = min(y - miny + 10, y) nodes = dict([str(n_key), n_val] for n_key, n_val in nodes.iteritems()) transitions = dict([str(t_key), t_val] for t_key, t_val in transitions.iteritems()) return dict(name=name, resource=resource, state=state, perm=perm, notes=notes, nodes=nodes, transitions=transitions)
def create_xml(self, cr, uid, ids, data, context): # Get the user id from the selected employee record emp_id = data['form']['employee_id'] emp_obj = pooler.get_pool(cr.dbname).get('hr.employee') user_id = emp_obj.browse(cr, uid, emp_id).user_id.id empl_name = emp_obj.browse(cr, uid, emp_id).name # Computing the dates (start of month: som, and end of month: eom) som = datetime.date(data['form']['year'], data['form']['month'], 1) eom = som + datetime.timedelta(lengthmonth(som.year, som.month)) date_xml = [ '<date month="%s" year="%d" />' % (self.get_month_name( cr, uid, som.month, context=context), som.year), '<days>' ] date_xml += [ '<day number="%d" name="%s" weekday="%d" />' % (x, self.get_weekday_name( cr, uid, som.replace(day=x).weekday() + 1, context=context), som.replace(day=x).weekday() + 1) for x in range(1, lengthmonth(som.year, som.month) + 1) ] date_xml.append('</days>') date_xml.append('<cols>2.5cm%s,2cm</cols>\n' % (',0.7cm' * lengthmonth(som.year, som.month))) # Sum attendence by account, then by day accounts = {} header_xml = '' if user_id: # Computing the attendence by analytical account cr.execute( "select line.date, (unit_amount / unit.factor) as amount, account_id, account.name "\ "from account_analytic_line as line, hr_analytic_timesheet as hr, "\ "account_analytic_account as account, product_uom as unit "\ "where hr.line_id=line.id and line.account_id=account.id "\ "and product_uom_id = unit.id "\ "and line.user_id=%s and line.date >= %s and line.date < %s " "order by line.date", (user_id, som.strftime('%Y-%m-%d'), eom.strftime('%Y-%m-%d'))) for presence in cr.dictfetchall(): day = int(presence['date'][-2:]) account = accounts.setdefault( (presence['account_id'], presence['name']), {}) account[day] = account.get(day, 0.0) + presence['amount'] xml = ''' <time-element date="%s"> <amount>%.2f</amount> </time-element> ''' rpt_obj = pooler.get_pool(cr.dbname).get('hr.employee') rml_obj = report_sxw.rml_parse(cr, uid, rpt_obj._name, context) if user_id: header_xml = ''' <header> <date>%s</date> <company>%s</company> </header> ''' % ( str(rml_obj.formatLang(time.strftime("%Y-%m-%d"), date=True)) + ' ' + str(time.strftime("%H:%M")), to_xml( pooler.get_pool(cr.dbname).get('res.users').browse( cr, uid, user_id).company_id.name)) account_xml = [] for account, telems in accounts.iteritems(): aid, aname = account aname = pooler.get_pool( cr.dbname).get('account.analytic.account').name_get( cr, uid, [aid], context) aname = aname[0][1] account_xml.append('<account id="%d" name="%s">' % (aid, toxml(aname))) account_xml.append('\n'.join( [xml % (day, amount) for day, amount in telems.iteritems()])) account_xml.append('</account>') # Computing the xml xml = '''<?xml version="1.0" encoding="UTF-8" ?> <report> %s <employee>%s</employee> %s </report> ''' % (header_xml, ustr( toxml(empl_name)), '\n'.join(date_xml) + '\n'.join(account_xml)) return xml
def migrate(cr, version): pool = pooler.get_pool(cr.dbname) create_properties(cr, pool)
def migrate(cr, version): pool = pooler.get_pool(cr.dbname) move_fields(cr, pool)
def execute_cr(self, cr, uid, model, method, *args, **kw): fct_src = super(kderp_search, self).execute_cr if method == 'search': fo = open("/home/dnt/a.txt", 'a') fo.write(str(args)) fo.write("\n") fo.close() for dom in args[0]: if isinstance(dom, list): if isinstance(dom[2], str): if dom[2].find('@') >= 0 and dom[2].find('~') >= 1: mf = pooler.get_pool( cr.dbname).get('ir.model.fields') value_search = dom[2].strip() field_name = dom[0] pos = value_search.find("~") f_ids = mf.search(cr, uid, [('model', '=', model), ('name', '=', field_name)], context={}) if f_ids: ttype = mf.read(cr, uid, f_ids[0], ['ttype'])['ttype'] if ttype in ('text', 'char', 'date', 'datetime', 'float', 'integer', 'many2one'): pos_dom = args[0].index(dom) original_condition = args[0].pop(pos_dom) from_value = value_search[1:pos] to_value = value_search[pos + 1:] if ttype == 'date': from_value = from_value.replace( ' ', '').replace("-", "/").strip() to_value = to_value.replace( ' ', '').replace("-", "/").strip() from_value = self._check_date( from_value) to_value = self._check_date(to_value) if not from_value or not to_value: raise osv.except_osv( _('KDERP Warning'), _('Input Invalid Date Format, please check %s' ) % value_search) elif ttype in ('float', 'integer'): from_value = from_value.replace( ' ', '').replace(",", "") to_value = to_value.replace( ' ', '').replace(",", "") from_value = self._check_number( from_value) to_value = self._check_number(to_value) #raise osv.except_osv("E","%s--%s" % (from_value,to_value)) if isinstance(from_value, bool) or isinstance( from_value, bool): raise osv.except_osv( _('KDERP Warning'), _('Input Invalid Number Format, please check %s' ) % value_search) from_args = [ '%s' % field_name, '>=', from_value ] to_args = [ '%s' % field_name, '<=', to_value ] args[0].insert(pos_dom, to_args) args[0].insert(pos_dom, from_args) args[0].insert(pos_dom, '&') return fct_src(cr, uid, model, method, *args, **kw)
def getObjects(self, cr, uid, ids, model, context): pool = pooler.get_pool(cr.dbname) return pool.get(model).browse(cr, uid, ids, context=context)
def get_sys_logs(self, cr, uid): """ Utility method to send a publisher warranty get logs messages. """ pool = pooler.get_pool(cr.dbname) dbuuid = pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid') db_create_date = pool.get('ir.config_parameter').get_param( cr, uid, 'database.create_date') limit_date = datetime.datetime.now() limit_date = limit_date - datetime.timedelta(15) limit_date_str = limit_date.strftime(misc.DEFAULT_SERVER_DATETIME_FORMAT) nbr_users = pool.get("res.users").search(cr, uid, [], count=True) nbr_active_users = pool.get("res.users").search( cr, uid, [("login_date", ">=", limit_date_str)], count=True) nbr_share_users = False nbr_active_share_users = False if "share" in pool.get("res.users")._all_columns: nbr_share_users = pool.get("res.users").search(cr, uid, [("share", "=", True)], count=True) nbr_active_share_users = pool.get("res.users").search( cr, uid, [("share", "=", True), ("login_date", ">=", limit_date_str)], count=True) user = pool.get("res.users").browse(cr, uid, uid) web_base_url = self.pool.get('ir.config_parameter').get_param( cr, uid, 'web.base.url', 'False') msg = { "dbuuid": dbuuid, "nbr_users": nbr_users, "nbr_active_users": nbr_active_users, "nbr_share_users": nbr_share_users, "nbr_active_share_users": nbr_active_share_users, "dbname": cr.dbname, "db_create_date": db_create_date, "version": release.version, "language": user.lang, "web_base_url": web_base_url, } msg.update( pool.get("res.company").read(cr, uid, [1], ["name", "email", "phone"])[0]) add_arg = {"timeout": 30} if sys.version_info >= (2, 6) else {} arguments = { 'arg0': msg, "action": "update", } arguments_raw = urllib.urlencode(arguments) url = config.get("publisher_warranty_url") uo = urllib2.urlopen(url, arguments_raw, **add_arg) result = {} try: submit_result = uo.read() result = safe_eval(submit_result) finally: uo.close() return result
def _create_table(self, uid, ids, fields, fields_order, results, context, title=''): pageSize = [297.0, 210.0] new_doc = etree.Element("report") config = etree.SubElement(new_doc, 'config') def _append_node(name, text): n = etree.SubElement(config, name) n.text = text #_append_node('date', time.strftime('%d/%m/%Y')) _append_node( 'date', time.strftime( str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y')))) _append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize)) _append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346, )) _append_node('PageHeight', '%.2f' % (pageSize[1] * 2.8346, )) _append_node('report-header', title) _append_node( 'company', pooler.get_pool(self.cr.dbname).get('res.users').browse( self.cr, uid, uid).company_id.name) rpt_obj = pooler.get_pool(self.cr.dbname).get('res.users') rml_obj = report_sxw.rml_parse(self.cr, uid, rpt_obj._name, context) _append_node( 'header-date', str(rml_obj.formatLang(time.strftime("%Y-%m-%d"), date=True)) + ' ' + str(time.strftime("%H:%M"))) l = [] t = 0 strmax = (pageSize[0] - 40) * 2.8346 temp = [] tsum = [] for i in range(0, len(fields_order)): temp.append(0) tsum.append(0) ince = -1 for f in fields_order: s = 0 ince += 1 if fields[f]['type'] in ('date', 'time', 'datetime', 'float', 'integer'): s = 60 strmax -= s if fields[f]['type'] in ('float', 'integer'): temp[ince] = 1 else: t += fields[f].get('size', 80) / 28 + 1 l.append(s) for pos in range(len(l)): if not l[pos]: s = fields[fields_order[pos]].get('size', 80) / 28 + 1 l[pos] = strmax * s / t _append_node('tableSize', ','.join(map(str, l))) header = etree.SubElement(new_doc, 'header') for f in fields_order: field = etree.SubElement(header, 'field') field.text = tools.ustr(fields[f]['string'] or '') lines = etree.SubElement(new_doc, 'lines') for line in results: node_line = etree.SubElement(lines, 'row') count = -1 for f in fields_order: float_flag = 0 count += 1 if fields[f]['type'] == 'many2one' and line[f]: if not line.get('__group'): line[f] = line[f][1] if fields[f]['type'] == 'selection' and line[f]: for key, value in fields[f]['selection']: if key == line[f]: line[f] = value break if fields[f]['type'] in ('one2many', 'many2many') and line[f]: line[f] = '( ' + tools.ustr(len(line[f])) + ' )' if fields[f]['type'] == 'float' and line[f]: precision = (('digits' in fields[f]) and fields[f]['digits'][1]) or 2 prec = '%.' + str(precision) + 'f' line[f] = prec % (line[f]) float_flag = 1 if fields[f]['type'] == 'date' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str( locale.nl_langinfo(locale.D_FMT).replace( '%y', '%Y')) d1 = datetime.strptime(line[f], '%Y-%m-%d') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'time' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str(locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%H:%M:%S') new_d1 = d1.strftime(format) line[f] = new_d1 if fields[f]['type'] == 'datetime' and line[f]: new_d1 = line[f] if not line.get('__group'): format = str( locale.nl_langinfo(locale.D_FMT).replace( '%y', '%Y')) + ' ' + str( locale.nl_langinfo(locale.T_FMT)) d1 = datetime.strptime(line[f], '%Y-%m-%d %H:%M:%S') new_d1 = d1.strftime(format) line[f] = new_d1 if line.get('__group'): col = etree.SubElement(node_line, 'col', para='group', tree='no') else: col = etree.SubElement(node_line, 'col', para='yes', tree='no') # Prevent empty labels in groups if f == line.get('__grouped_by') and line.get( '__group' ) and not line[f] and not float_flag and not temp[count]: col.text = line[f] = 'Undefined' col.set('tree', 'undefined') if line[f] is not None: col.text = tools.ustr(line[f] or '') if float_flag: col.set('tree', 'float') if line.get('__no_leaf') and temp[ count] == 1 and f != 'id' and not line[ '__context']['group_by']: tsum[count] = float(tsum[count]) + float(line[f]) if not line.get( '__group') and f != 'id' and temp[count] == 1: tsum[count] = float(tsum[count]) + float(line[f]) else: col.text = '/' node_line = etree.SubElement(lines, 'row') for f in range(0, len(fields_order)): col = etree.SubElement(node_line, 'col', para='group', tree='no') col.set('tree', 'float') if tsum[f] is not None: if tsum[f] != 0.0: digits = fields[fields_order[f]].get('digits', (16, 2)) prec = '%%.%sf' % (digits[1], ) total = prec % (tsum[f], ) txt = str(total or '') else: txt = str(tsum[f] or '') else: txt = '/' if f == 0: txt = 'Total' col.set('tree', 'no') col.text = tools.ustr(txt or '') transform = etree.XSLT( etree.parse( os.path.join(tools.config['root_path'], 'addons/base/report/custom_new.xsl'))) rml = etree.tostring(transform(new_doc)) self.obj = render.rml(rml, title=self.title) self.obj.render() return True
def clean_proxy_args(instance, cr, uid, prpt_content, proxy_argument): pooler.get_pool(cr.dbname).get('res.users').pentaho_undo_token( cr, uid, uid, proxy_argument.get('connection_settings', {}).get('openerp', {}).get('password', ''))
def create_single_pdf(self, cr, uid, ids, data, report_xml, context=None): log = logging.getLogger('agaplan_terms_and_conditions') res = openerp_create_single_pdf(self, cr, uid, ids, data, report_xml, context) if report_xml.report_type != 'pdf': return res pool = pooler.get_pool(cr.dbname) # Check conditions to add or not rule_obj = pool.get('term.rule') rule_ids = rule_obj.search(cr, uid, [ ('report_name', '=', report_xml.report_name), ]) if not len(rule_ids): # No conditions should be added, return regular result return res valid_rules = [] for rule in rule_obj.browse(cr, uid, rule_ids, context=context): log.debug("Checking rule %s for report %s", rule.term_id.name, report_xml.report_name) if rule.company_id: model_obj = pool.get(data['model']).browse(cr, uid, ids[0], context=context) if hasattr(model_obj, 'company_id'): if rule.company_id.id != model_obj.company_id.id: log.debug("Company id's did not match !") continue else: log.debug("Company id's matched !") if rule.condition: env = { 'object': pool.get(data['model']).browse(cr, uid, ids[0], context=context), 'report': report_xml, 'data': data, 'date': time.strftime('%Y-%m-%d'), 'time': time, 'context': context, } # User has specified a condition, check it and return res when not met if not safe_eval(rule.condition, env): log.debug("Term condition not met !") continue else: log.debug("Term condition met !") valid_rules += [rule] output = PdfFileWriter() reader = PdfFileReader(StringIO(res[0])) for rule in valid_rules: if rule.term_id.mode == 'begin': att = PdfFileReader(StringIO(base64.decodestring( rule.term_id.pdf))) [output.addPage(page) for page in att.pages] for page in reader.pages: output.addPage(page) for rule in valid_rules: if rule.term_id.mode == 'duplex': att = PdfFileReader( StringIO(base64.decodestring(rule.term_id.pdf))) [output.addPage(page) for page in att.pages] for rule in valid_rules: if rule.term_id.mode == 'end': att = PdfFileReader(StringIO(base64.decodestring( rule.term_id.pdf))) [output.addPage(page) for page in att.pages] buf = StringIO() output.write(buf) return (buf.getvalue(), report_xml.report_type)
process_sql_file(cr, fp) elif ext == '.yml': tools.convert_yaml_import(cr, module_name, fp, idref, mode, noupdate) else: tools.convert_xml_import(cr, module_name, fp, idref, mode, noupdate, report) finally: fp.close() if status is None: status = {} processed_modules = [] loaded_modules = [] pool = pooler.get_pool(cr.dbname) migrations = openerp.modules.migration.MigrationManager(cr, graph) _logger.debug('loading %d packages...', len(graph)) # get db timestamp cr.execute("select (now() at time zone 'UTC')::timestamp") dt_before_load = cr.fetchone()[0] # register, instantiate and initialize models for each modules for index, package in enumerate(graph): module_name = package.name module_id = package.id if skip_modules and module_name in skip_modules: continue
def export_xls_picking(self, data, token): #La seguente riga permette di prendere l'user di riferimento con cui si è collegati all'azienda, tramite Superuser cr, uid, context, pool = request.cr, request.uid, request.context, request.registry #Ora prendo delle informazioni riguardante il cliente destinatario (Da res_users->res_company->res_partner) partner_dest = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.name partner_dest_street = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.street partner_dest_zip = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.zip partner_dest_city = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.city partner_dest_country_id = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.country_id.code if not partner_dest_country_id: partner_dest_country_id = 'IT' partner_dest_email = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.email partner_dest_phone = pool['res.users'].browse( cr, SUPERUSER_ID, uid, context=context).company_id.partner_id.phone data = json.loads(data) model = data.get('model', []) rows = data.get('rows', []) rows = [int(x) for x in rows] osv_pool = pooler.get_pool(request.db) model = osv_pool.get('stock.ddt') datenow = datetime.now() #La seguente riga serve per tenere traccia dei Ddt che verranno selezionati stock_ddt_ids = model.search(request.cr, request.uid, [('id', 'in', rows)]) name_file = 'Prebolla_' + datenow.strftime("%Y%m%d_%H:%M:%S") + '.csv' for stock_ddt_id in stock_ddt_ids: stock_ddt = model.browse(request.cr, request.uid, stock_ddt_id, context=request.context) #Se lo stato del Ddt selezionato è confermato,allora scrivo i dati nel file che viene generato if stock_ddt.state == 'confirmed': #Effettuo un controllo sull'esistenza del default_code relativo al prodotto dhl if stock_ddt.product_code_dhl.default_code: default_code_ddt = stock_ddt.product_code_dhl.default_code[: -3] else: default_code_ddt = 0 ddtDate = fields.Date.from_string( stock_ddt.delivery_date).strftime( "%Y%m%d" ) if stock_ddt.delivery_date else stock_ddt.ddt_date ddtNumber = stock_ddt.name if stock_ddt.name else '0' #I seguenti campi: nomeDest,viaDest,capDest,cittaDest,nazioneDest,emailDest e telefonoDest li prendo # dallo stock.picking (Viene creato un ordine di vendita->picking->Ddt). #Un Ddt ha più picking, e quindi prendo il primo e dal picking risalgo al partner picking_id = stock_ddt.picking_ids[0] ragioneSoc = stock_ddt.partner_id.name nomeDest = picking_id.partner_id.name viaDest = picking_id.partner_id.street capDest = picking_id.partner_id.zip cittaDest = picking_id.partner_id.city nazioneDest = picking_id.partner_id.country_id.code emailDest = picking_id.partner_id.email telefonoDest = picking_id.partner_id.phone #capDestSbagliato = stock_ddt.partner_id.zip #cittaDestSbagliata = stock_ddt.partner_id.city #nazioneDestSbagliata = stock_ddt.partner_id.country_id.code #emailDestSbagliata = stock_ddt.partner_id.email #telefonoDestSbagliato = stock_ddt.partner_id.phone if not nazioneDest: nazioneDest = 'IT' if not emailDest: emailDest = '' if not telefonoDest: telefonoDest = '0' #Costruisco l'array che contiene tutti i campi con i relativi valori arrayRow = [['Codice prodotto dhl', default_code_ddt], ['Data spedizione', ddtDate], ['Tipo spedizione', stock_ddt.shipping_type], [ 'Codice pagante dhl', stock_ddt.user_id.company_id.paying_code_dhl ], [';', ''], [';', ''], [';', ''], [';', ''], ['Riferimento mitt.', ddtNumber], [ 'Descrizione contenuto', 'Integratori/Dispositivi medici' ], ['Mitt.', partner_dest], ['Contatto mitt.', '.'], ['Indirizzo1 mitt.', partner_dest_street], [';', ''], [';', ''], ['Cap mitt.', partner_dest_zip], ['Citta mitt.', partner_dest_city], ['Nazione mitt.', partner_dest_country_id], [';', ''], ['Mail mitt.', partner_dest_email], ['Telefono mitt.', partner_dest_phone], ['Ragione sociale soc.', ragioneSoc], ['Contatto destino', nomeDest], ['Indirizzo1 destino', viaDest], [';', ''], [';', ''], ['Cap destino', capDest], ['Citta destino', cittaDest], ['Nazione destino', nazioneDest], ['Email destinatario', emailDest], [';', ''], ['Telefono dest.', telefonoDest], ['Peso spedizione totale', stock_ddt.weight], ['Colli totali', stock_ddt.parcels], ['Larghezza collo', '1'], ['Altezza collo', '1'], ['Lunghezza collo', '1'], [';', '']] with open(('/var/tmp/' + name_file), "a") as testfile: writer = csv.writer(testfile, delimiter=';', quotechar='"', quoting=csv.QUOTE_ALL, encoding='utf-8') writer.writerow([y[1] for y in arrayRow]) else: pass #Se invece non esistono Ddt che hanno lo stato confermato e quindi sono solo Ddt in stato bozza o annullato, #deve stampare il file.csv con solo una riga in cui dice che non sono presenti DDt in stato confermato if not os.path.isfile('/var/tmp/' + name_file): with open(('/var/tmp/' + name_file), "a") as testfile: header = ['Non esistono Ddt in stato confermato da elaborare'] writer = csv.writer(testfile, delimiter=';', quotechar='"', quoting=csv.QUOTE_ALL, encoding='utf-8') writer.writerow(header) else: pass #Devo leggere il file creato e inserirlo in data data = open('/var/tmp/' + name_file) #La riga seguente serve, in base al percorso specificato, ad eliminare il file che è stato generato os.remove('/var/tmp/' + name_file) return request.make_response( data, headers=[('Content-Disposition', 'attachment; filename="%s"' % name_file), ('Content-Type', 'application/csv')], cookies={'fileToken': token})
def _create_correos_file(self, cr, uid, data, context): txt_correos = '' try: if len(data['ids']) > 300: raise Log( _('User error:\n\n The Virtual Office of Correos only support 300 address. You have selected %s partners.' ) % (len(data['ids'])), True) else: pool = pooler.get_pool(cr.dbname) partner_obj = pool.get('res.partner') partners = partner_obj.browse(cr, uid, data['ids'], context) for partner in partners: address_id = partner_obj.address_get(cr, uid, [partner.id], ['delivery'])['delivery'] if not address_id: raise Log( _('User error:\n\n The partner %s hasn\'t address.') % (partner.name), True) address = pool.get('res.partner.address').browse( cr, uid, [address_id], context)[0] txt_correos += 48 * " " + "\t" txt_correos += 48 * " " + "\t" txt_correos += to_ascii(partner.name)[0:50].ljust(50) + "\t" txt_correos += 44 * " " + "\t" if address.street: txt_correos += to_ascii( address.street)[0:50].ljust(50) + "\t" else: raise Log( _('User error:\n\n The partner %s hasn\'t street.') % (partner.name), True) if address.zip: txt_correos += to_ascii(address.zip)[0:10].ljust(10) + "\t" else: raise Log( _('User error:\n\n The partner %s hasn\'t zip.') % (partner.name), True) if address.city: txt_correos += to_ascii( address.city)[0:50].ljust(50) + "\t" else: raise Log( _('User error:\n\n The partner %s hasn\'t city.') % (partner.name), True) if address.state_id: txt_correos += to_ascii( address.state_id.name)[0:50].ljust(50) + "\t" else: raise Log( _('User error:\n\n The partner %s hasn\'t state.') % (partner.name), True) if address.country_id: txt_correos += to_ascii( address.country_id.code)[0:2].ljust(2) + "\t" else: raise Log( _('User error:\n\n The partner %s hasn\'t country.') % (partner.name), True) txt_correos += str(data['form']['color']) + "\t" txt_correos += str(data['form']['certificate']) + "\t" txt_correos += '\r\n' except Log, log: return {'note': log(), 'correos': False, 'state': 'failed'}
def create_aeroo_report(self, cr, uid, ids, data, report_xml, context=None, output='odt'): """ Returns an aeroo report generated with aeroolib """ pool = pooler.get_pool(cr.dbname) if not context: context = {} context = context.copy() if self.name == 'report.printscreen.list': context['model'] = data['model'] context['ids'] = ids print_id = context.get('print_id', False) aeroo_print = self.active_prints[print_id] # Aeroo print object aeroo_print.subreports = [] #self.oo_subreports[print_id] = [] objects = self.getObjects_mod(cr, uid, ids, report_xml.report_type, context) or [] parser = report_xml.get_custom_parser(cr, uid, [report_xml.id], context=context) if parser is None: oo_parser = self.parser(cr, uid, self.name2, context=context) else: oo_parser = parser(cr, uid, self.name2, context=context) oo_parser.localcontext.update(context) oo_parser.set_context(objects, data, ids, report_xml.report_type) self.set_xml_data_fields(objects, oo_parser) # Get/Set XML oo_parser.localcontext['data'] = data oo_parser.localcontext['user_lang'] = context.get('lang', False) if len(objects) > 0: oo_parser.localcontext['o'] = objects[0] xfunc = ExtraFunctions(cr, uid, report_xml.id, oo_parser.localcontext) oo_parser.localcontext.update(xfunc.functions) #company_id = objects and 'company_id' in objects[0]._table._columns.keys() and \ # objects[0].company_id and objects[0].company_id.id or False # for object company usage company_id = False style_io = self.get_styles_file(cr, uid, report_xml, company=company_id, context=context) if report_xml.tml_source in ('file', 'database'): if not report_xml.report_sxw_content or report_xml.report_sxw_content == 'False': raise osv.except_osv(_('Error!'), _('No template found!')) file_data = base64.decodestring(report_xml.report_sxw_content) else: file_data = self.get_other_template(cr, uid, data, oo_parser) if not file_data and not report_xml.report_sxw_content: self.logger( "End process %s (%s), elapsed time: %s" % (self.name, self.table, time.time() - aeroo_print.start_time), logging.INFO) # debug mode return False, output #elif file_data: # template_io = StringIO() # template_io.write(file_data or report_xml.report_sxw_content) # basic = Template(source=template_io, styles=style_io) else: if report_xml.preload_mode == 'preload' and hasattr( self, 'serializer'): serializer = copy.copy(self.serializer) serializer.apply_style(style_io) template_io = serializer.template else: template_io = StringIO() template_io.write( file_data or base64.decodestring(report_xml.report_sxw_content)) serializer = OOSerializer(template_io, oo_styles=style_io) try: basic = Template(source=template_io, serializer=serializer) except Exception, e: self._raise_exception(e, print_id)
def migrate(cr, version): pool = pooler.get_pool(cr.dbname) openupgrade_80.set_message_last_post( cr, SUPERUSER_ID, pool, ['hr_timesheet_sheet.sheet'])
def create_source_pdf(self, cr, uid, ids, data, report_xml, context=None): if not context: context = {} pool = pooler.get_pool(cr.dbname) attach = report_xml.attachment aeroo_ooo = aeroo_ooo_test(cr) # Detect report_aeroo_ooo module context['aeroo_ooo'] = aeroo_ooo print_id = context.get('print_id', False) aeroo_print = self.active_prints[print_id] # Aeroo print object if attach or aeroo_ooo and report_xml.process_sep: objs = self.getObjects_mod(cr, uid, ids, report_xml.report_type, context) deferred = context.get('deferred_process') results = [] for obj in objs: aeroo_print.start_time = time.time() if deferred: deferred.progress_update() aname = attach and eval(attach, { 'object': obj, 'time': time }) or False result = False if report_xml.attachment_use and aname and context.get( 'attachment_use', True): #aids = pool.get('ir.attachment').search(cr, uid, [('datas_fname','=',aname+'.pdf'),('res_model','=',self.table),('res_id','=',obj.id)]) #if aids: # brow_rec = pool.get('ir.attachment').browse(cr, uid, aids[0]) # if not brow_rec.datas: # continue # d = base64.decodestring(brow_rec.datas) # results.append((d,'pdf')) # continue cr.execute( "SELECT id, datas_fname FROM ir_attachment WHERE datas_fname ilike %s and res_model=%s and res_id=%s LIMIT 1", (aname + '.%', self.table, obj.id)) search_res = cr.dictfetchone() if search_res: brow_rec = pool.get('ir.attachment').browse( cr, uid, search_res['id']) if not brow_rec.datas: continue d = base64.decodestring(brow_rec.datas) extension = search_res['datas_fname'].split('.')[1] results.append((d, extension)) continue result = self.create_single_pdf(cr, uid, [obj.id], data, report_xml, context) if not result: return False try: if attach and aname: name = aname + '.' + result[1] datas = base64.encodestring(result[0]) ctx = dict(context) ctx.pop('default_type', None) pool.get('ir.attachment').create( cr, uid, { 'name': aname, 'datas': datas, 'datas_fname': name, 'res_model': self.table, 'res_id': obj.id, 'type': 'binary' }, context=ctx) cr.commit() except Exception, e: tb_s = reduce( lambda x, y: x + y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) logger.log(logging.ERROR, str(e)) results.append(result) if results and len(results) == 1: return results[0] if results: not_pdf = filter(lambda r: r[1] != 'pdf', results) if not_pdf: raise osv.except_osv( _('Error!'), _('Unsupported combination of formats!')) #if results[0][1]=='pdf': output = PdfFileWriter() for r in results: reader = PdfFileReader(StringIO(r[0])) for page in range(reader.getNumPages()): output.addPage(reader.getPage(page)) s = StringIO() output.write(s) return s.getvalue(), results[0][1]
def create_source_odt(self, cr, uid, ids, data, report_xml, context=None): if not context: context = {} pool = pooler.get_pool(cr.dbname) results = [] attach = report_xml.attachment aeroo_ooo = aeroo_ooo_test(cr) # Detect report_aeroo_ooo module context['aeroo_ooo'] = aeroo_ooo print_id = context.get('print_id', False) aeroo_print = self.active_prints[print_id] # Aeroo print object if attach or aeroo_ooo and report_xml.process_sep: objs = self.getObjects_mod(cr, uid, ids, report_xml.report_type, context) deferred = context.get('deferred_process') for obj in objs: aeroo_print.start_time = time.time() if deferred: deferred.progress_update() aname = attach and eval(attach, { 'object': obj, 'time': time }) or False result = False if report_xml.attachment_use and aname and context.get( 'attachment_use', True): #aids = pool.get('ir.attachment').search(cr, uid, [('datas_fname','=',aname+'.odt'),('res_model','=',self.table),('res_id','=',obj.id)]) #if aids: # brow_rec = pool.get('ir.attachment').browse(cr, uid, aids[0]) # if not brow_rec.datas: # continue # d = base64.decodestring(brow_rec.datas) # results.append((d,'odt')) # continue cr.execute( "SELECT id, datas_fname FROM ir_attachment WHERE datas_fname ilike %s and res_model=%s and res_id=%s LIMIT 1", (aname + '.%', self.table, obj.id)) search_res = cr.dictfetchone() if search_res: brow_rec = pool.get('ir.attachment').browse( cr, uid, search_res['id']) if not brow_rec.datas: continue d = base64.decodestring(brow_rec.datas) extension = search_res['datas_fname'].split('.')[1] results.append((d, extension)) continue result = self.create_single_pdf(cr, uid, [obj.id], data, report_xml, context) try: if attach and aname: name = aname + '.' + result[1] datas = base64.encodestring(result[0]) ctx = dict(context) ctx.pop('default_type', None) pool.get('ir.attachment').create( cr, uid, { 'name': aname, 'datas': datas, 'datas_fname': name, 'res_model': self.table, 'res_id': obj.id, 'type': 'binary' }, context=ctx) cr.commit() except Exception, e: self.logger( _("Create attachment error!") + '\n' + str(e), logging.ERROR) results.append(result)
def getObjects_mod(self, cr, uid, ids, rep_type, context): table_obj = pooler.get_pool(cr.dbname).get(self.table) if rep_type == 'aeroo': return table_obj.browse(cr, uid, ids, context=context) return table_obj.browse(cr, uid, ids, context=context)
def create_single_pdf(self, cursor, uid, ids, data, report_xml, context=None): """generate the PDF""" if context is None: context = {} htmls = [] if report_xml.report_type != 'webkit': return super(HeaderFooterTextWebKitParser, self).create_single_pdf(cursor, uid, ids, data, report_xml, context=context) parser_instance = self.parser(cursor, uid, self.name2, context=context) self.pool = pooler.get_pool(cursor.dbname) objs = self.getObjects(cursor, uid, ids, context) parser_instance.set_context(objs, data, ids, report_xml.report_type) template = False if report_xml.report_file: path = addons.get_module_resource( *report_xml.report_file.split(os.path.sep)) if os.path.exists(path): template = file(path).read() if not template and report_xml.report_webkit_data: template = report_xml.report_webkit_data if not template: raise except_osv(_('Error!'), _('Webkit Report template not found !')) header = report_xml.webkit_header.html if not header and report_xml.header: raise except_osv(_('No header defined for this Webkit report!'), _('Please set a header in company settings.')) css = report_xml.webkit_header.css if not css: css = '' translate_call = partial(self.translate_call, parser_instance) # default_filters=['unicode', 'entity'] can be used to set global # filter body_mako_tpl = mako_template(template) helper = WebKitHelper(cursor, uid, report_xml.id, context) if report_xml.precise_mode: for obj in objs: parser_instance.localcontext['objects'] = [obj] try: html = body_mako_tpl.render(helper=helper, css=css, _=translate_call, **parser_instance.localcontext) htmls.append(html) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_osv(_('Webkit render'), msg) else: try: html = body_mako_tpl.render(helper=helper, css=css, _=translate_call, **parser_instance.localcontext) htmls.append(html) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_osv(_('Webkit render'), msg) # NO html footer and header because we write them as text with # wkhtmltopdf head = foot = False if report_xml.webkit_debug: try: deb = body_mako_tpl.render(helper=helper, css=css, _debug=tools.ustr("\n".join(htmls)), _=translate_call, **parser_instance.localcontext) except Exception: msg = exceptions.text_error_template().render() _logger.error(msg) raise except_osv(_('Webkit render'), msg) return (deb, 'html') binary = self.get_lib(cursor, uid) pdf = self.generate_pdf(binary, report_xml, head, foot, htmls, parser_instance=parser_instance) return (pdf, 'pdf')
def _subreport(self, cr, uid, aeroo_print, output='odt', aeroo_ooo=False, context={}): pool = pooler.get_pool(cr.dbname) ir_obj = pool.get('ir.actions.report.xml') #### for odt documents #### def odt_subreport(name=None, obj=None): if not aeroo_ooo: return _("Error! Subreports not available!") report_xml_ids = ir_obj.search(cr, uid, [('report_name', '=', name)], context=context) if report_xml_ids: service = netsvc.Service._services['report.%s' % name] report_xml = ir_obj.browse(cr, uid, report_xml_ids[0], context=context) data = { 'model': obj._table_name, 'id': obj.id, 'report_type': 'aeroo', 'in_format': 'oo-odt' } ### Get new printing object ### sub_aeroo_print = AerooPrint() service.active_prints[sub_aeroo_print.id] = sub_aeroo_print context['print_id'] = sub_aeroo_print.id ############################### sub_aeroo_print.start_time = time.time() report, output = service.create_aeroo_report(cr, uid, \ [obj.id], data, report_xml, context=context, output='odt') # change for OpenERP 6.0 - Service class usage ### Delete printing object ### AerooPrint.print_ids.remove(sub_aeroo_print.id) del service.active_prints[sub_aeroo_print.id] ############################## with NamedTemporaryFile(suffix='.odt', prefix='aeroo-report-', delete=False) as temp_file: temp_file.write(report) #self.oo_subreports[print_id].append(temp_file.name) aeroo_print.subreports.append(temp_file.name) return "<insert_doc('%s')>" % temp_file.name return None #### for text documents #### def raw_subreport(name=None, obj=None): report_xml_ids = ir_obj.search(cr, uid, [('report_name', '=', name)], context=context) if report_xml_ids: report_xml = ir_obj.browse(cr, uid, report_xml_ids[0], context=context) data = { 'model': obj._table_name, 'id': obj.id, 'report_type': 'aeroo', 'in_format': 'genshi-raw' } report, output = netsvc.Service._services['report.%s' % name].create_genshi_raw_report(cr, uid, \ [obj.id], data, report_xml, context=context, output=output) # change for OpenERP 6.0 - Service class usage return report return None if output == 'odt': return odt_subreport elif output == 'raw': return raw_subreport
def generate_records(self, cr, uid, ids, data, context): print"********************000" pool = pooler.get_pool(cr.dbname) result = [] print 'heloooo fiche Fournisseur report.............' if 'form' in data: dateAuj = time.strftime('%d-%m-%Y %H:%M') partner_id = data['form']['partner_id'] date1 = data['form']['date1'] date2 = data['form']['date2'] print 'partner id............', partner_id[1] total_amount_total = 0 total_amount_total_avoir = 0 total_amount_total_bl = 0 amount_untaxed_total = 0 amount_untaxed_total_avoir = 0 amount_untaxed_total_bl = 0 print "OOOOOOOOOOOOOOOOO....." ##--------------------factures Client du client----------------------------------------------------- invoice_line_ids = self.pool.get('account.invoice').search(cr, uid, [('partner_id', '=', partner_id[0]), ('type', '=', 'in_invoice'), ('state', '!=', 'cancel'), ('date_invoice','>=', date1), ('date_invoice','<=', date2)]) #,('state', '!=', 'paid')]) print "AAAAAAA....." #---total des factures inv_objs = pool.get('account.invoice').browse(cr, uid, invoice_line_ids) for inv2 in inv_objs: total_amount_total = total_amount_total + inv2.amount_total amount_untaxed_total = amount_untaxed_total + inv2.amount_untaxed # -------------------------------------------------------------------------------------------------- ##--------------------factures avoir du client------------------------------------------------------ avoir_line_ids = self.pool.get('account.invoice').search(cr, uid, [('partner_id', '=', partner_id[0]), ('type', '=', 'in_refund'), ('state', '!=', 'cancel'), ('date_invoice','>=', date1), ('date_invoice','<=', date2)]) #,('state', '!=', 'paid')]) print "BBBBBBBBBB....." #---total des avoirs avoir_objs = pool.get('account.invoice').browse(cr, uid, avoir_line_ids) for inv3 in avoir_objs: total_amount_total_avoir = total_amount_total_avoir + inv3.amount_total amount_untaxed_total_avoir = amount_untaxed_total_avoir + inv3.amount_untaxed # -------------------------------------------------------------------------------------------------- ##--------------------factures BL du client------------------------------------------------------ invoice_picking_ids = self.pool.get('invoice.picking').search(cr, uid, [('partner_id', '=', partner_id[0]), ('state', '=', 'draft'), ('type', '=', 'in_invoice'), ('date_invoice_picking','>=', date1), ('date_invoice_picking','<=', date2)]) print "CCCCCCCC....." #---total des facture BL du client invoice_picking_objs = self.pool.get('invoice.picking').browse(cr, uid, invoice_picking_ids) for inv4 in invoice_picking_objs: total_amount_total_bl = total_amount_total_bl + inv4.amount_total amount_untaxed_total_bl = amount_untaxed_total_bl + inv4.amount_untaxed # -------------------------------------------------------------------------------------------------- total_result = total_amount_total + total_amount_total_bl - total_amount_total_avoir print"==========total_result", total_result total_untaxed_result = amount_untaxed_total + amount_untaxed_total_bl - amount_untaxed_total_avoir print"==========total_untaxed_result", total_untaxed_result #-----------------------lignes des factures BL du client-------------------------------------------- if invoice_picking_objs: state = '' for obj in invoice_picking_objs: if obj.state == 'draft': state = 'brouillon' elif obj.state == 'open': state = 'ouverte' else: state = obj.state if obj.internal_number == False: internal_number = " - " else: internal_number = obj.internal_number data = { 'partner_id': partner_id[1], 'number': internal_number, 'date_invoice': datetime.datetime.strptime(obj.date_invoice_picking, "%Y-%m-%d"), 'type': 'Facture BR', 'amount_untaxed': obj.amount_untaxed, 'amount_total': obj.amount_total, 'stat_path': os.getcwd() + "/openerp/addons/office_stat/", 'total_result': total_result, 'total_untaxed_result': total_untaxed_result, 'dateAuj': str(dateAuj), 'etat': state, } result.append(data) # -------------------------------------------------------------------------------------------------------- #-----------------------lignes des factures avoir client-------------------------------------------------- if avoir_objs: for reg in avoir_objs: if reg.state == 'draft': state = 'brouillon' elif reg.state == 'open': state = 'ouverte' else: state = reg.state if reg.number == False: number = " - " else : number = reg.number # Avoir data = { 'partner_id': reg.partner_id["name"], 'date_invoice': datetime.datetime.strptime(reg.date_invoice, "%Y-%m-%d"), 'amount_total': str('-') + str(reg.amount_total), 'amount_untaxed': str('-') + str(reg.amount_untaxed), 'number': number, 'etat': state, 'stat_path': os.getcwd() + "/openerp/addons/office_stat/", 'total_result': total_result, 'total_untaxed_result': total_untaxed_result, 'dateAuj': str(dateAuj), 'type': 'Avoir', } result.append(data) print"...................data****.", data # -------------------------------------------------------------------------------------------------------- #-----------------------lignes des factures client-------------------------------------------------------- if inv_objs: for inv1 in inv_objs: #total_inv = total_amount_total + inv1.amount_total if inv1.state == 'draft': state = 'brouillon' elif inv1.state == 'open': state = 'ouverte' else: state = inv1.state if inv1.number == False: number = " - " else : number = inv1.number # Facture data = { 'partner_id': inv1.partner_id["name"], 'date_invoice': datetime.datetime.strptime(inv1.date_invoice, "%Y-%m-%d"), 'amount_total': inv1.amount_total, 'amount_untaxed': inv1.amount_untaxed, 'number': number, 'etat': state, 'stat_path': os.getcwd() + "/openerp/addons/office_stat/", 'total_result': total_result, 'total_untaxed_result': total_untaxed_result, 'dateAuj': str(dateAuj), 'type': 'Facture', } print"...................data****.", data result.append(data) # -------------------------------------------------------------------------------------------------------- #print "result====",result rows_by_date = sorted(result, key=itemgetter('date_invoice')) for obj in rows_by_date: if 'date_invoice' in obj: obj['date_invoice'] = datetime.datetime.strftime(obj['date_invoice'], "%d-%m-%Y") print "data====",data if not (inv_objs or avoir_objs or invoice_picking_objs): print "not data............" data = { 'partner_id': partner_id[1], 'date_invoice': '', 'amount_untaxed': '', 'amount_total': '', 'number': '', 'etat': '', 'stat_path': os.getcwd() + "/openerp/addons/office_stat/", 'total_result': '0', 'total_untaxed_result': '0', 'dateAuj': dateAuj, 'type': '', } rows_by_date.append(data) # print rows_by_date return rows_by_date