def create_source_xls(self, cr, uid, ids, data, context=None): if not context: context = {} parser_instance = self.parser(cr, uid, self.name2, context) self.parser_instance = parser_instance self.context = context objs = self.getObjects(cr, uid, ids, context) parser_instance.set_context(objs, data, ids, 'xls') objs = parser_instance.localcontext['objects'] n = cStringIO.StringIO() wb = xlwt.Workbook(encoding='utf-8') _p = AttrDict(parser_instance.localcontext) _xs = self.xls_styles self.xls_headers = { 'standard': '', } report_date = datetime_field.context_timestamp(cr, uid, datetime.now(), context) report_date = report_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.xls_footers = { 'standard': ('&L&%(font_size)s&%(font_style)s' + report_date + '&R&%(font_size)s&%(font_style)s&P / &N') % self.hf_params, } self.generate_xls_report(_p, _xs, data, objs, wb) wb.save(n) n.seek(0) return (n.read(), 'xls')
def create_xlsx_report(self, ids, data, report): _logger.error("in xlsx report with env: %s", str(self.env.cr)) self.parser_instance = self.parser(self.env.cr, self.env.uid, self.name2, self.env.context) objs = self.getObjects(self.env.cr, self.env.uid, ids, self.env.context) self.parser_instance.set_context(objs, data, ids, 'xlsx') _p = AttrDict(self.parser_instance.localcontext) objs = self.parser_instance.localcontext['objects'] file_data = StringIO() workbook = Workbook(file_data) _xs = self.xls_styles self.xls_headers = { 'standard': '', } report_date = datetime_field.context_timestamp(self.cr, self.uid, datetime.now(), self.context) report_date = report_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.xls_footers = {'standard': report_date} self.generate_xls_report(_p, _xs, data, objs, workbook) workbook.close() file_data.seek(0) return (file_data.read(), 'xlsx')
def formatLang(value, digits=None, date=False, date_time=False, grouping=True, monetary=False, dp=False, currency_obj=False): """ Assuming 'Account' decimal.precision=3: formatLang(value) -> digits=2 (default) formatLang(value, digits=4) -> digits=4 formatLang(value, dp='Account') -> digits=3 formatLang(value, digits=5, dp='Account') -> digits=5 """ if digits is None: if dp: digits = get_digits(dp=dp) else: digits = get_digits(value) if isinstance(value, (str, unicode)) and not value: return '' if not self.lang_dict_called: _get_lang_dict() self.lang_dict_called = True if date or date_time: if not str(value): return '' date_format = self.lang_dict['date_format'] parse_format = DEFAULT_SERVER_DATE_FORMAT if date_time: value = value.split('.')[0] date_format = date_format + " " + self.lang_dict['time_format'] parse_format = DEFAULT_SERVER_DATETIME_FORMAT if isinstance(value, basestring): # FIXME: the trimming is probably unreliable if format includes day/month names # and those would need to be translated anyway. date = datetime.strptime(value[:get_date_length(parse_format)], parse_format) elif isinstance(value, time.struct_time): date = datetime(*value[:6]) else: date = datetime(*value.timetuple()[:6]) if date_time: # Convert datetime values to the expected client/context timezone date = datetime_field.context_timestamp(cr, uid, timestamp=date, context=self.localcontext) return date.strftime(date_format.encode('utf-8')) res = self.lang_dict['lang_obj'].format('%.' + str(digits) + 'f', value, grouping=grouping, monetary=monetary) if currency_obj: if currency_obj.position == 'after': res='%s %s'%(res,currency_obj.symbol) elif currency_obj and currency_obj.position == 'before': res='%s %s'%(currency_obj.symbol, res) return res
def create_source_xls(self, cr, uid, ids, data, context=None): if not context: context = {} parser_instance = self.parser(cr, uid, self.name2, context) self.parser_instance = parser_instance self.context = context objs = self.getObjects(cr, uid, ids, context) parser_instance.set_context(objs, data, ids, 'xls') objs = parser_instance.localcontext['objects'] n = cStringIO.StringIO() # prevent style make error # http://stackoverflow.com/questions/17130516/xlwt-set-style-making-error-more-than-4094-xfs-styles wb = xlwt.Workbook(encoding='utf-8', style_compression=2) _p = AttrDict(parser_instance.localcontext) _xs = self.xls_styles self.xls_headers = { 'standard': '', } report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context) report_date = report_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.xls_footers = { 'standard': ( '&L&%(font_size)s&%(font_style)s' + report_date + '&R&%(font_size)s&%(font_style)s&P / &N' ) % self.hf_params, } self.generate_xls_report(_p, _xs, data, objs, wb) wb.save(n) n.seek(0) return (n.read(), 'xls')
def set_context(self, objects, data, ids, report_type=None): report_date = datetime_field.context_timestamp( self.cr, self.uid, datetime.now(), self.context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({'report_date': report_date}) super(report_financial_parser, self).set_context(objects, data, ids, report_type)
def evalStr(self, value): if self.evalContext is None: t = self.dt and util.strToTime(self.dt) or datetime.now() t = datetime_field.context_timestamp(self.cr, self.uid, timestamp=t, context={"tz": self.tz}) sequences = { 'year': '%Y', 'month': '%m', 'day': '%d', 'y': '%y', 'doy': '%j', 'woy': '%W', 'weekday': '%w', 'h24': '%H', 'h12': '%I', 'min': '%M', 'sec': '%S' } self.evalContext = { key: t.strftime(sequence) for key, sequence in sequences.iteritems() } if value: return value % self.evalContext return value
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context reports = [] reports.append(self.get_wo(data)[0]) reports.append(self.get_so(data)[0]) if not reports: raise osv.except_osv(('Warning'), ('Data Report Tidak Ditemukan !')) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) report_info = _('') self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_report_penjualan_sowo_print, self).set_context(objects, data, ids, report_type)
def create_source_xls(self, cr, uid, ids, data, context=None): if not context: context = {} parser_instance = self.parser(cr, uid, self.name2, context) self.parser_instance = parser_instance self.context = context objs = self.getObjects(cr, uid, ids, context) parser_instance.set_context(objs, data, ids, 'xls') objs = parser_instance.localcontext['objects'] n = cStringIO.StringIO() # prevent style make error # http://stackoverflow.com/questions/17130516/xlwt-set-style-making-error-more-than-4094-xfs-styles wb = xlwt.Workbook(encoding='utf-8', style_compression=2) _p = AttrDict(parser_instance.localcontext) _xs = self.xls_styles self.xls_headers = { 'standard': '', } report_date = datetime_field.context_timestamp(cr, uid, datetime.now(), context) report_date = report_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.xls_footers = { 'standard': ('&L&%(font_size)s&%(font_style)s' + report_date + '&R&%(font_size)s&%(font_style)s&P / &N') % self.hf_params, } self.generate_xls_report(_p, _xs, data, objs, wb) wb.save(n) n.seek(0) return (n.read(), 'xls')
def _convert_datetime_to_client_timezone(self, date_time): date_time = dt.strptime(date_time, DTF) date_time_new = datetime.context_timestamp( self.env.cr, self.env.uid, date_time, context=self.env.context) date_time_new_str = date_time_new.strftime(DTF) return date_time_new_str
def create_source_xls(self, cr, uid, ids, data, context=None): if not context: context = {} parser_instance = self.parser(cr, uid, self.name2, context) self.parser_instance = parser_instance self.context = context objs = self.getObjects(cr, uid, ids, context) parser_instance.set_context(objs, data, ids, 'xls') objs = parser_instance.localcontext['objects'] n = cStringIO.StringIO() wb = xlwt.Workbook(encoding='utf-8') _p = AttrDict(parser_instance.localcontext) _xs = self.xls_styles self.xls_headers = { 'standard': '', } report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context) report_date = report_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.xls_footers = { 'standard': ( '&L&%(font_size)s&%(font_style)s' + report_date + '&R&%(font_size)s&%(font_style)s&P / &N' ) % self.hf_params, } self.generate_xls_report(_p, _xs, data, objs, wb) wb.save(n) n.seek(0) return (n.read(), 'xls')
def _convert_datetime_to_client_timezone(self, date_time): date_time = dt.strptime(date_time, DTF) date_time_new = datetime.context_timestamp(self.env.cr, self.env.uid, date_time, context=self.env.context) date_time_new_str = date_time_new.strftime(DTF) return date_time_new_str
def strToLocalTimeFormat(cr, uid, str_time, format, context): # Convert datetime values to the expected client/context timezone timestamp = util.strToTime(str_time) converted = datetime_field.context_timestamp(cr, uid, timestamp=timestamp, context=context) return util.dateFormat(converted, format)
def strToLocalDateStr(cr, uid, str_time, context): # Convert datetime values to the expected client/context timezone timestamp = util.strToTime(str_time) converted = datetime_field.context_timestamp(cr, uid, timestamp=timestamp, context=context) return util.timeToDateStr(converted)
def set_context(self, objects, data, ids, report_type=None): # _logger.warn('set_context, objects = %s, data = %s, ids = %s', # objects, data, ids) data = objects[0] cr = data._cr uid = data._uid context = data._context date_balance = data['date_balance'] journal_ids = [x.id for x in data['journal_ids']] if not journal_ids: raise Warning(_('No Financial Journals selected!')) cr.execute( "SELECT s.name AS s_name, s.date AS s_date, j.code AS j_code, " "s.balance_end_real AS s_balance, " "coalesce(jcu.id,ccu.id) as j_curr_id " "FROM account_bank_statement s " "INNER JOIN account_journal j ON s.journal_id = j.id " "INNER JOIN res_company co ON j.company_id = co.id " "LEFT OUTER JOIN res_currency jcu ON j.currency = jcu.id " "LEFT OUTER JOIN res_currency ccu ON co.currency_id = ccu.id " "INNER JOIN " " (SELECT journal_id, max(date) AS max_date " " FROM account_bank_statement " " WHERE date <= %s GROUP BY journal_id) d " " ON (s.journal_id = d.journal_id AND s.date = d.max_date) " "WHERE s.journal_id in %s " "ORDER BY j_curr_id, j.code", (date_balance, tuple(journal_ids))) lines = cr.dictfetchall() [ x.update( {'currency': data.env['res.currency'].browse(x['j_curr_id'])}) for x in lines ] currencies = list(set([x['currency'] for x in lines])) totals = [] for currency in currencies: lines_currency = filter(lambda x: x['currency'] == currency, lines) total_amount = reduce(lambda x, y: x + y, [x['s_balance'] for x in lines_currency]) totals.append({ 'currency': currency, 'total_amount': total_amount, }) if not lines: raise Warning(_('No records found for your selection!')) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'lines': lines, 'totals': totals, 'date_balance': date_balance, 'report_date': report_date, }) super(bank_statement_balance_report, self).set_context(objects, data, ids, report_type=report_type)
def formatLang(self, value, digits=None, date=False, date_time=False, grouping=True, monetary=False, dp=False, currency_obj=False): """ Assuming 'Account' decimal.precision=3: formatLang(value) -> digits=2 (default) formatLang(value, digits=4) -> digits=4 formatLang(value, dp='Account') -> digits=3 formatLang(value, digits=5, dp='Account') -> digits=5 """ if not value: return '' if digits is None: if dp: digits = self.get_digits(dp=dp) else: digits = self.get_digits(value) if isinstance(value, (str, unicode)) and not value: return '' if not self.lang_dict_called: self._get_lang_dict() self.lang_dict_called = True if date or date_time: if not str(value): return '' date_format = self.lang_dict['date_format'] parse_format = DEFAULT_SERVER_DATE_FORMAT if date_time: value = value.split('.')[0] date_format = date_format + " " + self.lang_dict['time_format'] parse_format = DEFAULT_SERVER_DATETIME_FORMAT if isinstance(value, basestring): # FIXME: the trimming is probably unreliable if format includes day/month names # and those would need to be translated anyway. date = datetime.strptime(value[:get_date_length(parse_format)], parse_format) elif isinstance(value, time.struct_time): date = datetime(*value[:6]) else: date = datetime(*value.timetuple()[:6]) if date_time: # Convert datetime values to the expected client/context timezone date = datetime_field.context_timestamp(self.cr, self.uid, timestamp=date, context=self.localcontext) return date.strftime(date_format.encode('utf-8')) res = self.lang_dict['lang_obj'].format('%.' + str(digits) + 'f', value, grouping=grouping, monetary=monetary) if currency_obj: if currency_obj.position == 'after': res = '%s %s' % (res, currency_obj.symbol) elif currency_obj and currency_obj.position == 'before': res = '%s %s' % (currency_obj.symbol, res) return res
def formatLang( self, value, digits=None, date=False, date_time=False, grouping=True, monetary=False, dp=False, currency_obj=False, ): if digits is None: if dp: digits = self.get_digits(dp=dp) elif currency_obj: digits = currency_obj.decimal_places else: digits = self.get_digits(value) if isinstance(value, (str, unicode)) and not value: return "" if not self.lang_dict_called: self._get_lang_dict() self.lang_dict_called = True if date or date_time: if not value: return "" date_format = self.lang_dict["date_format"] parse_format = DEFAULT_SERVER_DATE_FORMAT if date_time: value = value.split(".")[0] date_format = date_format + " " + self.lang_dict["time_format"] parse_format = DEFAULT_SERVER_DATETIME_FORMAT if isinstance(value, basestring): # FIXME: the trimming is probably unreliable if format includes day/month names # and those would need to be translated anyway. date = datetime.strptime(value[: get_date_length(parse_format)], parse_format) elif isinstance(value, time.struct_time): date = datetime(*value[:6]) else: date = datetime(*value.timetuple()[:6]) if date_time: # Convert datetime values to the expected client/context timezone date = datetime_field.context_timestamp(self.cr, self.uid, timestamp=date, context=self.localcontext) return date.strftime(date_format.encode("utf-8")) res = self.lang_dict["lang_obj"].format("%." + str(digits) + "f", value, grouping=grouping, monetary=monetary) if currency_obj: if currency_obj.position == "after": res = u"%s\N{NO-BREAK SPACE}%s" % (res, currency_obj.symbol) elif currency_obj and currency_obj.position == "before": res = u"%s\N{NO-BREAK SPACE}%s" % (currency_obj.symbol, res) return res
def strDateToUTCTimeStr(cr, uid, str_date, context): timestamp = util.strToTime(util.dateToTimeStr(str_date)) converted = datetime_field.context_timestamp(cr, uid, timestamp=timestamp, context=context) converted = util.strToTime(util.timeToStr(converted)) diff = (converted - timestamp) utcTimestamp = timestamp - diff return util.timeToStr(utcTimestamp)
def set_context(self, objects, data, ids, report_type=None): report_date = datetime_field.context_timestamp(self.cr, self.uid, datetime.now(), self.context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'period_code': data['form']['period_code'], 'partner_wo_vat': data['form']['partner_wo_vat'], 'amtsum': data['form']['amtsum'], 'report_date': report_date, 'clientlist': data['form']['clientlist'], }) super(vat_intra_print, self).set_context(objects, data, ids)
def _convert_timezone(self, date, format_string='%Y-%m-%d %H:%M:%S'): import logging logging.info(date) date = datetime.strptime(date, format_string) new_date = datetime_field.context_timestamp(self._cr, self._uid, timestamp=date, context=self._context) new_date = datetime.strptime(new_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT), DEFAULT_SERVER_DATETIME_FORMAT) duration = new_date - date seconds = duration.total_seconds() hours = seconds // 3600 date = date + relativedelta(hours=hours) return date.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def _convert_timezone(self, cr, uid, date, context): date = datetime.strptime(date, DEFAULT_SERVER_DATETIME_FORMAT) new_date = datetime_field.context_timestamp(cr, uid, timestamp=date, context=context) new_date = datetime.strptime(new_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT), DEFAULT_SERVER_DATETIME_FORMAT) duration = new_date - date seconds = duration.total_seconds() hours = seconds // 3600 date = date + relativedelta(hours=-hours) return date.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def set_context(self, objects, data, ids, report_type=None): report_date = datetime_field.context_timestamp( self.cr, self.uid, datetime.now(), self.context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_date': report_date, 'period_start': data['form']['period_start'], 'period_end': data['form']['period_end'], 'grid_data_list': data['form']['grid_data_list'] }) super(vat_declaration_print, self).set_context(objects, data, ids)
def remove_7_hours(self, cr, uid, date, context=None): from openerp.osv.fields import datetime as datetime_field date = datetime.strptime(date, DEFAULT_SERVER_DATETIME_FORMAT) new_date = datetime_field.context_timestamp(cr, uid, timestamp=date, context=context) new_date = datetime.strptime(new_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT), DEFAULT_SERVER_DATETIME_FORMAT) duration = new_date - date seconds = duration.total_seconds() hours = seconds // 3600 date = date + relativedelta(hours=-hours) return date.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def set_context(self, objects, data, ids, report_type=None): client_datas = data['client_datas'] report_date = datetime_field.context_timestamp( self.cr, self.uid, datetime.now(), self.context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'year': data['year'], 'sum_turnover': client_datas[-1]['sum_turnover'], 'sum_tax': client_datas[-1]['sum_tax'], 'client_list': client_datas, 'report_date': report_date, }) super(partner_vat_listing_print, self).set_context(objects, data, ids)
def _convert_timezone(self, cr, uid, date, context): from datetime import datetime from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT from openerp.osv.fields import datetime as datetime_field from dateutil.relativedelta import relativedelta date = datetime.strptime(date, DEFAULT_SERVER_DATETIME_FORMAT) new_date = datetime_field.context_timestamp(cr, uid, timestamp=date, context=context) new_date = datetime.strptime(new_date.strftime(DEFAULT_SERVER_DATETIME_FORMAT), DEFAULT_SERVER_DATETIME_FORMAT) duration = new_date - date seconds = duration.total_seconds() hours = seconds // 3600 date = date + relativedelta(hours=-hours) return date.date().strftime(DEFAULT_SERVER_DATE_FORMAT)
def generate_info_general_report(self, data, get_account_lines, workbook): self.sheet.write("A5", _(u"Target Moves:"), self.format_title_table) if data['target_move'] == 'all': self.sheet.write("A6", _(u"All Entries"), self.format_default_info) elif data['target_move'] == 'posted': self.sheet.write("A6", _(u"All Posted Entries"), self.format_default_info) if data['date_from']: self.sheet.write("B5", _(u"Date from:"), self.format_title_table) self.sheet.write("B6", u"%s" % data['date_from'], self.format_default_info) if data['date_to']: self.sheet.write("C5", _(u"Date to:"), self.format_title_table) self.sheet.write("C6", u"%s" % data['date_to'], self.format_default_info) self.sheet.write("D5", _(u"Print Date:"), self.format_title_table) self.sheet.write( "D6", u"%s" % datetime_field.context_timestamp( self.env.cr, self.env.uid, datetime.now(), self.context).strftime("%d/%m/%Y %H:%M") or '', self.format_default_info)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context branch_ids = data['branch_ids'] trx_start_date = data['trx_start_date'] trx_end_date = data['trx_end_date'] title_prefix = '' title_short_prefix = '' report_service_rate = { 'product_codes': [], 'product_supplied': [], 'type': 'payable', 'title': '', 'title_short': 'Laporan Service Rate' } query_start = "SELECT CONCAT(cast(l.id as text),'-',cast(obj.id as text),'-','WO') as id_ai, " \ "COALESCE(b.name,'') as branch_id, " \ "'WO' as tipe_dok, " \ "obj.date as date, " \ "obj.name as number, " \ "COALESCE(t.name,'') as product_code, " \ "COALESCE(t.description,'') as description, " \ "COALESCE(p.default_code,'') as default_code, " \ "COALESCE(l.price_unit,0) as het, " \ "COALESCE(l.product_qty,0)+COALESCE(l.lost_order_qty,0) as qty_demand, " \ "COALESCE(l.product_qty,0) as qty_supply, " \ "COALESCE(l.lost_order_qty,0) as qty_backorder, " \ "(COALESCE(l.price_unit,0)-(COALESCE(l.discount,0)/COALESCE(l.product_qty,0))-(COALESCE(l.discount_program,0)/COALESCE(l.product_qty,0))-COALESCE(s_db.discount_bundle,0)) / 1.1 as net_jual, " \ "((COALESCE(l.price_unit,0)-(COALESCE(l.discount,0)/COALESCE(l.product_qty,0))-(COALESCE(l.discount_program,0)/COALESCE(l.product_qty,0))-COALESCE(s_db.discount_bundle,0)) / 1.1)*(COALESCE(l.product_qty,0)+COALESCE(l.lost_order_qty,0)) as jumlah_demand, " \ "((COALESCE(l.price_unit,0)-(COALESCE(l.discount,0)/COALESCE(l.product_qty,0))-(COALESCE(l.discount_program,0)/COALESCE(l.product_qty,0))-COALESCE(s_db.discount_bundle,0)) / 1.1)*(COALESCE(l.product_qty,0)) as jumlah_supply, " \ "((COALESCE(l.price_unit,0)-(COALESCE(l.discount,0)/COALESCE(l.product_qty,0))-(COALESCE(l.discount_program,0)/COALESCE(l.product_qty,0))-COALESCE(s_db.discount_bundle,0)) / 1.1)*(COALESCE(l.lost_order_qty,0)) as jumlah_order " \ "FROM " \ "dym_work_order_line l " \ "LEFT JOIN dym_work_order obj ON l.work_order_id = obj.id " \ "LEFT JOIN dym_branch b ON b.id = obj.branch_id " \ "LEFT JOIN product_product p ON p.id = l.product_id " \ "LEFT JOIN product_template t ON t.id = p.product_tmpl_id " \ "LEFT JOIN (select db.wo_line_id, sum(db.diskon) as discount_bundle from dym_work_order_bundle db group by db.wo_line_id) s_db ON s_db.wo_line_id = l.id " \ "where 1=1 and obj.state in ('open','done') and l.categ_id = 'Sparepart' " query_start2 = "SELECT CONCAT(cast(l.id as text),'-',cast(obj.id as text),'-','SO') as id_ai, " \ "COALESCE(b.name,'') as branch_id, " \ "'SO' as tipe_dok, " \ "obj.date_order as date, " \ "obj.name as number, " \ "COALESCE(t.name,'') as product_code, " \ "COALESCE(t.description,'') as description, " \ "COALESCE(p.default_code,'') as default_code, " \ "COALESCE(l.price_unit,0) as het, " \ "COALESCE(l.product_uom_qty,0)+COALESCE(l.lost_order_qty,0) as qty_demand, " \ "COALESCE(l.product_uom_qty,0) as qty_supply, " \ "COALESCE(l.lost_order_qty,0) as qty_backorder, " \ "(COALESCE(l.price_unit,0)*(1-COALESCE(l.discount,0)/100)-(COALESCE(l.discount_program,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_cash,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_lain,0)/COALESCE(l.product_uom_qty,0))) / 1.1 as net_jual, " \ "((COALESCE(l.price_unit,0)*(1-COALESCE(l.discount,0)/100)-(COALESCE(l.discount_program,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_cash,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_lain,0)/COALESCE(l.product_uom_qty,0))) / 1.1)*(COALESCE(l.product_uom_qty,0)+COALESCE(l.lost_order_qty,0)) as jumlah_demand, " \ "((COALESCE(l.price_unit,0)*(1-COALESCE(l.discount,0)/100)-(COALESCE(l.discount_program,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_cash,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_lain,0)/COALESCE(l.product_uom_qty,0))) / 1.1)*(COALESCE(l.product_uom_qty,0)) as jumlah_supply, " \ "((COALESCE(l.price_unit,0)*(1-COALESCE(l.discount,0)/100)-(COALESCE(l.discount_program,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_cash,0)/COALESCE(l.product_uom_qty,0))-(COALESCE(l.discount_lain,0)/COALESCE(l.product_uom_qty,0))) / 1.1)*(COALESCE(l.lost_order_qty,0)) as jumlah_order " \ "FROM " \ "sale_order_line l " \ "LEFT JOIN sale_order obj ON l.order_id = obj.id " \ "LEFT JOIN dym_branch b ON b.id = obj.branch_id " \ "LEFT JOIN product_product p ON p.id = l.product_id " \ "LEFT JOIN product_template t ON t.id = p.product_tmpl_id " \ "where 1=1 and obj.state in ('progress','manual','shipping_except','invoice_except','done') " move_selection = "" report_info = _('') move_selection += "" query_end = "" query_end2 = "" if trx_start_date: query_end += " AND obj.date >= '%s'" % str(trx_start_date) query_end2 += " AND obj.date_order >= '%s'" % str(trx_start_date) if trx_end_date: query_end += " AND obj.date <= '%s 23:59:59'" % str(trx_end_date) query_end2 += " AND obj.date_order <= '%s 23:59:59'" % str( trx_end_date) if branch_ids: query_end += " AND obj.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') query_end2 += " AND obj.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') reports = [report_service_rate] # query_order = "order by cabang" query_order = "" for report in reports: cr.execute(query_start + query_end + query_order + " UNION ALL " + query_start2 + query_end2 + query_order) all_lines = cr.dictfetchall() id_ai = [] if all_lines: p_map = map( lambda x: { 'no': 0, 'id_ai': x['id_ai'] if x['id_ai'] != None else 0, 'branch_id': str(x['branch_id'].encode('ascii', 'ignore').decode( 'ascii')) if x['branch_id'] != None else '', 'tipe_dok': str(x['tipe_dok'].encode('ascii', 'ignore').decode( 'ascii')) if x['tipe_dok'] != None else '', 'date': str(x['date']) if x['date'] != None else '', 'number': str(x['number'].encode('ascii', 'ignore').decode( 'ascii')) if x['number'] != None else '', 'product_code': str(x['product_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['product_code'] != None else '', 'product_name': str(x['description']) if 'description' in x and x[ 'description'] != None else str(x['default_code']) if 'default_code' in x and x['default_code'] != None else '', 'het': x['het'], 'qty_demand': x['qty_demand'], 'qty_supply': x['qty_supply'], 'qty_backorder': x['qty_backorder'], 'net_jual': x['net_jual'], 'jumlah_demand': x['jumlah_demand'], 'jumlah_supply': x['jumlah_supply'], 'jumlah_order': x['jumlah_order'], }, all_lines) for p in p_map: if p['id_ai'] not in map(lambda x: x.get('id_ai', None), id_ai): records = filter(lambda x: x['id_ai'] == p['id_ai'], all_lines) p.update({'lines': records}) id_ai.append(p) if records[0]['product_code'] not in report[ 'product_codes']: report['product_codes'].append( records[0]['product_code']) if records[0]['product_code'] not in report[ 'product_supplied'] and records[0][ 'qty_supply'] > 0: report['product_supplied'].append( records[0]['product_code']) report.update({'id_ai': id_ai}) # report.update({'id_ai': p_map}) reports = filter(lambda x: x.get('id_ai'), reports) if not reports: raise osv.except_osv( _('Data Not Found!'), _('Tidak ditemukan data dari hasil filter report service rate.' )) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) super(dym_report_service_rate_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context branch_ids = data['branch_ids'] date_from = data['date_from'] date_to = data['date_to'] state = data['state'] jenis_penjualan = data['jenis_penjualan'] title_prefix = '' title_short_prefix = '' report_ar = { 'type': 'receivable', 'title': title_prefix + _(''), 'title_short': title_short_prefix + ', ' + _('Dealer_sale_order') } query_start = "select a.id as p_id, " \ "a.name as p_name, " \ "a.state as p_state, " \ "a.date_order as p_date_order, " \ "c.name as p_konsumen, " \ "z.name as p_salesman, " \ "e.name as p_fincoy, " \ "a.customer_dp as p_customer_dp, " \ "ss.name as p_sales_source, " \ "b.cicilan as p_cicilan, " \ "f.name as p_location_id, " \ "g.name_template as p_kode_product, " \ "j.code as p_warna , " \ "k.name as p_mesin, " \ "k.chassis_no as p_rangka, " \ "b.finco_tenor as p_tenor, " \ "b.is_bbn as p_is_bbn, " \ "l.name as p_nama_stnk, " \ "b.uang_muka as p_uang_muka, " \ "b.discount_po as p_pot_pelanggan, " \ "b.price_unit as p_harga, " \ "b.discount_total as p_total_discount, " \ "b.price_bbn as p_harga_bbn, " \ "m.name as p_cabang, " \ "c.default_code as p_default_code " \ "from dealer_sale_order a " \ "left join dealer_sale_order_line b ON a.id = b.dealer_sale_order_line_id " \ "left join res_partner c ON c.id = a.partner_id " \ "left join hr_employee d ON d.id = a.employee_id " \ "left join res_partner z ON z.id = d.partner_id " \ "left join res_partner e ON e.id = a.finco_id " \ "left join stock_location f ON f.id = b.location_id " \ "left join sales_source ss ON ss.id = a.sales_source " \ "LEFT JOIN product_product g ON g.id = b.product_id " \ "LEFT JOIN product_template h ON h.id = g.product_tmpl_id " \ "LEFT JOIN product_attribute_value_product_product_rel i ON i.prod_id = b.product_id " \ "LEFT JOIN product_attribute_value j ON j.id = i.att_id " \ "LEFT JOIN stock_production_lot k ON k.id = b.lot_id " \ "left join res_partner l ON l.id = b.partner_stnk_id " \ "left join dym_branch m ON m.id = a.branch_id " \ query_start += " where a.id is not null " move_selection = "" report_info = _('') move_selection += "" query_end = "" if branch_ids: query_end += " AND a.branch_id in %s " % str( tuple(branch_ids)).replace(',)', ')') if state: query_end += " AND a.state = '%s' " % state if date_from: query_end+= ' AND a.date_order >= ' +"'"+ date_from + "'" + \ 'AND a.date_order <= ' +"'"+ date_to + "'" + \ '' if jenis_penjualan and jenis_penjualan == 'cash': query_end += " AND a.finco_id is null " if jenis_penjualan and jenis_penjualan == 'kredit': query_end += " AND a.finco_id is not null " query_order = "order by p_cabang,p_name " reports = [report_ar] for report in reports: cr.execute(query_start + query_end + query_order) all_lines = cr.dictfetchall() partners = [] if all_lines: def lines_map(x): x.update({'docname': x['p_name']}) map(lines_map, all_lines) for cnt in range(len(all_lines) - 1): if all_lines[cnt]['p_id'] != all_lines[cnt + 1]['p_id']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'p_id': x['p_id'], 'p_name': str(x['p_name']), 'p_state': str(x['p_state']), 'p_date_order': str(x['p_date_order']), 'p_konsumen': str(x['p_konsumen']), 'p_salesman': str(x['p_salesman']), 'p_fincoy': str(x['p_fincoy']), 'p_sales_source': str(x['p_sales_source']), 'p_cicilan': str(x['p_cicilan']), 'p_location_id': str(x['p_location_id']), 'p_kode_product': str(x['p_kode_product']), 'p_warna': str(x['p_warna']), 'p_cabang': str(x['p_cabang']), 'p_mesin': str(x['p_mesin']), 'p_default_code': str(x['p_default_code']), 'p_rangka': str(x['p_rangka']), 'p_tenor': str(x['p_tenor']), 'p_is_bbn': str(x['p_is_bbn']), 'p_nama_stnk': str(x['p_nama_stnk']), 'p_uang_muka': str(x['p_uang_muka']), 'p_pot_pelanggan': str(x['p_pot_pelanggan']), 'p_harga': str(x['p_harga']), 'p_total_discount': str(x['p_total_discount']), 'p_harga_bbn': str(x['p_harga_bbn']), 'p_customer_dp': str(x['p_customer_dp']) }, all_lines) for p in p_map: if p['p_id'] not in map(lambda x: x.get('p_id', None), partners): partners.append(p) partner_lines = filter( lambda x: x['p_id'] == p['p_id'], all_lines) p.update({'lines': partner_lines}) p.update({'d': 1, 'c': 2, 'b': 3}) report.update({'partners': partners}) reports = filter(lambda x: x.get('partners'), reports) if not reports: raise orm.except_orm(_('No Data Available'), _('No records found for your selection!')) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_dealer_sale_order_report_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context journal_id = data['journal_id'] branch_id = data['branch_id'] start_date = data['start_date'] end_date = data['end_date'] title_prefix = '' title_short_prefix = '' bank_balance = 0.0 where_analytic = " 1=1 " if branch_id: branch_ids = self.pool.get('dym.branch').browse( cr, uid, branch_id[0]).ids else: branch_ids = self.pool.get('res.users').browse(cr, uid, uid).branch_ids.ids where_account = " 1=1 " if journal_id: journals = self.pool.get('account.journal').browse( cr, uid, journal_id[0]) account_ids = [] sql_query = '' if branch_ids: analytic_branch_ids = self.pool.get( 'account.analytic.account').search( cr, uid, [('segmen', '=', 3), ('branch_id', 'in', branch_ids), ('type', '=', 'normal'), ('state', 'not in', ('close', 'cancelled'))]) analytic_cc_ids = self.pool.get( 'account.analytic.account').search( cr, uid, [('segmen', '=', 4), ('type', '=', 'normal'), ('state', 'not in', ('close', 'cancelled')), ('parent_id', 'child_of', analytic_branch_ids)]) where_analytic = " l.analytic_account_id in %s " % str( tuple(analytic_cc_ids)).replace(',)', ')') sql_query = ' AND l.analytic_account_id in %s' % str( tuple(analytic_cc_ids)).replace(',)', ')') for journal in journals: if journal.default_debit_account_id and journal.default_debit_account_id.id not in account_ids: account_ids.append(journal.default_debit_account_id.id) if journal.default_credit_account_id and journal.default_credit_account_id.id not in account_ids: account_ids.append(journal.default_credit_account_id.id) bal_cr_init = journal.default_credit_account_id.with_context( date_from=start_date, date_to=end_date, initial_bal=True, sql_query=sql_query).balance bal_db_init = journal.default_debit_account_id.with_context( date_from=start_date, date_to=end_date, initial_bal=True, sql_query=sql_query).balance bank_balance_init = bal_cr_init or bal_db_init where_account = " a.id in %s " % str(tuple(account_ids)).replace( ',)', ')') title_short = 'LAPORAN Kas Besar' #if data['projection']: # title_short = title_short + ' (PROYEKSI)' report_kas_besar = { 'type': 'KasBesar', 'title': '', 'title_short': title_short_prefix + ', ' + _(title_short), #'saldo_awal': saldo_awal, #'saldo_awal_projection': saldo_awal_projection, 'start_date': start_date, 'end_date': end_date } where_start_date = " l.date >= '%s' " % start_date where_end_date = " l.date <= '%s' " % end_date where_start_datex = " '%s' " % start_date where_end_datex = " '%s' " % end_date days = { 'Sun': 'Minggu', 'Mon': 'Senin', 'Tue': 'Selasa', 'Wed': 'Rabu', 'Thu': 'Kamis', 'Fri': 'Jumat', 'Sat': 'Sabtu' } query_kas_besar = """ select x.day"date",sum(x.debit) debit,sum(x.credit) credit from ( select day::date,0 debit,0 credit from generate_series(timestamp %s, %s, '1 day') day union all select date,sum(debit) debit,sum(credit) from account_move_line l left join account_account a on a.id = l.account_id WHERE %s and %s and %s and %s GROUP BY l.date) x GROUP BY x.day ORDER BY 1""" % ( where_start_datex, where_end_datex, where_account, where_start_date, where_end_date, where_analytic) #print query_kas_besar move_selection = "" report_info = _('') move_selection += "" reports = [report_kas_besar] for report in reports: a = cr.execute(query_kas_besar) all_lines = cr.dictfetchall() move_lines = [] if all_lines: p_map = map( lambda x: { 'no': 0, 'hari': '', 'date': x['date'] if x['date'] != None else '', 'debit': x['debit'] if x['debit'] > 0 else 0.0, 'credit': x['credit'] if x['credit'] > 0 else 0.0, 'saldo_awal': 0.0, 'saldo_akhir': 0.0, 'lebih_setor': 0.0, 'kurang_setor': 0.0, }, all_lines) for p in p_map: if p['date'] not in map(lambda x: x.get('date', None), move_lines): account_move_lines = filter( lambda x: x['date'] == p['date'], all_lines) bal_cr_init = journal.default_credit_account_id.with_context( date_from=p['date'], date_to=p['date'], initial_bal=True, sql_query=sql_query).balance bal_db_init = journal.default_debit_account_id.with_context( date_from=p['date'], date_to=p['date'], initial_bal=True, sql_query=sql_query).balance bank_balance_init = bal_cr_init or bal_db_init bal_cr_init = journal.default_credit_account_id.with_context( date_from=datetime.strptime( p['date'], DSDF).replace(day=1).strftime(DSDF), date_to=datetime.strptime(p['date'], '%Y-%m-%d') - timedelta(days=1), initial_bal=False, sql_query=sql_query).balance bal_db_init = journal.default_debit_account_id.with_context( date_from=datetime.strptime( p['date'], DSDF).replace(day=1).strftime(DSDF), date_to=datetime.strptime(p['date'], '%Y-%m-%d') - timedelta(days=1), initial_bal=False, sql_query=sql_query).balance saldo_awal = bank_balance_init + (bal_cr_init - bal_db_init) saldo_akhir = saldo_awal + p['debit'] - p['credit'] if p['debit'] > saldo_akhir: lebih_setor = p['debit'] - saldo_akhir else: lebih_setor = 0 if p['debit'] < saldo_akhir: kurang_setor = saldo_akhir - p['debit'] else: kurang_setor = 0 date_a = datetime.strptime(p['date'], '%Y-%m-%d') hari = days[date_a.strftime('%a')] p.update({'hari': hari}) p.update({ 'date': datetime.strptime(str(p['date']), '%Y-%m-%d').strftime('%m/%d/%Y') }) p.update({'saldo_awal': saldo_awal}) p.update({'lebih_setor': lebih_setor}) p.update({'kurang_setor': kurang_setor}) p.update({'saldo_akhir': saldo_akhir}) move_lines.append(p) report.update({'move_lines': move_lines}) reports = filter(lambda x: x.get('move_lines'), reports) if not reports: reports = [{ 'type': 'KasBesar', 'title': '', #'title_short': title_short_prefix + ', ' + _(' '.join(['LAPORAN Kas Besar','(PROYEKSI)' if data['projection'] else ''])), 'saldo_awal': saldo_awal, 'start_date': start_date, 'end_date': end_date, 'hari': hari, 'move_lines': [{ 'no': 0, 'debit': 0, 'credit': 0, 'lebih_setor': 0, 'kurang_setor': 0, }], }] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_kas_besar_report_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context start_date = data['start_date'] end_date = data['end_date'] branch_ids = data['branch_ids'] account_id = data['account_ids'] journal_id = data['journal_ids'] branch_status = False trx_start_date = data['trx_start_date'] trx_end_date = data['trx_end_date'] division = data['division'] partner_ids = data['partner_ids'] title_prefix = '' title_short_prefix = '' report_other_receivable = { 'type': 'payable', 'title': '', 'title_short': title_short_prefix + ', ' + _('Laporan Other Receivable')} query_start = "SELECT av.id as id_ai, " \ "COALESCE(b.name,'') as branch_id, " \ "av.date as date, " \ "av.amount as total, " \ "av.name as memo, " \ "av.reference as ref, " \ "av.division as division, " \ "av.name as name, " \ "av.number as number, " \ "av.state as state, " \ "av.date_due as date_due, " \ "rp.default_code as partner_code, " \ "rp.name as partner_name, " \ "a.code as account_code, " \ "a.name as account_name, " \ "j.name as journal_name " \ "FROM " \ "account_voucher av " \ "LEFT JOIN dym_branch b ON av.branch_id = b.id " \ "LEFT JOIN res_partner rp ON rp.id = av.partner_id " \ "LEFT JOIN account_account a ON a.id = av.account_id " \ "LEFT JOIN account_journal j ON j.id = av.journal_id " \ "where 1=1 and av.state = 'posted' and j.type in ('sale','sale_refund') and av.type = 'sale' " move_selection = "" report_info = _('') move_selection += "" query_end="" if division : query_end +=" AND av.division = '%s'" % str(division) if trx_start_date : query_end +=" AND av.date >= '%s'" % str(trx_start_date) if trx_end_date : query_end +=" AND av.date <= '%s'" % str(trx_end_date) if start_date : query_end +=" AND av.date_due >= '%s'" % str(start_date) if end_date : query_end +=" AND av.date_due <= '%s'" % str(end_date) if partner_ids : query_end +=" AND av.partner_id in %s" % str( tuple(partner_ids)).replace(',)', ')') if branch_ids : query_end +=" AND av.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') if account_id : query_end+=" AND av.account_id in %s" % str( tuple(account_id)).replace(',)', ')') if journal_id : query_end+=" AND av.journal_id in %s" % str( tuple(journal_id)).replace(',)', ')') reports = [report_other_receivable] query_order = "" for report in reports: cr.execute(query_start + query_end + query_order) all_lines = cr.dictfetchall() id_ai = [] if all_lines: p_map = map( lambda x: { 'no': x['id_ai'] if x['id_ai'] != None else '', 'id_ai': x['id_ai'] if x['id_ai'] != None else '', 'branch_id': str(x['branch_id'].encode('ascii','ignore').decode('ascii')) if x['branch_id'] != None else '', 'date': str(x['date']) if x['date'] != None else '', 'number': str(x['number'].encode('ascii','ignore').decode('ascii')) if x['number'] != None else '', 'partner_name': str(x['partner_name'].encode('ascii','ignore').decode('ascii')) if x['partner_name'] != None else '', 'date_due': str(x['date_due']) if x['date_due'] != None else '', 'division': str(x['division']) if x['division'] != None else '', 'partner_code': str(x['partner_code']) if x['partner_code'] != None else '', 'journal_name': str(x['journal_name']) if x['journal_name'] != None else '', 'account_code': str(x['account_code'].encode('ascii','ignore').decode('ascii')) if x['account_code'] != None else '', 'account_name': str(x['account_name'].encode('ascii','ignore').decode('ascii')) if x['account_name'] != None else '', 'memo': str(x['memo']) if x['memo'] != None else '', 'ref': str(x['ref']) if x['ref'] != None else '', 'name': str(x['name']) if x['name'] != None else '', 'total': x['total'], 'state': x['state'], 'dpp': 0, 'ppn': 0, 'pph': 0, 'piutang': 0 }, all_lines) for p in p_map: if p['id_ai'] not in map( lambda x: x.get('id_ai', None), id_ai): account_analytic_account = filter( lambda x: x['id_ai'] == p['id_ai'], all_lines) analytic_1 = '' analytic_2 = '' analytic_3 = '' analytic_4 = '' analytic_1_name = '' analytic_2_name = '' analytic_3_name = '' analytic_4_name = '' ai = self.pool.get('account.voucher').browse(cr, uid, account_analytic_account[0]['id_ai']) analytic = ai.analytic_4 or '' branch_name = '' branch = False branch_status_1 = '' branch_name = '' branch_id = '' if analytic: if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id = analytic while (analytic.parent_id): analytic = analytic.parent_id if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id == analytic if (branch and branch_ids and branch.id not in branch_ids) or (branch and branch_status and branch_status != branch.branch_status): continue analytic_2_branch = analytic_2 if analytic_2 in ['210','220','230']: if branch_status_1 == 'H123': analytic_2_branch = analytic_2[:2] + '1' elif branch_status_1 == 'H23': analytic_2_branch = analytic_2[:2] + '2' else: analytic_2_branch = analytic_2 analytic_combination = analytic_1 + '/' + analytic_2_branch + '/' + analytic_3 + '/' + analytic_4 move_line = ai.move_ids.filtered(lambda r: r.account_id.id == ai.account_id.id and r.debit > 0) residual = 0 if move_line: residual = self.pool.get('account.move.line').get_residual_date_based(cr, uid, move_line[0].id, trx_end_date) p.update({'residual': residual}) p.update({'lines': account_analytic_account}) p.update({'analytic_1': analytic_1_name}) p.update({'analytic_2': analytic_2_name}) p.update({'analytic_3': analytic_3_name}) p.update({'analytic_4': analytic_4_name}) p.update({'branch_status': branch_status_1}) p.update({'branch_id': branch_name}) p.update({'analytic_combination': analytic_combination}) id_ai.append(p) av = self.pool.get('account.voucher').browse(cr, uid, p['id_ai']) sum_ppn = av.tax_amount sum_dpp = sum([sales_info.amount for sales_info in av.line_cr_ids]) sum_pph = sum([pph_line.amount for pph_line in av.withholding_ids]) sum_piutang = sum_dpp + sum_ppn - sum_pph p.update({'dpp': sum_dpp}) p.update({'ppn': sum_ppn}) p.update({'pph': sum_pph}) p.update({'piutang': sum_piutang}) report.update({'id_ai': id_ai}) reports = filter(lambda x: x.get('id_ai'), reports) if not reports : reports = [{'title_short': 'Laporan Other Receivable', 'type': ['out_invoice','in_invoice','in_refund','out_refund'], 'id_ai': [{'total': 0, 'date': 'NO DATA FOUND', 'branch_id': 'NO DATA FOUND', 'number': 'NO DATA FOUND', 'memo': 'NO DATA FOUND', 'ref': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'id_ai': 'NO DATA FOUND', 'analytic_1': 'NO DATA FOUND', 'analytic_2': 'NO DATA FOUND', 'analytic_3': 'NO DATA FOUND', 'analytic_4': 'NO DATA FOUND', 'analytic_combination': 'NO DATA FOUND', 'branch_status': 'NO DATA FOUND', 'state': 'NO DATA FOUND', 'partner_code': 'NO DATA FOUND', 'partner_name': 'NO DATA FOUND', 'journal_name': 'NO DATA FOUND', 'date_due': 'NO DATA FOUND', 'account_code': 'NO DATA FOUND', 'account_name': 'NO DATA FOUND', 'residual': 0, 'no': 0, 'name': 'NO DATA FOUND',}], 'title': ''}] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects=False super(dym_report_other_receivable_print, self).set_context( objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context division = data['division'] date_start_date = data['date_start_date'] date_end_date = data['date_end_date'] branch_id = data['branch_id'] product_ids = data['product_ids'] cabang = branch_id[1] if branch_id else '' title_short_prefix = '' report_stock_mutation = { 'type': 'receivable', 'title': '', 'division':division, 'cabang':cabang, 'title_short': title_short_prefix + '' + _('Laporan Stock Mutation')} where_division = where_division2 = " 1=1 " if division: where_division = " spick.division = '%s'" % str(division) where_division2 = " (spick.division = '%s' or spack.division = '%s')" % (str(division),str(division)) where_picking_type_code = " 1=1 " where_date_start_date = where_date_start_date2 = " 1=1 " where_state = where_state2 = " 1=1 " where_state = " spack.state = 'posted' " where_state2 = " spl.state = 'done' " where_date_end_date = where_date_end_date2 = " 1=1 " if date_end_date: where_date_end_date = " date(spick.date) <= '%s'" % str(date_end_date) where_date_end_date2 = " date(spl.date) <= '%s'" % str(date_end_date) where_min_date_start_date = where_min_date_start_date2 = " 1=1 " where_min_date_end_date = where_min_date_end_date2 = " 1=1 " where_date_done_start_date = where_date_done_start_date2 = " 1=1 " where_date_done_end_date = where_date_done_end_date2 = " 1=1 " where_branch_ids = where_branch_ids2 = " 1=1 " if branch_id: where_branch_ids = " spick.branch_id = %s" % branch_id[0] where_branch_ids2 = " (spl.branch_id = %s or spack.branch_id = %s or spick.branch_id = %s)" % (branch_id[0],branch_id[0],branch_id[0]) where_product_ids = " 1=1 " if product_ids : where_product_ids = " product.id in %s" % str( tuple(product_ids)).replace(',)', ')') where_partner_ids = where_partner_ids2 = " 1=1 " where_categ_ids = " 1=1 " query_stock_mutation = "SELECT CONCAT(cast(spl.id as text),'-dym_stock_packing_line') as id_ai, spick.id as id_picking, 'pack' as object, spl.id as pack_line_id, prod_tmpl.categ_id as categ_id, " \ "b.code as branch_code, b.name as branch_name, spick.division, spt.name as picking_type_name, spack.name as packing_name, spack.date as packing_date, " \ "partner.default_code as partner_code, partner.name as partner_name, expedisi.default_code as ekspedisi_code, expedisi.name as ekspedisi_name, " \ "product.name_template as prod_tmpl, pav.name as color, spl.engine_number as engine, spl.chassis_number as chassis, " \ "spl.tahun_pembuatan as tahun, " \ "CASE WHEN source_sloc.usage not in ('internal','kpb','nrfs','customer','inventory') and sloc.usage in ('internal','kpb','nrfs') THEN spl.quantity " \ " ELSE 0 " \ "END as qty, " \ "CASE WHEN source_sloc.usage = 'customer' and sloc.usage in ('internal','kpb','nrfs') THEN spl.quantity " \ " ELSE 0 " \ "END as qty_retur_jual, " \ "CASE WHEN source_sloc.usage = 'inventory' and sloc.usage in ('internal','kpb','nrfs') THEN spl.quantity " \ " ELSE 0 " \ "END as qty_adjustment_in, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage not in ('internal','kpb','nrfs','supplier','inventory') THEN spl.quantity " \ " ELSE 0 " \ "END as qty_out, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage = 'supplier' THEN spl.quantity " \ " ELSE 0 " \ "END as qty_retur_beli, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage = 'inventory' THEN spl.quantity " \ " ELSE 0 " \ "END as qty_adjustment_out, " \ "COALESCE(pph.old_cost_price,0) as amount_awal, " \ "COALESCE(pph.trans_price,0) as amount_trans, " \ "COALESCE(move.price_unit,0) as price_unit, " \ "COALESCE(pph.cost,0) as amount_akhir, " \ "COALESCE(pph.id,0) as pph_id, " \ "spack.state as packing_state, spick.origin as picking_origin, prod_categ.name as categ_name, product.default_code as internal_ref, " \ "COALESCE(spick2.origin,'') as backorder, sloc.name as location, case when spl.ready_for_sale = true then 'RFS' else 'NRFS' end as status_rfs, COALESCE(bs.name, '') as branch_source, parent_sloc.name as parent_location, source_sloc.name as location_source, parent_source_sloc.name as parent_location_source, spl.write_date as write_date, product.id as product_id " \ "FROM " \ "dym_stock_packing spack " \ "inner join dym_stock_packing_line spl ON spack.id = spl.packing_id " \ "left join stock_picking spick ON spick.id = spack.picking_id " \ "left join stock_move move ON spick.id = move.picking_id and move.product_id = spl.product_id " \ "left join consolidate_invoice_line cl ON cl.move_id = move.id " \ "left join consolidate_invoice c ON c.id = cl.consolidate_id " \ "left join product_price_history pph ON ((pph.model_name = 'consolidate.invoice' and pph.trans_id = c.id) or (pph.model_name = 'stock.picking' and pph.trans_id = spick.id)) and pph.product_id = spl.product_id " \ "left join stock_picking spick2 ON spick2.id = spick.backorder_id " \ "left join dym_branch b on b.id = spick.branch_id " \ "left join res_partner partner on partner.id = spick.partner_id " \ "left join res_partner expedisi on expedisi.id = spack.expedition_id " \ "left join product_product product on product.id = spl.product_id " \ "left join product_attribute_value_product_product_rel pavpp ON product.id = pavpp.prod_id " \ "left join product_attribute_value pav ON pavpp.att_id = pav.id " \ "left join stock_picking_type spt ON spt.id = spick.picking_type_id " \ "left join product_template prod_tmpl ON prod_tmpl.id = product.product_tmpl_id " \ "left join product_category prod_categ ON prod_categ.id = prod_tmpl.categ_id " \ "left join stock_location sloc ON sloc.id = spl.destination_location_id " \ "left join stock_location parent_sloc ON parent_sloc.id = sloc.location_id " \ "left join stock_location source_sloc ON source_sloc.id = spl.source_location_id " \ "left join stock_location parent_source_sloc ON parent_source_sloc.id = source_sloc.location_id " \ "left join dym_branch bs ON spack.branch_sender_id = bs.id " \ "where ((spl.engine_number is not null and spick.division = 'Unit') or spick.division != 'Unit') and spl.quantity > 0 and " + where_state + " AND " + where_division + " AND " + where_picking_type_code + " AND " + where_date_start_date + " AND " + where_date_end_date + " AND " + where_min_date_start_date + " AND " + where_min_date_end_date + " AND " + where_date_done_start_date + " AND " + where_date_done_end_date + " AND " + where_branch_ids + " AND " + where_categ_ids + " AND " + where_product_ids + " AND " + where_partner_ids + " " \ " UNION ALL " \ "SELECT CONCAT(cast(spl.id as text),'-stock_move') as id_ai, spick.id as id_picking, 'move' as object, spl.id as pack_line_id, prod_tmpl.categ_id as categ_id, " \ "b.code as branch_code, b.name as branch_name, " \ "CASE WHEN spack.id is not null THEN spack.division " \ " WHEN spick.id is not null THEN spick.division " \ " ELSE '' " \ "END as division, " \ "CASE WHEN spack.id is not null and sloc.usage = 'inventory' THEN 'Delivery Orders' " \ " WHEN spack.id is not null and source_sloc.usage = 'inventory' THEN 'Receipts' " \ " ELSE spt.name " \ "END as picking_type_name, " \ "CASE WHEN spack.id is not null THEN spack.name " \ " WHEN spick.id is not null THEN spick.name " \ " ELSE '' " \ "END as packing_name, " \ "date(spl.date) as packing_date, " \ "partner.default_code as partner_code, partner.name as partner_name, '' as ekspedisi_code, '' as ekspedisi_name, " \ "product.name_template as prod_tmpl, pav.name as color, lot.name as engine, CONCAT(lot.chassis_code, lot.chassis_no) as chassis, " \ "lot.tahun as tahun, " \ "CASE WHEN source_sloc.usage not in ('internal','kpb','nrfs','customer','inventory') and sloc.usage in ('internal','kpb','nrfs') THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty, " \ "CASE WHEN source_sloc.usage = 'customer' and sloc.usage in ('internal','kpb','nrfs') THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty_retur_jual, " \ "CASE WHEN source_sloc.usage = 'inventory' and sloc.usage in ('internal','kpb','nrfs') THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty_adjustment_in, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage not in ('internal','kpb','nrfs','supplier','inventory') THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty_out, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage = 'supplier' THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty_retur_beli, " \ "CASE WHEN source_sloc.usage in ('internal','kpb','nrfs') and sloc.usage = 'inventory' THEN spl.product_uom_qty " \ " ELSE 0 " \ "END as qty_adjustment_out, " \ "COALESCE(pph.old_cost_price,0) as amount_awal, " \ "COALESCE(pph.trans_price,0) as amount_trans, " \ "COALESCE(spl.price_unit,0) as price_unit, " \ "COALESCE(pph.cost,0) as amount_akhir, " \ "COALESCE(pph.id,0) as pph_id, " \ "CASE WHEN spack.id is not null THEN spack.state " \ " WHEN spick.id is not null THEN spick.state " \ " ELSE '' " \ "END as packing_state, " \ "spick.origin as picking_origin, prod_categ.name as categ_name, product.default_code as internal_ref, " \ "COALESCE(spick2.origin,'') as backorder, sloc.name as location, case when sloc.usage = 'internal' then 'RFS' when sloc.usage in ('kpb','nrfs') then 'NRFS' else '' end as status_rfs, '' as branch_source, parent_sloc.name as parent_location, source_sloc.name as location_source, parent_source_sloc.name as parent_location_source, spl.write_date as write_date, product.id as product_id " \ "FROM " \ "stock_move spl " \ "left join stock_inventory spack ON spack.id = spl.inventory_id " \ "left join stock_production_lot lot ON lot.id = spl.restrict_lot_id " \ "left join stock_picking spick ON spick.id = spl.picking_id " \ "left join consolidate_invoice_line cl ON cl.move_id = spl.id " \ "left join consolidate_invoice c ON c.id = cl.consolidate_id " \ "left join product_price_history pph ON ((pph.model_name = 'consolidate.invoice' and pph.trans_id = c.id) or (pph.model_name = 'stock.picking' and pph.trans_id = spick.id) or (pph.model_name = 'stock.inventory' and pph.trans_id = spack.id)) and pph.product_id = spl.product_id " \ "left join stock_picking spick2 ON spick2.id = spick.backorder_id " \ "left join dym_branch b on b.id = spl.branch_id " \ "left join res_partner partner on partner.id = spick.partner_id " \ "left join dym_stock_packing packing on spick.id = packing.picking_id " \ "left join product_product product on product.id = spl.product_id " \ "left join product_attribute_value_product_product_rel pavpp ON product.id = pavpp.prod_id " \ "left join product_attribute_value pav ON pavpp.att_id = pav.id " \ "left join stock_picking_type spt ON spt.id = spl.picking_type_id " \ "left join product_template prod_tmpl ON prod_tmpl.id = product.product_tmpl_id " \ "left join product_category prod_categ ON prod_categ.id = prod_tmpl.categ_id " \ "left join stock_location sloc ON sloc.id = spl.location_dest_id " \ "left join stock_location parent_sloc ON parent_sloc.id = sloc.location_id " \ "left join stock_location source_sloc ON source_sloc.id = spl.location_id " \ "left join stock_location parent_source_sloc ON parent_source_sloc.id = source_sloc.location_id " \ "where ((lot.id is not null and (spack.division = 'Unit' or spick.division = 'Unit')) or (spack.division != 'Unit' or spick.division != 'Unit')) and spl.product_uom_qty > 0 and " + where_state2 + " AND " + where_division2 + " AND " + where_picking_type_code + " AND " + where_date_start_date2 + " AND " + where_date_end_date2 + " AND " + where_min_date_start_date2 + " AND " + where_min_date_end_date2 + " AND " + where_date_done_start_date2 + " AND " + where_date_done_end_date2 + " AND " + where_branch_ids2 + " AND " + where_categ_ids + " AND " + where_product_ids + " AND " + where_partner_ids2 + " and packing.id is null " \ " ORDER BY product_id, packing_date, write_date, pph_id " \ move_selection = "" report_info = _('') move_selection += "" reports = [report_stock_mutation] #print query_stock_mutation product_x = '' for report in reports: cr.execute(query_stock_mutation) all_lines = cr.dictfetchall() picking_ids = [] if all_lines: def lines_map(x): x.update({'docname': x['branch_code']}) map(lines_map, all_lines) for cnt in range(len(all_lines)-1): if all_lines[cnt]['id_picking'] != all_lines[cnt+1]['id_picking']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'no': 0, 'id_picking': str(x['id_picking']), 'id_ai': str(x['id_ai']), 'object': str(x['object']), 'pack_line_id': x['pack_line_id'], 'branch_code': str(x['branch_code'].encode('ascii','ignore').decode('ascii')) if x['branch_code'] != None else '', 'branch_name': str(x['branch_name'].encode('ascii','ignore').decode('ascii')) if x['branch_name'] != None else '', 'branch_source': str(x['branch_source'].encode('ascii','ignore').decode('ascii')) if x['branch_source'] != None else '', 'division': str(x['division'].encode('ascii','ignore').decode('ascii')) if x['division'] != None else '', 'picking_type_name': str(x['picking_type_name'].encode('ascii','ignore').decode('ascii')) if x['picking_type_name'] != None else '', 'internal_ref': str(x['internal_ref'].encode('ascii','ignore').decode('ascii')) if x['internal_ref'] != None else '', 'categ_name': str(x['categ_name'].encode('ascii','ignore').decode('ascii')) if x['categ_name'] != None else '', 'packing_name': str(x['packing_name'].encode('ascii','ignore').decode('ascii')) if x['packing_name'] != None else '', 'packing_date': str(x['packing_date'].encode('ascii','ignore').decode('ascii')) if x['packing_date'] != None else '', 'partner_code': str(x['partner_code'].encode('ascii','ignore').decode('ascii')) if x['partner_code'] != None else '', 'partner_name': str(x['partner_name'].encode('ascii','ignore').decode('ascii')) if x['partner_name'] != None else '', 'ekspedisi_code': str(x['ekspedisi_code'].encode('ascii','ignore').decode('ascii')) if x['ekspedisi_code'] != None else '', 'ekspedisi_name': str(x['ekspedisi_name'].encode('ascii','ignore').decode('ascii')) if x['ekspedisi_name'] != None else '', 'prod_tmpl': str(x['prod_tmpl'].encode('ascii','ignore').decode('ascii')) if x['prod_tmpl'] != None else '', 'color': str(x['color'].encode('ascii','ignore').decode('ascii')) if x['color'] != None else '', 'engine': str(x['engine'].encode('ascii','ignore').decode('ascii')) if x['engine'] != None else '', 'chassis': str(x['chassis'].encode('ascii','ignore').decode('ascii')) if x['chassis'] != None else '', 'tahun': str(x['tahun'].encode('ascii','ignore').decode('ascii')) if x['tahun'] != None else '', 'qty': x['qty'], 'qty_out': x['qty_out'], 'qty_retur_beli': x['qty_retur_beli'], 'qty_retur_jual': x['qty_retur_jual'], 'qty_adjustment_in': x['qty_adjustment_in'], 'qty_adjustment_out': x['qty_adjustment_out'], 'amount': 0, 'amount_out': 0, 'amount_retur_beli': 0, 'amount_retur_jual': 0, 'amount_adjustment_in': 0, 'amount_adjustment_out': 0, 'amount_awal': x['amount_awal'], 'amount_trans': x['amount_trans'], 'price_unit': x['price_unit'], 'amount_akhir': x['amount_akhir'], 'packing_state': str(x['packing_state'].encode('ascii','ignore').decode('ascii')) if x['packing_state'] != None else '', 'picking_origin': str(x['picking_origin'].encode('ascii','ignore').decode('ascii')) if x['picking_origin'] != None else '', 'backorder': str(x['backorder'].encode('ascii','ignore').decode('ascii')) if x['backorder'] != None else '', 'location': str(x['location'].encode('ascii','ignore').decode('ascii')) if x['location'] != None else '', 'parent_location': str(x['parent_location'].encode('ascii','ignore').decode('ascii')) if x['parent_location'] != None else '', 'location_source': str(x['location_source'].encode('ascii','ignore').decode('ascii')) if x['location_source'] != None else '', 'parent_location_source': str(x['parent_location_source'].encode('ascii','ignore').decode('ascii')) if x['parent_location_source'] != None else '', 'status_rfs': str(x['status_rfs'].encode('ascii','ignore').decode('ascii')) if x['status_rfs'] != None else '', 'product_id': str(x['product_id']), }, all_lines) qty_awal = 0 amount_awal = 0 qty_akhir = 0 amount_akhir = 0 product_id = False flag = True for p in p_map: if p['id_ai'] not in map( lambda x: x.get('id_ai', None), picking_ids): packing_line = filter( lambda x: x['id_ai'] == p['id_ai'], all_lines) #print product_x,packing_line[0]['product_id'],packing_line[0]['packing_date'],packing_line[0]['internal_ref'],qty_awal,packing_line[0]['qty'],packing_line[0]['qty_retur_jual'],packing_line[0]['qty_adjustment_in'],packing_line[0]['qty_out'],packing_line[0]['qty_retur_beli'],packing_line[0]['qty_adjustment_out'] if packing_line[0]['packing_date'] < str(date_start_date) and date_start_date: if product_x == packing_line[0]['product_id']: qty_awal += packing_line[0]['qty'] qty_awal += packing_line[0]['qty_retur_jual'] qty_awal += packing_line[0]['qty_adjustment_in'] qty_awal -= packing_line[0]['qty_out'] qty_awal -= packing_line[0]['qty_retur_beli'] qty_awal -= packing_line[0]['qty_adjustment_out'] # print 'x',qty_awal else: qty_awal = packing_line[0]['qty'] qty_awal += packing_line[0]['qty_retur_jual'] qty_awal += packing_line[0]['qty_adjustment_in'] qty_awal -= packing_line[0]['qty_out'] qty_awal -= packing_line[0]['qty_retur_beli'] qty_awal -= packing_line[0]['qty_adjustment_out'] #print 'y', qty_awal product_x = packing_line[0]['product_id'] #print 'a------------' else: flag = True qty_akhir = qty_awal if packing_line[0]['qty'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir += packing_line[0]['qty'] else: qty_akhir = packing_line[0]['qty'] p.update({'amount': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) if packing_line[0]['qty_retur_jual'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir += packing_line[0]['qty_retur_jual'] else: qty_akhir += packing_line[0]['qty_retur_jual'] p.update({'amount_retur_jual': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) if packing_line[0]['qty_adjustment_in'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir += packing_line[0]['qty_adjustment_in'] else: qty_akhir += packing_line[0]['qty_adjustment_in'] p.update({'amount_adjustment_in': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) if packing_line[0]['qty_out'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir -= packing_line[0]['qty_out'] else: qty_akhir -= packing_line[0]['qty_out'] p.update({'amount_out': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) if packing_line[0]['qty_retur_beli'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir -= packing_line[0]['qty_retur_beli'] else: qty_akhir -= packing_line[0]['qty_retur_beli'] p.update({'amount_retur_beli': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) if packing_line[0]['qty_adjustment_out'] > 0: if product_x == packing_line[0]['product_id']: qty_akhir -= packing_line[0]['qty_adjustment_out'] else: qty_akhir =- packing_line[0]['qty_adjustment_out'] p.update({'amount_adjustment_out': packing_line[0]['amount_trans'] or packing_line[0]['price_unit']}) p.update({'qty_awal': qty_awal}) p.update({'qty_akhir': qty_akhir}) p.update({'lines': packing_line}) picking_ids.append(p) qty_awal = qty_akhir amount_awal = amount_akhir product_x = packing_line[0]['product_id'] #print 'b------------' report.update({'cabang': cabang}) report.update({'picking_ids': picking_ids}) reports = filter(lambda x: x.get('picking_ids'), reports) if not reports : reports = [{'picking_ids': [{ 'no': 0, 'branch_code': 'NO DATA FOUND', 'branch_name': 'NO DATA FOUND', 'branch_source': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'categ_name': 'NO DATA FOUND', 'internal_ref': 'NO DATA FOUND', 'picking_type_name': 'NO DATA FOUND', 'packing_name': 'NO DATA FOUND', 'packing_date': 'NO DATA FOUND', 'partner_code': 'NO DATA FOUND', 'partner_name': 'NO DATA FOUND', 'ekspedisi_code': 'NO DATA FOUND', 'ekspedisi_name': 'NO DATA FOUND', 'prod_tmpl': 'NO DATA FOUND', 'color': 'NO DATA FOUND', 'engine': 'NO DATA FOUND', 'chassis': 'NO DATA FOUND', 'tahun': 'NO DATA FOUND', 'location': 'NO DATA FOUND', 'parent_location': 'NO DATA FOUND', 'location_source': 'NO DATA FOUND', 'parent_location_source': 'NO DATA FOUND', 'status_rfs': 'NO DATA FOUND', 'qty': 0, 'qty_out': 0, 'qty_retur_beli': 0, 'qty_retur_jual': 0, 'qty_adjustment_in': 0, 'qty_adjustment_out': 0, 'amount': 0, 'amount_out': 0, 'amount_retur_beli': 0, 'amount_retur_jual': 0, 'amount_adjustment_in': 0, 'amount_adjustment_out': 0, 'qty_awal': 0, 'qty_akhir': 0, 'amount_awal': 0, 'amount_akhir': 0, 'packing_state': 'NO DATA FOUND', 'picking_origin': 'NO DATA FOUND', 'backorder': 'NO DATA FOUND',}], 'title_short': 'Laporan Stock Mutation', 'type': 'receivable', 'title': '', 'division':division, 'cabang': cabang}] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_report_stock_mutation_print, self).set_context( objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context start_date = data['start_date'] end_date = data['end_date'] branch_ids = data['branch_ids'] account_id = data['account_ids'] # segmen = data['segmen'] branch_status = False trx_start_date = data['trx_start_date'] trx_end_date = data['trx_end_date'] division = data['division'] partner_ids = data['partner_ids'] title_prefix = '' title_short_prefix = '' report_loan_piutang = { 'type': 'payable', 'title': '', 'title_short': title_short_prefix + ', ' + _('Laporan Loan Piutang')} query_start = "SELECT loan.id as id_ai, " \ "COALESCE(b.name,'') as branch_id, " \ "loan.date as date, " \ "loan.jumlah_loan as total, " \ "loan.loan_type as loan_type, " \ "loan.memo as memo, " \ "loan.reference as ref, " \ "loan.division as division, " \ "loan.name as number, " \ "loan.state as state, " \ "loan.first_due_date as date_due, " \ "loan.effective_date as effective_date, " \ "rp.default_code as partner_code, " \ "rp.name as partner_name, " \ "a.code as account_code, " \ "a.name as account_name, " \ "av.number as rv_pv " \ "FROM " \ "dym_loan loan " \ "LEFT JOIN account_voucher av ON loan.voucher_id = av.id " \ "LEFT JOIN dym_branch b ON loan.branch_id = b.id " \ "LEFT JOIN res_partner rp ON rp.id = loan.partner_id " \ "LEFT JOIN account_account a ON a.id = loan.account_id " \ "where 1=1 and loan.state in ('approved','done') and loan.loan_type = 'Piutang' " move_selection = "" report_info = _('') move_selection += "" query_end="" if division : query_end +=" AND loan.division = '%s'" % str(division) if trx_start_date : query_end +=" AND loan.date >= '%s'" % str(trx_start_date) if trx_end_date : query_end +=" AND loan.date <= '%s'" % str(trx_end_date) if start_date : query_end +=" AND loan.date_due >= '%s'" % str(start_date) if end_date : query_end +=" AND loan.date_due <= '%s'" % str(end_date) if partner_ids : query_end +=" AND loan.partner_id in %s" % str( tuple(partner_ids)).replace(',)', ')') if branch_ids : query_end +=" AND loan.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') if account_id : query_end+=" AND loan.account_id in %s" % str( tuple(account_id)).replace(',)', ')') reports = [report_loan_piutang] # query_order = "order by cabang" query_order = "" for report in reports: cr.execute(query_start + query_end + query_order) all_lines = cr.dictfetchall() id_ai = [] if all_lines: # def lines_map(x): # x.update({'docname': x['cabang']}) # map(lines_map, all_lines) # for cnt in range(len(all_lines)-1): # if all_lines[cnt]['id_aml'] != all_lines[cnt+1]['id_aml']: # all_lines[cnt]['draw_line'] = 1 # else: # all_lines[cnt]['draw_line'] = 0 # all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'no': x['id_ai'] if x['id_ai'] != None else '', 'id_ai': x['id_ai'] if x['id_ai'] != None else '', 'branch_id': str(x['branch_id'].encode('ascii','ignore').decode('ascii')) if x['branch_id'] != None else '', 'date': str(x['date']) if x['date'] != None else '', 'number': str(x['number'].encode('ascii','ignore').decode('ascii')) if x['number'] != None else '', 'partner_name': str(x['partner_name'].encode('ascii','ignore').decode('ascii')) if x['partner_name'] != None else '', 'date_due': str(x['date_due']) if x['date_due'] != None else '', 'division': str(x['division']) if x['division'] != None else '', 'partner_code': str(x['partner_code']) if x['partner_code'] != None else '', 'tipe_pinjaman': str(x['tipe_pinjaman']) if x['tipe_pinjaman'] != None else '', 'account_code': str(x['account_code'].encode('ascii','ignore').decode('ascii')) if x['account_code'] != None else '', 'account_name': str(x['account_name'].encode('ascii','ignore').decode('ascii')) if x['account_name'] != None else '', 'memo': str(x['memo']) if x['memo'] != None else '', 'ref': str(x['ref']) if x['ref'] != None else '', 'rv_pv': str(x['rv_pv']) if x['rv_pv'] != None else '', 'effective_date': str(x['effective_date']) if x['effective_date'] != None else '', 'loan_type': str(x['loan_type']) if x['loan_type'] != None else '', 'total': x['total'], 'state': x['state'],}, all_lines) for p in p_map: if p['id_ai'] not in map( lambda x: x.get('id_ai', None), id_ai): account_analytic_account = filter( lambda x: x['id_ai'] == p['id_ai'], all_lines) analytic_1 = '' analytic_2 = '' analytic_3 = '' analytic_4 = '' analytic_1_name = '' analytic_2_name = '' analytic_3_name = '' analytic_4_name = '' ai = self.pool.get('dym.loan').browse(cr, uid, account_analytic_account[0]['id_ai']) analytic = ai.analytic_4 or '' branch_name = '' branch = False branch_status_1 = '' branch_name = '' branch_id = '' if analytic: if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id = analytic while (analytic.parent_id): analytic = analytic.parent_id if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id == analytic if (branch and branch_ids and branch.id not in branch_ids) or (branch and branch_status and branch_status != branch.branch_status): continue analytic_2_branch = analytic_2 if analytic_2 in ['210','220','230']: if branch_status_1 == 'H123': analytic_2_branch = analytic_2[:2] + '1' elif branch_status_1 == 'H23': analytic_2_branch = analytic_2[:2] + '2' else: analytic_2_branch = analytic_2 analytic_combination = analytic_1 + '/' + analytic_2_branch + '/' + analytic_3 + '/' + analytic_4 residual = 0 move_line_id = self.pool.get('account.move.line').search(cr, uid, [ ('dym_loan_id','=',ai.id), ('debit','>',0) ]) residual = 0 if move_line_id: residual = self.pool.get('account.move.line').get_residual_date_based(cr, uid, move_line_id, trx_end_date) p.update({'residual': residual}) p.update({'lines': account_analytic_account}) p.update({'analytic_1': analytic_1_name}) p.update({'analytic_2': analytic_2_name}) p.update({'analytic_3': analytic_3_name}) p.update({'analytic_4': analytic_4_name}) p.update({'branch_status': branch_status_1}) p.update({'branch_id': branch_name}) p.update({'analytic_combination': analytic_combination}) id_ai.append(p) report.update({'id_ai': id_ai}) # for p in p_map: report.update({'id_ai': id_ai}) # if p['id_aml'] not in map( # lambda x: x.get('id_aml', None), ids_aml): # ids_aml.append(p) # lines = filter( # lambda x: x['id_aml'] == p['id_aml'], all_lines) # p.update({'lines': lines}) # p.update( # {'d': 1, # 'c': 2, # 'b': 3}) # report.update({'id_ai': p_map}) reports = filter(lambda x: x.get('id_ai'), reports) if not reports : reports = [{'title_short': 'Laporan Loan Piutang', 'type': ['out_invoice','in_invoice','in_refund','out_refund'], 'id_ai': [{'total': 0, 'date': 'NO DATA FOUND', 'branch_id': 'NO DATA FOUND', 'number': 'NO DATA FOUND', 'memo': 'NO DATA FOUND', 'ref': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'id_ai': 'NO DATA FOUND', 'analytic_1': 'NO DATA FOUND', 'analytic_2': 'NO DATA FOUND', 'analytic_3': 'NO DATA FOUND', 'analytic_4': 'NO DATA FOUND', 'analytic_combination': 'NO DATA FOUND', 'branch_status': 'NO DATA FOUND', 'state': 'NO DATA FOUND', 'partner_code': 'NO DATA FOUND', 'partner_name': 'NO DATA FOUND', 'journal_name': 'NO DATA FOUND', 'date_due': 'NO DATA FOUND', 'account_code': 'NO DATA FOUND', 'account_name': 'NO DATA FOUND', 'residual': 0, 'no': 0, 'loan_type': 'NO DATA FOUND', 'rv_pv': 'NO DATA FOUND', 'effective_date': 'NO DATA FOUND', 'tipe_pinjaman': 'NO DATA FOUND',}], 'title': ''}] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) super(dym_report_loan_piutang_print, self).set_context( objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context period_obj = self.pool['account.period'] posted = (data['target_move'] == 'posted') and True or False result_selection = data['result_selection'] company_id = data['company_id'] period_id = data['period_id'] period = period_obj.browse(cr, uid, period_id, context=context) period_code = period.code title_prefix = _('Period') + ' %s : ' % period_code title_short_prefix = period_code digits = self.pool['decimal.precision'].precision_get( cr, uid, 'Account') # perform query on selected period as well as preceding periods. period_date_start = period.date_start period_query_ids = period_obj.search( cr, uid, [('date_stop', '<=', period_date_start), ('company_id', '=', company_id)]) period_query_ids += [period_id] # find periods to select move_lines # that are reconciled after period next_period_ids = period_obj.search( cr, uid, [('date_stop', '>', period.date_stop), ('company_id', '=', company_id)]) report_ar = { 'type': 'receivable', 'title': title_prefix + _('Open Receivables'), 'title_short': title_short_prefix + ', ' + _('AR')} report_ap = { 'type': 'payable', 'title': title_prefix + _('Open Payables'), 'title_short': title_short_prefix + ', ' + _('AP')} # CASE statement on due date since standard Odoo accounting # allows to change the date_maturity in the accounting entries # on confirmed invoices (when using the account_cancel module). # The CASE statement gives accounting entries priority # over the invoice field. query_start = "SELECT l.move_id AS m_id, l.id AS l_id, " \ "l.date AS l_date, " \ "m.name AS move_name, m.date AS m_date, " \ "a.id AS a_id, a.code AS a_code, a.type AS a_type, " \ "j.id AS j_id, j.code AS j_code, j.type AS j_type, " \ "p.id AS p_id, p.name AS p_name, p.ref AS p_ref, " \ "l.name AS l_name, " \ "l.debit, l.credit, " \ "(CASE WHEN l.date_maturity IS NOT NULL THEN l.date_maturity " \ "ELSE ai.date_due END) AS date_due," \ "l.reconcile_id, r.name AS r_name, " \ "l.reconcile_partial_id, rp.name AS rp_name, " \ "ai.internal_number AS inv_number, b.name AS st_number, " \ "v.number AS voucher_number " \ "FROM account_move_line l " \ "INNER JOIN account_journal j ON l.journal_id = j.id " \ "INNER JOIN account_move m ON l.move_id = m.id " \ "INNER JOIN account_account a ON l.account_id = a.id " \ "INNER JOIN account_period ON l.period_id = account_period.id " \ "LEFT OUTER JOIN account_invoice ai ON ai.move_id = m.id " \ "LEFT OUTER JOIN account_voucher v ON v.move_id = m.id " \ "LEFT OUTER JOIN account_bank_statement b " \ "ON l.statement_id = b.id " \ "LEFT OUTER JOIN res_partner p ON l.partner_id = p.id " \ "LEFT OUTER JOIN account_move_reconcile r " \ "ON l.reconcile_id = r.id " \ "LEFT OUTER JOIN account_move_reconcile rp " \ "ON l.reconcile_partial_id = rp.id " if posted: move_selection = "AND m.state = 'posted' " report_info = _('All Posted Entries') else: move_selection = '' report_info = _('All Entries') move_selection += "AND account_period.id in %s" % str( tuple(period_query_ids)).replace(',)', ')') # define subquery to select move_lines within FY/period # that are reconciled after FY/period if next_period_ids: subquery = "OR reconcile_id IN " \ "(SELECT reconcile_id FROM account_move_line " \ "WHERE period_id IN %s " \ "AND reconcile_id IS NOT NULL)" % str( tuple(next_period_ids)).replace(',)', ')') else: subquery = None query_end = 'WHERE m.company_id = %s ' \ 'AND a.type = %s ' + move_selection + \ 'AND (l.reconcile_id IS NULL ' + (subquery or '') + ') ' \ 'AND (l.debit+l.credit) != 0 ' \ 'ORDER BY a_code, p_name, p_id, l_date' if result_selection == 'customer': reports = [report_ar] elif result_selection == 'supplier': reports = [report_ap] else: reports = [report_ar, report_ap] for report in reports: cr.execute(query_start + query_end, (company_id, report['type'])) all_lines = cr.dictfetchall() partners = [] if all_lines: # add reference of corresponding legal document def lines_map(x): if x['j_type'] in ['sale', 'sale_refund', 'purchase', 'purchase_refund']: x.update({ 'docname': x['inv_number'] or x['voucher_number'] }) elif x['j_type'] in ['bank', 'cash']: x.update({ 'docname': x['st_number'] or x['voucher_number'] }) else: x.update({'docname': x['move_name']}) map(lines_map, all_lines) # insert a flag in every line to indicate the end of a partner # this flag can be used to draw a full line between partners for cnt in range(len(all_lines)-1): if all_lines[cnt]['p_id'] != all_lines[cnt+1]['p_id']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'p_id': x['p_id'], 'p_name': x['p_name'], 'p_ref': x['p_ref']}, all_lines) for p in p_map: # remove duplicates while preserving list order if p['p_id'] not in map( lambda x: x.get('p_id', None), partners): partners.append(p) partner_lines = filter( lambda x: x['p_id'] == p['p_id'], all_lines) p.update({'lines': partner_lines}) debits = map( lambda x: x['debit'] or 0.0, partner_lines) sum_debit = reduce(lambda x, y: x + y, debits) sum_debit = round(sum_debit, digits) credits = map( lambda x: x['credit'] or 0.0, partner_lines) sum_credit = reduce(lambda x, y: x + y, credits) sum_credit = round(sum_credit, digits) balance = sum_debit - sum_credit p.update( {'d': sum_debit, 'c': sum_credit, 'b': balance}) report.update({'partners': partners}) sum_debit = 0.0 sum_credit = 0.0 acc_lines = filter( lambda x: x['a_type'] == report['type'], all_lines) debits = map(lambda x: x['debit'] or 0.0, acc_lines) if debits: sum_debit = reduce(lambda x, y: x + y, debits) sum_debit = round(sum_debit, digits) credits = map(lambda x: x['credit'] or 0.0, acc_lines) if credits: sum_credit = reduce(lambda x, y: x + y, credits) sum_credit = round(sum_credit, digits) balance = sum_debit - sum_credit report.update({'d': sum_debit, 'c': sum_credit, 'b': balance}) reports = filter(lambda x: x.get('partners'), reports) if not reports: raise orm.except_orm( _('No Data Available'), _('No records found for your selection!')) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) super(partner_open_arap_print, self).set_context( objects, data, ids, report_type=report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context division = data['division'] hutang_piutang_ksu = data['hutang_piutang_ksu'] picking_type_code = data['picking_type_code'] date_start_date = data['date_start_date'] date_end_date = data['date_end_date'] min_date_start_date = data['min_date_start_date'] min_date_end_date = data['min_date_end_date'] date_done_start_date = data['date_done_start_date'] date_done_end_date = data['date_done_end_date'] branch_ids = data['branch_ids'] categ_ids = data['categ_ids'] product_ids = data['product_ids'] partner_ids = data['partner_ids'] user_brw = self.pool.get('res.users').browse(cr, uid, uid) user_branch_type = user_brw.branch_type title_short_prefix = '' report_stock_movement = { 'type': 'receivable', 'title': '', 'division':division, 'hutang_piutang_ksu':hutang_piutang_ksu, 'title_short': title_short_prefix + '' + _('Laporan Stock Movement')} if hutang_piutang_ksu == 'hutang': report_stock_movement['title_short'] = title_short_prefix + '' + 'Laporan Hutang KSU' elif hutang_piutang_ksu == 'piutang': report_stock_movement['title_short'] = title_short_prefix + '' + 'Laporan Piutang KSU' where_division = where_division2 = " 1=1 " if division and not hutang_piutang_ksu : where_division = " spick.division = '%s'" % str(division) where_division2 = " (spick.division = '%s' or spack.division = '%s')" % (str(division),str(division)) where_picking_type_code = " 1=1 " if picking_type_code and not hutang_piutang_ksu: if picking_type_code == 'all' : where_picking_type_code = " (spt.code in ('incoming','outgoing','internal','interbranch_in','interbranch_out') or spack.id is not null)" elif picking_type_code == 'in' : where_picking_type_code = " (spt.code in ('incoming','interbranch_in') or (spack.id is not null and source_sloc.usage = 'inventory'))" elif picking_type_code == 'out' : where_picking_type_code = " (spt.code in ('outgoing','interbranch_out') or (spack.id is not null and sloc.usage = 'inventory'))" elif picking_type_code == 'incoming' : where_picking_type_code = " (spt.code = 'incoming' or (spack.id is not null and source_sloc.usage = 'inventory'))" elif picking_type_code == 'outgoing' : where_picking_type_code = " (spt.code = 'outgoing' or (spack.id is not null and sloc.usage = 'inventory'))" else : where_picking_type_code = " spt.code = '%s'" % str(picking_type_code) elif hutang_piutang_ksu == 'hutang': where_picking_type_code = " (spt.code in ('outgoing','interbranch_out') or (spack.id is not null and sloc.usage = 'inventory'))" elif hutang_piutang_ksu == 'piutang': where_picking_type_code = " (spt.code in ('incoming','interbranch_in') or (spack.id is not null and source_sloc.usage = 'inventory'))" where_date_start_date = where_date_start_date2 = " 1=1 " if date_start_date and not hutang_piutang_ksu: where_date_start_date = " date(spick.date) >= '%s'" % str(date_start_date) where_date_start_date2 = " date(spl.date) >= '%s'" % str(date_start_date) elif hutang_piutang_ksu == 'hutang': where_date_start_date = " (date(spick.date_done) >= '%s' or spick.date_done is null) and date(spick.create_date) <= '%s'" % (str(date_start_date),str(date_start_date)) where_date_start_date2 = " ((date(spl.date) >= '%s' and spl.state = 'done') or spl.state not in ('done','cancel')) and date(spl.create_date) <= '%s'" % (str(date_start_date),str(date_start_date)) elif hutang_piutang_ksu == 'piutang': where_date_start_date = " (date(spick.date_done) >= '%s' or spick.date_done is null) and date(spick.create_date) <= '%s'" % (str(date_start_date),str(date_start_date)) where_date_start_date2 = " ((date(spl.date) >= '%s' and spl.state = 'done') or spl.state not in ('done','cancel')) and date(spl.create_date) <= '%s'" % (str(date_start_date),str(date_start_date)) where_state = where_state2 = " 1=1 " if not hutang_piutang_ksu: where_state = " spack.state = 'posted' " where_state2 = " spl.state = 'done' " where_date_end_date = where_date_end_date2 = " 1=1 " if date_end_date and not hutang_piutang_ksu: where_date_end_date = " date(spick.date) <= '%s'" % str(date_end_date) where_date_end_date2 = " date(spl.date) <= '%s'" % str(date_end_date) where_min_date_start_date = where_min_date_start_date2 = " 1=1 " if min_date_start_date and not hutang_piutang_ksu: where_min_date_start_date = " date(spick.min_date) >= '%s'" % str(min_date_start_date) where_min_date_start_date2 = " date(spl.date_expected) >= '%s'" % str(min_date_start_date) where_min_date_end_date = where_min_date_end_date2 = " 1=1 " if min_date_end_date and not hutang_piutang_ksu: where_min_date_end_date = " date(spick.min_date) <= '%s'" % str(min_date_end_date) where_min_date_end_date2 = " date(spl.date_expected) <= '%s'" % str(min_date_end_date) where_date_done_start_date = where_date_done_start_date2 = " 1=1 " if date_done_start_date and not hutang_piutang_ksu: where_date_done_start_date = " date(spick.date_done) >= '%s'" % str(date_done_start_date) where_date_done_start_date2 = " date(spl.date) >= '%s' and spl.state = 'done'" % str(date_done_start_date) where_date_done_end_date = where_date_done_end_date2 = " 1=1 " if date_done_end_date and not hutang_piutang_ksu: where_date_done_end_date = " date(spick.date_done) <= '%s'" % str(date_done_end_date) where_date_done_end_date2 = " date(spl.date) <= '%s' and spl.state = 'done'" % str(date_done_end_date) where_branch_ids = where_branch_ids2 = " 1=1 " if branch_ids : where_branch_ids = " spick.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') where_branch_ids2 = " spl.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') where_product_ids = " 1=1 " if product_ids : where_product_ids = " product.id in %s" % str( tuple(product_ids)).replace(',)', ')') where_partner_ids = " 1=1 " if partner_ids : where_partner_ids = " spick.partner_id in %s" % str( tuple(partner_ids)).replace(',)', ')') where_categ_ids = " 1=1 " if categ_ids and where_product_ids == " 1=1 " : where_categ_ids = " prod_categ.id in %s" % str( tuple(categ_ids)).replace(',)', ')') query_stock_movement = "SELECT CONCAT(cast(spl.id as text),'-dym_stock_packing_line') as id_ai, spick.id as id_picking, 'pack' as object, spl.id as pack_line_id, prod_tmpl.categ_id as categ_id, " \ "b.code as branch_code, b.name as branch_name, spick.division, spt.name as picking_type_name, spack.name as packing_name, spack.date as packing_date, " \ "partner.default_code as partner_code, partner.name as partner_name, expedisi.default_code as ekspedisi_code, expedisi.name as ekspedisi_name, " \ "product.name_template as prod_tmpl, pav.code as color, spl.engine_number as engine, spl.chassis_number as chassis, " \ "spl.tahun_pembuatan as tahun, spl.quantity as qty, spack.state as packing_state, spick.origin as picking_origin, prod_categ.name as categ_name, product.default_code as internal_ref, " \ "COALESCE(spick2.origin,'') as backorder, sloc.name as location, case when spl.ready_for_sale = true then 'RFS' else 'NRFS' end as status_rfs, COALESCE(bs.name, '') as branch_source, parent_sloc.name as parent_location, source_sloc.name as location_source, parent_source_sloc.name as parent_location_source " \ "FROM " \ "dym_stock_packing spack " \ "inner join dym_stock_packing_line spl ON spack.id = spl.packing_id " \ "left join stock_picking spick ON spick.id = spack.picking_id " \ "left join stock_picking spick2 ON spick2.id = spick.backorder_id " \ "left join dym_branch b on b.id = spick.branch_id " \ "left join res_partner partner on partner.id = spick.partner_id " \ "left join res_partner expedisi on expedisi.id = spack.expedition_id " \ "left join product_product product on product.id = spl.product_id " \ "left join product_attribute_value_product_product_rel pavpp ON product.id = pavpp.prod_id " \ "left join product_attribute_value pav ON pavpp.att_id = pav.id " \ "left join stock_picking_type spt ON spt.id = spick.picking_type_id " \ "left join product_template prod_tmpl ON prod_tmpl.id = product.product_tmpl_id " \ "left join product_category prod_categ ON prod_categ.id = prod_tmpl.categ_id " \ "left join stock_location sloc ON sloc.id = spl.destination_location_id " \ "left join stock_location parent_sloc ON parent_sloc.id = sloc.location_id " \ "left join stock_location source_sloc ON source_sloc.id = spl.source_location_id " \ "left join stock_location parent_source_sloc ON parent_source_sloc.id = source_sloc.location_id " \ "left join dym_branch bs ON spack.branch_sender_id = bs.id " \ "where ((spl.engine_number is not null and spick.division = 'Unit') or spick.division != 'Unit') and spl.quantity > 0 and " + where_state + " AND " + where_division + " AND " + where_picking_type_code + " AND " + where_date_start_date + " AND " + where_date_end_date + " AND " + where_min_date_start_date + " AND " + where_min_date_end_date + " AND " + where_date_done_start_date + " AND " + where_date_done_end_date + " AND " + where_branch_ids + " AND " + where_categ_ids + " AND " + where_product_ids + " AND " + where_partner_ids + " " \ " UNION ALL " \ "SELECT CONCAT(cast(spl.id as text),'-stock_move') as id_ai, spick.id as id_picking, 'move' as object, spl.id as pack_line_id, prod_tmpl.categ_id as categ_id, " \ "b.code as branch_code, b.name as branch_name, " \ "CASE WHEN spack.id is not null THEN spack.division " \ " WHEN spick.id is not null THEN spick.division " \ " ELSE '' " \ "END as division, " \ "CASE WHEN spack.id is not null and sloc.usage = 'inventory' THEN 'Delivery Orders' " \ " WHEN spack.id is not null and source_sloc.usage = 'inventory' THEN 'Receipts' " \ " ELSE spt.name " \ "END as picking_type_name, " \ "CASE WHEN spack.id is not null THEN spack.name " \ " WHEN spick.id is not null THEN spick.name " \ " ELSE '' " \ "END as packing_name, " \ "spl.date as packing_date, " \ "partner.default_code as partner_code, partner.name as partner_name, '' as ekspedisi_code, '' as ekspedisi_name, " \ "product.name_template as prod_tmpl, pav.code as color, lot.name as engine, CONCAT(lot.chassis_code, lot.chassis_no) as chassis, " \ "lot.tahun as tahun, spl.product_uom_qty as qty, " \ "CASE WHEN spack.id is not null THEN spack.state " \ " WHEN spick.id is not null THEN spick.state " \ " ELSE '' " \ "END as packing_state, " \ "spick.origin as picking_origin, prod_categ.name as categ_name, product.default_code as internal_ref, " \ "COALESCE(spick2.origin,'') as backorder, sloc.name as location, case when sloc.usage = 'internal' then 'RFS' when sloc.usage in ('kpb','nrfs') then 'NRFS' else '' end as status_rfs, '' as branch_source, parent_sloc.name as parent_location, source_sloc.name as location_source, parent_source_sloc.name as parent_location_source " \ "FROM " \ "stock_move spl " \ "left join stock_inventory spack ON spack.id = spl.inventory_id " \ "left join stock_production_lot lot ON lot.id = spl.restrict_lot_id " \ "left join stock_picking spick ON spick.id = spl.picking_id " \ "left join stock_picking spick2 ON spick2.id = spick.backorder_id " \ "left join dym_branch b on b.id = spl.branch_id " \ "left join res_partner partner on partner.id = spick.partner_id " \ "left join dym_stock_packing packing on spick.id = packing.picking_id " \ "left join product_product product on product.id = spl.product_id " \ "left join product_attribute_value_product_product_rel pavpp ON product.id = pavpp.prod_id " \ "left join product_attribute_value pav ON pavpp.att_id = pav.id " \ "left join stock_picking_type spt ON spt.id = spl.picking_type_id " \ "left join product_template prod_tmpl ON prod_tmpl.id = product.product_tmpl_id " \ "left join product_category prod_categ ON prod_categ.id = prod_tmpl.categ_id " \ "left join stock_location sloc ON sloc.id = spl.location_dest_id " \ "left join stock_location parent_sloc ON parent_sloc.id = sloc.location_id " \ "left join stock_location source_sloc ON source_sloc.id = spl.location_id " \ "left join stock_location parent_source_sloc ON parent_source_sloc.id = source_sloc.location_id " \ "where ((lot.id is not null and (spack.division = 'Unit' or spick.division = 'Unit')) or (spack.division != 'Unit' or spick.division != 'Unit')) and spl.product_uom_qty > 0 and " + where_state2 + " AND " + where_division2 + " AND " + where_picking_type_code + " AND " + where_date_start_date2 + " AND " + where_date_end_date2 + " AND " + where_min_date_start_date2 + " AND " + where_min_date_end_date2 + " AND " + where_date_done_start_date2 + " AND " + where_date_done_end_date2 + " AND " + where_branch_ids2 + " AND " + where_categ_ids + " AND " + where_product_ids + " AND " + where_partner_ids + " and packing.id is null " \ " ORDER BY branch_code " \ move_selection = "" report_info = _('') move_selection += "" reports = [report_stock_movement] for report in reports: cr.execute(query_stock_movement) all_lines = cr.dictfetchall() picking_ids = [] if all_lines: def lines_map(x): x.update({'docname': x['branch_code']}) map(lines_map, all_lines) for cnt in range(len(all_lines)-1): if all_lines[cnt]['id_picking'] != all_lines[cnt+1]['id_picking']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'no': 0, 'id_picking': str(x['id_picking']), 'id_ai': str(x['id_ai']), 'object': str(x['object']), 'pack_line_id': x['pack_line_id'], 'branch_code': str(x['branch_code'].encode('ascii','ignore').decode('ascii')) if x['branch_code'] != None else '', 'branch_name': str(x['branch_name'].encode('ascii','ignore').decode('ascii')) if x['branch_name'] != None else '', 'branch_source': str(x['branch_source'].encode('ascii','ignore').decode('ascii')) if x['branch_source'] != None else '', 'division': str(x['division'].encode('ascii','ignore').decode('ascii')) if x['division'] != None else '', 'picking_type_name': str(x['picking_type_name'].encode('ascii','ignore').decode('ascii')) if x['picking_type_name'] != None else '', 'internal_ref': str(x['internal_ref'].encode('ascii','ignore').decode('ascii')) if x['internal_ref'] != None else '', 'categ_name': str(x['categ_name'].encode('ascii','ignore').decode('ascii')) if x['categ_name'] != None else '', 'packing_name': str(x['packing_name'].encode('ascii','ignore').decode('ascii')) if x['packing_name'] != None else '', 'packing_date': str(x['packing_date'].encode('ascii','ignore').decode('ascii')) if x['packing_date'] != None else '', 'partner_code': str(x['partner_code'].encode('ascii','ignore').decode('ascii')) if x['partner_code'] != None else '', 'partner_name': str(x['partner_name'].encode('ascii','ignore').decode('ascii')) if x['partner_name'] != None else '', 'ekspedisi_code': str(x['ekspedisi_code'].encode('ascii','ignore').decode('ascii')) if x['ekspedisi_code'] != None else '', 'ekspedisi_name': str(x['ekspedisi_name'].encode('ascii','ignore').decode('ascii')) if x['ekspedisi_name'] != None else '', 'prod_tmpl': str(x['prod_tmpl'].encode('ascii','ignore').decode('ascii')) if x['prod_tmpl'] != None else '', 'color': str(x['color'].encode('ascii','ignore').decode('ascii')) if x['color'] != None else '', 'engine': str(x['engine'].encode('ascii','ignore').decode('ascii')) if x['engine'] != None else '', 'chassis': str(x['chassis'].encode('ascii','ignore').decode('ascii')) if x['chassis'] != None else '', 'tahun': str(x['tahun'].encode('ascii','ignore').decode('ascii')) if x['tahun'] != None else '', 'qty': x['qty'], 'packing_state': str(x['packing_state'].encode('ascii','ignore').decode('ascii')) if x['packing_state'] != None else '', 'picking_origin': str(x['picking_origin'].encode('ascii','ignore').decode('ascii')) if x['picking_origin'] != None else '', 'backorder': str(x['backorder'].encode('ascii','ignore').decode('ascii')) if x['backorder'] != None else '', 'location': str(x['location'].encode('ascii','ignore').decode('ascii')) if x['location'] != None else '', 'parent_location': str(x['parent_location'].encode('ascii','ignore').decode('ascii')) if x['parent_location'] != None else '', 'location_source': str(x['location_source'].encode('ascii','ignore').decode('ascii')) if x['location_source'] != None else '', 'parent_location_source': str(x['parent_location_source'].encode('ascii','ignore').decode('ascii')) if x['parent_location_source'] != None else '', 'status_rfs': str(x['status_rfs'].encode('ascii','ignore').decode('ascii')) if x['status_rfs'] != None else '', }, all_lines) for p in p_map: if p['id_ai'] not in map( lambda x: x.get('id_ai', None), picking_ids): picking_ids.append(p) packing_line = filter( lambda x: x['id_ai'] == p['id_ai'], all_lines) p.update({'lines': packing_line}) if packing_line[0]['object'] == 'pack': line = self.pool.get('dym.stock.packing.line').browse(cr, uid, packing_line[0]['pack_line_id']) elif packing_line[0]['object'] == 'move': if user_branch_type == 'HO': line = self.pool.get('stock.move').browse(cr, SUPERUSER_ID, packing_line[0]['pack_line_id']) else: line = self.pool.get('stock.move').browse(cr, uid, packing_line[0]['pack_line_id']) if line.product_id.categ_id: category = line.product_id.categ_id if division == 'Unit': while (category.parent_id and category.parent_id.bisnis_unit == False): category = category.parent_id else: flag = False while (category.parent_id and flag == False): category = category.parent_id if category.bisnis_unit == True: flag = True p.update({'categ_name': category.name}) report.update({'picking_ids': picking_ids}) reports = filter(lambda x: x.get('picking_ids'), reports) if not reports : reports = [{'picking_ids': [{ 'no': 0, 'branch_code': 'NO DATA FOUND', 'branch_name': 'NO DATA FOUND', 'branch_source': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'categ_name': 'NO DATA FOUND', 'internal_ref': 'NO DATA FOUND', 'picking_type_name': 'NO DATA FOUND', 'packing_name': 'NO DATA FOUND', 'packing_date': 'NO DATA FOUND', 'partner_code': 'NO DATA FOUND', 'partner_name': 'NO DATA FOUND', 'ekspedisi_code': 'NO DATA FOUND', 'ekspedisi_name': 'NO DATA FOUND', 'prod_tmpl': 'NO DATA FOUND', 'color': 'NO DATA FOUND', 'engine': 'NO DATA FOUND', 'chassis': 'NO DATA FOUND', 'tahun': 'NO DATA FOUND', 'location': 'NO DATA FOUND', 'parent_location': 'NO DATA FOUND', 'location_source': 'NO DATA FOUND', 'parent_location_source': 'NO DATA FOUND', 'status_rfs': 'NO DATA FOUND', 'qty': 0, 'packing_state': 'NO DATA FOUND', 'picking_origin': 'NO DATA FOUND', 'backorder': 'NO DATA FOUND',}], 'title_short': 'Laporan Stock Movement', 'type': 'receivable', 'title': '', 'division':division, 'hutang_piutang_ksu':hutang_piutang_ksu}] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_report_stock_movement_print, self).set_context( objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context division = data['division'] start_date = data['start_date'] end_date = data['end_date'] trx_start_date = data['trx_start_date'] trx_end_date = data['trx_end_date'] status = data['status'] branch_ids = data['branch_ids'] partner_ids = data['partner_ids'] account_ids = data['account_ids'] journal_ids = data['journal_ids'] segmen = data['segmen'] branch_status = data['branch_status'] detail_pembayaran = data['detail_pembayaran'] where_end_datex = "av.date <= '%s' " % trx_end_date title_prefix = '' title_short_prefix = '' report_hutang = { 'detail_pembayaran': detail_pembayaran, 'type': 'payable', 'title': '', 'title_short': title_short_prefix + ', ' + _('Laporan Hutang') } #print "-----------", '"""+ str(start_date) + """' query_start = """SELECT distinct aml.id as id_aml, aml.division as division, rp.default_code as partner_code, rp.name as partner_name, a.code as account_code, '' as account_sap, aml.date as date_aml, aml.date_maturity as due_date, CURRENT_DATE - aml.date_maturity as overdue, aml.reconcile_id as status, aml.reconcile_partial_id as partial, aml.debit as debit, aml.credit as credit, aml.name as name, aml.ref as reference, j.name as journal_name, m.name as invoice_name, '' as branch_code, ai.supplier_invoice_number as supplier_invoice_number, ai.document_date as supplier_invoice_date, CASE WHEN aml.reconcile_id IS NOT NULL and aml4.account_id in (2533) and aml2.amount - aml4.amount < 0 then aml2.amount WHEN aml.reconcile_id IS NOT NULL and aml4.account_id in (2533) and aml2.amount - aml4.amount = 0 then aml2.amount - aml4.amount WHEN aml.reconcile_id IS NOT NULL then aml2.amount WHEN aml.reconcile_id IS NOT NULL and av.state is null THEN 0.0 WHEN aml.reconcile_partial_id IS NULL THEN aml.credit - aml.debit WHEN count_credit <> 0 THEN (aml3.credit - aml3.debit) / count_credit ELSE 0 END as residual, pt.name as payment_term, loa.branch as branch_x FROM account_move_line aml LEFT JOIN (SELECT aml2.reconcile_partial_id, (select count(aml4.credit) from account_move_line aml4 where aml2.reconcile_partial_id = aml4.reconcile_partial_id and aml4.credit > 0 and aml4.date <='""" + str( trx_end_date ) + """') as count_credit, SUM(aml2.debit) as debit, SUM(aml2.credit) as credit FROM account_move_line aml2 WHERE aml2.reconcile_partial_id is not Null and aml2.date <='""" + str( trx_end_date ) + """' GROUP BY aml2.reconcile_partial_id) aml3 on aml.reconcile_partial_id = aml3.reconcile_partial_id LEFT JOIN account_move m ON m.id = aml.move_id LEFT JOIN res_partner rp ON rp.id = aml.partner_id LEFT JOIN account_account a ON a.id = aml.account_id LEFT JOIN account_journal j ON j.id = aml.journal_id LEFT JOIN account_invoice ai ON ai.move_id = aml.move_id LEFT JOIN account_payment_term pt ON ai.payment_term = pt.id LEFT JOIN account_voucher_line avl on aml.id = avl.move_line_id LEFT JOIN account_voucher av ON avl.voucher_id = av.id LEFT JOIN (select sum(credit- debit) amount,reconcile_id from account_move_line where date <= '""" + str( trx_end_date ) + """' and state <> 'cancel' group by reconcile_id) aml2 on aml.reconcile_id = aml2.reconcile_id LEFT JOIN (select sum(debit- credit) amount,move_id,account_id from account_move_line where date <= '""" + str( trx_end_date ) + """' and state <> 'cancel' and account_id in (2533) group by move_id,account_id) aml4 on aml.move_id = aml4.move_id LEFT JOIN (select dl.name,aa.name branch from dym_loan dl left join account_analytic_account aa on aa.id = dl.analytic_3) loa on m.name = loa.name where right(m.name,10) <> '(Reversed)' AND m.state = 'posted'""" move_selection = "" report_info = _('') move_selection += "" query_end = "" if division: query_end += " AND aml.division = '%s'" % str(division) if start_date: query_end += " AND aml.date_maturity >= '%s'" % str(start_date) if end_date: query_end += " AND aml.date_maturity <= '%s'" % str(end_date) if trx_start_date: query_end += " AND aml.date >= '%s'" % str(trx_start_date) if trx_end_date: query_end += " AND aml.date <= '%s'" % str(trx_end_date) if status == 'reconciled': query_end += " AND aml.reconcile_id is not Null" elif status == 'outstanding': query_end += " AND aml.reconcile_id is Null" if partner_ids: query_end += " AND aml.partner_id in %s" % str( tuple(partner_ids)).replace(',)', ')') if account_ids: query_end += " AND aml.account_id in %s" % str( tuple(account_ids)).replace(',)', ')') if journal_ids: query_end += " AND aml.journal_id in %s" % str( tuple(journal_ids)).replace(',)', ')') reports = [report_hutang] query_order = "" add_line_x = "" print "=================", query_start + query_end + query_order for report in reports: cr.execute(query_start + query_end + query_order) all_lines = cr.dictfetchall() ids_aml = [] if all_lines: p_map = map( lambda x: { 'no': 0, 'id_aml': x['id_aml'] if x['id_aml'] != None else '', 'payment_term': x['payment_term'] if x['payment_term'] != None else '', 'branch_code': x['branch_code'] if x['branch_code'] != None else '', 'division': str(x['division'].encode('ascii', 'ignore').decode( 'ascii')) if x['division'] != None else '', 'partner_code': str(x['partner_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['partner_code'] != None else '', 'partner_name': str(x['partner_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['partner_name'] != None else '', 'account_code': str(x['account_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['account_code'] != None else '', 'invoice_name': str(x['invoice_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['invoice_name'] != None else '', 'name': str(x['name'].encode('ascii', 'ignore').decode('ascii') ) if x['name'] != None else '', 'supplier_invoice_date': str(x['supplier_invoice_date'].encode( 'ascii', 'ignore').decode('ascii')) if x['supplier_invoice_date'] != None else '', 'supplier_invoice_number': str(x['supplier_invoice_number'].encode( 'ascii', 'ignore').decode('ascii')) if x['supplier_invoice_number'] != None else '', 'date_aml': str(x['date_aml']) if x['date_aml'] != None else '', 'due_date': str(x['due_date']) if x['due_date'] != None else '', 'overdue': str(x['overdue']) if x['overdue'] != None and x[ 'residual'] != None and x['residual'] > 0 else '', 'status': 'Outstanding' if str(x['status']) == 'None' else 'Reconciled', 'tot_invoice': x['credit'] - x['debit'], #'saldo_awal': 0, 'amount_residual': x['residual'] if x['residual'] != None else False, 'belum_jatuh_tempo': (x['residual'] if x['residual'] != None else False) if x['overdue'] <= 0 or x['overdue' ] == None else False, 'overdue_1_30': (x['residual'] if x['residual'] != None else False) if x['overdue'] > 0 and x['overdue'] < 31 else False, 'overdue_31_60': (x['residual'] if x['residual'] != None else False) if x['overdue'] > 30 and x['overdue'] < 61 else False, 'overdue_61_90': (x['residual'] if x['residual'] != None else False) if x['overdue'] > 60 and x['overdue'] < 91 else False, 'overdue_91_n': (x['residual'] if x['residual'] != None else False) if x['overdue'] > 91 else False, 'reference': str(x['reference'].encode('ascii', 'ignore').decode( 'ascii')) if x['reference'] != None else '', 'branch_x': str(x['branch_x'].encode('ascii', 'ignore').decode( 'ascii')) if x['branch_x'] != None else '', 'journal_name': str(x['journal_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['journal_name'] != None else '', }, all_lines) for p in p_map: if p['id_aml'] not in map(lambda x: x.get('id_aml', None), ids_aml): account_move_lines = filter( lambda x: x['id_aml'] == p['id_aml'], all_lines) analytic_1 = '' analytic_2 = '' analytic_3 = '' analytic_4 = '' analytic_1_name = '' analytic_2_name = '' analytic_3_name = '' analytic_4_name = '' saldo_awal = 0 am = self.pool.get('account.move.line').browse( cr, uid, account_move_lines[0]['id_aml']) if am.debit > 0: continue analytic = am.analytic_account_id or '' saldo_awal = 0 branch_code = '' branch_name = '' branch = False branch_status_1 = '' branch_name = '' branch_code = '' if analytic: if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.sudo().branch_id branch_name = branch.name branch_code = branch.code branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id = analytic while (analytic.parent_id): analytic = analytic.parent_id if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.sudo().branch_id branch_name = branch.name branch_code = branch.code branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id == analytic if (branch and branch_ids and branch.id not in branch_ids) or ( branch and branch_status and branch_status != branch.branch_status): continue analytic_2_branch = analytic_2 if analytic_2 in ['210', '220', '230']: if branch_status_1 == 'H123': analytic_2_branch = analytic_2[:2] + '1' elif branch_status_1 == 'H23': analytic_2_branch = analytic_2[:2] + '2' else: analytic_2_branch = analytic_2 analytic_combination = analytic_1 + '/' + analytic_2_branch + '/' + analytic_3 + '/' + analytic_4 p.update({'lines': account_move_lines}) p.update({'analytic_1': analytic_1_name}) p.update({'analytic_2': analytic_2_name}) p.update({'analytic_3': analytic_3_name}) p.update({'analytic_4': analytic_4_name}) p.update({'branch_status': branch_status_1}) p.update({'branch_name': branch_name}) p.update({'branch_code': branch_code}) p.update( {'analytic_combination': analytic_combination}) acc_number = '' bank = '' an_rek = '' if am.partner_id.bank_ids: acc_number = am.partner_id.bank_ids[ 0].acc_number bank = am.partner_id.bank_ids[0].bank.name an_rek = am.partner_id.bank_ids[0].owner_name p.update({'acc_number': acc_number}) p.update({'bank': bank}) p.update({'an_rek': an_rek}) tgl_retur = '' no_retur = '' total_retur = 0 p.update({'tgl_retur': tgl_retur}) p.update({'no_retur': no_retur}) #p.update({'total_retur': total_retur}) if am.sudo().invoice: rb_ids = self.pool.get( 'dym.retur.beli').search( cr, uid, [('consolidate_id.invoice_id', '=', am.sudo().invoice.id), ('state', 'in', [ 'approved', 'except_picking', 'except_invoice', 'done' ])]) if rb_ids: rb = self.pool.get( 'dym.retur.beli').browse( cr, uid, rb_ids) tgl_retur = ', '.join(rb.mapped('date')) no_retur = ', '.join(rb.mapped('name')) total_retur = sum(x.amount_total for x in rb) if total_retur > 0: if p['tot_invoice'] > total_retur: p.update({'tot_invoice': p['tot_invoice']}) #p.update({'tot_invoice': p['tot_invoice'] - total_retur}) if p['overdue_1_30'] > total_retur: p.update({ 'overdue_1_30': p['overdue_1_30'] - total_retur }) if p['overdue_31_60'] > total_retur: p.update({ 'overdue_31_60': p['overdue_31_60'] - total_retur }) if p['overdue_61_90'] > total_retur: p.update({ 'overdue_61_90': p['overdue_61_90'] - total_retur }) if p['overdue_91_n'] > total_retur: p.update({ 'overdue_91_n': p['overdue_91_n'] - total_retur }) p.update({'total_retur': total_retur}) p.update({'pay_no': ''}) p.update({'pay_date': ''}) p.update({'pay_amount': 0}) p.update({'pay_pindahan': 0}) p.update({'pay_retur': 0}) p.update({'saldo_awal': 0}) #9999p.update({'branch_x': ''}) if "OPBAL" in p[ 'journal_name'] and p['tot_invoice'] > 0: p.update({'saldo_awal': p['tot_invoice']}) p.update({'tot_invoice': 0}) else: p.update({'saldo_awal': 0}) ids_aml.append(p) index = len(ids_aml) - 1 partial_lines = lines = self.pool.get( 'account.move.line').browse(cr, uid, []) if am.sudo().reconcile_id: lines |= am.sudo().reconcile_id.line_id elif am.sudo().reconcile_partial_id: lines |= am.sudo( ).reconcile_partial_id.line_partial_ids partial_lines += am payments = (lines - partial_lines).sorted().filtered( lambda r: r.debit > 0) if payments and detail_pembayaran == True: add_line = [] move_ids = [] for pay in payments: if pay.move_id.id in move_ids: continue move_ids.append(pay.move_id.id) voucher_id = self.pool.get( 'account.voucher').search( cr, uid, [('move_id', '=', pay.move_id.id)]) if voucher_id: bayar = 0 pindahan = 0 retur = 0 voucher = self.pool.get( 'account.voucher').browse( cr, uid, voucher_id) if voucher.amount != 0: if voucher.type == 'receipt': bayar += voucher.amount * -1 else: bayar += voucher.amount if voucher.amount == pay.debit: pay_amount_res = self.get_pay_array( cr, uid, voucher.number, voucher.date, 1 * voucher.amount, 0, 0, account_move_lines) else: pay_amount_res = self.get_pay_array( cr, uid, voucher.number, voucher.date, 1 * pay.debit, 0, 0, account_move_lines) add_line.append(pay_amount_res) for voucher_line in voucher.line_ids.filtered( lambda r: r.type == 'cr' and r. amount > 0): rb_name = voucher_line.move_line_id.sudo( ).invoice.origin or '' retur_beli_id = self.pool.get( 'dym.retur.beli').search( cr, uid, [('name', 'in', rb_name.split(' '))]) if not retur_beli_id: pindahan += voucher_line.amount pay_amount_res = self.get_pay_array( cr, uid, voucher_line. move_line_id.move_id.name, voucher.date, voucher_line.amount, 0, 0, account_move_lines) add_line.append(pay_amount_res) else: if pay.move_id.model == 'dym.loan': pay_amount_res = self.get_pay_array( cr, uid, pay.move_id.name, pay.date, 0, pay.debit, 0, account_move_lines) add_line.append(pay_amount_res) else: pay_amount_res = self.get_pay_array( cr, uid, pay.move_id.name, pay.date, pay.debit, 0, 0, account_move_lines) add_line.append(pay_amount_res) #print 'vvvvvvvvvvvv',p['reference'] if p['reference'][:3] == 'LOA': ref = p['reference'] + ' (Reversed)' acc = str( tuple(account_ids)).replace( ',)', ')') move_a = self.pool.get( 'account.move.line').search( cr, uid, [('ref', '=', ref), ('debit', '>', 0)], limit=1) move_x = self.pool.get( 'account.move.line').browse( cr, uid, move_a) #print 'ddddddddddddd', move_x.id,ref,acc if move_x.id: pay_amount_res = self.get_pay_array( cr, uid, move_x.ref, move_x.date, -1 * move_x.debit, 0, 0, account_move_lines) add_line.append(pay_amount_res) move_a = self.pool.get( 'account.move').search( cr, uid, [('reverse_from_id', '=', pay.move_id.id)]) move_x = self.pool.get( 'account.move').browse( cr, uid, move_a) #print pay.move_id.id, move_x.ids if move_x.id: move_rec = self.pool.get( 'account.move.line').search( cr, uid, [('move_id', '=', move_x.id)]) move_y = self.pool.get( 'account.move.line').browse( cr, uid, move_rec) for moves in move_y: #print moves.name,'eeeeeeeeeeeee' if moves.credit > 0: #print moves.analytic_account_id.id, 'zzzz', pay.analytic_account_id.id if moves.reconcile_id.id != pay.reconcile_id.id and moves.analytic_account_id.id == pay.analytic_account_id.id: pay_amount_res = self.get_pay_array( cr, uid, move_x.name, move_x.date, -1 * pay.debit, 0, 0, account_move_lines) add_line.append( pay_amount_res) move_rec2 = self.pool.get( 'account.move.line' ).search( cr, uid, [('reconcile_id', '=', moves. reconcile_id.id), ('credit', '>', 0) ], limit=1, order='id desc') move_y2 = self.pool.get( 'account.move.line' ).browse( cr, uid, move_rec2) pay_amount_res = self.get_pay_array( cr, uid, move_y2.ref, move_y2.date, pay.debit, 0, 0, account_move_lines) add_line.append( pay_amount_res) if add_line: if str(trx_end_date) >= pay.date: ids_aml += add_line report.update({'ids_aml': ids_aml}) reports = filter(lambda x: x.get('ids_aml'), reports) if not reports: reports = [{ 'title_short': 'Laporan Hutang', 'type': 'payable', 'ids_aml': [{ 'reference': 'NO DATA FOUND', 'acc_number': 'NO DATA FOUND', 'tgl_retur': 'NO DATA FOUND', 'no_retur': 'NO DATA FOUND', 'payment_term': 'NO DATA FOUND', 'total_retur': 0, 'bank': 'NO DATA FOUND', 'an_rek': 'NO DATA FOUND', 'supplier_invoice_number': 'NO DATA FOUND', 'supplier_invoice_date': 'NO DATA FOUND', 'tot_invoice': 0, 'saldo_awal': 0, 'date_aml': 'NO DATA FOUND', 'partner_code': 'NO DATA FOUND', 'no': 0, 'branch_code': 'NO DATA FOUND', 'branch_name': 'NO DATA FOUND', 'amount_residual': 0, 'journal_name': 'NO DATA FOUND', 'status': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'belum_jatuh_tempo': 0, 'id_aml': 'NO DATA FOUND', 'due_date': 'NO DATA FOUND', 'overdue_31_60': 0, 'partner_name': 'NO DATA FOUND', 'overdue_1_30': 0, 'overdue_61_90': 0, 'overdue_91_n': 0, 'invoice_name': 0, 'pay_no': 'NO DATA FOUND', 'pay_date': 'NO DATA FOUND', 'pay_amount': 0, 'pay_pindahan': 0, 'pay_retur': 0, 'name': 'NO DATA FOUND', 'account_code': 0, 'analytic_1': 'NO DATA FOUND', 'analytic_2': 'NO DATA FOUND', 'analytic_3': 'NO DATA FOUND', 'analytic_4': 'NO DATA FOUND', 'analytic_combination': 'NO DATA FOUND', 'branch_status': 'NO DATA FOUND', 'overdue': 'NO DATA FOUND', 'branch_x': 'NO DATA FOUND' }], 'title': '', 'detail_pembayaran': detail_pembayaran }] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_report_hutang_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context branch_ids = data['branch_ids'] journal_ids = data['journal_ids'] status = data['status'] start_date = data['start_date'] end_date = data['end_date'] option = data['option'] title_prefix = '' title_short_prefix = '' report_cash_non_status = { 'type': 'Cash_non_status', 'title': '', 'title_short': _('laporan'), 'start_date': start_date, 'end_date': end_date, 'option': option } where_option = " 1=1 " where_type = " 1=1 " if option == 'All Non Petty Cash': where_option = " j.type in ('bank','cash','edc','situation') " where_type = " a.type = 'liquidity' " elif option == 'Cash': where_option = " j.type in ('cash','situation') " where_type = " a.type = 'liquidity' " elif option == 'EDC': where_option = " j.type in ('edc','situation') " where_type = " a.type = 'receivable' " elif option == 'Bank': where_option = " j.type in ('bank','situation') " where_type = " a.type = 'liquidity' " elif option == 'Petty Cash': where_option = " j.type in ('pettycash','situation') " where_type = " a.type = 'liquidity' " where_branch = " 1=1 " if branch_ids: where_branch = " b.id in %s " % str(tuple(branch_ids)).replace( ',)', ')') else: area_user = self.pool.get('res.users').browse(cr, uid, uid).branch_ids branch_ids_user = [b.id for b in area_user] where_branch = " b.id in %s " % str(tuple(branch_ids_user)) where_journal = " 1=1 " if journal_ids: where_journal = " j.id in %s " % str(tuple(journal_ids)).replace( ',)', ')') where_status = " 1=1 " if status == 'outstanding': where_status = " aml.reconcile_id is Null " elif status == 'reconcile': where_status = " aml.reconcile_id is not Null " where_start_date = " 1=1 " where_end_date = " 1=1 " if start_date: where_start_date = " aml.date >= '%s' " % start_date if end_date: where_end_date = " aml.date <= '%s' " % end_date user = self.pool.get('res.users').browse(cr, uid, uid) timezone = user.tz or 'Asia/Jakarta' if timezone == 'Asia/Jayapura': tz = '9' elif timezone == 'Asia/Pontianak': tz = '8' else: tz = '7' query_cash_non_status = "SELECT aml.date as tanggal, b.code as branch_code, to_char(am.create_date + interval '"+tz+" hours', 'HH12:MI AM') as Jam, "\ "k.name as kwitansi_name, a.code as account_code, aml.name as keterangan, aml.debit - aml.credit as balance, "\ "j.type as journal_type, am.name as scr "\ "FROM account_move_line aml "\ "LEFT JOIN account_move am ON am.id = aml.move_id "\ "LEFT JOIN account_journal j ON j.id = aml.journal_id "\ "LEFT JOIN account_account a ON a.id = aml.account_id "\ "LEFT JOIN dym_register_kwitansi_line k ON k.id = aml.kwitansi_id "\ "LEFT JOIN dym_branch b ON b.id = aml.branch_id "\ "WHERE "+where_branch+" AND "+where_type+" AND "+where_journal+" AND "+where_status+" AND "+where_start_date+" AND "+where_end_date+" AND "+where_option+" "\ "ORDER BY b.code, aml.date " move_selection = "" report_info = _('') move_selection += "" reports = [report_cash_non_status] for report in reports: cr.execute(query_cash_non_status) all_lines = cr.dictfetchall() move_lines = [] if all_lines: p_map = map( lambda x: { 'no': 0, 'branch_code': x['branch_code'], 'tanggal': x['tanggal'], 'jam': x['jam'], 'kwitansi_name': x['kwitansi_name'], 'account_code': x['account_code'], 'keterangan': x['keterangan'].encode('ascii', 'ignore').decode( 'ascii') if x['keterangan'] != None else '', 'tunai': x['balance'] if x['journal_type'] == 'cash' else 0.0, 'bank_check': x['balance'] if x['journal_type'] == 'bank' else 0.0, 'edc': x['balance'] if x['journal_type'] == 'edc' else 0.0, 'total': x['balance'], 'move_name': x['scr'], }, all_lines) report.update({'move_lines': p_map}) reports = filter(lambda x: x.get('move_lines'), reports) if not reports: reports = [{ 'title_short': 'laporan', 'type': 'Cash_non_status', 'start_date': start_date, 'end_date': end_date, 'option': option, 'move_lines': [{ 'no': 0, 'branch_code': 'NO DATA FOUND', 'tanggal': 'NO DATA FOUND', 'jam': 'NO DATA FOUND', 'kwitansi_name': 'NO DATA FOUND', 'account_code': 0, 'keterangan': 'NO DATA FOUND', 'tunai': 0.0, 'bank_check': 0.0, 'edc': 0.0, 'total': 0.0, 'move_name': 'NO DATA FOUND', }], 'title': '' }] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_cash_non_status_report_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context section_id = data['section_id'] user_id = data['user_id'] product_ids = data['product_ids'] start_date = data['start_date'] end_date = data['end_date'] partner_komisi_id = data['partner_komisi_id'] proposal_id = data['proposal_id'] hutang_komisi_id = data['hutang_komisi_id'] state = data['state'] branch_ids = data['branch_ids'] finco_ids = data['finco_ids'] segmen = data['segmen'] branch_status = data['branch_status'] title_short_prefix = '' report_penjualantax = { 'type': '', 'title': '', 'title_short': title_short_prefix + ', ' + _('Laporan Penjualan Tax') } where_section_id = " 1=1 " if section_id: where_section_id = " dso.section_id = '%s'" % str(section_id) where_user_id = " 1=1 " if user_id: where_user_id = " dso.employee_id = '%s'" % str(user_id) where_partner_komisi_id = " 1=1 " if partner_komisi_id: where_partner_komisi_id = " dso.partner_komisi_id = '%s'" % str( partner_komisi_id) where_hutang_komisi_id = " 1=1 " if hutang_komisi_id: where_hutang_komisi_id = " dso.hutang_komisi_id = '%s'" % str( hutang_komisi_id) where_proposal_id = " 1=1 " if proposal_id: where_proposal_id_id = " dso.proposal_id = '%s'" % str(proposal_id) where_product_ids = " 1=1 " if product_ids: where_product_ids = " dsol.product_id in %s" % str( tuple(product_ids)).replace(',)', ')') where_start_date = " 1=1 " if start_date: where_start_date = " dso.date_order >= '%s'" % str(start_date) where_end_date = " 1=1 " if end_date: where_end_date = " dso.date_order <= '%s'" % str(end_date) where_state = " 1=1 " if state in ['progress', 'done']: where_state = " dso.state = '%s'" % str(state) else: where_state = " dso.state in ('progress','done')" where_branch_ids = " 1=1 " if branch_ids: where_branch_ids = " dso.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') where_finco_ids = " 1=1 " if finco_ids: where_finco_ids = " dso.finco_id in %s" % str( tuple(finco_ids)).replace(',)', ')') query_penjualantax = "select dso.id as id_dso, " \ "COALESCE(drsl.name, '') as no_registrasi, " \ "round(COALESCE(dsol.price_subtotal,0) + round(dsol.price_subtotal*0.1,2) + COALESCE(dsol.price_bbn,0)) as piutang_total, " \ "round(COALESCE(dsol.price_subtotal,0) + round(dsol.price_subtotal*0.1,2)) as total, " \ "COALESCE(b.branch_status,'') as branch_status, " \ "COALESCE(b.code,'') as branch_code, " \ "COALESCE(spk.name,'') as spk_name, " \ "COALESCE(dcp.name,'') as cabang_partner, " \ "COALESCE(b.name,'') as branch_name, " \ "COALESCE(md.default_code,'') as md_code, " \ "COALESCE(dso.name,'') as name, " \ "CASE WHEN dso.state = 'progress' THEN 'Sales Memo' " \ " WHEN dso.state = 'done' THEN 'Done' " \ " WHEN dso.state IS NULL THEN '' " \ " ELSE dso.state " \ "END as state, " \ "dso.date_order as date_order, " \ "COALESCE(finco.name,'Cash') as finco_code, " \ "CASE WHEN dso.is_cod = TRUE THEN 'COD' " \ " ELSE 'Reguler' " \ "END as is_cod, " \ "sm.id as stock_move_non_unit_id, " \ "COALESCE(sales_koor.name,'') as sales_koor_name, " \ "COALESCE(sales.name,'') as sales_name, " \ "COALESCE(job.name,'') as job_name, " \ "COALESCE(cust.default_code,'') as cust_code, " \ "COALESCE(cust.name,'') as cust_name, " \ "COALESCE(cust.npwp,'Non PKP') as pkp, " \ "COALESCE(product.name_template,'') as product_name, COALESCE(pav.code,'') as pav_code, COALESCE(dsol.product_qty,0) as product_qty, " \ "COALESCE(lot.name,'') as lot_name, COALESCE(lot.chassis_no,'') as lot_chassis, " \ "COALESCE(dsol.price_unit,0) as price_unit, " \ "COALESCE(dsol.discount_po,0) as discount_po, COALESCE(dsol_disc.ps_dealer,0) as ps_dealer, COALESCE(dsol_disc.ps_ahm,0) as ps_ahm, COALESCE(dsol_disc.ps_md,0) as ps_md, COALESCE(dsol_disc.ps_finco,0) as ps_finco, " \ "COALESCE(dsol_disc.ps_dealer,0)+COALESCE(dsol_disc.ps_ahm,0)+COALESCE(dsol_disc.ps_md,0)+COALESCE(dsol_disc.ps_finco,0) as ps_total, " \ "COALESCE(dsol.price_unit/1.1,0) as sales, " \ "COALESCE(dsol.discount_po/1.1,0) as disc_reg, COALESCE(dsol_disc.discount_pelanggan/1.1,0) as disc_quo, COALESCE(dsol_disc.discount_pelanggan) as disc_quo_incl_tax, " \ "COALESCE(dsol.discount_po/1.1,0)+COALESCE(dsol_disc.discount_pelanggan/1.1,0) as disc_total, " \ "COALESCE(dsol.price_subtotal,0) as price_subtotal, round(dsol.price_subtotal*0.1,2) as PPN, COALESCE(dsol.force_cogs,0) as force_cogs, " \ "COALESCE(dso.customer_dp,0) as piutang_dp, " \ "CASE WHEN finco.name = 'PT. ADIRA DINAMIKA MULTI FINANCE TBK.' THEN round((COALESCE(dsol.price_unit,0) + COALESCE(dsol.price_bbn,0) - COALESCE(dsol.discount_po,0))) - COALESCE(dso.customer_dp,0) " \ " ELSE round(COALESCE(dsol.price_unit,0) + COALESCE(dsol.price_bbn,0) - COALESCE(dsol.discount_po,0) - COALESCE(dsol_disc.discount_pelanggan,0)) - COALESCE(dso.customer_dp,0) " \ "END as piutang, " \ "COALESCE(dsol.price_subtotal,0)-COALESCE(dsol.force_cogs,0) as gp_dpp_minus_hpp, " \ "COALESCE(dsol.price_subtotal,0)-COALESCE(dsol.force_cogs,0)+COALESCE(dsol_disc.ps_ahm,0)+COALESCE(dsol_disc.ps_md,0)+COALESCE(dsol_disc.ps_finco,0) as gp_unit, " \ "COALESCE(dsol.price_bbn,0) as price_bbn, COALESCE(dsol.price_bbn_beli,0) as price_bbn_beli, COALESCE(dsol.price_bbn,0)-COALESCE(dsol.price_bbn_beli,0) as gp_bbn, " \ "(COALESCE(dsol.price_subtotal,0)-COALESCE(dsol.force_cogs,0)+COALESCE(dsol_disc.ps_ahm,0)+COALESCE(dsol_disc.ps_md,0)+COALESCE(dsol_disc.ps_finco,0))+(COALESCE(dsol.price_bbn,0)-COALESCE(dsol.price_bbn_beli,0)) as gp_total, " \ "0 as pph_komisi, " \ "COALESCE(dsol.amount_hutang_komisi,0) as amount_hutang_komisi, " \ "COALESCE(dsol.insentif_finco/1.1,0) as insentif_finco, insentif_finco as dpp_insentif_finco, " \ "COALESCE(dsol.discount_po/1.1,0)+COALESCE(dsol_disc.ps_dealer/1.1,0)+COALESCE(amount_hutang_komisi,0) as beban_cabang, " \ "COALESCE(prod_category.name,'') as categ_name, " \ "COALESCE(prod_category2.name,'') as categ2_name, " \ "COALESCE(prod_template.series,'') as prod_series, " \ "COALESCE(fp.name,'') as faktur_pajak, " \ "COALESCE(medi.name,'') as partner_komisi_id, " \ "COALESCE(hk.name,'') as hutang_komisi_id, " \ "CONCAT(pro.number, ' ', pro.name) as proposal_id, " \ "CASE WHEN sp.state = 'draft' THEN 'Draft' " \ " WHEN sp.state = 'cancel' THEN 'Cancelled' " \ " WHEN sp.state = 'waiting' THEN 'Waiting Another Operation' " \ " WHEN sp.state = 'confirmed' THEN 'Waiting Availability' " \ " WHEN sp.state = 'partially_available' THEN 'Partially Available' " \ " WHEN sp.state = 'assigned' THEN 'Ready to Transfer' " \ " WHEN sp.state = 'done' THEN 'Transferred' " \ " ELSE sp.state " \ "END as state_picking, " \ "sp.name as oos_number " \ "from dealer_sale_order dso " \ "inner join dealer_spk spk on spk.dealer_sale_order_id = dso.id " \ "inner join dealer_sale_order_line dsol on dsol.dealer_sale_order_line_id = dso.id " \ "left join stock_picking sp on sp.origin = dso.name " \ "left join stock_move sm ON sm.dealer_sale_order_line_id = dsol.id and sm.state not in ('done','cancel','draft') and sm.product_id != dsol.product_id " \ "left join dym_hutang_komisi hk ON dsol.hutang_komisi_id = hk.id " \ "left join res_partner medi ON dso.partner_komisi_id = medi.id " \ "left join dym_proposal_event pro ON dso.proposal_id = pro.id " \ "left join dym_branch b ON dso.branch_id = b.id " \ "left join res_partner md ON b.default_supplier_id = md.id " \ "left join res_partner finco ON dso.finco_id = finco.id " \ "left join hr_employee employee ON dso.employee_id = employee.id " \ "left join resource_resource sales ON employee.resource_id = sales.id " \ "left join hr_job job ON employee.job_id = job.id " \ "left join crm_case_section sales_team ON dso.section_id = sales_team.id " \ "left join hr_employee sales_leader ON sales_leader.id = sales_team.user_id " \ "left join resource_resource sales_koor ON sales_leader.resource_id = sales_koor.id " \ "left join res_partner cust ON dso.partner_id = cust.id " \ "left join product_product product ON dsol.product_id = product.id " \ "left join product_attribute_value_product_product_rel pavpp ON product.id = pavpp.prod_id " \ "left join product_attribute_value pav ON pavpp.att_id = pav.id " \ "left join product_template prod_template ON product.product_tmpl_id = prod_template.id " \ "left join product_category prod_category ON prod_template.categ_id = prod_category.id " \ "left join product_category prod_category2 ON prod_category.parent_id = prod_category2.id " \ "left join stock_production_lot lot ON dsol.lot_id = lot.id " \ "left join dym_faktur_pajak_out fp ON dso.faktur_pajak_id = fp.id " \ "left join dym_cabang_partner dcp ON dcp.id = spk.partner_cabang " \ "left join ( " \ "select dealer_sale_order_line_discount_line_id, sum(ps_finco) as ps_finco, sum(ps_ahm) as ps_ahm, sum(ps_md) as ps_md, sum(ps_dealer) as ps_dealer, sum(ps_others) as ps_others, " \ "sum(discount) as discount, sum(discount_pelanggan) as discount_pelanggan " \ "from dealer_sale_order_line_discount_line " \ "group by dealer_sale_order_line_discount_line_id " \ ") dsol_disc ON dsol_disc.dealer_sale_order_line_discount_line_id = dsol.id " \ "left join dealer_register_spk_line drsl ON drsl.id = dso.register_spk_id " \ "WHERE " + where_section_id + " AND " + where_user_id + " AND " + where_product_ids + " AND " + where_start_date + " AND " + where_partner_komisi_id + " AND " + where_hutang_komisi_id + " AND " +where_proposal_id + " AND " + where_end_date + " AND " + where_state + " AND " + where_branch_ids + " AND " + where_finco_ids + " " \ "order by b.code, dso.date_order" move_selection = "" report_info = _('') move_selection += "" reports = [report_penjualantax] for report in reports: cr.execute(query_penjualantax) all_lines = cr.dictfetchall() dso_ids = [] if all_lines: def lines_map(x): x.update({'docname': x['branch_code']}) map(lines_map, all_lines) for cnt in range(len(all_lines) - 1): if all_lines[cnt]['id_dso'] != all_lines[cnt + 1]['id_dso']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'no': 0, 'id_dso': x['id_dso'], 'state_ksu': 'Undelivered' if x['state_picking'] == None else 'Delivered', 'state_picking': x['state_picking'], 'oos_number': x['oos_number'], 'no_registrasi': str(x['no_registrasi'].encode('ascii', 'ignore'). decode('ascii')) if x['no_registrasi'] != None else '', 'branch_status': str(x['branch_status'].encode('ascii', 'ignore'). decode('ascii')) if x['branch_status'] != None else '', 'branch_code': str(x['branch_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['branch_code'] != None else '', 'branch_name': str(x['branch_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['branch_name'] != None else '', 'md_code': str(x['md_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['md_code'] != None else '', 'spk_name': str(x['spk_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['spk_name'] != None else '', 'name': str(x['name'].encode('ascii', 'ignore').decode('ascii') ) if x['name'] != None else '', 'state': str(x['state'].encode('ascii', 'ignore').decode( 'ascii')) if x['state'] != None else '', 'date_order': str(x['date_order']) if x['date_order'] != None else False, 'finco_code': str(x['finco_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['finco_code'] != None else '', 'is_cod': str(x['is_cod'].encode('ascii', 'ignore').decode( 'ascii')) if x['is_cod'] != None else '', 'sales_koor_name': str(x['sales_koor_name'].encode('ascii', 'ignore'). decode('ascii')) if x['sales_koor_name'] != None else '', 'sales_name': str(x['sales_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['sales_name'] != None else '', 'job_name': str(x['job_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['job_name'] != None else '', 'cust_code': str(x['cust_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['cust_code'] != None else '', 'cust_name': str(x['cust_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['cust_name'] != None else '', 'pkp': str(x['pkp'].encode('ascii', 'ignore').decode('ascii')) if x['pkp'] != None else 'Non PKP', 'product_name': str(x['product_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['product_name'] != None else '', 'pav_code': str(x['pav_code'].encode('ascii', 'ignore').decode( 'ascii')) if x['pav_code'] != None else '', 'product_qty': x['product_qty'], 'lot_name': str(x['lot_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['lot_name'] != None else '', 'lot_chassis': str(x['lot_chassis'].encode('ascii', 'ignore').decode( 'ascii')) if x['lot_chassis'] != None else '', 'price_unit': x['price_unit'], 'discount_po': x['discount_po'], 'ps_dealer': x['ps_dealer'], 'ps_ahm': x['ps_ahm'], 'ps_md': x['ps_md'], 'cabang_partner': x['cabang_partner'], 'ps_finco': x['ps_finco'], 'ps_total': x['ps_total'], 'sales': x['sales'] if x['sales'] != 0 else 0, 'disc_reg': x['disc_reg'] if x['disc_reg'] != 0 else 0, 'disc_quo': x['disc_quo'] if x['disc_quo'] != 0 else 0, 'disc_quo_incl_tax': x['disc_quo_incl_tax'] if x['disc_quo_incl_tax'] != 0 else 0, 'disc_total': x['disc_total'] if x['disc_total'] != 0 else 0, 'price_subtotal': x['price_subtotal'], 'piutang_dp': x['piutang_dp'], 'piutang': x['piutang'], 'piutang_total': x['piutang_total'], 'PPN': x['ppn'], 'total': x['total'], 'force_cogs': x['force_cogs'], 'gp_dpp_minus_hpp': x['gp_dpp_minus_hpp'], 'gp_unit': x['gp_unit'], 'amount_hutang_komisi': x['amount_hutang_komisi'], 'dpp_insentif_finco': x['dpp_insentif_finco'] if x['dpp_insentif_finco'] != 0 else 0, 'price_bbn': x['price_bbn'], 'price_bbn_beli': x['price_bbn_beli'], 'pph_komisi': x['amount_hutang_komisi'] * 3 / 100, 'gp_bbn': x['gp_bbn'], 'gp_total': x['gp_total'], 'beban_cabang': x['beban_cabang'], 'categ_name': str(x['categ_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['categ_name'] != None else '', 'categ2_name': str(x['categ2_name'].encode('ascii', 'ignore').decode( 'ascii')) if x['categ2_name'] != None else '', 'prod_series': str(x['prod_series'].encode('ascii', 'ignore').decode( 'ascii')) if x['prod_series'] != None else '', 'faktur_pajak': str(x['faktur_pajak'].encode('ascii', 'ignore').decode( 'ascii')) if x['faktur_pajak'] != None else '', 'partner_komisi_id': x['partner_komisi_id'], 'hutang_komisi_id': x['hutang_komisi_id'], 'proposal_id': x['proposal_id'] }, all_lines) for p in p_map: if p['id_dso'] not in map(lambda x: x.get('id_dso', None), dso_ids): records = filter(lambda x: x['id_dso'] == p['id_dso'], all_lines) dso = self.pool.get('dealer.sale.order').browse( cr, uid, records[0]['id_dso']) analytic_1_id, analytic_2_id, analytic_3_id, analytic_4_id = self.pool.get( 'account.analytic.account').get_analytical( cr, uid, dso.branch_id, 'Unit', False, 4, 'Sales') analytic_1_name = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_1_id).name or '' analytic_1 = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_1_id).code or '' analytic_2_name = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_2_id).name or '' analytic_2 = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_2_id).code or '' analytic_3_name = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_3_id).name or '' analytic_3 = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_3_id).code or '' branch = dso.branch_id branch_name = dso.branch_id.name branch_status_1 = dso.branch_id.branch_status branch_id = dso.branch_id.id analytic_4_name = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_4_id).name or '' analytic_4 = self.pool.get( 'account.analytic.account').browse( cr, uid, analytic_4_id).code or '' if (branch and branch_ids and branch.id not in branch_ids) or ( branch and branch_status and branch_status != branch.branch_status): continue analytic_2_branch = analytic_2 if analytic_2 in ['210', '220', '230']: if branch_status_1 == 'H123': analytic_2_branch = analytic_2[:2] + '1' elif branch_status_1 == 'H23': analytic_2_branch = analytic_2[:2] + '2' else: analytic_2_branch = analytic_2 analytic_combination = analytic_1 + '/' + analytic_2_branch + '/' + analytic_3 + '/' + analytic_4 p.update({'lines': records}) p.update({'analytic_1': analytic_1_name}) p.update({'analytic_2': analytic_2_name}) p.update({'analytic_3': analytic_3_name}) p.update({'analytic_4': analytic_4_name}) p.update({'branch_id': branch_id}) p.update({'branch_status': branch_status_1}) p.update({'branch_name': branch_name}) p.update( {'analytic_combination': analytic_combination}) p.update({'or_name': ''}) #p.update({'or_amount': '0'}) p.update({'invoice_number': ''}) p.update({'invoice_date': ''}) p.update({'invoice_status': ''}) p.update({'ar_days': '0'}) p.update({'tgl_lunas': ''}) voucher_ids = self.pool.get('account.voucher').search( cr, uid, [ '|', ('name', 'ilike', dso.name), ('reference', 'ilike', dso.name) ]) if voucher_ids: vouchers = self.pool.get('account.voucher').browse( cr, uid, voucher_ids) p.update({ 'or_name': ', '.join(vouchers.mapped('number')) }) #p.update({'or_amount': ', '.join([str(i) for i in vouchers.mapped('amount')])}) invoice_ids = self.pool.get('account.invoice').search( cr, uid, [('origin', 'ilike', dso.name), ('type', '=', 'out_invoice')], limit=1) if invoice_ids: invoices = self.pool.get('account.invoice').browse( cr, uid, invoice_ids) p.update({ 'invoice_number': ', '.join(invoices.mapped('internal_number')) }) p.update({ 'invoice_date': invoices.date_invoice and ', '.join(invoices.mapped('date_invoice')) or None }) p.update({'invoice_status': invoices.state}) if invoices.state == 'cancel': p.update({ 'product_qty': 0, 'price_unit': 0, 'discount_po': 0, 'ps_dealer': 0, 'ps_ahm': 0, 'ps_md': 0, 'ps_finco': 0, 'ps_total': 0, 'sales': 0, 'disc_reg': 0, 'disc_quo': 0, 'disc_quo_incl_tax': 0, 'disc_total': 0, 'piutang_dp': 0, 'piutang': 0, 'piutang_total': 0, 'total': 0, 'price_subtotal': 0, 'PPN': 0, 'force_cogs': 0, 'gp_dpp_minus_hpp': 0, 'gp_unit': 0, 'amount_hutang_komisi': 0, 'dpp_insentif_finco': 0, 'price_bbn': 0, 'price_bbn_beli': 0, 'gp_bbn': 0, 'gp_total': 0, 'beban_cabang': 0, 'pph_komisi': 0, }) inv_cust = invoices if len(invoices) > 1: inv_cust = invoices.filtered( lambda r: r.tipe == 'customer') if inv_cust and inv_cust[ 0].state == 'paid' and inv_cust[ 0].payment_ids: paid_date = inv_cust[0].payment_ids.sorted( key=lambda r: r.date)[ len(inv_cust[0].payment_ids) - 1].date paid_date = datetime.strptime( paid_date, '%Y-%m-%d') paid_date = datetime.date(paid_date) str_paid_date = paid_date.strftime('%Y-%m-%d') else: paid_date = date.today() str_paid_date = '' inv_date = inv_cust[0].date_invoice if inv_date: inv_date = datetime.strptime( inv_date, '%Y-%m-%d') inv_date = datetime.date(inv_date) date_diff = int((paid_date - inv_date).days) p.update({'ar_days': str(date_diff)}) p.update({'tgl_lunas': str_paid_date}) else: p.update({'ar_days': None}) p.update({'tgl_lunas': None}) dso_ids.append(p) dso_ids = sorted(dso_ids, key=lambda k: k['invoice_number']) report.update({'dso_ids': dso_ids}) reports = filter(lambda x: x.get('dso_ids'), reports) #print reports[0]['dso_ids'][0] tampung = [] for x in reports: for y in x['dso_ids']: if len(y['lines']) == 1: tampung.append(y) else: temp_str = '' temp = y for z in y['lines']: if z['lot_name'] != temp_str: temp_str = z['lot_name'] test_tampung = { 'discount_po': z['discount_po'], 'no_registrasi': temp['no_registrasi'], 'pkp': temp['pkp'], 'price_unit': z['price_unit'], 'ps_md': temp['ps_md'], 'id_dso': temp['id_dso'], 'cabang_partner': z['cabang_partner'], 'product_qty': temp['product_qty'], 'piutang_total': z['piutang_total'], 'state_picking': temp['state_picking'], 'oos_number': temp['oos_number'], 'total': z['total'], 'tgl_lunas': temp['tgl_lunas'], 'branch_id': temp['branch_id'], 'analytic_4': temp['analytic_4'], 'analytic_2': temp['analytic_2'], 'analytic_3': temp['analytic_3'], 'analytic_1': temp['analytic_1'], 'piutang_dp': temp['piutang_dp'], 'job_name': temp['job_name'], 'disc_quo': z['disc_quo'], 'cust_code': temp['cust_code'], 'hutang_komisi_id': temp['hutang_komisi_id'], 'ps_total': z['ps_total'], 'sales': z['sales'], 'branch_name': temp['branch_name'], 'gp_bbn': z['gp_bbn'], 'invoice_number': temp['invoice_number'], 'name': temp['name'], 'pph_komisi': z['pph_komisi'], 'disc_total': z['disc_total'], 'faktur_pajak': temp['faktur_pajak'], 'cust_name': temp['cust_name'], 'piutang': z['piutang'], 'prod_series': temp['prod_series'], 'disc_quo_incl_tax': temp['disc_quo_incl_tax'], 'sales_name': temp['sales_name'], 'pav_code': z['pav_code'], 'sales_koor_name': temp['sales_koor_name'], 'branch_status': temp['branch_status'], 'md_code': temp['md_code'], 'is_cod': temp['is_cod'], 'ps_finco': z['ps_finco'], 'state_ksu': temp['state_ksu'], 'lines': temp['lines'], 'price_subtotal': z['price_subtotal'], 'finco_code': temp['finco_code'], 'beban_cabang': z['beban_cabang'], 'ar_days': temp['ar_days'], 'date_order': temp['date_order'], 'disc_reg': z['disc_reg'], 'price_bbn': z['price_bbn'], 'no': temp['no'], 'PPN': temp['PPN'], 'branch_code': temp['branch_code'], 'state': temp['state'], 'ps_dealer': temp['ps_dealer'], 'amount_hutang_komisi': temp['amount_hutang_komisi'], 'proposal_id': temp['proposal_id'], 'product_name': z['product_name'], 'analytic_combination': temp['analytic_combination'], 'gp_total': z['gp_total'], 'price_bbn_beli': z['price_bbn_beli'], 'ps_ahm': z['ps_ahm'], 'invoice_date': temp['invoice_date'], 'force_cogs': z['force_cogs'], 'categ2_name': z['categ2_name'], 'lot_chassis': z['lot_chassis'], 'invoice_status': temp['invoice_status'], 'partner_komisi_id': temp['partner_komisi_id'], 'lot_name': z['lot_name'], 'dpp_insentif_finco': z['dpp_insentif_finco'], 'categ_name': z['categ_name'], 'gp_unit': z['gp_unit'], 'or_name': temp['or_name'], # 'or_amount' : str(temp['or_amount']), 'spk_name': z['spk_name'], 'gp_dpp_minus_hpp': z['gp_dpp_minus_hpp'], } tampung.append(test_tampung) del reports[0]['dso_ids'] report.update({'dso_ids': tampung}) if not reports: reports = [{ 'dso_ids': [{ 'no': 'NO DATA FOUND', 'state_ksu': 'NO DATA FOUND', 'invoice_number': 'NO DATA FOUND', 'invoice_status': 'NO DATA FOUND', 'invoice_date': 'NO DATA FOUND', 'state_ksu': 'NO DATA FOUND', 'state_picking': 'NO DATA FOUND', 'oos_number': 'NO DATA FOUND', 'branch_status': 'NO DATA FOUND', 'no_registrasi': 'NO DATA FOUND', 'branch_code': 'NO DATA FOUND', 'branch_name': 'NO DATA FOUND', 'analytic_1': 'NO DATA FOUND', 'analytic_2': 'NO DATA FOUND', 'analytic_3': 'NO DATA FOUND', 'analytic_4': 'NO DATA FOUND', 'analytic_combination': 'NO DATA FOUND', 'md_code': 'NO DATA FOUND', 'spk_name': 'NO DATA FOUND', 'name': 'NO DATA FOUND', 'state': 'NO DATA FOUND', 'date_order': 'NO DATA FOUND', 'finco_code': 'NO DATA FOUND', 'is_cod': 'NO DATA FOUND', 'sales_koor_name': 'NO DATA FOUND', 'sales_name': 'NO DATA FOUND', 'job_name': 'NO DATA FOUND', 'cust_code': 'NO DATA FOUND', 'cust_name': 'NO DATA FOUND', 'proposal_id': 'NO DATA FOUND', 'hutang_komisi_id': 'NO DATA FOUND', 'partner_komisi_id': 'NO DATA FOUND', 'or_name': 'NO DATA FOUND', # 'or_amount': 0, 'product_name': 'NO DATA FOUND', 'pav_code': 'NO DATA FOUND', 'product_qty': 0, 'cabang_partner': 'NO DATA FOUND', 'lot_name': 'NO DATA FOUND', 'lot_chassis': 'NO DATA FOUND', 'tgl_lunas': 'NO DATA FOUND', 'ar_days': '0', 'price_unit': 0, 'discount_po': 0, 'ps_dealer': 0, 'ps_ahm': 0, 'ps_md': 0, 'ps_finco': 0, 'ps_total': 0, 'sales': 0, 'piutang_dp': 0, 'piutang': 0, 'piutang_total': 0, 'total': 0, 'disc_reg': 0, 'disc_quo': 0, 'disc_quo_incl_tax': 0, 'disc_total': 0, 'price_subtotal': 0, 'PPN': 0, 'force_cogs': 0, 'gp_dpp_minus_hpp': 0, 'gp_unit': 0, 'amount_hutang_komisi': 0, 'dpp_insentif_finco': 0, 'price_bbn': 0, 'price_bbn_beli': 0, 'gp_bbn': 0, 'gp_total': 0, 'beban_cabang': 0, 'pph_komisi': 0, 'categ_name': 'NO DATA FOUND', 'pkp': 'NO DATA FOUND', 'categ2_name': 'NO DATA FOUND', 'prod_series': 'NO DATA FOUND', 'faktur_pajak': 'NO DATA FOUND' }], 'title_short': 'Laporan Penjualan Tax', 'type': '', 'title': '' }] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) super(dym_report_penjualantax_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context journal_id = data['journal_id'] start_date = data['start_date'] end_date = data['end_date'] start_value_date = data['start_value_date'] end_value_date = data['end_value_date'] title_prefix = '' title_short_prefix = '' bank_balance = 0 where_account = " 1=1 " if journal_id: journals = self.pool.get('account.journal').browse( cr, uid, journal_id[0]) account_ids = [] for journal in journals: if journal.default_debit_account_id and journal.default_debit_account_id.id not in account_ids: account_ids.append(journal.default_debit_account_id.id) if journal.default_credit_account_id and journal.default_credit_account_id.id not in account_ids: account_ids.append(journal.default_credit_account_id.id) bank_balance = journal.default_credit_account_id.with_context( date_from=start_date, date_to=start_date, initial_bal=True ).balance or journal.default_debit_account_id.with_context( date_from=start_date, date_to=start_date, initial_bal=True).balance where_account = " a.id in %s " % str(tuple(account_ids)).replace( ',)', ')') saldo_awal = bank_balance report_bank_book = { 'type': 'BankBook', 'title': '', 'title_short': title_short_prefix + ', ' + _('LAPORAN BANK BOOK'), 'saldo_awal': saldo_awal, 'start_date': start_date, 'end_date': end_date } area_user = self.pool.get('res.users').browse(cr, uid, uid).branch_ids branch_ids_user = [b.id for b in area_user] branch_ids = branch_ids_user where_start_date = " l.date >= '%s' " % start_date where_end_date = " l.date <= '%s' " % end_date where_value_start = " 1=1 " if start_value_date: where_value_start = " (at.value_date >= '%s' OR " % start_value_date where_value_start += " cg.value_date >= '%s' OR " % start_value_date where_value_start += " av.value_date >= '%s' OR " % start_value_date where_value_start += " bt.value_date >= '%s') " % start_value_date where_value_end = " 1=1 " if end_value_date: where_value_end = " (at.value_date <= '%s' OR " % end_value_date where_value_end += " cg.value_date <= '%s' OR " % end_value_date where_value_end += " av.value_date <= '%s' OR " % end_value_date where_value_end += " bt.value_date <= '%s') " % end_value_date query_bank_book = "SELECT l.id, l.analytic_account_id, l.name, l.ref, p.name as partner_name, a.name as account_name, a.code as account_code, l.account_id, l.date, l.debit, l.credit, fc.name as finance_company, at.value_date as at_value_date, cg.value_date as cg_value_date, av.value_date as av_value_date, bt.value_date as bt_value_date, avp.date as avp_value_date FROM account_move_line l LEFT JOIN account_move m on m.id = l.move_id LEFT JOIN account_account a on a.id = l.account_id LEFT JOIN res_partner p on p.id = l.partner_id LEFT JOIN res_partner fc on fc.id = l.finco_id LEFT JOIN dym_alokasi_titipan at on at.name = m.name LEFT JOIN dym_clearing_giro cg on cg.name = m.name LEFT JOIN account_voucher av on av.number = m.name LEFT JOIN dym_bank_transfer bt on bt.name = m.name LEFT JOIN dym_advance_payment avp on avp.name = m.name WHERE %s and %s and %s and %s and %s ORDER BY l.id asc" % ( where_account, where_start_date, where_end_date, where_value_start, where_value_end) move_selection = "" report_info = _('') move_selection += "" reports = [report_bank_book] for report in reports: a = cr.execute(query_bank_book) all_lines = cr.dictfetchall() move_lines = [] if all_lines: p_map = map( lambda x: { 'no': 0, 'id': x['id'], 'date': x['date'] if x['date'] != None else '', 'value_date': x['at_value_date'] if x['at_value_date'] and x[ 'at_value_date'] != None else x['cg_value_date'] if x['cg_value_date'] and x['cg_value_date'] != None else x['av_value_date'] if x['av_value_date'] and x[ 'av_value_date'] != None else x['bt_value_date'] if x['bt_value_date'] and x[ 'bt_value_date'] != None else x['avp_value_date'] if x['avp_value_date'] and x['avp_value_date' ] != None else '', 'debit': x['debit'] if x['debit'] > 0 else 0.0, 'credit': x['credit'] if x['credit'] > 0 else 0.0, 'account_code': x['account_code'].encode('ascii', 'ignore').decode( 'ascii') if x['account_code'] != None else '', 'account_name': x['account_name'].encode('ascii', 'ignore').decode( 'ascii') if x['account_name'] != None else '', 'partner_name': x['partner_name'].encode('ascii', 'ignore').decode( 'ascii') if x['partner_name'] != None else '', 'finance_company': x['finance_company'].encode('ascii', 'ignore').decode( 'ascii') if x['finance_company'] != None else '', 'name': x['name'].encode('ascii', 'ignore').decode('ascii') if x['name'] != None else '', 'ref': x['ref'].encode('ascii', 'ignore').decode('ascii') if x['ref'] != None else '', 'analytic_4': x['analytic_account_id'] if x['analytic_account_id'] != None else '', }, all_lines) for p in p_map: if p['id'] not in map(lambda x: x.get('id', None), move_lines): account_move_lines = filter( lambda x: x['id'] == p['id'], all_lines) analytic_1 = '' analytic_2 = '' analytic_3 = '' analytic_4 = '' analytic_1_name = '' analytic_2_name = '' analytic_3_name = '' analytic_4_name = '' analytic = self.pool.get( 'account.analytic.account').browse( cr, uid, account_move_lines[0] ['analytic_account_id']) or '' branch_name = '' branch = False branch_status_1 = '' branch_name = '' branch_id = '' if analytic: if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.sudo().branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id = analytic while (analytic.parent_id): analytic = analytic.parent_id if analytic.type == 'normal': if analytic.segmen == 1 and analytic_1 == '': analytic_1_name = analytic.name analytic_1 = analytic.code if analytic.segmen == 2 and analytic_2 == '': analytic_2_name = analytic.name analytic_2 = analytic.code if analytic.segmen == 3 and analytic_3 == '': analytic_3_name = analytic.name analytic_3 = analytic.code branch = analytic.sudo().branch_id branch_name = branch.name branch_status_1 = branch.branch_status branch_id = branch.id if analytic.segmen == 4 and analytic_4 == '': analytic_4_name = analytic.name analytic_4 = analytic.code analytic_id == analytic if (branch and branch_ids and branch.id not in branch_ids): continue analytic_2_branch = analytic_2 if analytic_2 in ['210', '220', '230']: if branch_status_1 == 'H123': analytic_2_branch = analytic_2[:2] + '1' elif branch_status_1 == 'H23': analytic_2_branch = analytic_2[:2] + '2' else: analytic_2_branch = analytic_2 analytic_1_code = analytic_1 analytic_2_code = analytic_2_branch analytic_3_code = analytic_3 analytic_4_code = analytic_4 analytic_combination = analytic_1 + '/' + analytic_2_branch + '/' + analytic_3 + '/' + analytic_4 p.update({'lines': account_move_lines}) p.update({'analytic_1': analytic_1_code}) p.update({'analytic_2': analytic_2_code}) p.update({'analytic_3': analytic_3_code}) p.update({'analytic_4': analytic_4_code}) p.update({'branch_id': branch_id}) p.update({'branch_status': branch_status_1}) p.update({'branch_name': branch_name}) p.update( {'analytic_combination': analytic_combination}) move_lines.append(p) report.update({'move_lines': move_lines}) reports = filter(lambda x: x.get('move_lines'), reports) if not reports: reports = [{ 'type': 'BankBook', 'title': '', 'title_short': title_short_prefix + ', ' + _('LAPORAN BANK BOOK'), 'saldo_awal': saldo_awal, 'start_date': start_date, 'end_date': end_date, 'move_lines': [{ 'no': 0, 'branch_status': 'NO DATA FOUND', 'branch_name': 'NO DATA FOUND', 'account_code': 'NO DATA FOUND', 'account_name': 'NO DATA FOUND', 'partner_name': 'NO DATA FOUND', 'finance_company': 'NO DATA FOUND', 'name': 0, 'ref': 0, 'date': 'NO DATA FOUND', 'value_date': 'NO DATA FOUND', 'debit': 0.0, 'credit': 0.0, 'analytic_1': 'NO DATA FOUND', 'analytic_2': 'NO DATA FOUND', 'analytic_3': 'NO DATA FOUND', 'analytic_4': 'NO DATA FOUND', 'analytic_combination': 'NO DATA FOUND', }], }] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_bank_book_report_print, self).set_context(objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): cr = self.cr uid = self.uid context = self.context branch_ids = data['branch_ids'] product_ids = data['product_ids'] trx_start_date = data['trx_start_date'] trx_end_date = data['trx_end_date'] division = data['division'] title_prefix = '' title_short_prefix = '' report_program_subsidi = { 'type': 'payable', 'title': '', 'title_short': title_short_prefix + ', ' + _('Laporan Program Subsidi')} query_start = """ SELECT d.id as id_ai, COALESCE(b.name,'') as branch_id, dso.division as division, dso.name as number, dso.date_order as date, COALESCE(inv.number,'') as invoice_number, inv.date_invoice as invoice_date, COALESCE(t.name,'') as type, COALESCE(pav.code,'') as warna, COALESCE(lot.name,'') as engine_number, COALESCE(p.name,'') as program, COALESCE(d.ps_md,0) as ps_md, COALESCE(d.ps_ahm,0) as ps_ahm, case when COALESCE(d.ps_finco,0) = COALESCE(d.discount_pelanggan,0) then COALESCE(d.ps_finco,0) else case when COALESCE(d.ps_finco,0) = 0 then COALESCE(d.ps_finco,0) else COALESCE(d.discount_pelanggan,0) end end as ps_finco, COALESCE(d.ps_dealer,0) as ps_dealer, COALESCE(d.ps_others,0) as ps_others, case when COALESCE(d.ps_finco,0) = COALESCE(d.discount_pelanggan,0) then COALESCE(d.ps_finco,0) else COALESCE(d.discount_pelanggan,0) end as discount, --COALESCE(d.discount_pelanggan,0) as disc_pelanggan COALESCE(l.discount_po,0) as disc_pelanggan, round(COALESCE(d.ps_md,0)/1.1) as ps_md_real, round(COALESCE(d.ps_ahm,0)/1.1) as ps_ahm_real, case when COALESCE(d.ps_finco,0) = COALESCE(d.discount_pelanggan,0) then round(COALESCE(d.ps_finco,0)/1.1) else case when COALESCE(d.ps_finco,0) = 0 then round(COALESCE(d.ps_finco,0)/1.1) else round(COALESCE(d.discount_pelanggan,0)/1.1) end end as ps_finco_real, round(COALESCE(d.ps_dealer,0)/1.1) as ps_dealer_real, round(COALESCE(d.ps_others,0)/1.1) as ps_others_real, case when COALESCE(d.ps_finco,0) = COALESCE(d.discount_pelanggan,0) then round(COALESCE(d.ps_finco,0)/1.1) else round(COALESCE(d.discount_pelanggan,0)/1.1) end as discount_real, --COALESCE(d.discount_pelanggan,0) as disc_pelanggan round(COALESCE(l.discount_po,0)/1.1) as disc_pelanggan_real FROM dealer_sale_order dso LEFT JOIN dealer_sale_order_line l ON l.dealer_sale_order_line_id = dso.id left join dealer_sale_order_line_discount_line d on l.id = d.dealer_sale_order_line_discount_line_id LEFT JOIN dym_program_subsidi p ON p.id = d.program_subsidi LEFT JOIN account_invoice inv ON inv.origin = dso.name and inv.type = 'out_invoice' /*dealer_sale_order_line_discount_line d LEFT JOIN dym_program_subsidi p ON p.id = d.program_subsidi LEFT JOIN dealer_sale_order_line l ON l.id = d.dealer_sale_order_line_discount_line_id LEFT JOIN dealer_sale_order dso ON l.dealer_sale_order_line_id = dso.id LEFT JOIN account_invoice inv ON inv.origin = dso.name and inv.type = 'out_invoice' */ LEFT JOIN dym_branch b ON b.id = dso.branch_id LEFT JOIN product_product pr ON l.product_id = pr.id LEFT JOIN product_template t ON t.id = pr.product_tmpl_id LEFT JOIN stock_production_lot lot ON lot.id = l.lot_id LEFT JOIN product_attribute_value_product_product_rel pavpp ON pr.id = pavpp.prod_id LEFT JOIN product_attribute_value pav ON pavpp.att_id = pav.id where dso.state in ('progress','done') """ move_selection = "" report_info = _('') move_selection += "" query_end="" if division : query_end +=" AND dso.division = '%s'" % str(division) if trx_start_date : query_end +=" AND dso.date_order >= '%s'" % str(trx_start_date) if trx_end_date : query_end +=" AND dso.date_order <= '%s'" % str(trx_end_date) if product_ids : query_end +=" AND l.product_id in %s" % str( tuple(product_ids)).replace(',)', ')') if branch_ids : query_end +=" AND dso.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') reports = [report_program_subsidi] query_order = "order by 2,4" #query_order = "" for report in reports: cr.execute(query_start + query_end + query_order) # --print query_start + query_end + query_order all_lines = cr.dictfetchall() id_ai = [] if all_lines: # def lines_map(x): # x.update({'docname': x['cabang']}) # map(lines_map, all_lines) # for cnt in range(len(all_lines)-1): # if all_lines[cnt]['id_aml'] != all_lines[cnt+1]['id_aml']: # all_lines[cnt]['draw_line'] = 1 # else: # all_lines[cnt]['draw_line'] = 0 # all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'no': 0, 'id_ai': x['id_ai'] if x['id_ai'] != None else 0, 'branch_id': str(x['branch_id'].encode('ascii','ignore').decode('ascii')) if x['branch_id'] != None else '', 'division': str(x['division']) if x['division'] != None else '', 'invoice_number': str(x['invoice_number'].encode('ascii','ignore').decode('ascii')) if x['invoice_number'] != None else '', 'invoice_date': str(x['invoice_date'].encode('ascii','ignore').decode('ascii')) if x['invoice_date'] != None else '', 'number': str(x['number'].encode('ascii','ignore').decode('ascii')) if x['number'] != None else '', 'date': str(x['date']) if x['date'] != None else '', 'type': str(x['type'].encode('ascii','ignore').decode('ascii')) if x['type'] != None else '', 'warna': str(x['warna'].encode('ascii','ignore').decode('ascii')) if x['warna'] != None else '', 'engine_number': str(x['engine_number'].encode('ascii','ignore').decode('ascii')) if x['engine_number'] != None else '', 'program': str(x['program'].encode('ascii','ignore').decode('ascii')) if x['program'] != None else '', 'ps_md': x['ps_md'], 'ps_ahm': x['ps_ahm'], 'ps_finco': x['ps_finco'], 'ps_dealer': x['ps_dealer'], 'ps_others': x['ps_others'], 'discount': x['discount'], 'disc_pelanggan': x['disc_pelanggan'], 'ps_md_real': x['ps_md_real'], 'ps_ahm_real': x['ps_ahm_real'], 'ps_finco_real': x['ps_finco_real'], 'ps_dealer_real': x['ps_dealer_real'], 'ps_others_real': x['ps_others_real'], 'discount_real': x['discount_real'], 'disc_pelanggan_real': x['disc_pelanggan_real'],}, all_lines) ps = [] for p in p_map: if p['number'] in ps: p['disc_pelanggan'] = 0 p['disc_pelanggan_real'] = 0 else: ps.append(p['number']) report.update({'id_ai': p_map}) reports = filter(lambda x: x.get('id_ai'), reports) if not reports : reports = [{'title_short': 'Laporan Program Subsidi', 'type': ['out_invoice','in_invoice','in_refund','out_refund'], 'id_ai': [{'no': 0, 'branch_id': 'NO DATA FOUND', 'division': 'NO DATA FOUND', 'number': 'NO DATA FOUND', 'date': 'NO DATA FOUND', 'invoice_number': 'NO DATA FOUND', 'invoice_date': 'NO DATA FOUND', 'type': 'NO DATA FOUND', 'warna': 'NO DATA FOUND', 'engine_number': 'NO DATA FOUND', 'program': 'NO DATA FOUND', 'ps_md': 0, 'ps_ahm': 0, 'ps_finco': 0, 'ps_dealer': 0, 'ps_others': 0, 'discount': 0, 'disc_pelanggan': 0, 'ps_md_real': 0, 'ps_ahm_real': 0, 'ps_finco_real': 0, 'ps_dealer_real': 0, 'ps_others_real': 0, 'discount_real': 0, 'disc_pelanggan_real': 0, }], 'title': ''}] report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context ).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) super(dym_report_program_subsidi_print, self).set_context( objects, data, ids, report_type)
def set_context(self, objects, data, ids, report_type=None): locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') cr = self.cr uid = self.uid context = self.context branch_ids = data['branch_ids'] product_ids = data['product_ids'] location_ids = data['location_ids'] date = data['date'] title_prefix = '' title_short_prefix = '' report_stock_unit = { 'date': date, 'type': 'receivable', 'title': '', 'title_short': title_short_prefix + ', ' + _('Report Stock Unit Per Tanggal') } query_start = "select a.id as p_id, " \ "c.name as p_name , " \ "e.name_template as p_kode_product, " \ "x.description as p_default_code, " \ "g.code as p_warna, " \ "d.name as p_mesin, " \ "d.chassis_no as p_rangka, " \ "a.lot_id as lot_id, " \ "'"+str(date)+"' - d.receive_date as p_umur, "\ "date_part('days', '"+str(date)+" 23:59:59' - MIN(a.in_date) ) as p_umur_quant, "\ "d.tahun as p_tahun, " \ "d.state as p_state, " \ "b.name as p_nama_lokasi " \ "From " \ "stock_quant a " \ "LEFT JOIN stock_location b ON b.id = a.location_id " \ "LEFT JOIN dym_branch c ON c.id = b.branch_id " \ "LEFT JOIN stock_production_lot d ON d.id = a.lot_id " \ "LEFT JOIN product_product e ON e.id = a.product_id " \ "LEFT JOIN product_template x ON x.id = e.product_tmpl_id " \ "LEFT JOIN product_attribute_value_product_product_rel f ON f.prod_id = a.product_id " \ "LEFT JOIN product_attribute_value g ON g.id = f.att_id " \ "LEFT JOIN product_category h ON h.id = x.categ_id " \ "LEFT JOIN stock_warehouse w ON b.warehouse_id = w.id " \ "LEFT JOIN stock_picking r ON r.id = d.picking_id " \ "LEFT JOIN dym_stock_packing p ON r.id = p.picking_id and p.branch_sender_id is not null " \ "LEFT JOIN dym_branch bs ON p.branch_sender_id = bs.id " \ "where a.lot_id is not null and a.in_date <= '"+str(date)+" 23:59:59' " categ_ids = self.pool.get('product.category').get_child_ids( cr, uid, ids, 'Unit') query_start += "and h.id in %s" % str(tuple(categ_ids)).replace( ',)', ')') move_selection = "" report_info = _('') move_selection += "" query_end = "" if branch_ids: query_end += " AND b.branch_id in %s" % str( tuple(branch_ids)).replace(',)', ')') if product_ids: query_end += "AND a.product_id in %s" % str( tuple(product_ids)).replace(',)', ')') if location_ids: query_end += "AND a.location_id in %s" % str( tuple(location_ids)).replace(',)', ')') reports = [report_stock_unit] query_group = " group by a.id,p_name,p_kode_product,p_default_code,p_warna,p_mesin,p_rangka,lot_id,p_umur,p_tahun,p_state,p_nama_lokasi " query_order = "order by p_kode_product " for report in reports: cr.execute(query_start + query_end + query_group + query_order) all_lines = cr.dictfetchall() partners = [] if all_lines: def lines_map(x): x.update({'docname': x['p_name']}) map(lines_map, all_lines) for cnt in range(len(all_lines) - 1): if all_lines[cnt]['p_id'] != all_lines[cnt + 1]['p_id']: all_lines[cnt]['draw_line'] = 1 else: all_lines[cnt]['draw_line'] = 0 all_lines[-1]['draw_line'] = 1 p_map = map( lambda x: { 'p_id': x['p_id'], 'lot_id': x['lot_id'], 'p_name': x['p_name'], 'p_parent_category': 'Unit', 'p_branch_name': x['p_name'], 'p_kode_product': x['p_kode_product'], 'p_warna': x['p_warna'], 'p_umur': str(x['p_umur']) if x['p_umur'] != None and x['p_umur'] >= 0 else str(x['p_umur_quant']).split('.')[0] if x['p_umur_quant'] != None else '', 'p_nama_lokasi': x['p_nama_lokasi'], 'p_mesin': x['p_mesin'], 'p_tahun': x['p_tahun'], 'p_rangka': x['p_rangka'], 'p_default_code': x['p_default_code'], }, all_lines) date_time = str(date) + ' 23:59:59' lot_ids = [] product_code = '' total_intransit = 0 total_reserved = 0 total_undelivered = 0 total_rfs = 0 total_nrfs = 0 grand_total_intransit = 0 grand_total_reserved = 0 grand_total_undelivered = 0 grand_total_rfs = 0 grand_total_nrfs = 0 for p in p_map: if p['p_id'] not in map(lambda x: x.get('p_id', None), partners): partner_lines = filter( lambda x: x['p_id'] == p['p_id'], all_lines) quant = self.pool.get('stock.quant').browse( cr, SUPERUSER_ID, partner_lines[0]['p_id']) filtered_history_quant_before = quant.history_ids.filtered( lambda r: r.date <= date_time).sorted( key=lambda r: r.date, reverse=True) if filtered_history_quant_before and filtered_history_quant_before[ 0].location_dest_id.usage not in ('internal', 'nrfs'): continue elif partner_lines[0]['lot_id'] in lot_ids: continue if product_code not in ( '', partner_lines[0]['p_kode_product'] ) and (total_nrfs > 0 or total_rfs > 0 or total_intransit > 0 or total_reserved > 0 or total_undelivered > 0): partners.append({ 'p_id': '', 'lot_id': '', 'p_name': '', 'p_parent_category': '', 'p_branch_name': '', 'p_kode_product': '', 'p_warna': '', 'p_umur': '', 'p_nama_lokasi': '', 'p_mesin': '', 'p_tahun': 'Sub Total', 'p_rangka': '', 'p_default_code': '', 'lines': partner_lines, 'intransit': total_intransit, 'rfs': total_rfs, 'nrfs': total_nrfs, 'reserved': total_reserved, 'undelivered': total_undelivered }) total_intransit = 0 total_reserved = 0 total_undelivered = 0 total_rfs = 0 total_nrfs = 0 product_code = partner_lines[0]['p_kode_product'] if product_code == '': product_code = partner_lines[0]['p_kode_product'] p.update({'lines': partner_lines}) intransit = 0 rfs = 0 nrfs = 0 reserved = 0 undelivered = 0 location = partner_lines[0]['p_nama_lokasi'] if quant.in_date <= date_time and ( (quant.consolidated_date and quant.consolidated_date > date_time) or not quant.consolidated_date): intransit = 1 location = quant.history_ids.filtered( lambda r: r.date <= date_time).sorted( key=lambda r: r.date )[0].location_dest_id.name elif filtered_history_quant_before and quant.lot_id.dealer_sale_order_id and quant.lot_id.dealer_sale_order_id.confirm_date and quant.lot_id.dealer_sale_order_id.confirm_date <= date_time and filtered_history_quant_before[ 0].location_dest_id.usage != 'customer': undelivered = 1 location = quant.lot_id.dealer_sale_order_id.dealer_sale_order_line.filtered( lambda r: r.lot_id == quant.lot_id ).location_id.name elif quant.lot_id.dealer_sale_order_id and quant.lot_id.dealer_sale_order_id.create_date <= date_time and ( (quant.lot_id.dealer_sale_order_id.confirm_date and quant.lot_id.dealer_sale_order_id.confirm_date > date_time) or not quant.lot_id. dealer_sale_order_id.confirm_date): reserved = 1 location = quant.lot_id.dealer_sale_order_id.dealer_sale_order_line.filtered( lambda r: r.lot_id == quant.lot_id ).location_id.name elif filtered_history_quant_before and filtered_history_quant_before[ 0].location_dest_id.usage == 'internal': rfs = 1 location = filtered_history_quant_before[ 0].location_dest_id.name elif filtered_history_quant_before and filtered_history_quant_before[ 0].location_dest_id.usage == 'nrfs': nrfs = 1 location = filtered_history_quant_before[ 0].location_dest_id.name p.update({ 'p_nama_lokasi': location, 'intransit': intransit, 'rfs': rfs, 'nrfs': nrfs, 'reserved': reserved, 'undelivered': undelivered }) total_intransit += intransit total_rfs += rfs total_nrfs += nrfs total_reserved += reserved total_undelivered += undelivered if nrfs > 0 or rfs > 0 or intransit > 0 or reserved > 0 or undelivered > 0: lot_ids.append(partner_lines[0]['lot_id']) partners.append(p) if partners and (total_nrfs > 0 or total_rfs > 0 or total_intransit > 0 or total_reserved > 0 or total_undelivered > 0): partners.append({ 'p_id': '', 'lot_id': '', 'p_name': '', 'p_parent_category': '', 'p_branch_name': '', 'p_kode_product': '', 'p_warna': '', 'p_umur': '', 'p_nama_lokasi': '', 'p_mesin': '', 'p_tahun': 'Sub Total', 'p_rangka': '', 'p_default_code': '', 'lines': partner_lines, 'intransit': total_intransit, 'rfs': total_rfs, 'nrfs': total_nrfs, 'reserved': total_reserved, 'undelivered': total_undelivered }) report.update({'partners': partners}) reports = filter(lambda x: x.get('partners'), reports) if not reports: raise orm.except_orm(_('No Data Available'), _('No records found for your selection!')) report_date = datetime_field.context_timestamp( cr, uid, datetime.now(), context).strftime(DEFAULT_SERVER_DATETIME_FORMAT) self.localcontext.update({ 'report_info': report_info, 'report_date': report_date, 'reports': reports, }) objects = False super(dym_stock_unit_report_pertgl_print, self).set_context(objects, data, ids, report_type)