Exemple #1
0
    def _get_orig_line(self, cr, uid, line, context={}):
        """
        :param cr: db cursor
        :param uid: user_id
        :param line: browse_record of sale.order.line
        :param context: context
        :return: the browse record of original line for the replacement.
        """
        sale_id = line.orig_sale.id
        orig_line_id = self.search(cr, uid,
                                   [('order_id', '=', sale_id),
                                    ('product_id', '=',
                                     line.product_id.id),
                                    ('is_all_replacement', '=', False)],
                                   context=context)
        if not orig_line_id:
            raise exceptions.MissingError(
                _('Not found the original line of replacement'))
        orig_line = self.browse(cr, uid, orig_line_id[0], context)
        if (orig_line.product_uom_qty - orig_line.qty_replacement) < \
                line.product_uom_qty and not context.get('no_control_qty'):
            raise exceptions.MissingError(
                _('Qty error in replacement.'))

        return orig_line
 def register_payment(self,
                      payment_line,
                      writeoff_acc_id=False,
                      writeoff_journal_id=False):
     """
     @attention: Source in
                 https://github.com/OCA/OCB/blob/9.0/addons/
                 account/models/account_invoice.py#L596
     @author:    Authors credited in README.rst
     """
     line_to_reconcile = self.env['account.move.line']
     for inv in self:
         line_to_reconcile += inv.move_id.line_id.filtered(
             lambda r: not r.reconcile_ref and r.account_id.type in
             ('payable', 'receivable'))
     ir_values_obj = self.env['ir.values']
     reconciliation_writeoff_account = ir_values_obj.get_default(
         'account.config.settings', 'reconciliation_writeoff_account')
     if not reconciliation_writeoff_account:
         raise exceptions.MissingError(
             _('''Set the write-off account
         in Settings -> Configuration -> Invoicing -> Write-Off account'''))
     return (line_to_reconcile + payment_line).reconcile(
         writeoff_journal_id=self.journal_id.id,
         writeoff_period_id=self.env['account.period'].find().id,
         writeoff_acc_id=reconciliation_writeoff_account)
Exemple #3
0
    def prepare_line(self, name, debit_amount, credit_amount, account_code,
                     cresus_tax_code, analytic_account_code):
        account_obj = self.env['account.account']
        tax_obj = self.env['account.tax']
        analytic_account_obj = self.env['account.analytic.account']

        line = {}
        line['name'] = name
        line['debit'] = debit_amount
        line['credit'] = credit_amount

        account = account_obj.search([('code', '=', account_code)], limit=1)
        if not account:
            raise exceptions.MissingError(
                _("No account with code %s") % account_code)
        line['account_id'] = account.id

        if not account.user_type_id.include_initial_balance:
            if cresus_tax_code:
                tax = tax_obj.search(
                    [('tax_cresus_mapping', '=', cresus_tax_code),
                     ('price_include', '=', True)],
                    limit=1)
                line['tax_line_id'] = tax.id
            if analytic_account_code:
                analytic_account = analytic_account_obj.search(
                    [('code', '=', analytic_account_code)], limit=1)
                line['analytic_account_id'] = analytic_account.id
        return line
Exemple #4
0
    def get_collection_data(self, name, version=None):
        cenit_api = self.env['cenit.api']

        args = {
            'name': name,
        }
        if not version:
            args.update({'sort_by': 'shared_version', 'limit': 1})
        else:
            args.update({'shared_version': version})

        path = "/setup/cross_shared_collection"
        rc = cenit_api.get(path, params=args).get("cross_shared_collections",
                                                  False)

        if not isinstance(rc, list):
            raise exceptions.ValidationError(
                "Hey!! something wicked just happened")
        elif len(rc) != 1:
            raise exceptions.MissingError(
                "Required '%s [%s]' not found in Cenit" %
                (name, version or "any"))

        rc = rc[0]

        # data = {
        #     'id': rc.get('id'),
        #     'params': rc.get('pull_parameters', [])
        # }

        return rc
Exemple #5
0
    def get_collection_data(self, name, version=None):
        cenit_api = self.env['cenit.api']

        args = {
            'name': name,
        }
        if not version:
            args.update({'sort_by': 'shared_version', 'limit': 1})
        else:
            args.update({'shared_version': version})

        path = "/setup/shared_collection"
        rc = cenit_api.get(path, params=args)

        _logger.info("\n\nRC: %s\n", rc)

        if not isinstance(rc, list):
            raise exceptions.ValidationError(
                "Hey!! something wicked just happened")
        elif len(rc) != 1:
            raise exceptions.MissingError(
                "Required '%s [%s]' not found in Cenit" %
                (name, version or "any"))

        rc = rc[0].get('shared_collection', {})
        data = {'id': rc.get('id'), 'params': rc.get('pull_parameters', [])}

        _logger.info("\n\nShared collection data: %s\n", data)

        return data
Exemple #6
0
 def add_attendee(self):
     if self.attendee_ids:
         session_pool = self.env['openacademy.session']
         recordset = session_pool.browse(self.env.context['active_id'])
         recordset.att_ids = self.attendee_ids
     else:
         raise exceptions.MissingError(_("No/Zero Attendees Selected !"))
Exemple #7
0
    def remote_call(self, method_type, method_name, **kwargs):
        shops = self
        if not len(shops):
            shops = self.search([])

        if not shops:
            raise exceptions.MissingError(u'没有定义电商店铺')

        shop = shops[0]
        base_url = shop.host if shop.host.startswith('http') else 'http://' + shop.host
        base_url = base_url[0:-1] if base_url.endswith('/') else base_url
        service_uri = "%s/erp/%s/%s" % (base_url, method_type, method_name)

        headers = {}
        headers['Auth_Account'] = shop.user
        headers['Auth_Token'] = shop.pwd
        headers['Content-Type'] = "application/json; charset=utf-8"
        data = json.dumps(kwargs)
        _logger.info(data)
        resp = requests.post(service_uri, data, headers=headers)
        _logger.info(resp)

        res = {}
        if requests.codes.ok == resp.status_code:
            res = json.loads(resp.content)
            _logger.info(res)
            if 'success' == res.get('result'):
                _logger.info(u'%s调用成功' % method_name)
            else:
                _logger.error(u'%s调用失败, 原因:%s' % (method_name, res.get('err_msg')))
        else:
            _logger.error(u'http调用失败, 返回码: %s' % resp.status_code)
            res.update({'result':'failed','err_msg':''})

        return res
    def renumber(self):
        """Renumber all the posted moves on the given journal and periods.

        :return dict:
            Window action to open the renumbered moves, to review them.
        """
        reset_sequences = self.env["ir.sequence"]
        reset_ranges = self.env["ir.sequence.date_range"]

        _logger.debug("Searching for account moves to renumber.")
        move_ids = self.env['account.move'].search(
            [('journal_id', 'in', self.journal_ids.ids),
             ('date', '>=', self.date_from), ('date', '<=', self.date_to),
             ('state', '=', 'posted')],
            order='date, id')
        if not move_ids:
            raise exceptions.MissingError(
                _('No records found for your selection!'))

        _logger.debug("Renumbering %d account moves.", len(move_ids))
        for move in move_ids:
            sequence = move.journal_id.sequence_id
            if sequence not in reset_sequences:
                if sequence.use_date_range:
                    date_range = self.env["ir.sequence.date_range"].search([
                        ("sequence_id", "=", sequence.id),
                        ("date_from", "<=", move.date),
                        ("date_to", ">=", move.date)
                    ])
                    if date_range and date_range not in reset_ranges:
                        date_range.number_next = self.number_next
                        reset_ranges |= date_range
                else:
                    sequence.number_next = self.number_next
                    reset_sequences |= sequence

            # Generate (using our own get_id) and write the new move number
            move.name = (sequence.with_context(
                ir_sequence_date=move.date).next_by_id())

        _logger.debug("%d account moves renumbered.", len(move_ids))

        return {
            'type': 'ir.actions.act_window',
            'name': _("Renumbered account moves"),
            'res_model': 'account.move',
            'domain': [("id", "in", move_ids.ids)],
            'view_type': 'form',
            'view_mode': 'tree',
            'context': self.env.context,
            'target': 'current',
        }
Exemple #9
0
    def open_customer_sign(self, cr, uid, ids, context=None):
        context = dict(context or {})
        pid = ids and ids[0]
        if not pid:
            raise exceptions.MissingError(u'未找到拣货单!')

        picking = self.browse(cr, uid, pid, context=context)
        if not picking.sale_id:
            raise exceptions.MissingError(u'未找到关联的销售订单!')

        if picking.picking_type_code != 'outgoing':
            raise exceptions.ValidationError(u'非出库单,不能签收!')

        so_obj = self.pool.get('sale.order')
        sale_id = picking.sale_id.id
        context.update({
            'picking_id': picking.id,
            'active_id': sale_id,
            'active_ids': [sale_id],
        })
        res = so_obj.prepare_customer_sign(cr, uid, [sale_id], context=context)
        return res
Exemple #10
0
 def remote_logistics_call(self, cr, uid, vals, context=None):
     shops = self.search(cr, uid, [])
     shop = shops and shops[0] or False
     if not shops:
         raise exceptions.MissingError(u'没有定义电商店铺')
     shop = self.browse(cr, uid, shop)
     http_url = shop.yt_url or False
     if not http_url:
         raise exceptions.MissingError(u'没有定义圆通接口地址')
     result = {}
     try:
         read = requests.post(http_url, data=vals)
         _logger.info(read)
         _logger.info(read.content)
         res_xml = ET.XML(read.content)
         res = res_xml.find('success').text
         if res == 'true':
             distributeInfo = res_xml.find('distributeInfo')
             shortAddress = distributeInfo.find('shortAddress')
             if shortAddress is not None:
                 mailNo = res_xml.find('mailNo')
                 result.update({
                     'result': 'success',
                     'err_msg': '',
                     'mailNo': mailNo.text,
                     'shortAddress': shortAddress.text
                 })
             else:
                 result.update({'result': 'fail', 'err_msg': u'不能解析的错误地址'})
         else:
             reason = res_xml.find('reason')
             if reason is not None:
                 reason = res_xml.find('reason').text
             else:
                 reason = ''
             result.update({'result': 'fail', 'err_msg': reason})
     except Exception, e:
         result.update({'result': 'fail', 'err_msg': u'网络错误或者圆通地址配置错误'})
Exemple #11
0
 def get_reserved_quants_query(self, product_id, location_id, picking_id):
     location = self.env['stock.location'].search([('id', '=', location_id)
                                                   ])
     if not location:
         raise exceptions.MissingError(
             u"Can't find the location with id:%s maybe active = False" %
             location_id)
     res = """
         SELECT sq.reservation_id
         FROM stock_quant sq
             LEFT JOIN stock_location sl ON sq.location_id = sl.id
             LEFT JOIN stock_move sm ON sq.reservation_id = sm.id
         WHERE sq.product_id = %s
             AND sl.parent_left >= %s
             AND sl.parent_left < %s
             AND sq.reservation_id IS NOT NULL
     """
     params = (product_id, location.parent_left, location.parent_right)
     if picking_id:
         res += " AND (sm.picking_id != %s OR sm.picking_id IS NULL)"
         params += (picking_id, )
     return res, params
Exemple #12
0
 def action_generate_lot(self):
     if not self.product_id.lot_code_id:
         raise exceptions.MissingError("You Must Set Lot Code ID for This Product First!")
     code = self.product_id.lot_code_id.get_next()[0]
     lot = self.env['stock.production.lot'].create({'name': code, 'product_id': self.product_id.id, 'ref': code})
     self.mrp_lot_id = lot
Exemple #13
0
    def import_mappings_data(self, json_data):
        irmodel_pool = self.env['ir.model']
        schema_pool = self.env['cenit.schema']
        namespace_pool = self.env['cenit.namespace']
        datatype_pool = self.env['cenit.data_type']
        line_pool = self.env['cenit.data_type.line']
        domain_pool = self.env['cenit.data_type.domain_line']
        trigger_pool = self.env['cenit.data_type.trigger']

        for data in json_data:
            odoo_model = data['model']
            namespace = data['namespace']
            schema = data['schema']

            domain = [('model', '=', odoo_model)]
            candidates = irmodel_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s module installed" % odoo_model)
            odoo_model = candidates.id

            domain = [('name', '=', namespace)]
            candidates = namespace_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s namespace in Namespaces" % namespace)
            namespace = candidates.id

            domain = [('name', '=', schema), ('namespace', '=', namespace)]
            candidates = schema_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s schema in Schemas" % schema)
            schema = candidates.id

            vals = {
                'name': data['name'],
                'model': odoo_model,
                'namespace': namespace,
                'schema': schema
            }
            dt = datatype_pool.search([('name', '=', data['name'])])
            updt = False
            if dt:
                dt.write(vals)
                updt = True
            else:
                dt = datatype_pool.create(vals)

            if updt:
                for d in dt.domain:
                    d.unlink()
                for d in dt.triggers:
                    d.unlink()
                for d in dt.lines:
                    d.unlink()

            for domain in data['domains']:
                vals = {
                    'data_type': dt.id,
                    'field': domain['field'],
                    'value': domain['value'],
                    'op': domain['op']
                }
                domain_pool.create(vals)

            for trigger in data['triggers']:
                vals = {
                    'data_type': dt.id,
                    'name': trigger['name'],
                    'cron_lapse': trigger['cron_lapse'],
                    'cron_units': trigger['cron_units'],
                    'cron_restrictions': trigger['cron_restrictions'],
                    'cron_name': trigger['cron_name']
                }
                trigger_pool.create(vals)

            for line in data['lines']:
                domain = [('name', '=', line['reference'])]
                candidate = datatype_pool.search(domain)
                vals = {
                    'data_type': dt.id,
                    'name': line['name'],
                    'value': line['value'],
                    'line_type': line['line_type'],
                    'line_cardinality': line['line_cardinality'],
                    'primary': line['primary'],
                    'inlined': line['inlined'],
                    'reference': candidate.id
                }
                line_pool.create(vals)
            dt.sync_rules()
        return True
    def _standardise_data(self, data, importer):
        """
        This function split one line of the spreadsheet into multiple lines.
        Winbiz just writes one line per move.
        """

        tax_obj = self.env['account.tax']
        journal_obj = self.env['account.journal']
        account_obj = self.env['account.account']

        def find_account(code):
            res = account_obj.search([('code', '=', code)], limit=1)
            if not res:
                raise exceptions.MissingError(
                    _("No account with code %s") % code)
            return res

        if self.enable_account_based_line_merging:
            my_prepare_move = (lambda lines, journal, date, ref: prepare_move(
                account_line_merge(lines), journal, date, ref))
        else:
            my_prepare_move = prepare_move

        # loop
        incomplete = None
        previous_pce = None
        previous_date = None
        previous_journal = None
        previous_tax = None
        lines = []
        for self.index, winbiz_item in enumerate(data, 1):
            if previous_pce not in (None, winbiz_item[u'pièce']):
                yield my_prepare_move(lines,
                                      previous_journal,
                                      previous_date,
                                      ref=previous_pce)
                lines = []
                incomplete = None
            previous_pce = winbiz_item[u'pièce']
            previous_date = importer.parse_date(winbiz_item[u'date'])
            journal = journal_obj.search(
                [('winbiz_mapping', '=', winbiz_item[u'journal'])], limit=1)
            if not journal:
                raise exceptions.MissingError(
                    _(u"No journal ‘%s’") % winbiz_item[u'journal'])
            previous_journal = journal

            # tvatyp:  0 no vat was applied (internal transfers for example)
            #          1 there is vat but it's not on this line
            #          2 sales vat
            #          3 purchases vat
            #         -1 pure vat
            tvatyp = int(winbiz_item['ecr_tvatyp'])
            if tvatyp > 1:
                if tvatyp == 2:
                    scope = 'sale'
                else:
                    assert tvatyp == 3
                    scope = 'purchase'
                tvabn = int(winbiz_item['ecr_tvabn'])
                if tvabn == 2:
                    included = True
                else:
                    assert tvabn == 1
                    included = False
                tax = tax_obj.search(
                    [('amount', '=', winbiz_item['ecr_tvatx']),
                     ('price_include', '=', included),
                     ('type_tax_use', '=', scope)],
                    limit=1)
                if not tax:
                    raise exceptions.MissingError(
                        _("No tax found with amount = %r and type = %r") %
                        (winbiz_item['ecr_tvatx'], scope))
            else:
                tax = None
            if int(winbiz_item['ecr_tvatyp']) < 0:
                assert previous_tax is not None
                originator_tax = previous_tax
            else:
                originator_tax = None
            previous_tax = tax

            amount = float(winbiz_item[u'montant'])
            recto_line = verso_line = None
            if winbiz_item[u'cpt_débit'] != 'Multiple':
                account = find_account(winbiz_item[u'cpt_débit'])
                if incomplete is not None and incomplete.account == account:
                    incomplete.amount -= amount
                else:
                    recto_line = prepare_line(
                        name=winbiz_item[u'libellé'].strip(),
                        amount=(-amount),
                        account=account,
                        originator_tax=originator_tax)
                    if winbiz_item['ecr_tvadc'] == 'd':
                        recto_line.tax = tax
                    lines.append(recto_line)

            if winbiz_item[u'cpt_crédit'] != 'Multiple':
                account = find_account(winbiz_item[u'cpt_crédit'])
                if incomplete is not None and incomplete.account == account:
                    incomplete.amount += amount
                else:
                    verso_line = prepare_line(
                        name=winbiz_item[u'libellé'].strip(),
                        amount=amount,
                        account=account,
                        originator_tax=originator_tax)
                    if winbiz_item['ecr_tvadc'] == 'c':
                        verso_line.tax = tax
                    lines.append(verso_line)

            if winbiz_item[u'cpt_débit'] == 'Multiple':
                assert incomplete is None
                incomplete = verso_line
            if winbiz_item[u'cpt_crédit'] == 'Multiple':
                assert incomplete is None
                incomplete = recto_line

        yield my_prepare_move(lines,
                              previous_journal,
                              previous_date,
                              ref=previous_pce)
 def find_account(code):
     res = account_obj.search([('code', '=', code)], limit=1)
     if not res:
         raise exceptions.MissingError(
             _("No account with code %s") % code)
     return res
Exemple #16
0
    def import_data_types(self, context={}):
        data_file = self._context['attachment']
        irmodel_pool = self.env['ir.model']
        schema_pool = self.env['cenit.schema']
        namespace_pool = self.env['cenit.namespace']
        datatype_pool = self.env['cenit.data_type']
        line_pool = self.env['cenit.data_type.line']
        domain_pool = self.env['cenit.data_type.domain_line']
        trigger_pool = self.env['cenit.data_type.trigger']

        data_file = base64.decodestring(data_file)
        json_data = json.loads(data_file)

        for data in json_data:
            odoo_model = data['model']
            namespace = data['namespace']
            schema = data['schema']

            domain = [('model', '=', odoo_model)]
            candidates = irmodel_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s module installed" % odoo_model)
            odoo_model = candidates.id

            domain = [('name', '=', namespace)]
            candidates = namespace_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s namespace in Namespaces" % namespace)
            namespace = candidates.id

            domain = [('name', '=', schema)]
            candidates = schema_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s schema in Schemas" % schema)
            schema = candidates.id

            vals = {
                'name': data['name'],
                'model': odoo_model,
                'namespace': namespace,
                'schema': schema
            }
            created_datatype = datatype_pool.create(vals)

            for domain in data['domains']:
                vals = {
                    'data_type': created_datatype.id,
                    'field': domain['field'],
                    'value': domain['value'],
                    'op': domain['op']
                }
                domain_pool.create(vals)

            for trigger in data['triggers']:
                vals = {
                    'data_type': created_datatype.id,
                    'name': trigger['name'],
                    'cron_lapse': trigger['cron_lapse'],
                    'cron_units': trigger['cron_units'],
                    'cron_restrictions': trigger['cron_restrictions'],
                    'cron_name': trigger['cron_name']
                }
                trigger_pool.create(vals)

            for line in data['lines']:
                domain = [('name', '=', line['reference'])]
                candidate = datatype_pool.search(domain)
                vals = {
                    'data_type': created_datatype.id,
                    'name': line['name'],
                    'value': line['value'],
                    'line_type': line['line_type'],
                    'line_cardinality': line['line_cardinality'],
                    'primary': line['primary'],
                    'inlined': line['inlined'],
                    'reference': candidate.id
                }
                line_pool.create(vals)
        return True
Exemple #17
0
    def prepare_logistics_interface_order(self, cr, uid, ids, context=None):
        ebiz_shop = self.pool['ebiz.shop']
        shops = ebiz_shop.search(cr, uid, [])
        shop = shops and shops[0] or False
        if not shops:
            raise exceptions.MissingError(u'没有定义电商店铺')
        shop = ebiz_shop.browse(cr, uid, shop)
        if not (shop.clientid and shop.yt_pwd):
            raise exceptions.MissingError(u'没有定义圆通账号或密钥')
        pickings = []
        error_picking = {}
        for picking in self.browse(cr, uid, ids, context=context):
            if not (picking.sale_id and picking.sale_id.partner_shipping_id
                    or False):
                error_picking.update({picking.name: u'没有对应销售订单或者对应销售订单没有发货地址'})
            else:
                pickings.append(picking)
        if error_picking:
            err_msg = ''
            for err in error_picking:
                err_msg += '%s:%s\n' % (err, error_picking.get(err))
            raise exceptions.ValidationError(err_msg)
        requese_xml_dict = {}
        for right_picking in pickings:
            RequestOrder = ET.Element('RequestOrder')
            clientID = ET.Element('clientID')
            clientID.text = shop.clientid  # 需要设置为可配置
            logisticProviderID = ET.Element('logisticProviderID')
            logisticProviderID.text = 'YTO'
            # 获取订单单号
            txLogisticID = ET.Element('txLogisticID')
            txLogisticID.text = right_picking.sale_id.name
            # 设置收件人信息
            receiver = ET.Element('receiver')
            r_name = ET.Element('name')
            r_name.text = right_picking.sale_id.partner_shipping_id.name
            r_prov = ET.Element('prov')
            r_prov.text = right_picking.sale_id.partner_shipping_id.state_id and right_picking.sale_id.partner_shipping_id.state_id.name or ''
            r_mobile = ET.Element('mobile')
            r_mobile.text = right_picking.sale_id.partner_shipping_id.mobile or right_picking.sale_id.partner_shipping_id.phone or ''
            r_city = ET.Element('city')
            r_city.text = right_picking.sale_id.partner_shipping_id.city or ''
            r_address = ET.Element('address')
            r_address.text = (
                right_picking.sale_id.partner_shipping_id.street2 or ''
            ) + (right_picking.sale_id.partner_shipping_id.street or '') or ''
            receiver.append(r_name)
            receiver.append(r_mobile)
            receiver.append(r_prov)
            receiver.append(r_city)
            receiver.append(r_address)

            # 设置发件人信息
            sender = ET.Element('sender')
            s_name = ET.Element('name')
            s_name.text = shop.warehouse_id.partner_id.name
            s_prov = ET.Element('prov')
            s_prov.text = shop.warehouse_id.partner_id.state_id and shop.warehouse_id.partner_id.state_id.name or ''
            s_city = ET.Element('city')
            s_city.text = shop.warehouse_id.partner_id.city or ''
            s_mobile = ET.Element('mobile')
            s_mobile.text = shop.warehouse_id.partner_id.mobile or shop.warehouse_id.partner_id.phone or ''
            s_address = ET.Element('address')
            s_address.text = (shop.warehouse_id.partner_id.street2 or '') + (
                shop.warehouse_id.partner_id.street or '') or ''
            sender.append(s_name)
            sender.append(s_prov)
            sender.append(s_mobile)
            sender.append(s_city)
            sender.append(s_address)

            # 商品信息
            items = ET.Element('items')
            # 做循环获取商品信息
            for move in right_picking.move_lines:
                item = ET.Element('item')
                itemName = ET.Element('itemName')
                itemName.text = move.product_id.name
                number = ET.Element('number')
                number.text = str(int(move.product_uom_qty))
                item.append(itemName)
                item.append(number)

            # 循环完毕
            items.append(item)

            customerId = ET.Element('customerId')
            customerId.text = shop.clientid
            orderType = ET.Element('orderType')
            orderType.text = shop.order_type  # 需要设置为可配置
            serviceType = ET.Element('serviceType')
            serviceType.text = shop.servicetype  # 需要设置为可配置

            RequestOrder.append(items)
            RequestOrder.append(logisticProviderID)
            RequestOrder.append(clientID)
            RequestOrder.append(sender)
            RequestOrder.append(receiver)
            RequestOrder.append(orderType)
            RequestOrder.append(txLogisticID)
            RequestOrder.append(customerId)
            RequestOrder = ET.tostring(RequestOrder)
            post_val = self.prepare_post_vals(cr,
                                              uid,
                                              RequestOrder,
                                              shop,
                                              context=context)
            requese_xml_dict.update({right_picking.id: post_val})
        return requese_xml_dict
Exemple #18
0
    def import_data_types(self):
        data_file = self[0].b_file
        irmodel_pool = self.env['ir.model']
        schema_pool = self.env['cenit.schema']
        namespace_pool = self.env['cenit.namespace']
        datatype_pool = self.env['cenit.data_type']
        line_pool = self.env['cenit.data_type.line']
        domain_pool = self.env['cenit.data_type.domain_line']
        trigger_pool = self.env['cenit.data_type.trigger']

        try:
           data_file = base64.decodestring(data_file)
           print data_file
           json_data = json.loads(data_file)
        except Exception as e:
            _logger.exception('File unsuccessfully imported, due to format mismatch.')
            raise UserError(_('File not imported due to format mismatch or a malformed file. (Valid format is .json)\n\nTechnical Details:\n%s') % tools.ustr(e))


        for data in json_data:
            odoo_model = data['model']
            namespace = data['namespace']
            schema = data['schema']

            domain = [('model', '=', odoo_model)]
            candidates = irmodel_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s module installed" % odoo_model
                )
            odoo_model = candidates.id

            domain = [('name', '=', namespace)]
            candidates = namespace_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s namespace in Namespaces" % namespace
                )
            namespace = candidates.id

            domain = [('name', '=', schema)]
            candidates = schema_pool.search(domain)
            if not candidates:
                raise exceptions.MissingError(
                    "There is no %s schema in Schemas" % schema
                )
            schema = candidates.id

            vals = {'name': data['name'], 'model': odoo_model, 'namespace': namespace, 'schema': schema}
            dt = datatype_pool.search([('name', '=', data['name'])])
            updt = False
            if dt:
                dt.write(vals)
                updt = True
            else:
                dt = datatype_pool.create(vals)

            if updt:
                    for d in dt.domain:
                        d.unlink()
                    for d in dt.triggers:
                        d.unlink()
                    for d in dt.lines:
                        d.unlink()

            for domain in data['domains']:
                vals = {'data_type': dt.id, 'field': domain['field'], 'value': domain['value'],
                        'op': domain['op']}
                domain_pool.create(vals)

            for trigger in data['triggers']:
                vals = {'data_type': dt.id, 'name': trigger['name'], 'cron_lapse': trigger['cron_lapse'],
                        'cron_units': trigger['cron_units'], 'cron_restrictions': trigger['cron_restrictions'],
                        'cron_name': trigger['cron_name']}
                trigger_pool.create(vals)

            for line in data['lines']:
                domain = [('name', '=', line['reference'])]
                candidate = datatype_pool.search(domain)
                vals = {
                    'data_type': dt.id, 'name': line['name'], 'value': line['value'],
                    'line_type': line['line_type'], 'line_cardinality': line['line_cardinality'],
                    'primary': line['primary'], 'inlined': line['inlined'], 'reference': candidate.id
                }
                line_pool.create(vals)
            dt.sync_rules()
        return True
Exemple #19
0
    def create_single_pdf(self, cr, uid, ids, data, report_xml, context=None):
        """ Overide this function to generate our py3o report
        """
        if report_xml.report_type != 'py3o':
            return super(Py3oParser, self).create_single_pdf(
                cr, uid, ids, data, report_xml, context=context
            )

        pool = registry(cr.dbname)
        model_data_ids = pool['ir.model.data'].search(
            cr, uid, [
                ('model', '=', 'ir.actions.report.xml'),
                ('res_id', '=', report_xml.id),
            ]
        )

        xml_id = None
        if model_data_ids:
            model_data = pool['ir.model.data'].browse(
                cr, uid, model_data_ids[0], context=context
            )
            xml_id = '%s.%s' % (model_data.module, model_data.name)

        parser_instance = self.parser(cr, uid, self.name2, context=context)
        parser_instance.set_context(
            self.getObjects(cr, uid, ids, context),
            data, ids, report_xml.report_type
        )

        if xml_id in _extender_functions:
            for fct in _extender_functions[xml_id]:
                fct(pool, cr, uid, parser_instance.localcontext, context)

        tmpl_data = self.get_template(report_xml)

        in_stream = StringIO(tmpl_data)
        out_stream = StringIO()
        template = Template(in_stream, out_stream)
        expressions = template.get_all_user_python_expression()
        py_expression = template.convert_py3o_to_python_ast(expressions)
        convertor = Py3oConvertor()
        data_struct = convertor(py_expression)

        filetype = report_xml.py3o_fusion_filetype

        datadict = parser_instance.localcontext

        parsed_datadict = data_struct.render(datadict)

        fusion_server_obj = pool.get('py3o.server')
        fusion_server_ids = fusion_server_obj.search(
            cr, uid, [('is_active', '=', True)], context=context, limit=1
        )
        if not fusion_server_ids:
            if filetype.fusion_ext == report_xml.py3o_template_id.filetype:
                # No format conversion is needed, render the template directly
                template.render(parsed_datadict)
                res = out_stream.getvalue()

            else:
                if USE_LOCAL_LIBREOFFICE:
                    import sh
                    import tempfile

                    template.render(parsed_datadict)
                    res = out_stream.getvalue()

                    arq = tempfile.NamedTemporaryFile(delete=False)
                    arq.seek(0)
                    arq.write(res)
                    arq.flush()

                    res_arq_name = arq.name + '.' + filetype.fusion_ext

                    sh.libreoffice('--headless', '--invisible', \
                        '--convert-to', filetype.fusion_ext, \
                        '--outdir', '/tmp', arq.name)

                    res = file(res_arq_name, 'r').read()

                    os.remove(res_arq_name)
                    os.remove(arq.name)

                else:
                    raise exceptions.MissingError(
                        _(u"No Py3o server configuration found")
                    )

        else:  # Call py3o.server to render the template in the desired format
            fusion_server_id = fusion_server_ids[0]

            fusion_server = fusion_server_obj.browse(
                cr, uid, fusion_server_id, context=context
            )
            in_stream.seek(0)
            files = {
                'tmpl_file': in_stream,
            }
            fields = {
                "targetformat": filetype.fusion_ext,
                "datadict": json.dumps(parsed_datadict),
                "image_mapping": "{}",
            }
            r = requests.post(fusion_server.url, data=fields, files=files)
            if r.status_code != 200:
                # server says we have an issue... let's tell that to enduser
                raise exceptions.Warning(
                    _('Fusion server error %s') % r.text,
                )

            # Here is a little joke about Odoo
            # we do nice chunked reading from the network...
            chunk_size = 1024
            with NamedTemporaryFile(
                    suffix=filetype.human_ext,
                    prefix='py3o-template-'
            ) as fd:
                for chunk in r.iter_content(chunk_size):
                    fd.write(chunk)
                fd.seek(0)
                # ... but odoo wants the whole data in memory anyways :)
                res = fd.read()

        return res, filetype.human_ext