def check_quantity(self): if self.quantity <= 0.0: raise ValidationError("Enter proper Quantity in Facilities!")
def create_count_sample_table(self, param, lot_size_desc): url = 'http://127.0.0.1:5000/GBT2828.1-2012/read_sample_table' headers = {'content-type': 'application/json'} response = None try: response = requests.post(url, data=json.dumps(param), headers=headers) except Exception as e: print '----Error in method model_sample_plan.create_count_sample_table: the request failed' raise ValidationError('请求服务失败或服务器未开启') if response: # 请求返回的数据 response_data = response.json() if response_data['success']: request_data = response_data['result']['lines'] if request_data is not None: lot_size_desc_len = len(lot_size_desc) lines = list() if self.sample_type_code in [ 'Integer_one_sample', 'Fraction_one_sample_fixed', 'Fraction_one_sample_unfixed' ]: for index in range(lot_size_desc_len): line = { 'lot_size': lot_size_desc[index], 'sample_code': request_data[index][3], 'normal_sample_size': request_data[index][0][2], 'normal_ac': request_data[index][0][0], 'normal_re': request_data[index][0][1], 'tightened_sample_size': request_data[index][1][2], 'tightened_ac': request_data[index][1][0], 'tightened_re': request_data[index][1][1], 'reduced_sample_size': request_data[index][2][2], 'reduced_ac': request_data[index][2][0], 'reduced_re': request_data[index][2][1], } lines.append(line) self.update({'sample_plan_ids': lines}) elif self.sample_type_code == 'Integer_two_sample': for index in range(lot_size_desc_len): # 如果读取的 * ,就显示*, line1 = dict() line2 = dict() line1['lot_size'] = lot_size_desc[index] line1['sample_code'] = request_data[index][3] line1['sample_time'] = '第一' line2['sample_time'] = '第二' line2['lot_size'] = False # r如果要返回样本量和累积样本量,需要重写 查表方法 if request_data[index][0][0] == '*': line1['normal_sample_size'] = False line1['normal_cumulative_sample_size'] = False line1['normal_ac'] = '*' line1['normal_re'] = '*' line2['normal_sample_size'] = False line2['normal_cumulative_sample_size'] = False line2['normal_ac'] = '*' line2['normal_re'] = '*' else: line1['normal_sample_size'] = request_data[ index][0][4] line1['normal_cumulative_sample_size'] = int( request_data[index][0][4]) line1['normal_ac'] = request_data[index][0][0] line1['normal_re'] = request_data[index][0][1] line2['normal_sample_size'] = request_data[ index][0][4] line2['normal_cumulative_sample_size'] = int( request_data[index][0][4]) * 2 line2['normal_ac'] = request_data[index][0][2] line2['normal_re'] = request_data[index][0][3] if request_data[index][1][0] == '*': line1['tightened_sample_size'] = False line1['tighted_cumulative_sample_size'] = False line1['tightened_ac'] = '*' line1['tightened_re'] = '*' line2['tightened_sample_size'] = False line2['tighted_cumulative_sample_size'] = False line2['tightened_ac'] = '*' line2['tightened_re'] = '*' else: line1['tightened_sample_size'] = request_data[ index][1][4] line1['tighted_cumulative_sample_size'] = int( request_data[index][1][4]) line1['tightened_ac'] = request_data[index][1][ 0] line1['tightened_re'] = request_data[index][1][ 1] line2['tightened_sample_size'] = request_data[ index][1][4] line2['tighted_cumulative_sample_size'] = int( request_data[index][1][4]) * 2 line2['tightened_ac'] = request_data[index][1][ 2] line2['tightened_re'] = request_data[index][1][ 3] if request_data[index][2][0] == '*': line1['reduced_sample_size'] = False line2['reduced_cumulative_sample_size'] = False line1['reduced_ac'] = '*' line1['reduced_re'] = '*' line2['reduced_sample_size'] = False line2['tighted_cumulative_sample_size'] = False line2['reduced_ac'] = '*' line2['reduced_re'] = '*' else: line1['reduced_sample_size'] = request_data[ index][2][4] line2['reduced_cumulative_sample_size'] = int( request_data[index][2][4]) line1['reduced_ac'] = request_data[index][2][0] line1['reduced_re'] = request_data[index][2][1] line2['reduced_sample_size'] = request_data[ index][2][4] line2['tighted_cumulative_sample_size'] = int( request_data[index][2][4]) * 2 line2['reduced_ac'] = request_data[index][2][2] line2['reduced_re'] = request_data[index][2][3] lines.append(line1) lines.append(line2) self.update({'sample_plan_ids': lines}) elif self.sample_type_code == 'Integer_many_sample': for index in range(lot_size_desc_len): # 如果读取的 * ,就显示*, line1 = dict() line2 = dict() line3 = dict() line4 = dict() line5 = dict() line1['lot_size'] = lot_size_desc[index] line2['lot_size'] = False line3['lot_size'] = False line4['lot_size'] = False line5['lot_size'] = False line1['sample_code'] = request_data[index][3] line1['sample_time'] = '第一' line2['sample_time'] = '第二' line3['sample_time'] = '第三' line4['sample_time'] = '第四' line5['sample_time'] = '第五' if request_data[index][0][0] == '*': line1['normal_sample_size'] = False line1['normal_cumulative_sample_size'] = False line1['normal_ac'] = '*' line1['normal_re'] = '*' line2['normal_sample_size'] = False line2['normal_cumulative_sample_size'] = False line2['normal_ac'] = '*' line2['normal_re'] = '*' line3['normal_sample_size'] = False line3['normal_cumulative_sample_size'] = False line3['normal_ac'] = '*' line3['normal_re'] = '*' line3['normal_cumulative_sample_size'] = False line4['normal_sample_size'] = False line4['normal_cumulative_sample_size'] = False line4['normal_ac'] = '*' line4['normal_re'] = '*' line5['normal_sample_size'] = False line5['normal_cumulative_sample_size'] = False line5['normal_ac'] = '*' line5['normal_re'] = '*' elif request_data[index][0][0] == '++': line1['normal_sample_size'] = False line1['normal_cumulative_sample_size'] = False line1['normal_ac'] = '++' line1['normal_re'] = '++' line2['normal_sample_size'] = False line2['normal_cumulative_sample_size'] = False line2['normal_ac'] = '++' line2['normal_re'] = '++' line3['normal_sample_size'] = False line3['normal_cumulative_sample_size'] = False line3['normal_ac'] = '++' line3['normal_re'] = '++' line4['normal_sample_size'] = False line4['normal_cumulative_sample_size'] = False line4['normal_ac'] = '++' line4['normal_re'] = '++' line5['normal_sample_size'] = False line5['normal_cumulative_sample_size'] = False line5['normal_ac'] = '++' line5['normal_re'] = '++' else: line1['normal_sample_size'] = request_data[ index][0][10] line1['normal_cumulative_sample_size'] = int( request_data[index][0][10]) * 1 line1['normal_ac'] = '#' if request_data[index][0][0] == -1 else \ request_data[index][0][ 0] line1['normal_re'] = request_data[index][0][1] line2['normal_sample_size'] = request_data[ index][0][10] line2['normal_cumulative_sample_size'] = int( request_data[index][0][10]) * 2 line2['normal_ac'] = request_data[index][0][2] line2['normal_re'] = request_data[index][0][3] line3['normal_sample_size'] = request_data[ index][0][10] line3['normal_cumulative_sample_size'] = int( request_data[index][0][10]) * 3 line3['normal_ac'] = request_data[index][0][4] line3['normal_re'] = request_data[index][0][5] line4['normal_sample_size'] = request_data[ index][0][10] line4['normal_cumulative_sample_size'] = int( request_data[index][0][10]) * 4 line4['normal_ac'] = request_data[index][0][6] line4['normal_re'] = request_data[index][0][7] line5['normal_sample_size'] = request_data[ index][0][10] line5['normal_cumulative_sample_size'] = int( request_data[index][0][10]) * 5 line5['normal_ac'] = request_data[index][0][8] line5['normal_re'] = request_data[index][0][9] if request_data[index][1][0] == '*': line1['tightened_sample_size'] = False line1['tighted_cumulative_sample_size'] = False line1['tightened_ac'] = '*' line1['tightened_re'] = '*' line2['tightened_sample_size'] = False line2['tighted_cumulative_sample_size'] = False line2['tightened_ac'] = '*' line2['tightened_re'] = '*' line3['tightened_sample_size'] = False line3['tighted_cumulative_sample_size'] = False line3['tightened_ac'] = '*' line3['tightened_re'] = '*' line4['tightened_sample_size'] = False line4['tighted_cumulative_sample_size'] = False line4['tightened_ac'] = '*' line4['tightened_re'] = '*' line5['tightened_sample_size'] = False line5['tighted_cumulative_sample_size'] = False line5['tightened_ac'] = '*' line5['tightened_re'] = '*' elif request_data[index][1][0] == '++': line1['tightened_sample_size'] = False line1['tighted_cumulative_sample_size'] = False line1['tightened_ac'] = '++' line1['tightened_re'] = '++' line2['tightened_sample_size'] = False line2['tighted_cumulative_sample_size'] = False line2['tightened_ac'] = '++' line2['tightened_re'] = '++' line3['tightened_sample_size'] = False line3['tighted_cumulative_sample_size'] = False line3['tightened_ac'] = '++' line3['tightened_re'] = '++' line4['tightened_sample_size'] = False line4['tighted_cumulative_sample_size'] = False line4['tightened_ac'] = '++' line4['tightened_re'] = '++' line5['tightened_sample_size'] = False line5['tighted_cumulative_sample_size'] = False line5['tightened_ac'] = '++' line5['tightened_re'] = '++' else: line1['tightened_sample_size'] = request_data[ index][1][10] line1['tighted_cumulative_sample_size'] = int( request_data[index][1][10]) * 1 line1['tightened_ac'] = '#' if request_data[index][1][0] == -1 else \ request_data[index][1][0] line1['tightened_re'] = request_data[index][1][ 1] line2['tightened_sample_size'] = request_data[ index][1][10] line2['tighted_cumulative_sample_size'] = int( request_data[index][1][10]) * 2 line2['tightened_ac'] = request_data[index][1][ 2] line2['tightened_re'] = request_data[index][1][ 3] line3['tightened_sample_size'] = request_data[ index][1][10] line3['tighted_cumulative_sample_size'] = int( request_data[index][1][10]) * 3 line3['tightened_ac'] = request_data[index][1][ 4] line3['tightened_re'] = request_data[index][1][ 5] line4['tightened_sample_size'] = request_data[ index][1][10] line4['tighted_cumulative_sample_size'] = int( request_data[index][1][10]) * 4 line4['tightened_ac'] = request_data[index][1][ 6] line4['tightened_re'] = request_data[index][1][ 7] line5['tightened_sample_size'] = request_data[ index][1][10] line5['tighted_cumulative_sample_size'] = int( request_data[index][1][10]) * 5 line5['tightened_ac'] = request_data[index][1][ 8] line5['tightened_re'] = request_data[index][1][ 9] if request_data[index][2][0] == '*': line1['reduced_sample_size'] = False line1['reduced_cumulative_sample_size'] = False line1['reduced_ac'] = '*' line1['reduced_re'] = '*' line2['reduced_sample_size'] = False line2['reduced_cumulative_sample_size'] = False line2['reduced_ac'] = '*' line2['reduced_re'] = '*' line3['reduced_sample_size'] = False line3['reduced_cumulative_sample_size'] = False line3['reduced_ac'] = '*' line3['reduced_re'] = '*' line4['reduced_sample_size'] = False line4['reduced_cumulative_sample_size'] = False line4['reduced_ac'] = '*' line4['reduced_re'] = '*' line5['reduced_sample_size'] = False line5['reduced_cumulative_sample_size'] = False line5['reduced_ac'] = '*' line5['reduced_re'] = '*' elif request_data[index][2][0] == '++': line1['reduced_sample_size'] = False line1['reduced_cumulative_sample_size'] = False line1['reduced_ac'] = '++' line1['reduced_re'] = '++' line2['reduced_sample_size'] = False line2['reduced_cumulative_sample_size'] = False line2['reduced_ac'] = '++' line2['reduced_re'] = '++' line3['reduced_sample_size'] = False line3['reduced_cumulative_sample_size'] = False line3['reduced_ac'] = '++' line3['reduced_re'] = '++' line4['reduced_sample_size'] = False line4['reduced_cumulative_sample_size'] = False line4['reduced_ac'] = '++' line4['reduced_re'] = '++' line5['reduced_sample_size'] = False line5['reduced_cumulative_sample_size'] = False line5['reduced_ac'] = '++' line5['reduced_re'] = '++' else: line1['reduced_sample_size'] = request_data[ index][2][10] line1['reduced_cumulative_sample_size'] = int( request_data[index][2][10]) * 1 line1['reduced_ac'] = '#' if request_data[index][2][0] == -1 else \ request_data[index][2][0] line1['reduced_re'] = request_data[index][2][1] line2['reduced_sample_size'] = request_data[ index][2][10] line2['reduced_cumulative_sample_size'] = int( request_data[index][2][10]) * 2 line2['reduced_ac'] = request_data[index][2][2] line2['reduced_re'] = request_data[index][2][3] line3['reduced_sample_size'] = request_data[ index][2][10] line3['reduced_cumulative_sample_size'] = int( request_data[index][2][10]) * 3 line3['reduced_ac'] = request_data[index][2][4] line3['reduced_re'] = request_data[index][2][5] line4['reduced_sample_size'] = request_data[ index][2][10] line4['reduced_cumulative_sample_size'] = int( request_data[index][2][10]) * 4 line4['reduced_ac'] = request_data[index][2][6] line4['reduced_re'] = request_data[index][2][7] line5['reduced_sample_size'] = request_data[ index][2][10] line5['reduced_cumulative_sample_size'] = int( request_data[index][2][10]) * 5 line5['reduced_ac'] = request_data[index][2][8] line5['reduced_re'] = request_data[index][2][9] lines.append(line1) lines.append(line2) lines.append(line3) lines.append(line4) lines.append(line5) self.update({'sample_plan_ids': lines}) else: print '-----------%s----------' % (response_data['message']) raise ValidationError('查询结果返回为 None') else: raise ValidationError('查询结果返回为 None')
def write(self, vals): nutrition = vals.get('nutrition', self.nutrition) norm_weight = vals.get('norm_weight', self.norm_weight) if nutrition and norm_weight == 0: raise ValidationError("Norm weight must be greater than 0") return super(ProductTemplate, self).write(vals)
def compute_rm_req(self): vals = [] non_bom_list = [] if not len(self.advance_finishgoods_one2many): raise ValidationError("No items Included") for record in self.advance_finishgoods_one2many: bom = self.env['mrp.bom'].search( [('product_tmpl_id', '=', record.product_id.id)], order="create_date desc") location = self.env.ref('stock.stock_location_stock') bom_product = False if len(bom): bom_product = True for bom_line in bom[0].bom_line_ids: for prod_id in bom_line.product_id.product_tmpl_id: prod_sup = False for supplier in prod_id.product_supplier_one2many: prod_sup = self.env['product.supplierinfo'].search( [('name', '=', supplier.name[0].id)]) vals.append([ 0, 0, { 'product_id': bom_line.product_id.product_tmpl_id.id, 'req_qty': (bom_line.product_qty * record.net_req_qty), 'net_req_qty': (bom_line.product_qty), 'supplier_id': prod_sup[0].name.id if prod_sup else False, # 'moq':bom_line.product_id.moq, 'location_id': location and location.id or False, } ]) if not bom_product: non_bom_list.append(str(record.product_id.name)) bom_string = "\ No BOM available for Following Product(s) :\n\n" if non_bom_list: print "IIIIIIIIIIIIIIIIIIIIII BOM Not Available" raise ValidationError(bom_string + "\n".join(non_bom_list)) ctx = self._context.copy() ctx.update({ 'default_fg_plan': self.id, 'default_date': self.date, 'default_average_forecast': self.average_forecast, 'default_advance_rawmaterial_one2many': vals }) imd = self.env['ir.model.data'] action = imd.xmlid_to_object('assemble_pro.action_advance_rm_material') form_view_id = imd.xmlid_to_res_id( 'assemble_pro.view_advance_rm_material_form') result = { 'name': action.name, 'help': action.help, 'type': action.type, 'views': [[form_view_id, 'form']], 'target': action.target, 'context': ctx, 'res_model': action.res_model, } return result
def _check_cantidad(self): for record in self: if record.cantidad < 1: raise ValidationError("La cantidad debe ser mayor que 0")
def _check_sequence(self): for line in self: if line.sequence <= 0: raise ValidationError(_('Sequence must be greater then zero.')) if line.length <= 0: raise ValidationError(_('Length must be greater then zero.'))
def check_key_or_pass(self): if not self.key_filename and not self.password: raise ValidationError(_('You must set a Key filename or a password'))
def _prepare_account_move_line(self, cr, uid, move, qty, cost, credit_account_id, debit_account_id, context=None): """ Generate the account.move.line values to post to track the stock valuation difference due to the processing of the given quant. """ if context is None: context = {} currency_obj = self.pool.get('res.currency') if context.get('force_valuation_amount'): valuation_amount = context.get('force_valuation_amount') else: if move.product_id.cost_method == 'average': valuation_amount =\ cost if move.location_id.usage != 'internal' and\ move.location_dest_id.usage == 'internal' else\ move.product_id.standard_price else: valuation_amount =\ cost if move.product_id.cost_method ==\ 'real' else move.product_id.standard_price # the standard_price of the product may be in another decimal # precision, or not compatible with the coinage of # the company currency... so we need to use round() # before creating the accounting entries. valuation_amount = currency_obj.round(cr, uid, move.company_id.currency_id, valuation_amount * qty) # check that all data is correct if move.company_id.currency_id.is_zero(valuation_amount): raise ValidationError( _("The found valuation amount for product %s is zero. " "Which means there is probably a configuration error. " "Check the costing method and the " "standard price") % (move.product_id.name, )) partner_id = (move.picking_id.partner_id and self.pool.get('res.partner')._find_accounting_partner( move.picking_id.partner_id).id) or False # Cesar Barron 09 Ago 2016 #### reference = False name = False # trace = move.location_id.name + "->" + move.location_dest_id.name if move.picking_id and not move.production_id and not move.raw_material_production_id: reference = move.picking_id.name # + " " + trace or False name = move.picking_id.name + " " + move.name elif move.production_id: reference = move.production_id.name # + " " + trace or False name = move.name + \ ' [' + move.product_id.default_code + '] ' + move.product_id.name elif move.raw_material_production_id: reference = move.raw_material_production_id.name # + " " + trace or False name = move.name + \ ' [' + move.product_id.default_code + '] ' + move.product_id.name else: reference = "W/O Reference " name = move.product_id.name # + trace analytic_id = move.location_id.account_analytic_id.id or False if not analytic_id: analytic_id = move.location_dest_id.account_analytic_id.id or False # Cesar Barron 09 Ago 2016 #### credit_line_vals = { 'name': name, 'product_id': move.product_id.id, 'quantity': qty, 'product_uom_id': move.product_id.uom_id.id, # 'ref': move.picking_id and move.picking_id.name or False, 'ref': reference, 'analytic_account_id': analytic_id, 'partner_id': partner_id, 'credit': valuation_amount > 0 and valuation_amount or 0, 'debit': valuation_amount < 0 and -valuation_amount or 0, 'account_id': credit_account_id, } if move.location_id.usage == 'internal' and move.location_dest_id.usage == 'internal' and move.location_id.stock_warehouse_id.id != move.location_dest_id.stock_warehouse_id.id: analytic_id = move.location_dest_id.account_analytic_id.id or False debit_line_vals = { 'name': name, 'product_id': move.product_id.id, 'quantity': qty, 'product_uom_id': move.product_id.uom_id.id, # 'ref': move.picking_id and move.picking_id.name or False, 'ref': reference, 'analytic_account_id': analytic_id, 'partner_id': partner_id, 'debit': valuation_amount > 0 and valuation_amount or 0, 'credit': valuation_amount < 0 and -valuation_amount or 0, 'account_id': debit_account_id, } return [(0, 0, debit_line_vals), (0, 0, credit_line_vals)]
def _get_accounting_data_for_valuation(self, cr, uid, move, context=None): """ Return the accounts and journal to use to post Journal Entries for the real-time valuation of the quant. :param context: context dictionary that can explicitly mention the company to consider via the 'force_company' key :returns: journal_id, source account, destination account, valuation account :raise: openerp.exceptions.UserError if any mandatory account or journal is not defined. """ product_obj = self.pool.get('product.template') accounts = product_obj.browse(cr, uid, move.product_id.product_tmpl_id.id, context).get_product_accounts() if move.location_id.valuation_out_account_id: acc_src = move.location_id.valuation_out_account_id.id else: acc_src = accounts['stock_input'].id if move.location_dest_id.valuation_in_account_id: acc_dest = move.location_dest_id.valuation_in_account_id.id else: acc_dest = accounts['stock_output'].id # Cesar Barron 09 Ago 2016 ########## # acc_valuation = accounts.get('stock_valuation', False) pick_type = move.picking_id.picking_type_id.code or False if pick_type: if pick_type == 'incoming': if move.location_id.usage == 'customer': acc_dest = move.location_id.account_id.id or False acc_valuation = move.location_dest_id.account_id or False elif pick_type == 'outgoing': acc_valuation = move.location_id.account_id or False acc_dest = move.location_dest_id.account_id.id or False if move.location_dest_id.usage == 'supplier': acc_src = move.location_dest_id.account_id.id if move.location_id.usage not in ( 'internal', 'transit', 'customer') and \ move.location_dest_id.usage == 'internal': acc_src = acc_valuation.id acc_valuation = move.location_dest_id.account_id elif pick_type == 'internal': acc_valuation = move.location_dest_id.account_id or False else: acc_valuation = move.location_id.account_id or False else: acc_valuation = move.location_id.account_id or False if move.picking_id.stock_move_type_id: move_type = move.picking_id.stock_move_type_id.code adjustment = move.picking_id.type_adjustment_id if move_type in ('E4', 'S4') and not adjustment: raise ValidationError(_('Specify an adjustment type')) if move_type == 'E4': acc_src = adjustment.account_id.id if move_type == 'S4': acc_dest = adjustment.account_id.id if move.inventory_id: acc_valuation = move.location_dest_id.account_id or False if not acc_valuation: acc_valuation = accounts.get('stock_valuation', False) # Cesar Barron 09 Ago 2016 ########## if acc_valuation: acc_valuation = acc_valuation.id if not accounts.get('stock_journal', False): raise ValidationError( _('You don\'t have any stock journal defined on ' 'your product category, check if you have installed ' 'a chart of accounts')) if not acc_src: raise ValidationError( _('Cannot find a stock input account for the ' 'product %s. You must define one on the product ' 'category, or on the location, before processing this ' 'operation.') % (move.product_id.name)) if not acc_dest: raise ValidationError( _('Cannot find a stock output account for the ' 'product %s. You must define one on the product category, ' 'or on the location, before processing this ' 'operation.') % (move.product_id.name)) if not acc_valuation: raise ValidationError( _('You don\'t have any stock valuation account defined on ' 'your product category. You must define one before ' 'processing this operation.')) journal_id = accounts['stock_journal'].id return journal_id, acc_src, acc_dest, acc_valuation
def _check_taxbranch(self): if self.taxbranch and len(self.taxbranch) != 5: raise ValidationError(_("Tax Branch must be 5 digits"))
def _check_partner_name(self): count = len(self.search([('name', '=', self.name)])._ids) if count > 1: raise ValidationError("Partner Name must be unique!")
def _check_vat(self): if self.vat and len(self.vat) != 13: raise ValidationError(_("Tax ID must be 13 digits!"))
def split_row_col(pos): match = re.match(r"([a-z]+)([0-9]+)", pos, re.I) if not match: raise ValidationError(_('Position %s is not valid') % pos) col, row = match.groups() return col, int(row)
def _check_recursive_parent_child(self): for rec in self: if rec.id in rec.parent_child_ids.ids: raise ValidationError( 'Error! A code cannot be a child of itself.') return True
def _check_bulstat(self): #eik_checker(self.bulstat) if not bg_uic_checker(self.bg_uic): raise ValidationError(_("BULSTAT isn't valid"))
def _check_location(self): if self.location_dest_id == self.location_id: raise ValidationError(_('Source and Destination Location ' 'can not be the same location'))
def _validate_approve_user(self): if self.env.user not in \ self.env.user.company_id.bank_account_approver_ids: raise ValidationError( _('You are not allowed to approve / unapprove bank account!')) return
def _check_future_qty(self): for line in self: if float_compare(line.product_uom_qty, line.future_qty, 2) == 1: raise ValidationError( _('%s is not enough!') % line.product_id.name)
def make_invoices(self, ): sale_mdl = self.env['sale.order'] new_invoices = [] _logger.debug('Working on orders %s', self.active_ids()) for sale_order in self.active_ids(): if sale_order.state != 'manual': raise ValidationError( "You shouldn't manually invoice the following order %s" % (sale_order.name)) if self.grouped == 'partner_preference': invoice_groups = defaultdict(list) ungrouped = [] for order in self.active_ids(): if order.partner_invoice_id.invoice_grouping == 'grouped': _logger.debug( '%s chose grouped invoicing, adding to invoice_groups', order.partner_invoice_id) invoice_groups[order.partner_invoice_id.id].append( order.id) else: _logger.debug( '%s chose sep invoicing, adding to ungrouped', order.partner_invoice_id) ungrouped.append(order.id) if len(ungrouped): _logger.debug('Creating ungrouped invoices for %s', ungrouped) sale_mdl.browse(ungrouped).action_invoice_create( grouped=False, date_invoice=self.invoice_date) for orders in invoice_groups.values(): _logger.debug('Creating ungrouped invoices for %s', orders) sale_mdl.browse(orders).action_invoice_create( grouped=True, date_invoice=self.invoice_date) else: _logger.debug('Not invoicing by partner preference') self.active_ids.action_invoice_create( grouped=(self.grouped == 'grouped'), date_invoice=self.invoice_date, ) for sale_order in self.active_ids(): for invoice in sale_order.invoice_ids: new_invoices.append(invoice.id) _logger.debug('Created invoices: %s', new_invoices) # Dummy call to workflow, will not create another invoice but bind the new invoice to the subflow manual_orders = [ o.id for o in self.active_ids() if o.order_policy == 'manual' ] sale_mdl.browse(manual_orders).signal_workflow('manual_invoice') result = self.env['ir.model.data'].get_object_reference( 'account', 'action_invoice_tree1', ) id = result and result[1] or False result = self.env['ir.actions.act_window'].browse(id).read()[0] result['domain'] = "[('id','in', [" + ','.join(map( str, new_invoices)) + "])]" _logger.debug('Invoice domain %s', result['domain']) return result
def _check_numbers(self): if self.number_from >= self.number_to: raise ValidationError( _(u'El Número desde debe ser menor que Número hasta!'))
def unlink(self): if self.state not in ('draft', 'cancel'): raise ValidationError(_( 'You cannot delete a server which is not draft or cancelled.')) return super(server, self).unlink()
def validate_record(self): for r in self: logger.info("fson_connector_sale() Validate record %s" % r.id) # Validate Partner if r.is_company: if r.firstname or r.name_zwei or r.birthdate_web: raise ValidationError( "Fields 'firstname', 'name_zwei' and 'birthdate_web' are not allowed " "if 'is_company' is set!") # Validate Employee employee_fields = self.get_fields_by_con_group('employee') if any(r[fname] for fname in employee_fields): if not r.is_company: raise ValidationError( "Employee fields are only allowed if 'is_company' is set!" "These fields must be empty: %s" % employee_fields) if not r.e_lastname: raise ValidationError( "Field e_lastname must be set for the employee!") # Validate Donee donee_fields = self.get_fields_by_con_group('donee') if any(r[fname] for fname in donee_fields): if not r.d_lastname: raise ValidationError( "Field d_lastname must be set for the donee!") # Validate 'price_donate' and 'price_unit' are not used at the same time! if self.price_donate and self.price_unit: raise ValidationError( _("Field 'price_donate' and 'price_unit' is set! " "Use 'price_donate' for donations and 'price_unit' for regular products!" )) # Validate Product if r.product_id: if not r.product_id.fson_connector_sale: raise ValidationError( "This product is not available for the connector!") if r.price_donate: if not r.product_id.price_donate: raise ValidationError( "This product can not be used with 'price_donate'!" ) if r.product_id.price_donate_min and r.price_donate < r.product_id.price_donate_min: raise ValidationError( "price_donate must be at least %s" % r.product_id.price_donate_min) if r.product_id.payment_interval_lines_ids: interval_ids = tuple( l.payment_interval_id.id for l in r.product_id.payment_interval_lines_ids) if r.payment_interval_id.id not in interval_ids: raise ValidationError( "This payment interval is not available for the product! " "Allowed payment interval ids for this product: %s" % str(interval_ids)) # Validate payment transaction # - if acquirer is enabled by 'fson_connector_sale' # - if only correct fields are used (e.g. frst_iban) # TODO: # - if all needed partner fields are set for this acquirer - maybe done by the _*_ methods form_feedb? # - check if acquirer recurring-allowed-setting matches with payment interval if r.acquirer_id: # Check if provider is enabled if not r.acquirer_id.fson_connector_sale: raise ValidationError( "This acquirer (id %s) is not enabled for the connector!" % r.acquirer_id.id) # Check if the correct payment provider fields are used # ATTENTION: This will only work if all payment addons stick to the naming convention! all_provider_fields = self.get_fields_by_con_group( 'all_provider_fields') allowed_provider_fields = self.get_fields_by_con_group( 'payment_' + self.acquirer_id.provider) invalid_provider_fields = tuple( k for k in all_provider_fields if self[k] and k not in allowed_provider_fields) if invalid_provider_fields: raise ValidationError( 'Fields are not valid for the current acquirer (ID %s): %s' '' % (r.acquirer_id.id, invalid_provider_fields))
def _check_name(self): for record in self: if record.name < 1: raise ValidationError("El numero de venta debe ser mayor que 0")
def import_xls(self, model, file, column_name=None, column_value=None): decoded_data = base64.decodestring(file) ftemp = 'temp' + datetime.utcnow().strftime('%H%M%S%f')[:-3] f = open(ftemp + '.xls', 'wb+') f.write(decoded_data) f.seek(0) f.close() wb = xlrd.open_workbook(f.name) st = wb.sheet_by_index(0) csv_file = open(ftemp + '.csv', 'wb') csv_out = unicodecsv.writer(csv_file, encoding='utf-8', quoting=unicodecsv.QUOTE_ALL) if st._cell_values: _HEADER_FIELDS = st._cell_values[0] for nrow in xrange(st.nrows): if nrow > 0: row_values = st.row_values(nrow) for index, val in enumerate(row_values): ctype = st.cell(nrow, index).ctype type = ctype_text.get(ctype, 'unknown type') if type == 'empty' or type == 'text' \ or type == 'bool' or type == 'error' \ or type == 'blank': row_values[index] = st.cell(nrow, index).value elif type == 'number': if not val: row_values[index] = 0 else: if not str(val).isdigit(): row_values[index] = int(val) else: row_values[index] = val elif type == 'xldate': str_date = self.xldate_to_datetime( st.cell(nrow, index).value) row_values[index] = str_date else: row_values[index] = st.cell(nrow, index).value csv_out.writerow(row_values) else: csv_out.writerow(st.row_values(nrow)) csv_file.close() csv_file = open(ftemp + '.csv', 'r') file_txt = csv_file.read() csv_file.close() os.unlink(ftemp + '.xls') os.unlink(ftemp + '.csv') if not file_txt: raise ValidationError(_(str("File Not found."))) if column_name and column_value: _HEADER_FIELDS.insert(0, str(column_name)) file_txt = self._add_column(column_name, column_value, file_txt) Import = self.env['base_import.import'] imp = Import.create({ 'res_model': model, 'file': file_txt, }) [errors] = imp.do( _HEADER_FIELDS, {'headers': True, 'separator': ',', 'quoting': '"', 'encoding': 'utf-8'}) if errors: raise ValidationError(_(str(errors[0]['message']))) return file
def create_measurement_sample_table(self, param, lot_size_desc): url = 'http://127.0.0.1:5000/GBT6378.1-2008/read_sample_table' headers = {'content-type': 'application/json'} response = None try: response = requests.post(url, data=json.dumps(param), headers=headers) except Exception as e: print '----Error in method model_sample_plan.create_count_sample_table: the request failed' raise ValidationError('请求服务失败或服务器未开启') # 计量的创建 查询表 if response: # 请求返回的数据 response_data = response.json() if response_data['success']: request_data = response_data['result']['lines'] if request_data: lot_size_desc_len = len(lot_size_desc) lines = list() if self.sample_type_code in [ 'one_side_s_method', 'one_side_sigma_method' ]: for index in range(lot_size_desc_len): line = { 'lot_size': lot_size_desc[index], 'normal_sample_size': request_data[index][0][0], 'normal_k': request_data[index][0][1], 'tightened_sample_size': request_data[index][1][0], 'tightened_k': request_data[index][1][1], 'reduced_sample_size': request_data[index][2][0], 'reduced_k': request_data[index][2][1], } lines.append(line) self.update({'measurement_table_one_ids': lines}) elif self.sample_type_code in ['both_side_s_method']: for index in range(lot_size_desc_len): line = { 'lot_size': lot_size_desc[index], 'normal_sample_size': request_data[index][0][0], 'normal_fs': request_data[index][0][1], 'tightened_sample_size': request_data[index][1][0], 'tightened_fs': request_data[index][1][1], 'reduced_sample_size': request_data[index][2][0], 'reduced_fs': request_data[index][2][1], } lines.append(line) self.update({'measurement_table_two_ids': lines}) elif self.sample_type_code in ['both_side_sigma_method']: for index in range(lot_size_desc_len): line = { 'lot_size': lot_size_desc[index], 'normal_sample_size': request_data[index][0][0], 'normal_k': request_data[index][0][1], 'normal_f_sigma': request_data[index][0][2], 'tightened_sample_size': request_data[index][1][0], 'tightened_k': request_data[index][1][1], 'tightened_f_sigma': request_data[index][1][2], 'reduced_sample_size': request_data[index][2][0], 'reduced_k': request_data[index][2][1], 'reduced_f_sigma': request_data[index][2][2], } lines.append(line) self.update({'measurement_table_three_ids': lines}) else: print '-----------%s----------' % ( response_data['message']) raise ValidationError('服务器查询结果为空') else: print '-----------%s----------' % (response_data['message']) raise ValidationError('服务器查询结果为空') else: raise ValidationError('服务器查询结果为空')
def _check_date(self): if self.issued_date > self.return_date: raise ValidationError( 'Return Date cannot be set before Issued Date.')
def action_check_approval(self): self.ensure_one() amount = self.amount doctype = '' if self.is_employee_advance: doctype = 'employee_advance' elif self.is_advance_clearing: doctype = 'employee_clearing' elif self.pay_to == 'pettycash': doctype = 'employee_pettycash' elif not self.is_advance_clearing and not self.is_employee_advance: doctype = 'employee_expense' levels = self.env['level.validation'].search([ ('operating_unit_id', '=', self.operating_unit_id.id), ('doctype', 'like', doctype), ]).sorted(key=lambda r: r.level) if not levels: raise ValidationError(_("This operating unit does not " "set approver.")) levels_lt_amount = levels.filtered( lambda r: r.limit_amount < amount) levels_gt_amount = levels.filtered( lambda r: r.limit_amount >= amount) if levels_gt_amount: target_levels = levels_lt_amount + levels.filtered( lambda r: r.level == min(levels_gt_amount.mapped('level'))) else: target_levels = levels_lt_amount if not target_levels.filtered( lambda r: r.limit_amount >= amount): raise ValidationError(_("Amount is over " "maximum limited amount.")) if self.approver_ids and self.env.user not in self.approver_ids: raise ValidationError(_("Your user is not allow to " "approve this document.")) if target_levels: if self.level_id: min_level = min(filter(lambda r: r >= self.level_id.level, target_levels.mapped('level'))) target_level = target_levels.filtered( lambda r: r.level == min_level + 1) if target_level: self.write({ 'level_id': target_level.id, 'approver_ids': [ (6, 0, target_level.user_ids.ids) ], }) else: self.write({ 'level_id': False, 'approver_ids': False, }) else: if not self.level_id and not self.approver_ids: target_level = target_levels.filtered( lambda r: r.level == min(target_levels.mapped('level')) ) self.write({ 'level_id': target_level.id, 'approver_ids': [ (6, 0, target_level.user_ids.ids) ], }) return True
def check_actual_return_date(self): if self.actual_return_date: if self.issued_date > self.actual_return_date: raise ValidationError( 'Actual Return Date cannot be set before Issued Date')
def _compute_results(self): self.ensure_one() dom = [] status = [] # Prepare DOM to filter assets if self.asset_status_draft: status += ['draft'] if self.asset_status_open: status += ['open'] if self.asset_status_close: status += ['close'] if self.asset_status_removed: status += ['removed'] if self.asset_ids: dom += [('id', 'in', tuple(self.asset_ids.ids + [0]))] if self.asset_profile_ids: dom += [('profile_id', 'in', tuple(self.asset_profile_ids.ids + [0]))] if status: dom += [('state', 'in', tuple(status + ['']))] # Prepare fixed params date_start = False date_end = False # fiscalyear_start = self.fiscalyear_start_id.name if self.filter == 'filter_date': date_start = self.date_start date_end = self.date_end if self.filter == 'filter_period': date_start = self.period_start_id.date_start date_end = self.period_end_id.date_stop if not date_start or not date_end: raise ValidationError(_('Please provide from and to dates.')) accum_depre_account_ids = self.env['account.account'].search( [('user_type', '=', self.accum_depre_account_type.id)]).ids depre_account_ids = self.env['account.account'].search( [('user_type', '=', self.depre_account_type.id)]).ids where_str = self._domain_to_where_str(dom) if where_str: where_str = 'and ' + where_str self._cr.execute(""" select a.*, id asset_id, -- depreciation (select coalesce(sum(debit-credit), 0.0) from account_move_line ml where account_id in %s -- depreciation account and ml.date between %s and %s and asset_id = a.id) depreciation, -- accumulated_cf (select coalesce(sum(credit-debit), 0.0) from account_move_line ml where account_id in %s -- accumulated account and ml.date <= %s -- date end and asset_id = a.id) accumulated_cf, -- accumulated_bf case when a.date_start >= %s then 0 else (select a.purchase_value - coalesce(sum(credit-debit), 0.0) from account_move_line ml join account_period ap on ap.id = ml.period_id join account_fiscalyear af on af.id = ap.fiscalyear_id where account_id in %s -- accumulatedp account and ml.date < %s and asset_id = a.id) end accumulated_bf from account_asset a where (a.state != 'close' or a.value_depreciated != 0) """ + where_str + "order by profile_id, number", (tuple(depre_account_ids), date_start, date_end, tuple(accum_depre_account_ids), date_end, date_start, tuple(accum_depre_account_ids), date_start)) asset_results = self._cr.dictfetchall() ReportLine = self.env['asset.view'] for line in asset_results: self.results += ReportLine.new(line)
def _check_name(self): if len(self.search([('name','=', self.name)]))> 1: raise ValidationError("Field year must be unique")