def do_split(self): self.ensure_one() picking_obj = self.env['stock.picking'] move_obj = self.env["stock.move"] prod_obj = self.env['mrp.production'] backorder_move_ids = [] for item in self.item_ids: if item.move_id.product_uom_qty < item.quantity: raise exceptions.UserError(( u"La quantité entrée %s %s est supérieur à celle du mouvement de stock parent." ) % (item.quantity, item.product_id.name)) if item.move_id.product_uom_qty == item.quantity: backorder_move_ids.append(item.move_id.id) if item.move_id.product_uom_qty > item.quantity: new_move_id = move_obj.browse(item.move_id.id).copy( {'product_uom_qty': item.quantity}) move_obj.action_confirm(new_move_id) #creation ordre de fabrication if item.move_id.id_mo: prod_id = prod_obj.browse(item.move_id.id_mo).copy({ 'product_qty': item.quantity, # 'move_prod_id': new_move_id, 'origin': item.move_id.origin, }) move_obj.browse(new_move_id).write({'id_mo': prod_id}) state_origin = item.move_id.state new_move = move_obj.browse(new_move_id) #workflow if state_origin == 'contre_mesure': new_move.contre_mesure1() if state_origin == 'flowsheeting': new_move.contre_mesure1() new_move.flow_sheet() if state_origin == 'assigned': new_move.contre_mesure1() new_move.flow_sheet() new_move.force_assign() backorder_move_ids.append(new_move_id) new_qty = item.move_id.product_uom_qty - item.quantity move_obj.browse(item.move_id.id).write( {'product_uom_qty': new_qty}) #mise a jour ordre de fabrication lie if item.move_id.id_mo: prod_obj.browse(item.move_id.id_mo).write( {'product_qty': new_qty}) if backorder_move_ids: backorder = picking_obj.browse(self.picking_id.id).copy({ 'name': '/', 'move_lines': [], 'pack_operation_ids': [], 'backorder_id': self.picking_id.id, }) picking_obj.browse(self.picking_id.id).message_post( body=(u"Reliquat <em>%s</em> <b>créé</b>.") % (backorder.name)) move_obj.browse(backorder_move_ids).write( {'picking_id': backorder.id}) backorder.action_confirm() return True
def _update_info_shopee(self, data={}): self.ensure_one() self.calculate_stock() self.ecomm_product_product_ids.calculate_stock() new_v = self.ecomm_product_product_ids.filtered( lambda r: not r.platform_variant_idn) o = len(self.ecomm_product_product_ids - new_v) if self.attribute_line_ids: if len(self.attribute_line_ids) > 2: raise exceptions.UserError( 'Tier variation should be under 2 level') tier_variation = [{ 'name': line.name, 'options': line.line_value_ids.mapped('name') } for line in self.attribute_line_ids[:2]] tier_variation[0]['images_url'] = self._upload_image_shopee( self.attribute_line_ids[0].line_value_ids.mapped( 'ecomm_product_image_ids.image_url')) add_variant_data = [{ 'tier_index': attr_line_value_ids.mapped('sequence'), 'stock': v.stock, 'price': v.price, 'variation_sku': v.sku, } for i, v in enumerate(new_v)] if o == 0: resp = self.shop_id._py_client_shopee( ).item.init_tier_variation(item_id=int(self.platform_item_idn), tier_variation=tier_variation, variation=add_variant_data) else: list_resp = self.shop_id._py_client_shopee( ).item.update_tier_variation_list( item_id=int(self.platform_item_idn), tier_variation=tier_variation) resp = self.shop_id._py_client_shopee( ).item.add_tier_variation(item_id=int(self.platform_item_idn), variation=add_variant_data) if resp.get('item_id'): for i, v in enumerate(new_v): v.platform_variant_idn = resp['variation_id_list'][i][ 'variation_id'] data.update({ 'item_id': int(self.platform_item_idn), 'name': self.name, 'description': self.description, 'item_sku': self.sku or ' ', 'price': self.price, 'variations': [{ 'variation_id': int(v.platform_variant_idn), 'variation_sku': v.sku, } for v in self.ecomm_product_product_ids] }) resp = self.shop_id._py_client_shopee().item.update_item(data) if resp.get('item', {}).get('images') != [ img.image_url for img in self.ecomm_product_image_ids if img.image_url ]: self._update_image_shopee() if True: self._last_info_update = fields.Datetime.now()
def _get_s3_bucket(self, name=None): """Connect to S3 and return the bucket The following environment variables can be set: * ``AWS_HOST`` * ``AWS_REGION`` * ``AWS_ACCESS_KEY_ID`` * ``AWS_SECRET_ACCESS_KEY`` * ``AWS_BUCKETNAME`` If a name is provided, we'll read this bucket, otherwise, the bucket from the environment variable ``AWS_BUCKETNAME`` will be read. """ params = self._get_s3_connection_params(bucket_name=name) # Pop the bucket_name to avoid TypeError: resource() got an unexpected # keyword argument 'bucket_name' bucket_name = params.pop("bucket_name") if not ( params["aws_access_key_id"] and params["aws_secret_access_key"] and bucket_name ): msg = _('If you want to read from the %s S3 bucket, the following ' 'environment variables must be set:\n' '* AWS_ACCESS_KEY_ID\n' '* AWS_SECRET_ACCESS_KEY\n' 'If you want to write in the %s S3 bucket, this variable ' 'must be set as well:\n' '* AWS_BUCKETNAME\n' 'Optionally, the S3 host can be changed with:\n' '* AWS_HOST\n' ) % (bucket_name, bucket_name) raise exceptions.UserError(msg) # try: # get instanciated bucket from bucket_dict bucket = S3BucketClientRegistry.get_bucket_client(bucket_name) if bucket: return bucket s3 = boto3.resource('s3', **params) bucket = s3.Bucket(bucket_name) exists = True try: s3.meta.client.head_bucket(Bucket=bucket_name) except ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = e.response['Error']['Code'] if error_code == '404': exists = False except EndpointConnectionError as error: # log verbose error from s3, return short message for user _logger.exception('Error during connection on S3') raise exceptions.UserError(str(error)) if not exists: region_name = params.get('region_name') if not region_name: bucket = s3.create_bucket(Bucket=bucket_name) else: bucket = s3.create_bucket( Bucket=bucket_name, CreateBucketConfiguration={ 'LocationConstraint': region_name }) # store instanciated bucket to bucket_dict S3BucketClientRegistry.set_bucket_client(bucket_name, bucket) return bucket
def _compute_nber_letters(self): """ Counts the number of scans. If a zip file is given, the number of scans inside is counted. """ for letter in self: if letter.manual_import or (letter.state and letter.state != 'draft'): super(ImportLettersHistory, letter)._compute_nber_letters() else: # files are not selected by user so we find them on NAS # folder 'Imports' counter tmp = 0 smb_conn = letter._get_smb_connection() share_nas = letter.env.ref( 'sbc_switzerland.share_on_nas').value imported_letter_path = letter.import_folder_path if smb_conn and smb_conn.connect( SmbConfig.smb_ip, SmbConfig.smb_port) and \ imported_letter_path: imported_letter_path = letter.check_path( imported_letter_path) try: listPaths = smb_conn.listPath(share_nas, imported_letter_path) except OperationFailure: logger.info('--------------- PATH NO CORRECT ------' '-----') listPaths = [] for sharedFile in listPaths: if func.check_file(sharedFile.filename) == 1: tmp += 1 elif func.is_zip(sharedFile.filename): logger.info('File to retrieve: {}'.format( imported_letter_path + sharedFile.filename)) file_obj = BytesIO() smb_conn.retrieveFile( share_nas, imported_letter_path + sharedFile.filename, file_obj) try: zip_ = zipfile.ZipFile(file_obj, 'r') list_file = zip_.namelist() # loop over all files in zip for tmp_file in list_file: tmp += (func.check_file(tmp_file) == 1) except zipfile.BadZipfile: raise exceptions.UserError( _('Zip file corrupted (' + sharedFile.filename + ')')) except zipfile.LargeZipFile: raise exceptions.UserError( _('Zip64 is not supported(' + sharedFile.filename + ')')) smb_conn.close() else: logger.info("""Failed to list files in imported \ folder Imports oh the NAS in emplacement: {}""".format( imported_letter_path)) letter.nber_letters = tmp
def _create_payment_entry_manual(self, amount): if self._context.get("active_model", False) == "account.invoice": return super(AccountPayment, self)._create_payment_entry_manual(amount) manual_debit = round( sum([line.debit for line in self.payment_move_ids]), 2) manual_credit = round( sum([line.credit for line in self.payment_move_ids]), 2) if manual_credit != manual_debit: raise exceptions.UserError( _("You can not create journal entry that is not square.")) aml_obj = self.env['account.move.line'].with_context( check_move_validity=False) invoice_currency = False if self.invoice_ids and all([ x.currency_id == self.invoice_ids[0].currency_id for x in self.invoice_ids ]): # if all the invoices selected share the same currency, record the paiement in that currency too invoice_currency = self.invoice_ids[0].currency_id debit, credit, amount_currency, currency_id = aml_obj.with_context( date=self.payment_date).compute_amount_fields( amount, self.currency_id, self.company_id.currency_id, invoice_currency) move = self.env['account.move'].create(self._get_move_vals()) # Write line corresponding to invoice payment counterpart_aml_dict = self._get_shared_move_line_vals( debit, credit, amount_currency, move.id, False) counterpart_aml_dict.update( self._get_counterpart_move_line_vals(self.invoice_ids)) counterpart_aml_dict.update({'currency_id': currency_id}) account_id = self.payment_type in ( 'outbound', 'transfer' ) and self.journal_id.default_debit_account_id.id or self.journal_id.default_credit_account_id.id, manual_lines = [ line for line in self.payment_move_ids if line.account_id.id not in account_id ] rate = False if counterpart_aml_dict.get("amount_currency", False): rate = counterpart_aml_dict["debit"] / counterpart_aml_dict["amount_currency"] if counterpart_aml_dict[ "debit"] > 0 else \ counterpart_aml_dict["credit"] / counterpart_aml_dict["amount_currency"] for line in manual_lines: line_amount_currency = False line_debit = line.debit line_credit = line.credit if rate: line_amount_currency = line.debit if line.debit else line.credit if self.payment_type == "inbound": line_amount_currency = line_amount_currency * -1 line_debit = line_debit * rate line_credit = line_credit * rate line_dict = { 'account_id': line.account_id.id, 'amount_currency': line_amount_currency, 'credit': abs(line_credit), 'currency_id': counterpart_aml_dict["currency_id"], 'debit': abs(line_debit), 'invoice_id': counterpart_aml_dict["invoice_id"], 'journal_id': counterpart_aml_dict["journal_id"], 'move_id': counterpart_aml_dict["move_id"], 'name': line.name or counterpart_aml_dict["name"], 'partner_id': line.partner_id.id if line.partner_id else counterpart_aml_dict["partner_id"], 'product_id': line.product_id.id, 'analytic_account_id': line.analytic_account_id.id, 'payment_id': counterpart_aml_dict["payment_id"] } aml_obj.create(line_dict) # Write counterpart lines if not self.currency_id != self.company_id.currency_id: amount_currency = 0 liquidity_aml_dict = self._get_shared_move_line_vals( credit, debit, -amount_currency, move.id, False) liquidity_aml_dict.update(self._get_liquidity_move_line_vals(-amount)) aml_obj.create(liquidity_aml_dict) return move
def send_image_message(self, openid, media_id): try: self.wxclient.send_image_message(openid, media_id) except ClientException as e: raise exceptions.UserError(u'发送image失败 %s' % e)
def action_pos_order_paid(self): if not self.test_paid() and not self.is_return_order: raise exceptions.UserError(_("Order is not paid.")) self.write({'state': 'paid'})
def _get_serialized_challenge_lines(self, user=(), restrict_goals=(), restrict_top=0): """Return a serialised version of the goals information if the user has not completed every goal :param user: user retrieving progress (False if no distinction, only for ranking challenges) :param restrict_goals: compute only the results for this subset of gamification.goal ids, if False retrieve every goal of current running challenge :param int restrict_top: for challenge lines where visibility_mode is ``ranking``, retrieve only the best ``restrict_top`` results and itself, if 0 retrieve all restrict_goal_ids has priority over restrict_top format list # if visibility_mode == 'ranking' { 'name': <gamification.goal.description name>, 'description': <gamification.goal.description description>, 'condition': <reach condition {lower,higher}>, 'computation_mode': <target computation {manually,count,sum,python}>, 'monetary': <{True,False}>, 'suffix': <value suffix>, 'action': <{True,False}>, 'display_mode': <{progress,boolean}>, 'target': <challenge line target>, 'own_goal_id': <gamification.goal id where user_id == uid>, 'goals': [ { 'id': <gamification.goal id>, 'rank': <user ranking>, 'user_id': <res.users id>, 'name': <res.users name>, 'state': <gamification.goal state {draft,inprogress,reached,failed,canceled}>, 'completeness': <percentage>, 'current': <current value>, } ] }, # if visibility_mode == 'personal' { 'id': <gamification.goal id>, 'name': <gamification.goal.description name>, 'description': <gamification.goal.description description>, 'condition': <reach condition {lower,higher}>, 'computation_mode': <target computation {manually,count,sum,python}>, 'monetary': <{True,False}>, 'suffix': <value suffix>, 'action': <{True,False}>, 'display_mode': <{progress,boolean}>, 'target': <challenge line target>, 'state': <gamification.goal state {draft,inprogress,reached,failed,canceled}>, 'completeness': <percentage>, 'current': <current value>, } """ Goals = self.env['gamification.goal'] (start_date, end_date) = start_end_date_for_period(self.period) res_lines = [] for line in self.line_ids: line_data = { 'name': line.definition_id.name, 'description': line.definition_id.description, 'condition': line.definition_id.condition, 'computation_mode': line.definition_id.computation_mode, 'monetary': line.definition_id.monetary, 'suffix': line.definition_id.suffix, 'action': True if line.definition_id.action_id else False, 'display_mode': line.definition_id.display_mode, 'target': line.target_goal, } domain = [ ('line_id', '=', line.id), ('state', '!=', 'draft'), ] if restrict_goals: domain.append(('ids', 'in', restrict_goals.ids)) else: # if no subset goals, use the dates for restriction if start_date: domain.append(('start_date', '=', start_date)) if end_date: domain.append(('end_date', '=', end_date)) if self.visibility_mode == 'personal': if not user: raise exceptions.UserError(_("Retrieving progress for personal challenge without user information")) domain.append(('user_id', '=', user.id)) goal = Goals.search(domain, limit=1) if not goal: continue if goal.state != 'reached': return [] line_data.update(goal.read(['id', 'current', 'completeness', 'state'])[0]) res_lines.append(line_data) continue line_data['own_goal_id'] = False, line_data['goals'] = [] if line.condition=='higher': goals = Goals.search(domain, order="completeness desc, current desc") else: goals = Goals.search(domain, order="completeness desc, current asc") if not goals: continue for ranking, goal in enumerate(goals): if user and goal.user_id == user: line_data['own_goal_id'] = goal.id elif restrict_top and ranking > restrict_top: # not own goal and too low to be in top continue line_data['goals'].append({ 'id': goal.id, 'user_id': goal.user_id.id, 'name': goal.user_id.name, 'rank': ranking, 'current': goal.current, 'completeness': goal.completeness, 'state': goal.state, }) if len(goals) < 3: # display at least the top 3 in the results missing = 3 - len(goals) for ranking, mock_goal in enumerate([{'id': False, 'user_id': False, 'name': '', 'current': 0, 'completeness': 0, 'state': False}] * missing, start=len(goals)): mock_goal['rank'] = ranking line_data['goals'].append(mock_goal) res_lines.append(line_data) return res_lines
def write(self, values): if 'project_template' in values: raise exceptions.UserError( _('The project template cannot be modified.') ) return super().write(values)
def create(self, values): record = super(JiraProjectProject, self).create(values) if not record.jira_key: raise exceptions.UserError( _('The JIRA Key is mandatory in order to export a project')) return record
def unlink(self): for channel in self: if channel.name == "root": raise exceptions.UserError(_("Cannot remove the root channel")) return super(JobChannel, self).unlink()
def attendance_action_split_change(self, job=None, wo=None, end=None): """ Check In/Check Out action - for Split Time Check In: create a new attendance record Check Out: modify check_out field of appropriate attendance record """ if len(self) > 1: raise exceptions.UserError(_('Cannot perform check in or check out on multiple employees.')) action_date = fields.Datetime.now() if self.attendance_state != 'checked_in': vals = { 'employee_id': self.id, 'check_in': action_date, } attendance = self.env['hr.attendance'].create(vals) vals_l = { 'employee_id': self.id, 'check_in': action_date, 'attendance_id': attendance.id, } self.env['hr.attendance.line'].create(vals_l) return attendance else: attendance = self.env['hr.attendance'].search([('employee_id', '=', self.id), ('check_out', '=', False)], limit=1) attendance_line = self.env['hr.attendance.line'].search([('employee_id', '=', self.id), ('check_out', '=', False)], limit=1) if attendance: if end == 'True': attendance.check_out = action_date if attendance_line: if not job or not wo: raise exceptions.UserError(_('Must choose a job and work order to log time.')) else: ct = len(job) delta = (action_date - attendance_line.check_in) / ct check = attendance_line.check_in + delta for key, val in enumerate(job): if key == 0: attendance_line.check_out = check attendance_line.job_id = val attendance_line.workorder_id = wo[key] else: c_in = check check = c_in + delta vals_l = { 'employee_id': self.id, 'check_in': c_in, 'check_out': check, 'attendance_id': attendance.id, 'job_id': val, 'workorder_id': wo[key], } self.env['hr.attendance.line'].create(vals_l) else: if attendance_line: if not job or not wo: raise exceptions.UserError(_('Must choose a job and work order to log time.')) else: ct = len(job) delta = (action_date - attendance_line.check_in) / ct check = attendance_line.check_in + delta for key, val in enumerate(job): if key == 0: attendance_line.check_out = check attendance_line.job_id = val attendance_line.workorder_id = wo[key] else: c_in = check check = c_in + delta vals_l = { 'employee_id': self.id, 'check_in': c_in, 'check_out': check, 'attendance_id': attendance.id, 'job_id': val, 'workorder_id': wo[key], } self.env['hr.attendance.line'].create(vals_l) vals_l = { 'employee_id': self.id, 'check_in': action_date, 'attendance_id': attendance.id, } self.env['hr.attendance.line'].create(vals_l) else: raise exceptions.UserError(_('Cannot perform check out on %(empl_name)s, could not find corresponding check in. ' 'Your attendances have probably been modified manually by human resources.') % {'empl_name': self.name, }) return attendance
def update_move_data(self): self.ensure_one() # TODO picking_id = self.picking_id picking_obj = self.env["stock.picking"] # recuperer new_date_planned = self.date_planned # ------ pick = picking_obj.browse(picking_id) for move in pick.move_lines: stock_move_id = move.id #Creation de l'ordre de fabrication move_data = self.env['stock.move'].browse(stock_move_id) self.env.cr.execute('''SELECT mrp_bom.id FROM mrp_bom INNER JOIN product_product ON mrp_bom.product_id = product_product.id WHERE product_product.id={0}'''.format(move_data.product_id.id)) res_req = self.env.cr.dictfetchone() if res_req : bom_id = res_req["id"] else: bom_id = False #recuperation du lien avec la ligne de la commande # ---- procurement_id doesn't exist so : removed xD # if move_data.procurement_id and move_data.procurement_id.sale_line_id: # sale_line_id = move_data.procurement_id.sale_line_id # else: # raise exceptions.UserError(u"Ce movement de stock n'est lié à aucune ligne de bons de commande (sale.order.line)") if move_data.sale_line_id: sale_line_id = move_data.sale_line_id else: raise exceptions.UserError(u"Ce movement de stock n'est lié à aucune ligne de bons de commande (sale.order.line)") vals = { 'origin': move_data.origin, 'product_id': move_data.product_id.id, 'product_qty': move_data.product_qty, 'product_uom_id': move_data.product_uom.id, # 'product_uos_qty': move_data.product_uos and move_data.product_uos_qty or False, # 'product_uos': move_data.product_uos and move_data.product_uos.id or False, 'location_src_id': move_data.location_id.id, 'location_dest_id': move_data.location_dest_id.id, 'bom_id': bom_id, 'date_planned': new_date_planned, # 'move_prod_id': move_data.id, 'company_id': move_data.company_id.id, 'largeur': move_data.largeur, 'hauteur': move_data.hauteur, 'is_printable':move_data.is_printable, 'description':move_data.name, 'partner_id':move_data.picking_id.partner_id.id, #mim wizard 'dimension':sale_line_id.dimension, 'vitre':sale_line_id.vitre.id, 'type_vitre':sale_line_id.type_vitre, 'decoratif' :sale_line_id.decoratif.id, 'poigne' :sale_line_id.poigne.id, 'nb_poigne':sale_line_id.nb_poigne, 'serr' :sale_line_id.serr.id, 'nb_serr':sale_line_id.nb_serr, 'oscillo_battant':sale_line_id.oscillo_battant, 'va_et_vient':sale_line_id.va_et_vient, 'butoir':sale_line_id.butoir, 'remplissage_vitre':sale_line_id.remplissage_vitre, 'type_fixe':sale_line_id.type_fixe, 'inegalite':sale_line_id.inegalite, 'cintre':sale_line_id.cintre, 'triangle':sale_line_id.triangle, 'division':sale_line_id.division, 'nb_division':sale_line_id.nb_division, 'laque':sale_line_id.laque, 'moustiquaire':sale_line_id.moustiquaire, 'tms':sale_line_id.tms, 'type_moustiquaire':sale_line_id.type_moustiquaire, 'intermediaire':sale_line_id.intermediaire, } production_obj = self.env['mrp.production'] stock_move_obj = self.env['stock.move'] id_mo = production_obj.create(vals) val = {} val['id_mo'] = id_mo val['user_id'] = self.env.user.id val['is_mo_created'] = True stock_move_obj.browse(stock_move_id).write(val) return True
def _get_s3_bucket(self, name=None): """Connect to S3 and return the bucket The following environment variables can be set: * ``AWS_HOST`` * ``AWS_REGION`` * ``AWS_ACCESS_KEY_ID`` * ``AWS_SECRET_ACCESS_KEY`` * ``AWS_BUCKETNAME`` If a name is provided, we'll read this bucket, otherwise, the bucket from the environment variable ``AWS_BUCKETNAME`` will be read. """ host = os.environ.get('AWS_HOST') # Ensure host is prefixed with a scheme (use https as default) if host and not urlsplit(host).scheme: host = 'https://%s' % host region_name = os.environ.get('AWS_REGION') access_key = os.environ.get('AWS_ACCESS_KEY_ID') secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY') bucket_name = name or os.environ.get('AWS_BUCKETNAME') params = { 'aws_access_key_id': access_key, 'aws_secret_access_key': secret_key, } if host: params['endpoint_url'] = host if region_name: params['region_name'] = region_name if not (access_key and secret_key and bucket_name): msg = _('If you want to read from the %s S3 bucket, the following ' 'environment variables must be set:\n' '* AWS_ACCESS_KEY_ID\n' '* AWS_SECRET_ACCESS_KEY\n' 'If you want to write in the %s S3 bucket, this variable ' 'must be set as well:\n' '* AWS_BUCKETNAME\n' 'Optionally, the S3 host can be changed with:\n' '* AWS_HOST\n') % (bucket_name, bucket_name) raise exceptions.UserError(msg) # try: s3 = boto3.resource('s3', **params) bucket = s3.Bucket(bucket_name) exists = True try: s3.meta.client.head_bucket(Bucket=bucket_name) except ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = e.response['Error']['Code'] if error_code == '404': exists = False except EndpointConnectionError as error: # log verbose error from s3, return short message for user _logger.exception('Error during connection on S3') raise exceptions.UserError(str(error)) if not exists: if not region_name: bucket = s3.create_bucket(Bucket=bucket_name) else: bucket = s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={ 'LocationConstraint': region_name }) return bucket
def create_warranty_order_by_driver(self): active_ids = self._context.get('active_ids') plan_sheets = self.env['warranty_plan_order'].browse(active_ids) for plan_sheet in plan_sheets: if not plan_sheet.report_repair_user: raise exceptions.UserError(_("report_repair_user Required!")) if not plan_sheet.maintain_sheet_id: plan_sheet.vehicle_id.state = 'warrantly' plan = plan_sheet.parent_id maintain_sheets = self.env['warranty_order'].search([('plan_id', '=', plan.id)]) maintain_sheets_count = len(maintain_sheets) maintain_sheet_val = { 'name': plan.name + '_' + str(maintain_sheets_count + 1), # +''+str(maintain_sheets_count) 'vehicle_id': plan_sheet.vehicle_id.id, 'vehicle_type': plan_sheet.vehicle_type.id, 'license_plate': plan_sheet.license_plate, 'fleet': plan_sheet.fleet.id, 'operating_mileage': plan_sheet.operating_mileage, 'warranty_category': plan_sheet.approval_warranty_category.id, 'planned_date': plan_sheet.planned_date, 'vin': plan_sheet.vin, 'average_daily_kilometer': plan_sheet.average_daily_kilometer, 'line': plan_sheet.line.id, 'warranty_location': plan_sheet.warranty_location.id, 'plan_id': plan.id, 'report_repair_user':plan_sheet.report_repair_user.id } maintain_sheet = self.env['warranty_order'].create(maintain_sheet_val) category_id = maintain_sheet.warranty_category.id condition = '%/' + str(category_id) + '/%' sql_query = """ select id,idpath from warranty_category where idpath like %s order by idpath asc """ self.env.cr.execute(sql_query, (condition,)) results = self.env.cr.dictfetchall() sheet_items = [] available_products = [] sheet_instructions = [] for line in results: category = self.env['warranty_category'].search([('id', '=', line.get('id'))]) project_ids = category.project_ids for project in project_ids: order_project = { 'warranty_order_id': maintain_sheet.id, 'category_id': category.id, 'project_id': project.id, 'sequence': len(sheet_items) + 1, 'work_time': project.manhour, 'percentage_work': 100, 'component_ids':[(6,0,plan_sheet.vehicle_id.mapped('component_ids').filtered(lambda x: x.product_id in project.important_product_id).ids)] } sheet_items.append((0, 0, order_project)) sheet_instruction = { 'warranty_order_id': maintain_sheet.id, 'category_id': category.id, 'project_id': project.id, 'sequence': len(sheet_instructions) + 1, 'operational_manual': project.operational_manual } sheet_instructions.append((0, 0, sheet_instruction)) warranty_project = self.env['warranty_project'].search([('id', '=', project.id)]) boms = warranty_project.avail_ids for bom in boms: available_product = { 'sequence': len(available_products) + 1, 'warranty_order_id': maintain_sheet.id, 'category_id': category.id, 'project_id': project.id, 'product_id': bom.product_id.id, 'change_count': bom.change_count, 'max_count': bom.max_count, 'require_trans': bom.require_trans } available_products.append((0, 0, available_product)) maintain_sheet.write({'project_ids': sheet_items, 'available_product_ids': available_products, 'instruction_ids': sheet_instructions}) plan_sheet.update({'maintain_sheet_id': maintain_sheet.id, 'state': 'executing'})
def unlink(self): if any(self.mapped('external_id')): raise exceptions.UserError( _('Exported project cannot be deleted.') ) return super().unlink()
def send_text(self, openid, text): try: self.wxclient.send_text_message(openid, text) except ClientException as e: raise exceptions.UserError(u'发送失败 %s' % e)
def attendance_action_change(self, location): """ Check In/Check Out action Check In: create a new attendance record Check Out: modify check_out field of appropriate attendance record """ if len(self) > 1: raise exceptions.UserError( _('Cannot perform check in or check out on multiple employees.' )) #현재시간 present_date = datetime.now() #해당 유저의 처음 출근 시간 파악 hr_attendance = self.env['hr.attendance'].search( [('employee_id', '=', self.id)], limit=1) check_in_date = hr_attendance.check_in #출퇴근 기준 시간의 초기화 check_in_cut_line = "" check_out_cut_line = "" find_1 = "China" find_2 = "Vietnam" location = "6X4Q+22 Trần Xá, Yên Phong, Bắc Ninh, Vietnam" China = location.find(find_1) Vietnam = location.find(find_2) if China != -1: present_date = present_date - timedelta(hours=1) _logger.warning("date%s" % present_date) elif Vietnam != -1: present_date = present_date - timedelta(hours=2) _logger.warning("date2%s" % present_date) #해당 유저의 처음 출근시간이 존재할 경우 if check_in_date != False: #해당 유저의 첫 체크인시간을 년,월,일,시간,분,초로 변경 check_in_last_time = datetime.strptime(check_in_date, '%Y-%m-%d %H:%M:%S') #체크인 기준시간(다음날 00시 00분 00초) #15시간을 더해준 이유: 서버의 시간은 미국시간이므로 한국과 9시간의 차이가 발생함 check_in_cut_line = check_in_last_time check_in_cut_line = check_in_cut_line.replace(hour=15, minute=0, second=0) #체크아웃 기준시간(1시간뒤) check_out_cut_line = check_in_last_time + relativedelta(hours=1) #체크인상태일 경우 _logger.warning(self.attendance_state) if self.attendance_state != 'checked_in': #해당 유저의 출근시간이 존재하지 않을경우 if check_in_date != False: #출퇴근 기준시간보다 현재의 시간이 클 경우 if present_date > check_in_cut_line: #서버에 입력 vals = { 'employee_id': self.id, 'check_in': present_date, 'check_in_place': location, } self.attendance_state = 'checked_in' return self.env['hr.attendance'].create(vals) #출퇴근 기준시간보다 현재의 시간이 작을 경우 else: raise UserError(_('출근시간이 아닙니다.')) #해당 유저의 출퇴근시간이 존재하지 않을경우 else: #서버에 생성 vals = { 'employee_id': self.id, 'check_in': present_date, 'check_in_place': location, } self.attendance_state = 'checked_in' return self.env['hr.attendance'].create(vals) #체크아웃상태일 경우 else: hr_attendance = self.env['hr.attendance'] #해당 유저의 체크아웃 파악 attendance = self.env['hr.attendance'].search( [('employee_id', '=', self.id), ('check_out', '=', False), ('check_in', '!=', False)], limit=1) _logger.warning(attendance) if attendance: #출퇴근 기준시간 보다 현재의 시간이 클경우 if present_date > check_out_cut_line: #서버에 생성 attendance.check_out = present_date attendance.check_out_place = location self.attendance_state = 'checked_out' #출퇴근 기준시간 보다 현쟈시간이 작을경우 else: raise UserError(_('퇴근시간이 아닙니다.')) return attendance
def _process_order(self, pos_order): prec_acc = self.env['decimal.precision'].precision_get('Account') pos_session = self.env['pos.session'].browse( pos_order['pos_session_id']) if pos_session.state == 'closing_control' or pos_session.state == 'closed': pos_order['pos_session_id'] = self._get_valid_session(pos_order).id if pos_order.get('is_return_order', False): pos_order['amount_paid'] = 0 for line in pos_order['lines']: line_dict = line[2] line_dict['qty'] = line_dict['qty'] * -1 original_line = self.env['pos.order.line'].browse( line_dict.get('original_line_id', False)) original_line.line_qty_returned += abs(line_dict.get('qty', 0)) for statement in pos_order['statement_ids']: statement_dict = statement[2] statement_dict['amount'] = statement_dict['amount'] * -1 pos_order['amount_tax'] = pos_order['amount_tax'] * -1 pos_order['amount_return'] = 0 pos_order['amount_total'] = pos_order['amount_total'] * -1 order = self.create(self._order_fields(pos_order)) journal_ids = set() for payments in pos_order['statement_ids']: if not float_is_zero(payments[2]['amount'], precision_digits=prec_acc): order.add_payment(self._payment_fields(payments[2])) journal_ids.add(payments[2]['journal_id']) if pos_session.sequence_number <= pos_order.get('sequence_number', 0): pos_session.write( {'sequence_number': pos_order['sequence_number'] + 1}) pos_session.refresh() if not float_is_zero(pos_order['amount_return'], prec_acc): cash_journal_id = pos_session.cash_journal_id.id if not cash_journal_id: # Select for change one of the cash journals used in this # payment cash_journal = self.env['account.journal'].search([ ('type', '=', 'cash'), ('id', 'in', list(journal_ids)), ], limit=1) if not cash_journal: # If none, select for change one of the cash journals of the POS # This is used for example when a customer pays by credit card # an amount higher than total amount of the order and gets cash back cash_journal = [ statement.journal_id for statement in pos_session.statement_ids if statement.journal_id.type == 'cash' ] if not cash_journal: raise exceptions.UserError( _(u"No se encontró ninguna declaración de efectivo para esta sesión. No se puede registrar el efectivo devuelto." )) cash_journal_id = cash_journal[0].id order.add_payment({ 'amount': -pos_order['amount_return'], 'payment_date': fields.Datetime.now(), 'payment_name': _('return'), 'journal': cash_journal_id, }) return order
def _manage_images_on_page(self, page_node, data_node, exported_resources): """ - Extract images from page and generate an xml node - Replace db id in url with xml id """ img_model = 'ir.attachment' urls = self.img_url_map.bind("dummy.org", "/") for img_elem in page_node.iter('img'): img_src = img_elem.get('src') parse_result = urlparse.urlparse(img_src) path = parse_result.path query_args = parse_result.query if urls.test(parse_result.path, "GET"): endpoint, kwargs = urls.match(path, "GET", query_args=query_args) kwargs.update(dict(urlparse.parse_qsl(query_args))) image = None # get the binary object xml_id = kwargs.get('xmlid') if xml_id: image = self.env.ref(xml_id, False) else: _id = kwargs.get('id') model = kwargs.get('model', 'ir.attachment') if _id and model: _id, _, unique = str(_id).partition('_') image = self.env[model].browse(int(_id)) if (not image or not image.exists() or image._name != img_model): raise exceptions.UserError( _('Only images from ir.attachment are supported when ' 'exporting help pages')) exported_data = image.export_data([ 'id', 'datas', 'datas_fname', 'name', 'res_model', 'mimetype' ], raw_data=False)['datas'][0] xml_id = exported_data[0] new_src = '/web/image/%s' % xml_id img_elem.attrib['src'] = new_src if xml_id in exported_resources: continue img_node = ET.SubElement(data_node, 'record', attrib={ 'id': xml_id, 'model': image._name }) field_node = ET.SubElement(img_node, 'field', attrib={'name': 'datas'}) field_node.text = str(exported_data[1]) field_node = ET.SubElement(img_node, 'field', attrib={'name': 'datas_fname'}) field_node.text = exported_data[2] field_node = ET.SubElement(img_node, 'field', attrib={'name': 'name'}) field_node.text = exported_data[3] field_node = ET.SubElement(img_node, 'field', attrib={'name': 'res_model'}) field_node.text = exported_data[4] field_node = ET.SubElement(img_node, 'field', attrib={'name': 'mimetype'}) field_node.text = exported_data[5] data_node.append(img_node) exported_resources.add(xml_id)
def _check_capacity(self): if any(workcenter.capacity <= 0.0 for workcenter in self): raise exceptions.UserError( _('The capacity must be strictly positive.'))
def import_data_product(self): if not self.xls_file: raise exceptions.UserError(_('Please Select Excel file')) wb = xlrd.open_workbook( file_contents=base64.decodestring(self.xls_file)) final_data_product = [] for sheet in wb.sheets(): if sheet.name == 'Product': final_data_product = [] # set data in list for row in range(sheet.nrows): if row != 0: self._cr.execute( """SELECT id from product_category WHERE name = '%s'""" % (sheet.cell(row, 3).value)) product_categ_id = self._cr.fetchone() product_categ = product_categ_id and product_categ_id[ 0] or False s_tax_name = sheet.cell(row, 6).value, self._cr.execute( """SELECT id from account_tax WHERE name = '%s'""" % (s_tax_name)) s_tax_id = self._cr.fetchone() sale_tax = s_tax_id and s_tax_id[0] or False if s_tax_name and not sale_tax: msg = 'Sale TAX not Avaiable ' \ 'Tax name %s !\n ' % (s_tax_name) raise UserError(_('Data Not Available !\n' + msg)) v_tax_name = sheet.cell(row, 8).value, self._cr.execute( """SELECT id from account_tax WHERE name = '%s'""" % (v_tax_name)) p_tax_id = self._cr.fetchone() purchase_tax = p_tax_id and p_tax_id[0] or False if v_tax_name and not purchase_tax: msg = 'Vendor TAX not Avaiable ' \ 'Tax name %s !\n ' % (s_tax_name) raise UserError(_('Data Not Available !\n' + msg)) sale_ok = False if sheet.cell(row, 10).value and sheet.cell( row, 10).value == 1: sale_ok = True purchase_ok = False if sheet.cell(row, 11).value and sheet.cell( row, 11).value == 1: purchase_ok = True name = sheet.cell(row, 0).value default_code = str(sheet.cell(row, 2).value) product_domain = [] if name: product_domain.append(('name', 'ilike', name)) if default_code: product_domain.append( ('default_code', 'ilike', default_code)) product_id = False if product_domain: product_id = self.env['product.template'].search( product_domain, limit=1) # product_data = self._cr.fetchone() # product_id = product_data and product_data[0] or False if not product_id: data = { 'name': name, 'type': sheet.cell(row, 1).value, 'default_code': default_code, 'lst_price': sheet.cell(row, 4).value, 'standard_price': sheet.cell(row, 5).value, 'sale_ok': sale_ok, 'purchase_ok': purchase_ok, 'categ_id': product_categ or 1, 'taxes_id': [(6, 0, [sale_tax])], 'supplier_taxes_id': [(6, 0, [purchase_tax])], } final_data_product.append(data) # self.env['product.template'].create(data) # create product from list # cnt = 0 for product in final_data_product: # if cnt == 50: # break self.env['product.template'].create(product)
def import_images(self): errors = [] file_csv = StringIO.StringIO(base64.decodestring(self.file_csv)) reader = csv.reader(file_csv, delimiter=DELIMITER) headers = next(reader, None) if headers != HEADERS.split(DELIMITER): raise exceptions.UserError( _("Invalid CSV file headers found! Expected: %s" % HEADERS)) csv.field_size_limit(sys.maxsize) file_zip = None if self.file_zip: file_zip = StringIO.StringIO(base64.decodestring(self.file_zip)) file_obj = self.env["storage.file"] image_obj = self.env["storage.image"] relation_obj = self.env["product.image.relation"] for row in reader: if not row: continue default_code, tag_name, image_path = row try: mimetype, image_base64 = self.get_image_base64( image_path, file_zip) except Exception: errors.append('%s: impossible to retrieve file "%s"' % (default_code, image_path)) continue product = self.env[self.product_model].search([ ("default_code", "=", default_code) ]) if not product: errors.append("Could not find the product '%s'" % default_code) continue image_name = os.path.join(image_path) vals = { "data": image_base64, "name": image_name, "file_type": "image", "mimetype": mimetype, "backend_id": self.storage_backend_id.id, } file_id = file_obj.create(vals) tag_id = self.get_tag(tag_name) image = image_obj.create({ "file_id": file_id.id, "name": image_name, "alt_name": image_name, }) if self.overwrite: domain = [ ("image_id.name", "=", image.name), ("tag_id", "=", tag_id), ("product_tmpl_id", "=", product.id), ] relation_obj.search(domain).unlink() if self.product_model == "product.template" and product: relation_obj.create({ "image_id": image.id, "tag_id": tag_id, "product_tmpl_id": product.id, }) elif self.product_model == "product.product" and product: relation_obj.create({ "image_id": image.id, "tag_id": tag_id, "product_tmpl_id": product.product_tmpl_id.id, }) if errors: raise exceptions.UserError(_("\n".join(errors)))
def import_data_invoice(self): if not self.xls_file: raise exceptions.UserError(_('Please Select Excel file')) wb = xlrd.open_workbook( file_contents=base64.decodestring(self.xls_file)) for sheet in wb.sheets(): if sheet.name == 'Invoice': invoice_list = [] invoice_val = [] for row in range(sheet.nrows): if row != 0: obj_product = self.env['product.template'] in_type = sheet.cell(row, 1).value partner_name = sheet.cell(row, 2).value.strip() nit = sheet.cell(row, 3).value domain = [] if partner_name: domain.append(('name', 'ilike', partner_name)) if nit: domain.append(('vat', '=', nit)) err_msg = '' if in_type == "out_invoice": domain.append(('customer', '=', True)) err_msg = "Customer" elif in_type == "in_invoice": domain.append(('supplier', '=', True)) err_msg = "Supplier" partner_id = self.env['res.partner'].search(domain, limit=1) if not partner_id: msg = '%s not Avaiable ' \ 'Partner name %s and NIT %s !\n ' % (err_msg, partner_name, nit) raise UserError(_('Data Not Available !\n' + msg)) product_name = sheet.cell(row, 6).value product_id = obj_product.search( [('name', 'ilike', product_name)], limit=1) if not product_id: msg = 'Product not Avaiable ' \ 'Product name _(%s) !\n ' % (product_name) raise UserError(_('Data Not Available !\n' + msg)) # tax_name = sheet.cell(row, 10).value # self._cr.execute("""SELECT id from account_tax where name like '%s'""" % (tax_name)) # tax_id = [data[0] for data in self._cr.fetchall()] tax_data = sheet.cell(row, 10).value tax_ids = [] if tax_data: tax_list = tax_data.split(",") tax_ids = self.env['account.tax'].search([ ('name', 'in', tax_list) ]) if tax_list and not tax_ids: msg = 'Tax not Avaiable ' raise UserError( _('Data Not Available !\n' + msg)) tax_ids = tax_ids.ids extra_field = sheet.cell(row, 11).value extra_id = False if extra_field: # extra_id = self.env['x_centro_de_costo'].search([('x_name', 'ilike', extra_field)], limit=1) extra_id = self.env[ 'account.analytic.account'].search( [('name', 'ilike', extra_field)], limit=1).id int_date = datetime.datetime(*xlrd.xldate_as_tuple( sheet.cell(row, 4).value, wb.datemode)) due_date = datetime.datetime(*xlrd.xldate_as_tuple( sheet.cell(row, 12).value, wb.datemode)) invoice_data = { 'number': sheet.cell(row, 0).value, 'type': in_type, 'partner_id': partner_id.id, 'date_invoice': int_date, 'date_due': due_date, 'origin': sheet.cell(row, 5).value, 'product_id': product_id.id, 'quantity': sheet.cell(row, 7).value, 'price_unit': sheet.cell(row, 8).value, 'tax': tax_ids, 'account_analytic_id': extra_id } # if in_type == "out_invoice": # invoice_data.update({'x_studio_field_ngXF8': extra_id.id}) # elif in_type == "in_invoice": # invoice_data.update({'x_studio_field_45LWE': extra_id.id}) invoice_list.append(invoice_data) if invoice_list: invoice_dict = {} for key in invoice_list: in_type = key['type'] if key['number'] not in invoice_dict.keys(): data = { 'number': key['number'], 'type': key['type'], 'partner_id': key['partner_id'], 'date_invoice': key['date_invoice'], 'origin': key['origin'], 'date_due': key['date_due'], } invoice_dict.update({key['number']: data}) for key in invoice_dict: lst = [] for final_data in invoice_list: if key == final_data['number']: invoice_line_ids = { 'product_id': final_data.get('product_id'), 'quantity': final_data.get('quantity'), 'price_unit': final_data.get('price_unit'), 'tax': final_data.get('tax'), 'account_analytic_id': final_data.get('account_analytic_id'), } # if in_type == "out_invoice": # invoice_line_ids.update({'x_studio_field_ngXF8': final_data.get('x_studio_field_ngXF8')}) # elif in_type == "in_invoice": # invoice_line_ids.update({'x_studio_field_45LWE': final_data.get('x_studio_field_45LWE')}) lst.append(invoice_line_ids) if lst and invoice_dict.get(key): invoice_dict.get(key).update( {'lines': lst}) for d in invoice_dict.values(): invoice_val.append(d) for inv_data in invoice_val: order_data = { 'partner_id': inv_data.get('partner_id'), 'date_invoice': inv_data.get('date_invoice'), 'type': inv_data.get('type'), 'number': inv_data.get('number'), 'date_due': inv_data.get('date_due'), } obj_account_inv = self.env['account.invoice'] invoice_id = obj_account_inv.create(order_data) if invoice_id: # invoice_id.create_from_import(inv_data.get('lines'), inv_data.get('number')) invoice_lines = invoice_id.invoice_line_ids for line in inv_data.get('lines'): invoice_line = invoice_lines.new() invoice_line.invoice_id = invoice_id.id invoice_line.product_id = line.get('product_id') invoice_line._onchange_product_id() invoice_line.quantity = line.get('quantity') invoice_line.price_unit = line.get('price_unit') invoice_line.account_analytic_id = line.get( 'account_analytic_id') invoice_line.invoice_line_tax_ids = [] invoice_line.invoice_line_tax_ids = [[ 6, 0, line.get('tax') ]] # if line.get('x_studio_field_ngXF8', False): # invoice_line.x_studio_field_ngXF8 = line.get('x_studio_field_ngXF8') # if line.get('x_studio_field_45LWE', False): # invoice_line.x_studio_field_45LWE = line.get('x_studio_field_45LWE') invoice_id.invoice_line_ids = invoice_id.invoice_line_ids | invoice_line invoice_id._onchange_invoice_line_ids() invoice_id.action_invoice_open() invoice_id.write({'number': inv_data.get('number')})
def _add_to_shop_shopee(self, data=None): self.ensure_one() preset = self.product_tmpl_id.mapped('{}_product_preset_id'.format( self.platform_id.platform)) self.calculate_stock() self.ecomm_product_product_ids.calculate_stock() data = data or {} data.update({ 'category_id': preset.ecomm_categ_id.platform_categ_idn, 'name': self.name, 'description': self.description, 'price': self.price, 'stock': self.stock, 'item_sku': self.product_tmpl_id.default_code, 'images': [{ 'url': url } for url in self._upload_image_shopee( self.mapped('ecomm_product_image_ids.image_url')) if url], 'attributes': [{ 'attributes_id': a['idn'], 'value': a['value'] } for a in preset.format_attr_values()], 'logistics': [{ 'logistic_id': l.ecomm_carrier_id.logistic_idn, 'enabled': l.enable, } for l in self.carrier_ids], 'weight': preset.weight, 'package_length': int(preset.package_length), 'package_width': int(preset.package_width), 'package_height': int(preset.package_height), 'days_to_ship': preset.days_to_ship, #'size_chart': preset.size_chart, 'condition': preset.condition, 'status': preset.status, 'is_pre_order': preset.is_pre_order, }) resp = self.shop_id._py_client_shopee().item.add( {k: v for k, v in data.items() if v}) if resp.get('item_id'): write_vals = { 'platform_item_idn': resp['item_id'], 'status': resp['item']['status'], } if self.attribute_line_ids: if len(self.attribute_line_ids) > 2: raise exceptions.UserError( 'Tier variation should be under 2 level') tier_variation = [{ 'name': line.name, 'options': line.line_value_ids.mapped('name') } for line in self.attribute_line_ids[:2]] image_urls = [ val.ecomm_product_image_ids[:1].image_url for val in self.attribute_line_ids[0].line_value_ids ] if all(image_urls): tier_variation[0][ 'images_url'] = self._upload_image_shopee(image_urls) write_vals.update({ 'attribute_line_ids': [(1, self.attribute_line_ids[0].id, { 'line_value_ids': [(1, line.id, { 'ecomm_product_image_ids': [(1, line.ecomm_product_image_ids[0].id, { 'image_url': image_urls[i], })] }) for i, line in enumerate( self.attribute_line_ids[0].line_value_ids)] })], }) variation = [] for v in self.ecomm_product_product_ids: vals = { 'tier_index': v.attr_line_value_ids.mapped('sequence'), 'stock': v.stock, 'price': v.price } if v.sku: vals['variation_sku'] = v.sku variation.append(vals) init_data = { 'item_id': resp['item_id'], 'tier_variation': tier_variation, 'variation': variation, } init_resp = self.shop_id._py_client_shopee( ).item.init_tier_variation(**init_data) if init_resp.get('variation_id_list'): write_vals.update({ 'ecomm_product_product_ids': [(1, v.id, { 'platform_variant_idn': init_resp['variation_id_list'][i]['variation_id'] }) for i, v in enumerate(self.ecomm_product_product_ids)] }) self.write(write_vals)
def unlink(self): if any(item.state not in ['draft', 'cancelled'] for item in self): raise exceptions.UserError(_("Only reports in 'draft' or " "'cancelled' state can be removed")) return super(L10nEsAeatReport, self).unlink()
def _make_preset_shopee(self, context=None, data=None): self.ensure_one() if not self.product_tmpl_id: raise exceptions.UserError( 'Can not set preset for unmatched product') preset_id = self.product_tmpl_id.shopee_product_preset_id or self.env[ 'shopee.product.preset'].create({ 'platform_id': self.platform_id.id, 'ecomm_categ_selector_id': self.env['ecommerce.category.selector'].create({ 'platform_id': self.platform_id.id, }).id, 'product_tmpl_ids': [(4, self.product_tmpl_id.id, _)], }) if data: val = data elif self.platform_item_idn: data = self.shop_id._py_client_shopee().item.get_item_detail( item_id=int(self.platform_item_idn)).get('item') val = { k: data.get(k) for k in [ 'name', 'description', 'weight', 'package_length', 'package_width', 'package_height', 'condition', 'is_pre_order', 'days_to_ship' ] } #size_chart attrs = self.shop_id._py_client_shopee().item.get_attributes( category_id=data.get('category_id')).get('attributes') for i, attr in enumerate(attrs): attr['attribute_value'] = data.get( 'attributes')[i]['attribute_value'] attr['attr_id'] = self.env['ecommerce.attribute'].search([ ('platform_id', '=', self.platform_id.id), ('platform_attr_idn', '=', attr['attribute_id']) ])[:1].id or self.env['ecommerce.attribute'].create({ 'name': attr['attribute_name'], 'platform_id': self.platform_id.id, 'platform_attr_idn': attr['attribute_id'], 'mandatory': attr['is_mandatory'], 'attr_type': attr['attribute_type'], 'input_type': attr['input_type'], 'value_ids': [(0, _, { 'name': option, }) for option in attr['options']], }).id val.update({ 'platform_id': self.platform_id.id, 'ecomm_categ_id': self.env['ecommerce.category'].search([ ('platform_id', '=', self.platform_id.id), ('platform_categ_idn', '=', data.get('category_id')) ])[:1].id, 'ecomm_attribute_lines': [(0, _, { 'attr_id': attr['attr_id'], 'res_id': preset_id.id, 'res_model': preset_id._name, 'value_id': self.env['ecommerce.attribute.value'].search([ ('attr_id', '=', attr['attr_id']), ('name', '=', attr['attribute_value']) ])[:1].id, }) for attr in attrs], 'ecomm_product_image_ids': [(0, _, { 'sequence': i.sequence, 'name': i.name, 'image_url': i.image_url, 'res_id': preset_id.id, 'res_model': preset_id._name, }) for i in self.ecomm_product_image_ids] }) else: raise exceptions.UserError('No data to set') preset_id.ecomm_categ_selector_id.write( {'ecomm_categ_id': val['ecomm_categ_id']}) preset_id.write(val)
def _action_cancel(self): if any(move.quantity_done and (move.raw_material_production_id or move.production_id) for move in self): raise exceptions.UserError(_('You cannot cancel a manufacturing order if you have already consumed material.\ If you want to cancel this MO, please change the consumed quantities to 0.')) return super(StockMove, self)._action_cancel()
def action_cancel(self): if any(move.quantity_done for move in self): raise exceptions.UserError( _('You cannot cancel a move move having already consumed material' )) return super(StockMove, self).action_cancel()
def _check_industries(self): for lead in self: if lead.industry_id in lead.secondary_industry_ids: raise exceptions.UserError( _('The secondary industries must be different from the main ' 'industry.'))