def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file or b'' if not csv_data: return iter([]) encoding = options.get('encoding') if not encoding: encoding = options['encoding'] = chardet.detect( csv_data)['encoding'].lower() # some versions of chardet (e.g. 2.3.0 but not 3.x) will return # utf-(16|32)(le|be), which for python means "ignore / don't strip # BOM". We don't want that, so rectify the encoding to non-marked # IFF the guessed encoding is LE/BE and csv_data starts with a BOM bom = BOM_MAP.get(encoding) if bom and csv_data.startswith(bom): encoding = options['encoding'] = encoding[:-2] if encoding != 'utf-8': csv_data = csv_data.decode(encoding).encode('utf-8') separator = options.get('separator') if not separator: # default for unspecified separator so user gets a message about # having to specify it separator = ',' for candidate in (',', ';', '\t', ' ', '|', unicodedata.lookup('unit separator')): # pass through the CSV and check if all rows are the same # length & at least 2-wide assume it's the correct one it = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=candidate) w = None for row in it: width = len(row) if w is None: w = width if width == 1 or width != w: break # next candidate else: # nobreak separator = options['separator'] = candidate break csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=separator) return (row for row in csv_iterator if any(x for x in row if x.strip()))
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file or b'' if not csv_data: return iter([]) encoding = options.get('encoding') if not encoding: encoding = options['encoding'] = chardet.detect(csv_data)['encoding'].lower() # some versions of chardet (e.g. 2.3.0 but not 3.x) will return # utf-(16|32)(le|be), which for python means "ignore / don't strip # BOM". We don't want that, so rectify the encoding to non-marked # IFF the guessed encoding is LE/BE and csv_data starts with a BOM bom = BOM_MAP.get(encoding) if bom and csv_data.startswith(bom): encoding = options['encoding'] = encoding[:-2] if encoding != 'utf-8': csv_data = csv_data.decode(encoding).encode('utf-8') separator = options.get('separator') if not separator: # default for unspecified separator so user gets a message about # having to specify it separator = ',' for candidate in (',', ';', '\t', ' ', '|', unicodedata.lookup('unit separator')): # pass through the CSV and check if all rows are the same # length & at least 2-wide assume it's the correct one it = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=candidate) w = None for row in it: width = len(row) if w is None: w = width if width == 1 or width != w: break # next candidate else: # nobreak separator = options['separator'] = candidate break csv_iterator = pycompat.csv_reader( io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=separator) return ( row for row in csv_iterator if any(x for x in row if x.strip()) )
def Import_Bank_AC(self): account_journal_browse_obj = self.env['account.bank.statement'].browse(self._context.get('active_ids')) file_data = False if self.select_file and self.data_file: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or seqeuance')) my_list = [] for row in file_data: partner_id_search = self.env['res.partner'].search([('name', '=', row[2])]) dt = datetime.datetime.strptime(row[0], "%d-%m-%Y") account_line = { 'name': row[3] and row[3] or '/', 'partner_id': partner_id_search.id, 'amount': row[4], 'ref': row[1], 'date': dt, } my_list.append((0,0,account_line)) account_journal_browse_obj.write({'line_ids':my_list})
def Import_BOM(self): product_tem_obj = self.env['product.template'] product_obj = self.env['product.product'] mrp_result = {} mrp_obj = self.env['mrp.bom'] mrp_obj_fileds = mrp_obj.fields_get() mrp_default_value = mrp_obj.default_get(mrp_obj_fileds) mrp_line_obj = self.env['mrp.bom.line'] line_fields = mrp_line_obj.fields_get() file_data = False if self.select_file and self.data_file and self.bom_type: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or bom type')) for row in file_data: product_tem = product_tem_obj.search([('name', '=', row[0]),('default_code', '=', row[1])]) product_rows = product_obj.search([('name', '=', row[3])]) if not product_tem: raise ValidationError("Product '%s' not found"%row[0]) if not product_rows: raise ValidationError("Product '%s' not found"%row[3]) mrp_obj_update = mrp_default_value.copy() mrp_obj_update.update({ 'product_tmpl_id': product_tem.id, 'product_qty': row[2], 'product_uom_id': product_tem.uom_id.id, 'type': self.bom_type == 'mp' and 'normal' or 'phantom', 'code': row[1], }) line_v1 = mrp_line_obj.default_get(line_fields) line_vals = line_v1.copy() line_vals.update({'product_id': product_rows.id, 'product_qty': row[4] and int(row[4]) or 1, 'product_uom_id': product_rows.uom_id.id}) l2 = [(0, 0, line_vals)] if mrp_result.get(row[0]): l1 = mrp_result[row[0]]['bom_line_ids'] mrp_result[row[0]].update({'bom_line_ids': l1 + l2}) if not mrp_result.get(row[0]): mrp_obj_update.update({'bom_line_ids': l2}) mrp_result[row[0]] = mrp_obj_update for mrp_data in mrp_result.values(): mrp_gen = mrp_obj.create(mrp_data) print ("::::::::::::::::::mrp_gen::::::::::::::::",mrp_gen) return True
def validate(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar="'", delimiter=self.delimeter) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() self.state = 'validated' except Exception as e: print("Not a valid file!", e) return { 'name': ('Tarifs'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'tarif.import', 'view_id': False, 'context': { 'data': self.data, 'state': self.state, 'supplier_id': self.supplier_id.id, 'tarifs_ids': self._get_tarif_from_csv() }, 'type': 'ir.actions.act_window', 'target': 'new' }
def setUpClass(cls): super(TestPartnerUpdateVatSubjectedBase, cls).setUpClass() cls.partner_model = cls.env["res.partner"] parts = cls.partner_model.search( [("country_id", "=", cls.env.ref("base.ro").id)] ) parts.write({"country_id": False}) data_dir = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "examples/" ) context = {"tracking_disable": True} fdata = tools.file_open(data_dir + "res.partner.csv") csvdata = pycompat.csv_reader( io.BytesIO(bytes(fdata.read(), "utf-8")), quotechar='"', delimiter="," ) lines = [line for line in csvdata if any(line)] cls.env.user.company_id.write({"vat_check_vies": False}) for line in lines: cls.partner_model.with_context(context).create( { "id": line[0], "name": line[1], "vat": line[2], "is_company": line[3], "country_id": cls.env.ref("base.ro").id, } )
def setUpClass(cls): super(TestPartnerUpdateVatSubjectedBase, cls).setUpClass() cls.partner_model = cls.env['res.partner'] parts = cls.partner_model.search([('country_id', '=', cls.env.ref('base.ro').id)]) parts.write({'country_id': False}) data_dir = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'examples/') context = {'tracking_disable': True} fdata = tools.file_open(data_dir + 'res.partner.csv') csvdata = pycompat.csv_reader(io.BytesIO(bytes(fdata.read(), 'utf-8')), quotechar='"', delimiter=',') lines = [line for line in csvdata if any(line)] cls.env.user.company_id.write({'vat_check_vies': False}) for line in lines: cls.partner_model.with_context(context).create({ 'id': line[0], 'name': line[1], 'vat': line[2], 'is_company': line[3], 'country_id': cls.env.ref('base.ro').id })
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file or b'' if not csv_data: return iter([]) encoding = options.get('encoding') if not encoding: encoding = options['encoding'] = chardet.detect( csv_data)['encoding'].lower() if encoding != 'utf-8': csv_data = csv_data.decode(encoding).encode('utf-8') separator = options.get('separator') if not separator: # default for unspecified separator so user gets a message about # having to specify it separator = ',' for candidate in (',', ';', '\t', ' ', '|', unicodedata.lookup('unit separator')): # pass through the CSV and check if all rows are the same # length & at least 2-wide assume it's the correct one it = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=candidate) w = None for row in it: width = len(row) if w is None: w = width if width == 1 or width != w: break # next candidate else: # nobreak separator = options['separator'] = candidate break csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=separator) return (row for row in csv_iterator if any(x for x in row if x.strip()))
def readCsvFile(f, head): '''读取csv文件,返回列表''' lines = [] reader = pycompat.csv_reader(f) if head: reader.__next__() for row in reader: lines.append(row) return lines
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file or b'' if not csv_data: return iter([]) encoding = options.get('encoding') if not encoding: encoding = options['encoding'] = chardet.detect(csv_data)['encoding'].lower() if encoding != 'utf-8': csv_data = csv_data.decode(encoding).encode('utf-8') separator = options.get('separator') if not separator: # default for unspecified separator so user gets a message about # having to specify it separator = ',' for candidate in (',', ';', '\t', ' ', '|', unicodedata.lookup('unit separator')): # pass through the CSV and check if all rows are the same # length & at least 2-wide assume it's the correct one it = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=candidate) w = None for row in it: width = len(row) if w is None: w = width if width == 1 or width != w: break # next candidate else: # nobreak separator = options['separator'] = candidate break csv_iterator = pycompat.csv_reader( io.BytesIO(csv_data), quotechar=options['quoting'], delimiter=separator) return ( row for row in csv_iterator if any(x for x in row if x.strip()) )
def process_attendance_file(self): for i in self.attendance_file_ids: reader_info = [] csv_data = base64.b64decode(i.file) csv_data = csv_data.decode(encoding='ASCII').encode('utf-8') csv_iterator = pycompat.csv_reader(BytesIO(csv_data), delimiter=',', lineterminator='\r\n') try: reader_info.extend(csv_iterator) except Exception: raise ValidationError( _("File must be a CSV Format and use 'comma' as a delimeter" )) keys = reader_info[0] del reader_info[0] values = {} for i in range(len(reader_info)): field = reader_info[i] values = dict(zip(keys, field)) if not values.get('Biometric') or not values.get( 'Attendance Time') or not values.get('Action') or ( values.get('Action') and not values.get('Action') in ['sign_in', 'sign_out']): raise ValidationError( _('File must have the following Columns and Data:\n\t1. Biometric\n\t2.Attendance Time\n\t3. Action\n\nAction, must be either "sign_in" or "sign_out".' )) employee = self.env['hr.employee'].search( [('biometric', '=', values.get('Biometric'))], limit=1) # _logger.info('\n\n\nEmployee: %s\n\n\n'%(str(values.get('Attendance Time')))) if employee[:1]: attendance_time = create_datefrom_parse( values.get('Attendance Time')) dup_count = self.env['attendance.sorting'].search_count([ ('attendance_process_id', '=', self.id), ('employee_id', '=', employee.id), ('attendance_time', '=', attendance_time) ]) if dup_count == 0: self.env['attendance.sorting'].create({ 'employee_id': employee[:1].id, 'attendance_time': attendance_time, 'attendance_type': values.get('Action'), 'attendance_process_id': self.id }) else: raise ValidationError( _("No matching employee record found owning the biometric no. %s" % (values.get('Biometric')))) self.write({'state': 'sorting'})
def action_create_compute_inputsline(self): def _get_attachment_filename(attachment): return hasattr(attachment, 'fname') and getattr(attachment, 'fname') or attachment.name def _get_attachment_content(attachment): return hasattr(attachment, 'content') and getattr(attachment, 'content') or base64.b64decode(attachment.datas) dataLines = [] dataHeaders = {} inputCode = self.env['hr.rule.input.code'] for attachment in self.attachment_ids: filename = _get_attachment_filename(attachment) content = _get_attachment_content(attachment) buffer = io.BytesIO(content) if filename.lower().strip().endswith('.csv'): indx = 0 lines = pycompat.csv_reader(buffer) for line in lines: if indx == 3: for i, headerLine in enumerate(line): if i < 5: continue code = headerLine.strip().split(' ') for code_id in inputCode.search(['|', ('name', 'ilike', headerLine.strip()), ('name', '=', code[0])]): dataHeaders[ i ] = { 'name': headerLine.strip(), 'id': code_id.id, 'input_id': code_id.input_id.id, 'input_name': code_id.input_id.name, 'input_code': code_id.input_id.code, } _logger.info('------ dataHeaders %s '%dataHeaders ) if indx >= 4: dataLines.append( line ) indx += 1 buffer.close() # Crea Nominas Empleados active_id = self.env.context.get('active_id') if active_id: [run_data] = self.env['hr.payslip.run'].browse(active_id).read(['date_start', 'date_end', 'credit_note']) from_date = run_data.get('date_start') to_date = run_data.get('date_end') credit_note = run_data.get('credit_note') threaded_calculation = threading.Thread( target=self._action_create_compute_inputsline, args=(active_id, from_date, to_date, credit_note, dataLines, dataHeaders), name='crearcalcularrunid_%s'%active_id) threaded_calculation.start() return True
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file encoding = chardet.detect(csv_data)['encoding'] csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return (row for row in csv_iterator if any(x for x in row if x.strip()))
def action_create_inputsline(self): def _get_attachment_filename(attachment): return hasattr(attachment, 'fname') and getattr( attachment, 'fname') or attachment.name def _get_attachment_content(attachment): return hasattr(attachment, 'content') and getattr( attachment, 'content') or base64.b64decode(attachment.datas) inputCode = self.env['hr.rule.input.code'] for attachment in self.attachment_ids: filename = _get_attachment_filename(attachment) content = _get_attachment_content(attachment) buffer = io.BytesIO(content) headers = [] dataHeaders = {} if filename.lower().strip().endswith('.csv'): try: indx = 0 lines = pycompat.csv_reader(buffer) for line in lines: if indx == 3: for i, headerLine in enumerate(line): if i < 5: continue code = headerLine.strip().split(' ') for code_id in inputCode.search([ '|', ('name', 'ilike', headerLine.strip()), ('name', '=', code[0]) ]): dataHeaders[i] = { 'name': headerLine.strip(), 'id': code_id.id, 'input_id': code_id.input_id.id, 'input_name': code_id.input_id.name, 'input_code': code_id.input_id.code, } _logger.info('------ dataHeaders %s ' % dataHeaders) if indx >= 4: self.action_create_inputsline_payslip( dataLine=line, dataHeaders=dataHeaders) indx += 1 except Exception as e: _logger.info('---------error %s ' % e) pass buffer.close() return True
def get_data(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar=self.quotechar, delimiter=self.delimeter) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() self.state = 'validated' except Exception as e: raise ValidationError(_("CSV file error : %s") % e)
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing """ csv_data = self.file encoding = chardet.detect(csv_data)['encoding'] csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader( io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return ( row for row in csv_iterator if any(x for x in row if x.strip()) )
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_data = self.file # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') if encoding != 'utf-8': # csv module expect utf-8, see http://docs.python.org/2/library/csv.html csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader(io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return (row for row in csv_iterator if any(x for x in row if x.strip()))
def import_picking_order(self): file_data = False if self.select_file and self.data_file: if self.select_file == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO( base64.decodestring(self.data_file)), quotechar=",", delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning( _('Please select file and type of file or picking type')) lines = [] for row in file_data: if self.select_file == 'csv' and len(row) != 1: raise ValidationError( "You can let empty cell in csv file or please use xls file." ) if row[0] != "": sale = self.env['sale.order'].search([('name', '=', row[0] or "_____________")]) if not sale: raise ValidationError("Sale Order '%s' is not founded" % row[0]) else: raise ValidationError("Please Assign Sale Order Name.") sale.action_confirm()
def Import_payment(self,vals): if self.data_file == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data else: file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data partner_obj = self.env['res.partner'] account_obj = self.env['account.journal'] for row in file_data: partner = partner_obj.search([('name', 'like', row[0])]) account = account_obj.search([('name', 'like', row[2])]) payment_vals = { 'partner_type': self.payment_type == 'customer_py' and 'customer' or 'supplier', 'partner_id': partner.id, 'payment_date': datetime.datetime.now(), 'journal_id': account.id, 'amount': row[1], 'communication': row[4], 'payment_method_id': 2, 'state': 'draft', 'payment_type': 'inbound', } payment = self.env['account.payment'].create(payment_vals) print (":::::::::::::::::::::::pppppppppp:::::::::::::::::::::::::",payment)
def _read_csv(self, options): """ Returns a CSV-parsed iterator of all non-empty lines in the file :throws csv.Error: if an error is detected during CSV parsing :throws UnicodeDecodeError: if ``options.encoding`` is incorrect """ csv_data = self.file # TODO: guess encoding with chardet? Or https://github.com/aadsm/jschardet encoding = options.get('encoding', 'utf-8') if encoding != 'utf-8': # csv module expect utf-8, see http://docs.python.org/2/library/csv.html csv_data = csv_data.decode(encoding).encode('utf-8') csv_iterator = pycompat.csv_reader( io.BytesIO(csv_data), quotechar=str(options['quoting']), delimiter=str(options['separator'])) return ( row for row in csv_iterator if any(x for x in row if x.strip()) )
def get_file_csv_data(self, file): self.ensure_one() csv_data = base64.b64decode(file) csv_data = BytesIO(csv_data.decode(self.encoding).encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, quotechar="'", delimiter=self.delimiter) file_reader = [] try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() except Exception as e: raise exceptions.Warning(e) list_values = enumerate(self.reader_info) for index, row in list_values: if not index: header = row else: file_reader.append(dict(zip(header, row))) return file_reader
def import_bank_ac(self): account_journal_browse_obj = self.env['account.bank.statement'].browse(self._context.get('active_ids')) file_data = False if self.select_file and self.data_file: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or sequence')) my_list = [] for row in file_data: if self.select_file == 'csv' and len(row) != 5: raise ValidationError("You can let empty cell in csv file or please use xls file.") if row[0] == "" or row[3] == "": raise ValidationError("Please Assign The Label And Date.") partner_id_search = self.env['res.partner'].search([('name', '=', row[2] or "_____________")]) dt = datetime.strptime(row[0], "%d-%m-%Y") account_line = { 'name': row[3] and row[3] or '/', 'partner_id': partner_id_search and partner_id_search.id or False, 'amount': row[4] or "", 'ref': row[1] or "", 'date': dt, } my_list.append((0,0,account_line)) account_journal_browse_obj.write({'line_ids':my_list})
def validate(self): if not self.data: raise exceptions.Warning(_("You need to select a file!")) csv_data = base64.b64decode(self.data) csv_data = BytesIO(csv_data.decode('utf-8').encode('utf-8')) csv_iterator = pycompat.csv_reader(csv_data, delimiter=";") logging.info("csv_iterator" + str(csv_iterator)) try: self.reader_info = [] self.reader_info.extend(csv_iterator) csv_data.close() # self.stock_production_lot_ids = self._get_stock_prd_lot_from_csv() self.state = 'validated' except Exception: raise exceptions.Warning(_("Not a valid file!")) return { 'name': ('Assignment Sub'), 'view_type': 'form', 'view_mode': 'form', 'res_model': 'stock.inventory.import', 'view_id': False, 'context': { 'data': self.data, 'state': self.state, 'default_dest_categ': self.dest_categ.id, 'default_new_prd_categ': self.new_prd_categ.id, 'stock_inventory_ids': self._get_stock_inventory_from_csv() }, 'type': 'ir.actions.act_window', 'target': 'new' }
def import_sale_order(self): line_vals ={} date_planned = datetime.now() payment_term = False fiscal_position = False incoterm = False salesperson_obj = self.env['res.users'] sale_obj = self.env['sale.order'] sale_line_obj = self.env['sale.order.line'] file_data = False if self.select_file and self.data_file: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or sequence')) for row in file_data: ids =[] if self.select_file == 'csv' and len(row) != 14: raise ValidationError("You can let empty cell in csv file or please use xls file.") tax_search = self.env['account.tax'].search([('name', '=', row[8]),('type_tax_use', '=', 'sale')]) if row[0] != "": partner = self.env['res.partner'].search([('name', '=', row[0] or "_____________")]) if not partner: raise ValidationError("Customer '%s' is not founded" % row[0]) else: raise ValidationError("Please Assign Customer Name.") if row[1] != "": currency = self.env['res.currency'].search([('name', '=', row[1] or "_____________")]) if not currency: raise ValidationError("Currency '%s' is not founded" % row[1]) else: raise ValidationError("Please Assign Currency.") if row[4] != "": uom = self.env['product.uom'].search([('name', '=', row[4] or "_____________")]) if not uom: raise ValidationError("UOM '%s' is not founded" % row[4]) else: raise ValidationError("Please Assign UOM.") if row[10] != "": payment_term = self.env['account.payment.term'].search([('name', '=', row[10] or "_____________")]) if not payment_term: raise ValidationError("Payment Terms '%s' is not founded" % row[10]) if row[11] != "": fiscal_position = self.env['account.fiscal.position'].search([('name', '=', row[11] or "_____________")]) if not fiscal_position: raise ValidationError("Fiscal Position '%s' is not founded" % row[11]) if row[12] != "": pricelist = self.env['product.pricelist'].search([('name', '=', row[12] or "_____________")]) if not pricelist: raise ValidationError("Pricelist '%s' is not founded" % row[12]) if row[13] != "": user = self.env['res.users'].search([('name', '=', row[13] or "_____________")]) if not user: raise ValidationError("User '%s' is not founded" % row[13]) dt = datetime.strptime(row[7], "%d-%m-%Y") if row[9] != "": date_planned = datetime.strptime(row[9], "%d-%m-%Y") sale_vals = { 'partner_id': partner.id or False, 'currency_id': currency and currency.id or False, 'date_order': dt, 'client_order_ref': row[8] or "", 'validity_date': date_planned or datetime.now(), 'payment_term_id': payment_term and payment_term.id or False, 'fiscal_position_id': fiscal_position and fiscal_position.id or False, 'pricelist_id': pricelist and pricelist.id or False, 'user_id':user and user.id or self.env.user } sale = sale_obj.create(sale_vals) if row[2] != "": for pro in row[2].split(";"): product = self.env['product.product'].search([('id', '=', pro or "_____________")]) if not product: raise ValidationError("Product '%s' is not founded" % pro) line_vals ={ "order_id":sale.id, 'name': product.name, 'product_id': product.id, 'product_uom': product.uom_id.id, 'tax_id': [(6, 0, tax_search.ids)] } sale_line_rec = sale_line_obj.create(line_vals) ids.append(sale_line_rec.id) else: raise ValidationError("Please Assign Product.") if row[3] != "" and type(row[3]) in [str]: i = 0 for id in ids: list = row[3].split(";") order_line = self.env["sale.order.line"].browse(id) order_line.product_uom_qty = list[i] i = i+1 else: for id in ids: order_line = self.env["sale.order.line"].browse(id) order_line.product_uom_qty = row[3] if row[5] != "" and type(row[5]) in [str]: j = 0 for id in ids: list = row[5].split(";") order_line = self.env["sale.order.line"].browse(id) order_line.price_unit = list[j] j = j+1 else: for id in ids: order_line = self.env["sale.order.line"].browse(id) order_line.price_unit = row[5] if self.import_state == "confirm": sale.action_confirm() return True
def import_inventory(self): if self.file_type and self.data_file: if self.file_type == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO( base64.decodestring(self.data_file)), quotechar=",", delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.file_type == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning( _('Please select file and type of file or picking type')) product_obj = self.env['product.product'] inventory_obj = self.env['stock.inventory'] inventory_fields = inventory_obj.fields_get() inventory_def_val = inventory_obj.default_get(inventory_fields) new_inventory_val = inventory_def_val.copy() new_inventory_val.update({ 'name': self.inv_name, 'state': 'confirm', 'location_id': self.loc_name.id, }) final_created_id = inventory_obj.create(new_inventory_val) for row in file_data: if self.file_type == 'csv' and len(row) != 4: raise ValidationError( "You can let empty cell in csv file or please use xls file." ) prod_lot_obj = self.env['stock.production.lot'] new_lot_serial = self.env['stock.production.lot'] prod_lot_fields = prod_lot_obj.fields_get() prod_lot_obj_def_val = prod_lot_obj.default_get(prod_lot_fields) new_inventory_line_val_ids = prod_lot_obj_def_val.copy() inventory_line_obj = self.env['stock.inventory.line'] inventory_line_fields = inventory_line_obj.fields_get() inventory_line_def_val = inventory_line_obj.default_get( inventory_line_fields) new_inventory_line_val = inventory_line_def_val.copy() date = datetime.strptime(row[3], "%d-%m-%Y") if self.imp_product_by == "code": product_id = product_obj.search([('default_code', '=', row[0] or "_____________")]) if not product_id: raise ValidationError("Product '%s' is not founded" % row[0]) elif self.imp_product_by == "barcode": product_id = product_obj.search([('barcode', '=', int(row[0]) or "_____________")]) if not product_id: raise ValidationError("Product '%s' is not founded" % row[0]) elif self.imp_product_by == "name": product_id = product_obj.search([('name', '=', row[0] or "_____________")]) if not product_id: raise ValidationError("Product '%s' is not founded" % row[0]) else: raise exceptions.Warning(_('Please select product by')) stock_prod_lot_obj = self.env['stock.production.lot'].search([ ('name', '=', row[2] or "_____________") ]) if self.ser_no_lot_expi == True and stock_prod_lot_obj.id == False: new_inventory_line_val_ids.update({ 'name': int(row[2]) or '', 'product_id': product_id.id, 'life_date': date or '', }) new_lot_serial = prod_lot_obj.create( new_inventory_line_val_ids) new_inventory_line_val.update({ 'inventory_id': final_created_id.id, 'product_id': product_id.id, 'product_qty': row[1] or '', 'location_id': self.loc_name.id, 'prod_lot_id': stock_prod_lot_obj.id or new_lot_serial.id or False, }) final_line = inventory_line_obj.create(new_inventory_line_val) final_created_id.action_done()
def Import_inventory(self): if self.file_type and self.data_file: if self.file_type == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.file_type == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or picking type')) product_obj = self.env['product.product'] inventory_obj = self.env['stock.inventory'] inventory_fields = inventory_obj.fields_get() inventory_def_val = inventory_obj.default_get(inventory_fields) new_inventory_val = inventory_def_val.copy() new_inventory_val.update({ 'name': self.inv_name, 'state': 'confirm', 'location_id': self.loc_name.id, }) final_created_id = inventory_obj.create(new_inventory_val) for row in file_data: prod_lot_obj = self.env['stock.production.lot'] prod_lot_fields = prod_lot_obj.fields_get() prod_lot_obj_def_val = prod_lot_obj.default_get(prod_lot_fields) new_inventory_line_val_ids = prod_lot_obj_def_val.copy() inventory_line_obj = self.env['stock.inventory.line'] inventory_line_fields = inventory_line_obj.fields_get() inventory_line_def_val = inventory_line_obj.default_get(inventory_line_fields) new_inventory_line_val = inventory_line_def_val.copy() date = datetime.datetime.strptime(row[3], "%d-%m-%Y") stock_prod_lot_obj = self.env['stock.production.lot'].search([('name','=',int(row[2]))]) print ("::::::::::::::::::::::::::::::::::::stock_prod_lot_obj:::::::::::::::::",stock_prod_lot_obj) print ("::::::::::::::::::::::::::::::::::::row[2]:::::::::::::::::",row[2]) if self.imp_product_by == "code": pro_nm_bycode = product_obj.search([('default_code', '=', row[0])]) new_inventory_line_val_ids.update({ 'name': self.ser_no_lot_expi == True and int(row[2]) or '', 'product_id': pro_nm_bycode.id, 'life_date': self.ser_no_lot_expi == True and date or '', }) new_lot_serial = prod_lot_obj.create(new_inventory_line_val_ids) new_inventory_line_val.update({ 'inventory_id': final_created_id.id, 'product_id': pro_nm_bycode.id, 'product_qty': row[1], 'location_id': self.loc_name.id, 'prod_lot_id': stock_prod_lot_obj.id and stock_prod_lot_obj.id or new_lot_serial.id, }) final_line = inventory_line_obj.create(new_inventory_line_val) print ("::::::::::::::::::::::lines:::::::::::::::::::", final_line) elif self.imp_product_by == "barcode": pro_nm_barcode = product_obj.search([('barcode', '=', int(row[0]))]) new_inventory_line_val_ids.update({ 'name': self.ser_no_lot_expi == True and int(row[2]) or '', 'product_id': pro_nm_barcode.id, 'life_date': self.ser_no_lot_expi == True and date or '', }) new_lot_serial = prod_lot_obj.create(new_inventory_line_val_ids) new_inventory_line_val.update({ 'inventory_id': final_created_id.id, 'product_id': pro_nm_barcode.id, 'product_qty': row[1], 'location_id': self.loc_name.id, 'prod_lot_id': stock_prod_lot_obj and stock_prod_lot_obj.id or new_lot_serial.id, }) final_line = inventory_line_obj.create(new_inventory_line_val) print ("::::::::::::::::::::::lines:::::::::::::::::::", final_line) elif self.imp_product_by == "name": print (":::::::::::::::::::name::::::::::::::") pro_nm = product_obj.search([('name', 'like', row[0])]) new_inventory_line_val_ids.update({ 'name': self.ser_no_lot_expi == True and int(row[2]) or '', 'product_id': pro_nm.id, 'life_date': self.ser_no_lot_expi == True and date or ''}) new_lot_serial = prod_lot_obj.create(new_inventory_line_val_ids) print ("::::::::::::::::::::::::::::::new_lot_serial:::::::::::::::::::::",new_lot_serial) print ("::::::::::::::::::::::::::::::stock_prod_lot_obj:::::::::::::::::::::",stock_prod_lot_obj) new_inventory_line_val.update({ 'inventory_id': final_created_id.id, 'product_id': pro_nm.id, 'product_qty': row[1], 'location_id': self.loc_name.id, 'prod_lot_id': stock_prod_lot_obj and stock_prod_lot_obj.id or new_lot_serial.id, }) final_line = inventory_line_obj.create(new_inventory_line_val) print ("::::::::::::::::::::::lines:::::::::::::::::::", final_line) else: raise exceptions.Warning(_('Please select product by'))
def Import_sale_order(self): partner_obj = self.env['res.partner'] currency_obj = self.env['res.currency'] product_obj = self.env['product.product'] uom_obj = self.env['product.uom'] salesperson_obj = self.env['res.users'] sale_result = {} sale_obj = self.env['sale.order'] sale_obj_fileds = sale_obj.fields_get() sale_default_value = sale_obj.default_get(sale_obj_fileds) sale_line_obj = self.env['sale.order.line'] line_fields = sale_line_obj.fields_get() file_data = False if self.select_file and self.data_file and self.seq_opt: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or seqeuance')) for row in file_data: print ("::::::::::::::::::::::::::::::::::::::::::::::::row[9]::::::::::::::::",row[9]) dt = datetime.datetime.strptime(row[10], "%d-%m-%Y") tax_search = self.env['account.tax'].search([('name','=',row[9]),('type_tax_use','=','sale')]) print ("::::::::::::::::::::::::::::::::::::::::::::::::tax_search::::::::::::::::", tax_search) partner = partner_obj.search([('name', 'like', row[1])]) currency = currency_obj.search([('name', 'like', row[2])]) product = product_obj.search([('name', 'like', row[3])]) salesperson = salesperson_obj.search([('name', 'like', row[8])]) if not partner: raise ValidationError("Customer '%s' not found"%row[1]) if not product: raise ValidationError("Product '%s' not found"%row[3]) if not salesperson: raise ValidationError("Sales User '%s' not found"%row[8]) if not currency: raise ValidationError("Currency '%s' not found"%row[2]) sale_obj_update = sale_default_value.copy() sale_obj_update.update({ 'partner_id': partner[0].id or row[0], 'name': self.seq_opt == 'f_sequence' and row[0] or 'New', 'currency_id': currency and currency.id or False, 'user_id': salesperson and salesperson[0].id or False, 'validity_date': dt, }) sale_obj_new = sale_obj.new(sale_obj_update) sale_obj_new.onchange_partner_id() sale_obj_update.update({'currency_id': sale_obj_new.currency_id.id}) line_v1 = sale_line_obj.default_get(line_fields) line_vals = line_v1.copy() line_vals.update({'name': row[6], 'product_id': product.id, 'quantity': row[4] and int(row[4]) or 1, 'uom_id': product.uom_id.id, 'price_unit': row[7] and int(row[7]) or 1, 'tax_id': [(6, 0, tax_search.ids)], }) print ("::::::::::::::::::::::::::::line_vals::::::::::::", line_vals) line_obj = sale_line_obj.new(line_vals) line_vals.update({'name': line_obj.name, 'tax_id': [(6,0, line_obj.tax_id and line_obj.tax_id.ids or tax_search.ids)] or "", }) l2 = [(0, 0, line_vals)] if sale_result.get(row[0]): l1 = sale_result[row[0]]['order_line'] sale_result[row[0]].update({'order_line': l1 + l2}) if not sale_result.get(row[0]): sale_obj_update.update({'order_line': l2}) sale_result[row[0]] = sale_obj_update for sale_data in sale_result.values(): sale_var = sale_obj.create(sale_data) if self.import_state == "confirm": sale_var.action_confirm() return True
def import_payment(self, vals): if self.data_file == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO( base64.decodestring(self.data_file)), quotechar=",", delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data else: file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data for row in file_data: if row[0] != "": partner = self.env['res.partner'].search([ ('name', '=', row[0] or "_____________") ]) if not partner: raise ValidationError( "Customer/Vendor '%s' is not founded" % row[0]) else: raise ValidationError("Please Assign Customer/Vendor Name.") if row[2] != "": account = self.env['account.journal'].search([ ('name', '=', row[2] or "_____________") ]) if not account: raise ValidationError( "PAYMENT JOURNAL '%s' is not founded" % row[2]) else: raise ValidationError("Please Assign PAYMENT JOURNAL.") payment_vals = { 'partner_type': self.payment_type == 'customer_py' and 'customer' or 'supplier', 'partner_id': partner.id, 'payment_date': datetime.now(), 'journal_id': account.id, 'amount': row[1], 'communication': row[4], 'payment_method_id': 2, 'state': 'draft', 'payment_type': self.payment_type == 'customer_py' and 'inbound' or 'outbound', } payment = self.env['account.payment'].create(payment_vals) payment.post()
def import_sale_data(self): self.ensure_one() ctx = self._context.copy() model = self.env['ir.model'].search([('model', '=', 'sale.order')]) product_dict = {} partner_dict = {} pricelist_dict = {} order_item_dict = {} # tax_dict = {} order_dict = {} picking_dict = {} warehouse_dict = {} team_dict = {} carrier_dict = {} error_log_id = False ir_attachment_obj = self.env['ir.attachment'] ir_attachment = ir_attachment_obj.create({ 'name': self.datas_fname, 'datas': self.input_file, 'datas_fname': self.datas_fname }) # new code to read csv # csv_data = base64.decodestring(self.input_file) csv_data = base64.decodebytes(self.input_file) csv_iterator = pycompat.csv_reader( io.BytesIO(csv_data), quotechar='"', delimiter=',' ) try: sheet_fields = next(csv_iterator) except: raise Warning(_("Please import a CSV file with UTF-8 encoding.")) # column validation missing_columns = list(set(FIELDS_TO_IMPORT) - set(sheet_fields)) if missing_columns: raise Warning( _('Following columns are missing: \n %s' % ('\n'.join(missing_columns)) ) ) order_group = sheet_fields.index('Group') missing_columns.append('Group') partner_name = sheet_fields.index('Customer') partner_tel = sheet_fields.index('Customer Phone/Mobile') product_id = sheet_fields.index('Line Product') line_name = sheet_fields.index('Line Description') price_unit = sheet_fields.index('Line Unit Price') product_qty = sheet_fields.index('Line Qty') taxes_id = sheet_fields.index('Line Tax') notes = sheet_fields.index('Notes') pricelist_id = sheet_fields.index('Pricelist') warehouse_id = sheet_fields.index('Warehouse') team_id = sheet_fields.index('Team') carrier_id = sheet_fields.index('Carrier') for row in csv_iterator: check_list = [] # Below logic for is row values are empty on all columns then skip that line. # order_group_value = row[order_group].strip() if not bool(row[order_group].strip()): for r in row: if bool(r.strip()): check_list.append(r) if not bool(row[order_group].strip()) and not check_list: continue error_line_vals = {'error_name': '', 'error': False} ctx.update({'partner_name': partner_name}) partner_value, product_id_value, pricelist_value,\ warehouse_value, team_value, carrier_value = \ self.with_context(ctx)._get_order_value_dict( row, error_line_vals, partner_tel, product_id, pricelist_id, warehouse_id, team_id, carrier_id, partner_dict, product_dict, pricelist_dict, picking_dict, warehouse_dict, team_dict, carrier_dict) taxes = [] qty, price_unit_value = self._get_order_value(row, error_line_vals, taxes, price_unit, taxes_id, product_qty) picking_policy = self.picking_policy order = row[order_group].strip() error_log_id = self._update_error_log(error_log_id, error_line_vals, ir_attachment, model, csv_iterator.line_num, order) self._get_order_item_dict(error_log_id, row, order, taxes, line_name, product_dict, product_id_value, order_item_dict, qty, price_unit_value) self._get_order_dict(error_log_id, order_dict, order, partner_dict, partner_value, pricelist_dict, pricelist_value, picking_dict, picking_policy, team_dict, team_value, carrier_dict, carrier_value, warehouse_dict, warehouse_value, row, notes) if not error_log_id: error_log_id = self.env['error.log'].create({ 'input_file': ir_attachment.id, 'import_user_id': self.env.user.id, 'import_date': datetime.now(), 'state': 'done', 'model_id': model.id}).id for item in order_item_dict: order_id = self._get_order_id(order_dict[item], item, error_log_id) for so_line in order_item_dict[item]: if not so_line['invoiceable']: order_id.invoiceable = False self._get_orderline_id(so_line, order_id) # orderline_id = \ # self._get_orderline_id(so_line, order_id) # order_id.signal_workflow('order_confirm') # odoo11 order_id.action_confirm() # odoo11 if order_id.picking_ids: for picking in order_id.picking_ids: picking.action_assign() # available = picking.action_assign() # invoice_ids = order_id.action_invoice_create() # odoo11 if order_id.invoiceable: order_id.action_invoice_create() # odoo11 if order_id.invoice_ids: for invoice in order_id.invoice_ids: invoice.journal_id = \ self.customer_invoice_journal_id.id if invoice.state == 'draft': # invoice.signal_workflow('invoice_open') invoice.action_invoice_open() # odoo11 invoice.pay_and_reconcile( self.customer_payment_journal_id.id ) # odoo11 res = self.env.ref('base_import_log.error_log_action') res = res.read()[0] res['domain'] = str([('id', 'in', [error_log_id])]) return res
def Import_partner(self): partner_obj = self.env['res.partner'] if self.select_file and self.data_file: if self.select_file == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",", delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or picking type')) for row in file_data: search_partner = self.env['res.partner'].search([('name', '=', row[0]),('ref', '=', row[16])]) search_parent_partner = self.env['res.partner'].search([('name', '=', row[2])]) print ("::::::::::::::::::::::::::::::::::::::;search person:::::::::::::::::::::::::::::::::::::",search_partner) search_salesperson = self.env['res.users'].search([('name', '=', row[15])]) search_cust_payment_term = self.env['account.payment.term'].search([('name', '=', row[17])]) search_vendar_payment_term = self.env['account.payment.term'].search([('name', '=', row[18])]) search_country = self.env['res.country'].search([('name', '=', row[8])]) search_state = self.env['res.country.state'].search([('name', '=', row[6])]) # if not search_partner: # raise ValidationError("partner ids '%s' not found" % row[2]) if not search_salesperson: raise ValidationError("Salesperson ids '%s' not found" % row[15]) if not search_cust_payment_term: raise ValidationError("customer payment '%s' not found" % row[17]) if not search_vendar_payment_term: raise ValidationError("Vendar payment '%s' not found" % row[18]) if not search_country: raise ValidationError("Country '%s' not found" % row[18]) if not search_state: raise ValidationError("State '%s' not found" % row[6]) partner_fields = partner_obj.fields_get() partner_def_val = partner_obj.default_get(partner_fields) new_partner_val = partner_def_val.copy() new_partner_val.update({ 'name': row[0], 'company_type': row[1], # 'parent_id': search_partner.id and search_partner.id or new_partnerr_id.id, 'parent_id': search_parent_partner and search_parent_partner.id or '', 'street': row[3], 'street2': row[4], 'city': row[5], 'state': search_state.id and search_state.id or row[6], 'zip': row[7], 'country_id': search_country.id and search_country.id or row[8], 'website': row[9], 'phone': row[10], 'mobile': row[11], 'email': row[12], 'customer': row[13], 'supplier': row[14], 'user_id': search_salesperson.id and search_salesperson.id or row[15], 'ref': row[16], 'property_payment_term_id': search_cust_payment_term.id and search_cust_payment_term.id or row[17], 'property_supplier_payment_term_id': search_vendar_payment_term.id and search_vendar_payment_term.id or row[18], }) if search_partner: partner_created_id = search_partner.write(new_partner_val) print ("::::::::::::::::::::::::::update partner::::::::::::::::::::::::::::::::", partner_created_id) else: partner_created_id = partner_obj.create(new_partner_val) print ("::::::::::::::::::::::::::create partner::::::::::::::::::::::::::::::::", partner_created_id)
def Import_purchase_order(self): partner_obj = self.env['res.partner'] currency_obj = self.env['res.currency'] product_obj = self.env['product.product'] uom_obj = self.env['product.uom'] salesperson_obj = self.env['res.users'] purchase_result = {} purchase_obj = self.env['purchase.order'] purchase_obj_fileds = purchase_obj.fields_get() purchase_default_value = purchase_obj.default_get(purchase_obj_fileds) purchase_line_obj = self.env['purchase.order.line'] line_fields = purchase_line_obj.fields_get() file_data = False if self.select_file and self.data_file and self.seq_opt and self.state_stage: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or seqeuance')) for row in file_data: tax_search = self.env['account.tax'].search([('name', '=', row[8]),('type_tax_use', '=', 'sale')]) dt = datetime.datetime.strptime(row[9], "%d-%m-%Y") print ("::::::::::::::::::::::::::::::::::::::::::::::::::::::row[9]:::::::::::::", dt) partner = partner_obj.search([('name', 'like', row[1])]) currency = currency_obj.search([('name', 'like', row[2])]) product = product_obj.search([('name', 'like', row[3])]) uom = uom_obj.search([('name', 'like', row[5])]) if not partner: raise ValidationError("Customer '%s' not found"%row[1]) if not product: raise ValidationError("Product '%s' not found"%row[3]) if not currency: raise ValidationError("Currency '%s' not found"%row[2]) purchase_obj_update = purchase_default_value.copy() purchase_obj_update.update({ 'partner_id': partner[0].id or row[0], # 'move_name': self.seq_opt == 'f_sequence' and row[0] or '', 'name': self.seq_opt == 'f_sequence' and row[0] or 'New', 'currency_id': currency and currency.id or False, 'state': self.state_stage == 'draft' and 'draft' or 'purchase', 'date_order': dt, }) purchase_obj = purchase_obj.new(purchase_obj_update) purchase_obj.onchange_partner_id() purchase_obj_update.update({'currency_id': purchase_obj.currency_id.id}) line_v1 = purchase_line_obj.default_get(line_fields) line_vals = line_v1.copy() line_vals.update({'name': row[6], 'date_planned':datetime.datetime.now(),'product_id': product.id, 'product_qty': row[4] and int(row[4]) or 1, 'product_uom': product.uom_id.id, 'price_unit': row[7] and int(row[7]) or 1,'taxes_id': [(6, 0, tax_search.ids)]}) print ("::::::::::::::::::::::::::::::::::::::::purchase::::::line::::::::::::::::",line_vals) l2 = [(0, 0, line_vals)] line_obj = purchase_line_obj.new(l2[0][2]) l2[0][2].update({'name': line_obj.name, # 'taxes_id': line_obj.taxes_id and line_obj.taxes_id.ids or False, 'taxes_id': [(6, 0, line_obj.taxes_id and line_obj.taxes_id.ids or tax_search.ids)] or False, }) if purchase_result.get(row[0]): l1 = purchase_result[row[0]]['order_line'] purchase_result[row[0]].update({'order_line': l1 + l2}) if not purchase_result.get(row[0]): purchase_obj_update.update({'order_line': l2}) purchase_result[row[0]] = purchase_obj_update for purchase_data in purchase_result.values(): purchase_obj.create(purchase_data) return True
def Import_picking_order(self): partner_obj = self.env['res.partner'] product_obj = self.env['product.product'] picking_obj = self.env['stock.picking'] picking_obj_fileds = picking_obj.fields_get() picking_default_value = picking_obj.default_get(picking_obj_fileds) picking_line_obj = self.env['stock.move'] line_fields = picking_line_obj.fields_get() file_data = False if self.select_file and self.data_file and self.picking_type: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or picking type')) lines = [] for each in file_data: partner = partner_obj.search([('name', 'like', each[1])]) product = product_obj.search([('name', 'like', each[4])]) stock_picking_vals = picking_default_value.copy() if not partner: raise ValidationError("Partner '%s' not found"%each[1]) if not product: raise ValidationError("Product '%s' not found"%each[4]) dt = datetime.datetime.strptime(each[3], "%d-%m-%Y") print ("::::::::::::::::::::::::::::::::::::::::::::::::::::::row[10]:::::::::::::", dt) print ("::::::::::::::::::::::::::::::::::::::::",stock_picking_vals) print (":::::::::::::product:::::::::::::::::::::::::::",product) lines = [(0, 0, { 'product_id': product.id, 'product_uom_id': 2, 'product_uom_qty': each[5], 'name': product.name, 'product_uom': product.uom_id.id, })] stock_picking_vals.update({ 'partner_id': partner.id, 'location_id': self.source_loc.id, 'location_dest_id': self.destination_loc.id, 'picking_type_id': self.picking_type.id, 'move_type': 'direct', 'origin': each[2], 'scheduled_date': dt, 'move_lines': lines }) print ("::::::::::::::::::::::::::::::::created::::::::::::::::",self.env['stock.picking'].create(stock_picking_vals))
def Import_customer_invoice(self): partner_obj = self.env['res.partner'] currency_obj = self.env['res.currency'] product_obj = self.env['product.product'] uom_obj = self.env['product.uom'] salesperson_obj = self.env['res.users'] inv_result = {} invoice_obj = self.env['account.invoice'] invoice_obj_fileds = invoice_obj.fields_get() inv_default_value = invoice_obj.default_get(invoice_obj_fileds) invoice_line_obj = self.env['account.invoice.line'] line_fields = invoice_line_obj.fields_get() file_data = False if self.select_file and self.data_file and self.seq_opt and self.type: if self.select_file == 'csv' : csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)),quotechar=",",delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or seqeuance properly')) for row in file_data: taxes_ids = self.env['account.tax'].search([('name', '=', row[9]),('type_tax_use', '=', 'sale')]) dt = datetime.datetime.strptime(row[10], "%d-%m-%Y") print ("::::::::::::::::::::::::::::::::::::::::::::::::::::::row[9]:::::::::::::",dt) partner = partner_obj.search([('name', 'like', row[1])]) currency = currency_obj.search([('name', 'like', row[2])]) product = product_obj.search([('name', 'like', row[3])]) uom = uom_obj.search([('name', 'like', row[5])]) salesperson = salesperson_obj.search([('name', 'like', row[8])]) if not partner: raise ValidationError("Customer '%s' not found"%row[1]) if not product: raise ValidationError("Product '%s' not found"%row[3]) if not currency: raise ValidationError("currency '%s' not found"%row[2]) if not salesperson: raise ValidationError("Sales User '%s' not found"%row[8]) inv_obj_update = inv_default_value.copy() inv_obj_update.update({ 'partner_id': partner[0].id or row[0], 'move_name': self.seq_opt == 'f_sequence' and row[0] or '', 'name': self.seq_opt == 'f_sequence' and row[0] or '', 'date_invoice': dt, 'currency_id': currency and currency.id or False, 'user_id': salesperson and salesperson[0].id or False, 'type': self.type =='out_invoice' and 'out_invoice' or 'in_invoice', }) inv_obj = invoice_obj.new(inv_obj_update) inv_obj._onchange_partner_id() inv_obj_update.update({'account_id': inv_obj.account_id.id , 'journal_id': inv_obj.journal_id.id,'currency_id': inv_obj.currency_id.id}) line_v1 = invoice_line_obj.with_context({'journal_id': inv_obj.journal_id.id}).default_get(line_fields) line_vals = line_v1.copy() line_vals.update({'name': row[6], 'product_id': product.id, 'quantity': row[4] and int(row[4]) or 1, 'uom_id': product.uom_id.id, 'price_unit': row[7] and int(row[7]) or 1,'invoice_line_tax_ids': [(6,0, taxes_ids.ids)] or False, }) l2 = [(0, 0, line_vals)] line_obj = invoice_line_obj.new(l2[0][2]) line_obj._onchange_product_id() l2[0][2].update({'name': line_obj.name, 'account_id': line_obj.account_id.id , # 'invoice_line_tax_ids': line_obj.invoice_line_tax_ids and line_obj.invoice_line_tax_ids.ids or taxes_ids.ids or False, 'invoice_line_tax_ids': [(6,0, line_obj.invoice_line_tax_ids and line_obj.invoice_line_tax_ids.ids or taxes_ids.ids)] or False, }) if inv_result.get(row[0]): l1 = inv_result[row[0]]['invoice_line_ids'] inv_result[row[0]].update({'invoice_line_ids': l1 + l2}) if not inv_result.get(row[0]): inv_obj_update.update({'invoice_line_ids': l2}) inv_result[row[0]] = inv_obj_update for invoice_data in inv_result.values(): invoice_var = invoice_obj.create(invoice_data) if self.state == "validate": invoice_var.action_invoice_open() return True
def Import_product_order(self): product_main_obj = self.env['product.product'] file_data = False if self.select_file and self.data_file: if self.select_file == 'csv': csv_reader_data = pycompat.csv_reader(io.BytesIO(base64.decodestring(self.data_file)), quotechar=",", delimiter=",") csv_reader_data = iter(csv_reader_data) next(csv_reader_data) file_data = csv_reader_data elif self.select_file == 'xls': file_datas = base64.decodestring(self.data_file) workbook = xlrd.open_workbook(file_contents=file_datas) sheet = workbook.sheet_by_index(0) result = [] data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)] data.pop(0) file_data = data else: raise exceptions.Warning(_('Please select file and type of file or picking type')) for row in file_data: uom_ids = self.env['product.uom'].search([('name', 'like', row[5])]) uom_po_ids = self.env['product.uom'].search([('name', 'like', row[6])]) print ("::::::::::::::::::::::::uom_ids::::",uom_po_ids) if not uom_ids: raise ValidationError("Uom ids '%s' not found" % row[5]) if not uom_po_ids: raise ValidationError("uom_po_ids '%s' not found" % row[6]) categ_id_ids = self.env['product.category'].search([('name','like',row[2])]) if not categ_id_ids: raise ValidationError("categ_ids '%s' not found" % row[2]) print ("::::::::::::::::::::::product cat:::::::::::::::::::::::::::::::::::::::::",categ_id_ids) product_obj = self.env['product.product'] product_fields = product_obj.fields_get() pro_def_val = product_obj.default_get(product_fields) new_pro_up = pro_def_val.copy() if row[4] == '': new_pro_up.update({ 'name': row[0], 'default_code': row[1], 'type' : row[3], 'list_price': row[7], 'categ_id': categ_id_ids.id and categ_id_ids.id or "", 'uom_id' : uom_ids.id, 'uom_po_id' : uom_po_ids.id, 'weight' : row[9], 'volume' : row[10], }) elif row[4] != '': new_pro_up.update({ 'name': row[0], 'default_code': row[1], 'type' : row[3], 'list_price': row[7], 'barcode' : row[4], 'categ_id': categ_id_ids.id and categ_id_ids.id or "", 'uom_id' : uom_ids.id, 'uom_po_id' : uom_po_ids.id, 'weight' : row[9], 'volume' : row[10], }) search_product = product_main_obj.search([('name','=',row[0])]) print ("::::::::::::::::::::::search_product:::::::::::::::::::::::::::::::::::::::::", search_product) if search_product: product_created_id = search_product.write(new_pro_up) print ("::::::::::::::::::::::::::product_updated::::::::::::::::::::::::::::::::",product_created_id) else: product_created_id = product_main_obj.create(new_pro_up) print ("::::::::::::::::::::::::::product_create::::::::::::::::::::::::::::::::", product_created_id) if row[11] != '': product = self.env['product.product'].search([('name', 'like', row[0])]) self.stock_location = self.env.ref('stock.stock_location_stock') location_id = self.env['stock.location'].search([('name', 'like', row[11])]) self.env['stock.quant']._update_available_quantity(product, self.stock_location, row[11])