def test_import_only_children(self): user_email = "*****@*****.**" if frappe.db.exists("User", user_email): frappe.delete_doc("User", user_email, force=True) frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() exporter.export_data("Has Role", "User", all_doctypes=True, template=True) content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Blogger"]) importer.upload(content) user = frappe.get_doc("User", user_email) self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"})) self.assertTrue(user.get("roles")[0].role, "Blogger") # overwrite exporter.export_data("Has Role", "User", all_doctypes=True, template=True) content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Website Manager"]) importer.upload(content, overwrite=True) user = frappe.get_doc("User", user_email) self.assertEqual(len(user.get("roles")), 1) self.assertTrue(user.get("roles")[0].role, "Website Manager")
def test_import(self): if frappe.db.exists("Blog Category", "test-category"): frappe.delete_doc("Blog Category", "test-category") exporter.get_template("Blog Category", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "", "test-category", "Test Cateogry"]) importer.upload(content) self.assertTrue( frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") # export with data exporter.get_template("Blog Category", all_doctypes="No", with_data="Yes") content = read_csv_content(frappe.response.result) # overwrite content[-1][3] = "New Title" importer.upload(content, overwrite=True) self.assertTrue( frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def test_import_only_children(self): user_email = "*****@*****.**" if frappe.db.exists("User", user_email): frappe.delete_doc("User", user_email) frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() exporter.get_template("UserRole", "User", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Blogger"]) importer.upload(content) user = frappe.get_doc("User", user_email) self.assertEquals(len(user.get("user_roles")), 1) self.assertTrue(user.get("user_roles")[0].role, "Blogger") # overwrite exporter.get_template("UserRole", "User", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Website Manager"]) importer.upload(content, overwrite=True) user = frappe.get_doc("User", user_email) self.assertEquals(len(user.get("user_roles")), 1) self.assertTrue(user.get("user_roles")[0].role, "Website Manager")
def test_import_only_children(self): user_email = "*****@*****.**" if frappe.db.exists("User", user_email): frappe.delete_doc("User", user_email) frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() exporter.get_template("UserRole", "User", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Blogger"]) importer.upload.queue(content) user = frappe.get_doc("User", user_email) self.assertEquals(len(user.get("user_roles")), 1) self.assertTrue(user.get("user_roles")[0].role, "Blogger") # overwrite exporter.get_template("UserRole", "User", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "*****@*****.**", "Website Manager"]) importer.upload.queue(content, overwrite=True) user = frappe.get_doc("User", user_email) self.assertEquals(len(user.get("user_roles")), 1) self.assertTrue(user.get("user_roles")[0].role, "Website Manager")
def add_primacasa_item_group(): from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open( "/home/frappe/frappe-bench/apps/erpnext/erpnext/primacasa_items.csv", "r") as infile: rows = read_csv_content(infile.read()) i = 0 for index, row in enumerate(rows): if row[3]: if not frappe.db.exists("Item Group", {"item_group_name": row[3]}): print(row[3]) frappe.get_doc({ "doctype": "Item Group", "company": 'Primacasa', "is_group": 1, "item_group_name": row[3], "parent_item_group": 'All Item Groups' }).insert(ignore_permissions=True) i += 1 print('*************') print(i) print('*************')
def add_show_expert_accounts(): from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open( "/home/frappe/frappe-bench/apps/erpnext/erpnext/show_expert_account_tree.csv", "r") as infile: rows = read_csv_content(infile.read()) i = 0 for index, row in enumerate(rows): parent = str(row[1]) + ' - ' + str(row[0]) + ' - ' + str( row[2]) + ' - S' account_name = str(row[3]) + ' - ' + str(row[5]) print(parent) if row[7] == 1: frappe.get_doc({ "doctype": "Account", "account_name": account_name, "account_number": row[4], "is_group": row[7], "root_type": row[6] }).insert(ignore_permissions=True) else: frappe.get_doc({ "doctype": "Account", "account_name": account_name, "account_number": row[4], "is_group": row[7] }).insert(ignore_permissions=True) i += 1 print('*************') print(i) print('*************')
def add_primacasa_supplier(): from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open( "/home/frappe/frappe-bench/apps/erpnext/erpnext/primacasa_items.csv", "r") as infile: rows = read_csv_content(infile.read()) i = 0 for index, row in enumerate(rows): if row[10]: if not frappe.db.exists("Supplier", {"supplier_name": row[10]}): print(str(row[10][0:5]) + ' ' + str(row[10][6:])) frappe.get_doc({ "doctype": "Supplier", "company": 'Primacasa', "supplier_name": row[10], "supplier_name_in_arabic": row[10][6:], "tax_id": row[10][0:5], "supplier_group": 'Raw Material', "add_account": 1 }).insert(ignore_permissions=True) i += 1 print('*************') print(i) print('*************')
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): "Import CSV using data import" from frappe.core.doctype.data_import import importer from frappe.utils.csvutils import read_csv_content site = get_site(context) if not os.path.exists(path): path = os.path.join('..', path) if not os.path.exists(path): print('Invalid path {0}'.format(path)) sys.exit(1) with open(path, 'r') as csvfile: content = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() try: importer.upload(content, submit_after_import=submit_after_import, no_email=no_email, ignore_encoding_errors=ignore_encoding_errors, overwrite=not only_insert, via_console=True) frappe.db.commit() except Exception: print(frappe.get_traceback()) frappe.destroy()
def get_transaction_entries(filename, headers): header_index = {} rows, transactions = [], [] if (filename.lower().endswith("xlsx")): from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(file_id=filename) elif (filename.lower().endswith("csv")): from frappe.utils.file_manager import get_file_path from frappe.utils.csvutils import read_csv_content filepath = get_file_path(filename) with open(filepath,'rb') as csvfile: rows = read_csv_content(csvfile.read()) elif (filename.lower().endswith("xls")): rows = get_rows_from_xls_file(filename) else: frappe.throw("Only .csv and .xlsx files are supported currently") for row in rows: if len(row) == 0 or row[0] == None or not row[0]: continue #print("Processing row {0}".format(row)) if header_index: transaction = get_transaction_info(headers, header_index, row) transactions.append(transaction) elif is_headers_present(headers, row): header_index = get_header_index(headers, row) return transactions
def add_translation(ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): import sys from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open('/home/frappe/frappe-bench/apps/client/Translation.csv', "r") as infile: rows = read_csv_content(infile.read()) for index, row in enumerate(rows): if not frappe.db.exists("Translation", {"source_name": row[0]}) and (row[0] is not None): print index, '---', row[0] frappe.get_doc({ "doctype": "Translation", "language": 'ar', "source_name": row[0], "target_name": row[1] }).insert(ignore_permissions=True) else: try: frappe.db.sql( """update `tabTranslation` set target_name="{0}" where source_name="{1}" """ .format(row[1], row[0])) print row[0] except: pass
def add_usres_email(): import sys from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload # print "Importing " + path with open('/home/frappe/frappe-bench/apps/client/client/emp.csv', "r") as infile: rows = read_csv_content(infile.read()) cc = 0 for index, row in enumerate(rows): cc += 1 string = str(row[1]) tt = string.split() if row[11]: print tt[0] + '.' + tt[-1] frappe.get_doc({ "doctype": "User", "user_type": 'System User', "email": row[11], "first_name": tt[0], "last_name": tt[-1], "language": "ar", "civil_id_no": row[9], "username": tt[0] + '.' + tt[-1], "new_password": 123, "send_welcome_email": 0 }).insert(ignore_permissions=True) _update_password(row[11], row[5]) print cc
def add_reports_to(): import sys from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload # print "Importing " + path with open('/home/frappe/frappe-bench/apps/client/emps.csv', "r") as infile: rows = read_csv_content(infile.read()) cc = 0 names = [] result_manager = frappe.db.sql( "select name,employee_name,Designation from tabEmployee") print result_manager[1][0] for index, row in enumerate(rows): print row[3] for i in result_manager: if i[2] == row[3]: print "adding......" emp_name = frappe.db.sql( "select name from tabEmployee where employee_name='{0}'" .format(row[1])) if emp_name: print emp_name[0][0] doc_emp = frappe.get_doc('Employee', emp_name[0][0]) doc_emp.reports_to = i[0] doc_emp.save(ignore_permissions=True)
def add_emp(): import sys from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload # print "Importing " + path with open('/home/frappe/frappe-bench/apps/client/client/emp.csv', "r") as infile: rows = read_csv_content(infile.read()) cc = 0 for index, row in enumerate(rows): print index frappe.get_doc({ "doctype": "Employee", "employee_name_english": row[1], "employee_name": row[2], "designation": row[3], "civil_id_no": row[5], "emp_nationality": row[6], "date_of_joining": row[7], "date_of_birth": row[8], "department": row[9], "branch": row[10], "user_id": row[11], "work_days": row[12], "naming_series": "EMP", "gender": "ذكر" }).insert(ignore_permissions=True) print cc
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): "Import CSV using data import tool" from frappe.core.page.data_import_tool import importer from frappe.utils.csvutils import read_csv_content site = get_site(context) if not os.path.exists(path): path = os.path.join('..', path) if not os.path.exists(path): print('Invalid path {0}'.format(path)) sys.exit(1) with open(path, 'r') as csvfile: content = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() try: importer.upload(content, submit_after_import=submit_after_import, no_email=no_email, ignore_encoding_errors=ignore_encoding_errors, overwrite=not only_insert, via_console=True) frappe.db.commit() except Exception: print(frappe.get_traceback()) frappe.destroy()
def get_transaction_entries(file_url, headers): header_index = {} rows, transactions = [], [] if (file_url.lower().endswith("xlsx")): from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(file_url=file_url) elif (file_url.lower().endswith("csv")): from frappe.utils.csvutils import read_csv_content _file = frappe.get_doc("File", {"file_url": file_url}) filepath = _file.get_full_path() with open(filepath, 'rb') as csvfile: rows = read_csv_content(csvfile.read()) elif (file_url.lower().endswith("xls")): filename = file_url.split("/")[-1] rows = get_rows_from_xls_file(filename) else: frappe.throw(_("Only .csv and .xlsx files are supported currently")) stmt_headers = headers.values() for row in rows: if len(row) == 0 or row[0] == None or not row[0]: continue #print("Processing row {0}".format(row)) if header_index: transaction = get_transaction_info(stmt_headers, header_index, row) transactions.append(transaction) elif is_headers_present(stmt_headers, row): header_index = get_header_index(stmt_headers, row) return transactions
def setup_warehouses(): system_doc = frappe.get_doc("System Setup") company = frappe.db.get_single_value('Global Defaults', 'default_company') abbr = frappe.get_value("Company", filters={'name': company}, fieldname='abbr') if system_doc.no_warehouses: delete_nongroup_warehouse_groups() if not frappe.db.exists("Warehouse", 'General Warehouse - ' + abbr): doc = frappe.new_doc("Warehouse") doc.warehouse_name = 'General Warehouse' doc.parent_warehouse = frappe.db.sql( "select name from `tabWarehouse` where is_group = 1 order by creation asc limit 1" )[0][0] doc.insert() elif system_doc.warehouses_attachment: file = frappe.get_doc("File", {"file_url": system_doc.warehouses_attachment}) filename = file.get_full_path() company = frappe.db.get_single_value('Global Defaults', 'default_company') abbr = frappe.get_value("Company", filters={'name': company}, fieldname='abbr') with open(filename, "r", encoding="utf8") as infile: if frappe.safe_encode(filename).lower().endswith( "csv".encode('utf-8')): rows = read_csv_content(infile.read()) elif frappe.safe_encode(filename).lower().endswith( "xls".encode('utf-8')): content = file.get_content() rows = read_xls_file_from_attached_file(fcontent=content) elif frappe.safe_encode(filename).lower().endswith( "xlsx".encode('utf-8')): content = file.get_content() rows = read_xlsx_file_from_attached_file(fcontent=content) else: frappe.throw( _("Only CSV and Excel files can be used to for importing data. Please check the file format you are trying to upload" )) delete_nongroup_warehouse_groups() for index, row in enumerate(rows): if index != 0: if not frappe.db.exists("Warehouse", row[0] + ' - ' + abbr): doc = frappe.new_doc("Warehouse") doc.warehouse_name = row[0] doc.parent_warehouse = frappe.db.sql( "select name from `tabWarehouse` where is_group = 1 order by creation asc limit 1" )[0][0] doc.insert()
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False): "Import CSV using data import tool" from frappe.core.page.data_import_tool import importer from frappe.utils.csvutils import read_csv_content site = get_single_site(context) with open(path, 'r') as csvfile: content = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() try: importer.upload(content, submit_after_import=submit_after_import, ignore_encoding_errors=ignore_encoding_errors, overwrite=not only_insert, via_console=True) frappe.db.commit() except Exception: print frappe.get_traceback() frappe.destroy()
def get_transaction_entries(filename, headers): header_index = {} rows, transactions = [], [] if (filename.lower().endswith("xlsx")): from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(file_id=filename) elif (filename.lower().endswith("csv")): from frappe.utils.file_manager import get_file_path from frappe.utils.csvutils import read_csv_content filepath = get_file_path(filename) with open(filepath,'rb') as csvfile: rows = read_csv_content(csvfile.read()) elif (filename.lower().endswith("xls")): rows = get_rows_from_xls_file(filename) else: frappe.throw("Only .csv and .xlsx files are supported currently") stmt_headers = headers.values() for row in rows: if len(row) == 0 or row[0] == None or not row[0]: continue #print("Processing row {0}".format(row)) if header_index: transaction = get_transaction_info(stmt_headers, header_index, row) transactions.append(transaction) elif is_headers_present(stmt_headers, row): header_index = get_header_index(stmt_headers, row) return transactions
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None): from frappe.utils.csvutils import read_csv_content from frappe.core.page.data_import_tool.importer import upload print "Importing " + path with open(path, "r") as infile: upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, overwrite=overwrite, submit_after_import=submit, pre_process=pre_process)
def upload(file): from frappe.utils.file_manager import get_file filepath = get_file(file) pps = read_csv_content(filepath[1]) date = '' err_list = '' for pp in pps: if pp[0] not in ('TamilNadu PetroProducts', 'Consumption Report for the', 'Slno', None, ''): if pp[2] is not None: if not frappe.db.exists("Food Item", pp[2]): err_list += '<ul>Item - <b>%s</b> Not Fount</ul><br>' % pp[ 2] if err_list: frappe.throw(err_list) else: for pp in pps: if pp[0] not in ('TamilNadu PetroProducts', 'Slno', '', None): if pp[0] == "Consumption Report for the": if pp[1]: try: date = datetime.strptime(pp[1], '%d/%m/%Y') except: date = datetime.strptime(pp[1], '%d-%m-%Y') if pp[2]: if frappe.db.exists("Food Item", pp[2]): zfe = frappe.new_doc("Zecons Food Entry") zfe.date = date zfe.code = pp[1] zfe.item = pp[2] zfe.rate = pp[3] zfe.qty = pp[4] zfe.cntr_value = pp[6] zfe.save(ignore_permissions=True) return 'ok'
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False): "Import CSV using data import tool" from frappe.core.page.data_import_tool import importer from frappe.utils.csvutils import read_csv_content site = get_site(context) with open(path, "r") as csvfile: content = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() try: importer.upload( content, submit_after_import=submit_after_import, ignore_encoding_errors=ignore_encoding_errors, overwrite=not only_insert, via_console=True, ) frappe.db.commit() except Exception: print frappe.get_traceback() frappe.destroy()
def add_items(): import sys from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open("/home/frappe/frappe-bench/apps/client/client/item1.csv", "r") as infile: rows = read_csv_content(infile.read()) c = 0 for index, row in enumerate(rows): print index, row[0] frappe.get_doc({ "doctype": "Item", "item_group": row[0], "subgroup_1": row[1], "subgroup_2": row[2], "item_name": row[3] + '-item', "item_code": row[3] + '-item', "stock_uom": 'kg', "disabled": 1, "is_stock_item": 1, "standard_rate": row[4], "is_fixed_asset": 0, "is_purchase_item": 1, "is_sales_item": 1 }).insert(ignore_permissions=True) print c
def upload_csv_bank_statement(): if frappe.safe_encode(frappe.local.uploaded_filename).lower().endswith( "csv".encode("utf-8")): from frappe.utils.csvutils import read_csv_content rows = read_csv_content(frappe.local.uploaded_file) elif frappe.safe_encode(frappe.local.uploaded_filename).lower().endswith( "xlsx".encode("utf-8")): from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file( fcontent=frappe.local.uploaded_file) elif frappe.safe_encode(frappe.local.uploaded_filename).lower().endswith( "xls".encode("utf-8")): from frappe.utils.xlsxutils import read_xls_file_from_attached_file rows = read_xls_file_from_attached_file(frappe.local.uploaded_file) else: frappe.throw(_("Please upload a csv, xls or xlsx file")) column_row = rows[0] columns = [{"field": x, "label": x} for x in column_row] rows.pop(0) data = [] for row in rows: data.append(dict(zip(column_row, row))) return {"columns": columns, "data": data}
def read_csv(csv_path): """Read the csv file from path and return list of dict""" from frappe.utils.csvutils import read_csv_content with open(csv_path, "r") as csv_file: csv_content = csv_file.read() result = read_csv_content(csv_content) return result[1:]
def test_export_with_all_doctypes(self): exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True) content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User") self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) self.assertEqual(content[13][0], "DocType:") self.assertEqual(content[13][1], "User") self.assertTrue("Has Role" in content[13])
def test_export_with_all_doctypes(self): exporter.get_template("User", all_doctypes="Yes", with_data="Yes") content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User") self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) self.assertEquals(content[13][0], "DocType:") self.assertEquals(content[13][1], "User") self.assertTrue("UserRole" in content[13])
def setup_suppliers(): system_doc = frappe.get_doc("System Setup") company = frappe.db.get_single_value('Global Defaults', 'default_company') abbr = frappe.get_value("Company", filters={'name': company}, fieldname='abbr') if system_doc.no_suppliers: delete_nongroup_supplier_groups() insert_suppliers_group(company, "General Group") if not frappe.db.exists("Supplier", "General Supplier"): doc = frappe.new_doc("Supplier") doc.supplier_name = "General Supplier" doc.supplier_group = "General Group" doc.supplier_type = "Individual" doc.insert() elif system_doc.suppliers_attachment: file = frappe.get_doc("File", {"file_url": system_doc.suppliers_attachment}) filename = file.get_full_path() with open(filename, "r", encoding="utf8") as infile: if frappe.safe_encode(filename).lower().endswith( "csv".encode('utf-8')): rows = read_csv_content(infile.read()) elif frappe.safe_encode(filename).lower().endswith( "xls".encode('utf-8')): content = file.get_content() rows = read_xls_file_from_attached_file(fcontent=content) elif frappe.safe_encode(filename).lower().endswith( "xlsx".encode('utf-8')): content = file.get_content() rows = read_xlsx_file_from_attached_file(fcontent=content) else: frappe.throw( _("Only CSV and Excel files can be used to for importing data. Please check the file format you are trying to upload" )) delete_nongroup_supplier_groups() for index, row in enumerate(rows): if index != 0: insert_suppliers_group(company, row[1]) if not frappe.db.exists("Supplier", row[0]): doc = frappe.new_doc("Supplier") doc.supplier_name = row[0] doc.supplier_group = row[1] if row[2].lower() == "individual": doc.supplier_type = "Individual" elif row[20].lower() == "company": doc.supplier_type = "Company" else: frappe.throw( _("Supplier Type column values must be Company or Individual" )) doc.insert()
def test_export_with_data(self): exporter.export_data("User", all_doctypes=True, template=True, with_data=True) content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User") self.assertTrue( '"Administrator"' in [c[1] for c in content if len(c) > 1])
def upload_file(path, transactiontype, pos_transaction_date, filename, client=None): try: if client == 'yes': path = get_file_path(path) with open(encode(path), 'r') as f: content = f.read() rows = read_csv_content(content) result = importer.upload(rows, submit_after_import=True, update_only=False, ignore_encoding_errors=True, no_email=True) # generate JV name title = result['messages'][0]['title'] st = title.rfind('J') en = title.rfind('<') JV_name = title[st:en] transaction_link = JV_name error_status = result['error'] # failed due to content error if error_status == True: log_name = make_sync_log("Failed", transactiontype, result, '#fff168', pos_transaction_date, None) attachments = [{'fname': filename, 'fcontent': content}] send_email('Failed', transactiontype, result, pos_transaction_date, log_name, attachments) os.remove(path) return log_name #import is successful elif error_status == False: log_name = make_sync_log("Successful", transactiontype, result, '#9deca2', pos_transaction_date, transaction_link) os.remove(path) send_email('Successful', transactiontype, result, pos_transaction_date) return log_name except Exception as e: error = True log_name = make_sync_log("File not found failure", transactiontype, frappe.get_traceback(), '#ff4d4d', None, None) send_email('File not found failure', transactiontype, frappe.get_traceback(), pos_transaction_date, log_name) return log_name if error: frappe.db.rollback() else: frappe.db.commit() return {"error": error}
def test_import(self): if frappe.db.exists("Blog Category", "test-category"): frappe.delete_doc("Blog Category", "test-category") exporter.get_template("Blog Category", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) content.append(["", "", "test-category", "Test Cateogry"]) importer.upload(content) self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") # export with data exporter.get_template("Blog Category", all_doctypes="No", with_data="Yes") content = read_csv_content(frappe.response.result) # overwrite content[-1][3] = "New Title" importer.upload(content, overwrite=True) self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content rows = read_csv_content(frappe.local.uploaded_file) if not rows: frappe.throw(_("Please select a csv file")) frappe.enqueue(import_attendances, rows=rows, now=True if len(rows) < 200 else False)
def add_primacasa_items(): from frappe.utils.csvutils import read_csv_content from frappe.core.doctype.data_import.importer import upload with open( "/home/frappe/frappe-bench/apps/erpnext/erpnext/primacasa_items.csv", "r") as infile: rows = read_csv_content(infile.read()) i = 0 for index, row in enumerate(rows): if index: print(index) item = frappe.new_doc('Item') item.company = 'Primacasa' item.item_name = row[1] item.description = row[2] item.item_group = row[3] item.sub_item_group = row[4] item.item_company = row[5] item.standard_rate = row[6] item.valuation_rate = row[6] item.stock_uom = row[7] item.is_stock_item = 1 item.item_code = row[11] if flt(row[8]) > 0 and flt(row[6]) > 0: item.opening_stock = row[8] if cint(row[12]) == 1: item.append('taxes', { 'tax_type': 'VAT 5% - P', 'tax_rate': 5 }) if row[10]: item.append( 'item_defaults', { 'company': 'Primacasa', 'default_warehouse': 'Stores - P', 'default_price_list': 'Standard Selling', 'default_supplier': row[10] }) item.append('supplier_items', { 'supplier': row[10], 'supplier_part_no': str(row[10][0:5]) }) item.save() i += 1 print('*************') print(i) print('*************')
def read_content(self, content, extension): if extension == "csv": data = read_csv_content(content) elif extension == "xlsx": data = read_xlsx_file_from_attached_file(fcontent=content) elif extension == "xls": data = read_xls_file_from_attached_file(content) self.header_row = data[0] self.data = data[1:]
def mark_attendance(file_url, start_date, end_date): #below is the method to get file from Frappe File manager from frappe.utils.file_manager import get_file #Method to fetch file using get_doc and stored as _file _file = frappe.get_doc("File", {"file_url": file_url}) #Path in the system filepath = get_file(file_url) #CSV Content stored as pps pps = read_csv_content(filepath[1]) for pp in pps: frappe.errprint(pp) holiday_list = frappe.db.get_value("Employee", {"name": pp[0]}, ["holiday_list"]) holiday_map = frappe._dict() # for d in holiday_list: # if d: holiday_list = frappe.db.sql( '''select holiday_date from `tabHoliday` where parent=%s and holiday_date between %s and %s''', (holiday_list, start_date, end_date)) holidays = [] for holiday in holiday_list: holidays.append(holiday[0]) import pandas as pd total_days = pd.date_range(start_date, end_date).tolist() day = 1 for days in total_days: date = days.date() if date not in holidays: frappe.errprint(date) if int(pp[1]) >= day: attendance = frappe.new_doc("Attendance") attendance.update({ "employee": pp[0], "attendance_date": date, "status": "Absent" }).save(ignore_permissions=True) attendance.submit() frappe.db.commit() day = day + 1 if float(pp[2]) >= 1: ts = frappe.new_doc("Timesheet") ts.employee = pp[0] ts.append( "time_logs", { "activity_type": "Overtime", "from_time": start_date, "hours": float(pp[2]) }) ts.save(ignore_permissions=True) ts.submit() frappe.db.commit()
def update_from_csv(filename): from frappe.utils.csvutils import read_csv_content from frappe.utils.file_manager import get_file _file = frappe.get_doc("File", {"file_name": filename}) filepath = get_file(filename) pps = read_csv_content(filepath[1]) # print len(pps) for pp in pps: # pp1 = datetime.strptime(pp[1], '%d-%m-%Y').strftime('%Y-%m-%d')pp[0] ss = frappe.get_value("Salary Slip",{"employee":pp[0],"posting_date":'2019-05-02'}) print ss
def pivot_to_report(pivot): columns = [ dict(label=d, fieldname=d, fieldtype="Varchar", width=120) for d in pivot.index.names ] columns = columns + \ [dict(label=c, fieldname=c, fieldtype="Int", width=90) for c in pivot.columns] from frappe.utils.csvutils import read_csv_content csv = read_csv_content(pivot.to_csv()) return columns, csv[1:]
def test_import_with_children(self): exporter.get_template("Event", all_doctypes="Yes", with_data="No") content = read_csv_content(frappe.response.result) content.append([None] * len(content[-2])) content[-1][2] = "__Test Event with children" content[-1][3] = "Private" content[-1][4] = "2014-01-01 10:00:00.000000" importer.upload(content) ev = frappe.get_doc("Event", {"subject":"__Test Event with children"})
def get_csv_contents(files_path): csv_content = {} for filepath in files_path: fname = os.path.basename(filepath) for file_type in ["account.account.template", "account.account.type", "account.chart.template"]: if fname.startswith(file_type) and fname.endswith(".csv"): with open(filepath, "r") as csvfile: try: csv_content.setdefault(file_type, []).append(read_csv_content(csvfile.read())) except Exception, e: continue
def readfile(file_url, data_format, fcontent=None, filepath=None): if data_format == "XLSX": ext_rows = read_xlsx_file_from_attached_file(file_url, fcontent, filepath) else: file_att = frappe.get_doc("File", {"file_url": file_url}) filename = file_att.get_full_path() if data_format == "CSV": with open(filename, "r") as infile: ext_rows = read_csv_content(infile.read()) if data_format == "JSON": with open(filename, 'r') as infile: try: aa = str(infile.read()) aa = aa.replace("[", "").replace("]", "").replace(",{", "#{").split("#") ext_rows = [] ext_rows.append([]) j = 1 while j <= len(aa): bb = ast.literal_eval(aa[j - 1]) ext_rows.append([]) for x in bb.values(): ext_rows[j].append(str(x)) j += 1 bb = ast.literal_eval(aa[0]) for x, y in bb.items(): ext_rows[0].append(str(x)) except ValueError: print("bad json: {0}".format(file_url)) raise if data_format == "XML": with open(filename, 'r') as infile: try: tree = ET.parse(infile) root = tree.getroot() ext_rows = [] ext_rows.append([]) for child in root[0]: ext_rows[0].append(str(child.tag)) i = 0 for child in root: ext_rows.append([]) i += 1 for subchild in child: ext_rows[i].append(str(subchild.text)) except ValueError: print("bad xml: {0}".format(file_url)) raise if ext_rows: if not isinstance(ext_rows, list): ext_rows = [ext_rows] return ext_rows
def test_import_with_children(self): exporter.get_template("Event", all_doctypes="Yes", with_data="No") content = read_csv_content(frappe.response.result) content.append([None] * len(content[-2])) content[-1][2] = "__Test Event" content[-1][3] = "Private" content[-1][4] = "2014-01-01 10:00:00.000000" content[-1][content[15].index("role")] = "System Manager" importer.upload.queue(content) ev = frappe.get_doc("Event", {"subject":"__Test Event"}) self.assertTrue("System Manager" in [d.role for d in ev.roles])
def test_import_with_children(self): exporter.get_template("Event", all_doctypes="Yes", with_data="No") content = read_csv_content(frappe.response.result) content.append([None] * len(content[-2])) content[-1][2] = "__Test Event" content[-1][3] = "Private" content[-1][4] = "2014-01-01 10:00:00.000000" content[-1][content[15].index("person")] = "Administrator" importer.upload(content) ev = frappe.get_doc("Event", {"subject":"__Test Event"}) self.assertTrue("Administrator" in [d.person for d in ev.event_individuals])
def test_import_with_children(self): #pylint: disable=R0201 if frappe.db.exists("Event", "EV00001"): frappe.delete_doc("Event", "EV00001") exporter.export_data("Event", all_doctypes="Yes", template=True) content = read_csv_content(frappe.response.result) content.append([None] * len(content[-2])) content[-1][1] = "__Test Event with children" content[-1][2] = "Private" content[-1][3] = "2014-01-01 10:00:00.000000" importer.upload(content) frappe.get_doc("Event", {"subject":"__Test Event with children"})
def _bulk_rename(context, doctype, path): "Rename multiple records via CSV file" from frappe.model.rename_doc import bulk_rename from frappe.utils.csvutils import read_csv_content site = get_single_site(context) with open(path, 'r') as csvfile: rows = read_csv_content(csvfile.read()) frappe.init(site=site) frappe.connect() bulk_rename(doctype, rows, via_console = True) frappe.destroy()
def upload_bank_statement(): if getattr(frappe, "uploaded_file", None): with open(frappe.uploaded_file, "rb") as upfile: fcontent = upfile.read() else: from frappe.utils.file_manager import get_uploaded_content fname, fcontent = get_uploaded_content() if frappe.safe_encode(fname).lower().endswith("csv"): from frappe.utils.csvutils import read_csv_content rows = read_csv_content(fcontent, False) elif frappe.safe_encode(fname).lower().endswith("xlsx"): from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(fcontent=fcontent) columns = rows[0] rows.pop(0) data = rows return {"columns": columns, "data": data}
def execute(): with open("/Users/rmehta/Downloads/ispirt.csv", "r") as f: source = read_csv_content(f.read()) frappe.db.sql("delete from tabCategory") frappe.db.sql("delete from tabPerson") frappe.db.sql("delete from tabCompany") frappe.db.sql("delete from `tabCompany Person`") for s in source[1:]: row = frappe._dict({}) for key in columns: row[key] = s[columns[key]] if row[key] and row[key].lower() in ("-na-", "-not available-", "category"): row[key] = None if row.category and not frappe.db.exists("Category", row.category): print "adding Category " + row.category frappe.get_doc({"doctype": "Category", "name": row.category}).insert() if row.company_name: if not frappe.db.exists("Company", row.company_name): print "adding Company " + row.company_name frappe.get_doc({"doctype": "Company", "title": row.company_name , "website": row.website, "email_id": row.email, "category": row.category, "source": row.source, "city": row.city, "phone": row.phone}).insert() if row.email: if not frappe.db.exists("Person", row.email): print "adding Person " + row.email frappe.get_doc({"doctype": "Person", "first_name": row.first_name , "last_name": row.last_name, "email_id": row.email, "phone": row.phone, "current_company": row.company_name}).insert() company = frappe.get_doc("Company", row.company_name) company.append("people", {"person": row.email, "person_role": "Founder"}) company.save()
def test_export_with_data(self): exporter.export_data("User", all_doctypes=True, template=True, with_data=True) content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User") self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", skip_errors = True): """upload data""" frappe.flags.in_import = True # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True if not params.get("no_email"): no_email = False if params.get('update_only'): update_only = True if params.get('from_data_import'): from_data_import = params.get('from_data_import') if not params.get('skip_errors'): skip_errors = params.get('skip_errors') frappe.flags.mute_emails = no_email def get_data_keys_definition(): return get_data_keys() def bad_template(): frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) def check_data_length(): max_rows = 5000 if not data: frappe.throw(_("No data found")) elif not via_console and len(data) > max_rows: frappe.throw(_("Only allowed {0} rows in one import").format(max_rows)) def get_start_row(): for i, row in enumerate(rows): if row and row[0]==get_data_keys_definition().data_separator: return i+1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0]==key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = list(filter(lambda x: x in ("", None), columns)) if empty_cols: if columns[-1*len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1*len(empty_cols)] else: frappe.msgprint(_("Please make sure that there are no empty columns in the file."), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d and doctype_row[i] in (None, '' ,'~', '-', 'DocType:'): dt, parentfield = d, None # xls format truncates the row, so it may not have more columns if len(doctype_row) > i+2: parentfield = doctype_row[i+2] doctypes.append((dt, parentfield)) column_idx_to_fieldname[(dt, parentfield)] = {} column_idx_to_fieldtype[(dt, parentfield)] = {} if dt: column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] def get_doc(start_idx): if doctypes: doc = {} for idx in range(start_idx, len(rows)): if (not doc) or main_doc_empty(rows[idx]): for dt, parentfield in doctypes: d = {} for column_idx in column_idx_to_fieldname[(dt, parentfield)]: try: fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": if d[fieldname] and isinstance(d[fieldname], string_types): d[fieldname] = getdate(parse_date(d[fieldname])) elif fieldtype == "Datetime": if d[fieldname]: if " " in d[fieldname]: _date, _time = d[fieldname].split() else: _date, _time = d[fieldname], '00:00:00' _date = parse_date(d[fieldname]) d[fieldname] = get_datetime(_date + " " + _time) else: d[fieldname] = None elif fieldtype in ("Image", "Attach Image", "Attach"): # added file to attachments list attachments.append(d[fieldname]) elif fieldtype in ("Link", "Dynamic Link") and d[fieldname]: # as fields can be saved in the number format(long type) in data import template d[fieldname] = cstr(d[fieldname]) except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite: d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = parentfield doc.setdefault(d['parentfield'], []).append(d) else: break return doc else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc def main_doc_empty(row): return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2]))) users = frappe.db.sql_list("select name from tabUser") def prepare_for_insert(doc): # don't block data import if user is not set # migrating from another system if not doc.owner in users: doc.owner = frappe.session.user if not doc.modified_by in users: doc.modified_by = frappe.session.user def is_valid_url(url): is_valid = False if url.startswith("/files") or url.startswith("/private/files"): url = get_url(url) try: r = requests.get(url) is_valid = True if r.status_code == 200 else False except Exception: pass return is_valid def attach_file_to_doc(doctype, docname, file_url): # check if attachment is already available # check if the attachement link is relative or not if not file_url: return if not is_valid_url(file_url): return files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( doctype=doctype, docname=docname, file_url=file_url )) if files: # file is already attached return save_url(file_url, None, doctype, docname, "Home/Attachments", 0) # header if not rows: from frappe.utils.file_manager import get_file_doc file_doc = get_file_doc(dt='', dn="Data Import", folder='Home', is_private=1) filename, file_extension = os.path.splitext(file_doc.file_name) if file_extension == '.xlsx' and from_data_import == 'Yes': from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(file_id=file_doc.name) elif file_extension == '.csv': from frappe.utils.file_manager import get_file from frappe.utils.csvutils import read_csv_content fname, fcontent = get_file(file_doc.name) rows = read_csv_content(fcontent, ignore_encoding_errors) else: frappe.throw(_("Unsupported File Format")) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) doctypes = [] column_idx_to_fieldname = {} column_idx_to_fieldtype = {} attachments = [] if submit_after_import and not cint(frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} # allow limit rows to be uploaded check_data_length() make_column_map() if overwrite==None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) ret = [] def log(msg): if via_console: print(msg.encode('utf-8')) else: ret.append(msg) def as_link(doctype, name): if via_console: return "{0}: {1}".format(doctype, name) else: return getlink(doctype, name) error = False total = len(data) for i, row in enumerate(data): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None # publish task_update frappe.publish_realtime("data_import_progress", {"progress": [i, total]}, user=frappe.session.user) try: doc = get_doc(row_idx) if pre_process: pre_process(doc) if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() log('Inserted row for %s at #%s' % (as_link(parenttype, doc.parent),text_type(doc.idx))) else: if overwrite and doc["name"] and frappe.db.exists(doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original_name = original.name original.update(doc) # preserve original name for case sensitivity original.name = original_name original.flags.ignore_links = ignore_links original.save() log('Updated row (#%d) %s' % (row_idx + 1, as_link(original.doctype, original.name))) doc = original else: if not update_only: doc = frappe.get_doc(doc) prepare_for_insert(doc) doc.flags.ignore_links = ignore_links doc.insert() log('Inserted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) else: log('Ignored row (#%d) %s' % (row_idx + 1, row[1])) if attachments: # check file url and create a File document for file_url in attachments: attach_file_to_doc(doc.doctype, doc.name, file_url) if submit_after_import: doc.submit() log('Submitted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) except Exception as e: if not skip_errors: error = True if doc: frappe.errprint(doc if isinstance(doc, dict) else doc.as_dict()) err_msg = frappe.local.message_log and "\n\n".join(frappe.local.message_log) or cstr(e) log('Error for row (#%d) %s : %s' % (row_idx + 1, len(row)>1 and row[1] or "", err_msg)) frappe.errprint(frappe.get_traceback()) finally: frappe.local.message_log = [] if error: frappe.db.rollback() else: frappe.db.commit() frappe.flags.mute_emails = False frappe.flags.in_import = False return {"messages": ret, "error": error}
def test_export(self): exporter.export_data("User", all_doctypes=True, template=True) content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User")
def test_export_with_data(self): exporter.get_template("User", all_doctypes="No", with_data="Yes") content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User") self.assertTrue("Administrator" in [c[1] for c in content if len(c)>1])
def test_export(self): exporter.get_template("User", all_doctypes="No", with_data="No") content = read_csv_content(frappe.response.result) self.assertTrue(content[1][1], "User")
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): from frappe.utils.csvutils import read_csv_content print("Importing " + path) with open(path, "r") as infile: upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, submit_after_import=submit, pre_process=pre_process)
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", skip_errors = True, data_import_doc=None, validate_template=False, user=None): """upload data""" # for translations if user: frappe.cache().hdel("lang", user) frappe.set_user_lang(user) if data_import_doc and isinstance(data_import_doc, string_types): data_import_doc = frappe.get_doc("Data Import", data_import_doc) if data_import_doc and from_data_import == "Yes": no_email = data_import_doc.no_email ignore_encoding_errors = data_import_doc.ignore_encoding_errors update_only = data_import_doc.only_update submit_after_import = data_import_doc.submit_after_import overwrite = data_import_doc.overwrite skip_errors = data_import_doc.skip_errors else: # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True if not params.get("no_email"): no_email = False if params.get('update_only'): update_only = True if params.get('from_data_import'): from_data_import = params.get('from_data_import') if not params.get('skip_errors'): skip_errors = params.get('skip_errors') frappe.flags.in_import = True frappe.flags.mute_emails = no_email def get_data_keys_definition(): return get_data_keys() def bad_template(): frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) def check_data_length(): if not data: frappe.throw(_("No data found in the file. Please reattach the new file with data.")) def get_start_row(): for i, row in enumerate(rows): if row and row[0]==get_data_keys_definition().data_separator: return i+1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0]==key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = list(filter(lambda x: x in ("", None), columns)) if empty_cols: if columns[-1*len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1*len(empty_cols)] else: frappe.msgprint(_("Please make sure that there are no empty columns in the file."), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): dt, parentfield = d, None # xls format truncates the row, so it may not have more columns if len(doctype_row) > i+2: parentfield = doctype_row[i+2] doctypes.append((dt, parentfield)) column_idx_to_fieldname[(dt, parentfield)] = {} column_idx_to_fieldtype[(dt, parentfield)] = {} if dt: column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] def get_doc(start_idx): if doctypes: doc = {} attachments = [] last_error_row_idx = None for idx in range(start_idx, len(rows)): last_error_row_idx = idx # pylint: disable=W0612 if (not doc) or main_doc_empty(rows[idx]): for dt, parentfield in doctypes: d = {} for column_idx in column_idx_to_fieldname[(dt, parentfield)]: try: fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] if not fieldname or not rows[idx][column_idx]: continue d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": if d[fieldname] and isinstance(d[fieldname], string_types): d[fieldname] = getdate(parse_date(d[fieldname])) elif fieldtype == "Datetime": if d[fieldname]: if " " in d[fieldname]: _date, _time = d[fieldname].split() else: _date, _time = d[fieldname], '00:00:00' _date = parse_date(d[fieldname]) d[fieldname] = get_datetime(_date + " " + _time) else: d[fieldname] = None elif fieldtype in ("Image", "Attach Image", "Attach"): # added file to attachments list attachments.append(d[fieldname]) elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: # as fields can be saved in the number format(long type) in data import template d[fieldname] = cstr(d[fieldname]) except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite and doc.get("name"): d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = parentfield doc.setdefault(d['parentfield'], []).append(d) else: break return doc, attachments, last_error_row_idx else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc, [], None # used in testing whether a row is empty or parent row or child row # checked only 3 first columns since first two columns can be blank for example the case of # importing the item variant where item code and item name will be blank. def main_doc_empty(row): if row: for i in range(3,0,-1): if len(row) > i and row[i]: return False return True def validate_naming(doc): autoname = frappe.get_meta(doctype).autoname if autoname: if autoname[0:5] == 'field': autoname = autoname[6:] elif autoname == 'naming_series:': autoname = 'naming_series' else: return True if (autoname not in doc) or (not doc[autoname]): from frappe.model.base_document import get_controller if not hasattr(get_controller(doctype), "autoname"): frappe.throw(_("{0} is a mandatory field".format(autoname))) return True users = frappe.db.sql_list("select name from tabUser") def prepare_for_insert(doc): # don't block data import if user is not set # migrating from another system if not doc.owner in users: doc.owner = frappe.session.user if not doc.modified_by in users: doc.modified_by = frappe.session.user def is_valid_url(url): is_valid = False if url.startswith("/files") or url.startswith("/private/files"): url = get_url(url) try: r = requests.get(url) is_valid = True if r.status_code == 200 else False except Exception: pass return is_valid def attach_file_to_doc(doctype, docname, file_url): # check if attachment is already available # check if the attachement link is relative or not if not file_url: return if not is_valid_url(file_url): return files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( doctype=doctype, docname=docname, file_url=file_url )) if files: # file is already attached return save_url(file_url, None, doctype, docname, "Home/Attachments", 0) # header filename, file_extension = ['',''] if not rows: from frappe.utils.file_manager import get_file # get_file_doc fname, fcontent = get_file(data_import_doc.import_file) filename, file_extension = os.path.splitext(fname) if file_extension == '.xlsx' and from_data_import == 'Yes': from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file(file_id=data_import_doc.import_file) elif file_extension == '.csv': from frappe.utils.csvutils import read_csv_content rows = read_csv_content(fcontent, ignore_encoding_errors) else: frappe.throw(_("Unsupported File Format")) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] try: doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) except: frappe.throw(_("Cannot change header content")) doctypes = [] column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if skip_errors: data_rows_with_error = header if submit_after_import and not cint(frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} # Throw expception in case of the empty data file check_data_length() make_column_map() total = len(data) if validate_template: if total: data_import_doc.total_rows = total return True if overwrite==None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) import_log = [] def log(**kwargs): if via_console: print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) else: import_log.append(kwargs) def as_link(doctype, name): if via_console: return "{0}: {1}".format(doctype, name) else: return getlink(doctype, name) # publish realtime task update def publish_progress(achieved, reload=False): if data_import_doc: frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) error_flag = rollback_flag = False batch_size = frappe.conf.data_import_batch_size or 1000 for batch_start in range(0, total, batch_size): batch = data[batch_start:batch_start + batch_size] for i, row in enumerate(batch): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None publish_progress(i) try: doc, attachments, last_error_row_idx = get_doc(row_idx) validate_naming(doc) if pre_process: pre_process(doc) original = None if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() else: if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original_name = original.name original.update(doc) # preserve original name for case sensitivity original.name = original_name original.flags.ignore_links = ignore_links original.save() doc = original else: if not update_only: doc = frappe.get_doc(doc) prepare_for_insert(doc) doc.flags.ignore_links = ignore_links doc.insert() if attachments: # check file url and create a File document for file_url in attachments: attach_file_to_doc(doc.doctype, doc.name, file_url) if submit_after_import: doc.submit() # log errors if parentfield: log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), "link": get_url_to_form(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) elif submit_after_import: log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully submitted", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "blue"}) elif original: log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully updated", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "green"}) elif not update_only: log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully saved", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "green"}) else: log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, "message": "Document updation ignored", "indicator": "orange"}) except Exception as e: error_flag = True # build error message if frappe.local.message_log: err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) else: err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e)) error_trace = frappe.get_traceback() if error_trace: error_log_doc = frappe.log_error(error_trace) error_link = get_url_to_form("Error Log", error_log_doc.name) else: error_link = None log(**{ "row": row_idx + 1, "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), "message": err_msg, "indicator": "red", "link":error_link }) # data with error to create a new file # include the errored data in the last row as last_error_row_idx will not be updated for the last row if skip_errors: if last_error_row_idx == len(rows)-1: last_error_row_idx = len(rows) data_rows_with_error += rows[row_idx:last_error_row_idx] else: rollback_flag = True finally: frappe.local.message_log = [] start_row += batch_size if rollback_flag: frappe.db.rollback() else: frappe.db.commit() frappe.flags.mute_emails = False frappe.flags.in_import = False log_message = {"messages": import_log, "error": error_flag} if data_import_doc: data_import_doc.log_details = json.dumps(log_message) import_status = None if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): import_status = "Partially Successful" # write the file with the faulty row from frappe.utils.file_manager import save_file file_name = 'error_' + filename + file_extension if file_extension == '.xlsx': from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") file_data = xlsx_file.getvalue() else: from frappe.utils.csvutils import to_csv file_data = to_csv(data_rows_with_error) error_data_file = save_file(file_name, file_data, "Data Import", data_import_doc.name, "Home/Attachments") data_import_doc.error_file = error_data_file.file_url elif error_flag: import_status = "Failed" else: import_status = "Successful" data_import_doc.import_status = import_status data_import_doc.save() if data_import_doc.import_status in ["Successful", "Partially Successful"]: data_import_doc.submit() publish_progress(100, True) else: publish_progress(0, True) frappe.db.commit() else: return log_message