def upload(): if not frappe.has_permission("BRS Entries", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} #Columns located at 4th row columns = [scrub(f) for f in rows[2]] ret = [] error = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[3:]): if not row: continue row_idx = i + 3 d = frappe._dict(zip(columns, row)) d["doctype"] = "BRS Entries" try: check_record(d) ret.append(import_doc(d, "BRS Entries", 1, row_idx, submit=True)) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row)>1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback())
def upload(): if not frappe.has_permission("Quotation", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[0]] columns[0] = "item_code" ret = [] error = [] start_row = 1 total = len(rows)-1 for i, row in enumerate(rows[start_row:]): row_idx = i + start_row d = frappe._dict(zip(columns, row)) item = frappe.db.sql("""select name from `tabItem` where name = %s and docstatus < 2""",d.item_code, as_dict=1) if item: newitem = {} newitem["item_code"] = item[0].name newitem["qty"] = d.quantity ret.append(newitem) else: error.append('Error for row (#%d) %s : Invalid Item Code' % (row_idx,row[0])) return {"messages": ret, "error": error}
def upload(select_doctype=None, rows=None): from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.model.rename_doc import rename_doc if not select_doctype: select_doctype = frappe.form_dict.select_doctype if not frappe.has_permission(select_doctype, "write"): raise frappe.PermissionError if not rows: rows = read_csv_content_from_uploaded_file() if not rows: frappe.throw(_("Please select a valid csv file with data")) max_rows = 500 if len(rows) > max_rows: frappe.throw(_("Maximum {0} rows allowed").format(max_rows)) rename_log = [] for row in rows: # if row has some content if len(row) > 1 and row[0] and row[1]: try: if rename_doc(select_doctype, row[0], row[1]): rename_log.append(_("Successful: ") + row[0] + " -> " + row[1]) frappe.db.commit() else: rename_log.append(_("Ignored: ") + row[0] + " -> " + row[1]) except Exception, e: rename_log.append("<span style='color: RED'>" + \ _("Failed: ") + row[0] + " -> " + row[1] + "</span>") rename_log.append("<span style='margin-left: 20px;'>" + repr(e) + "</span>")
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[4]] columns[0] = "name" columns[3] = "att_date" ret = [] error = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[5:]): if not row: continue row_idx = i + 5 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" if d.name: d["docstatus"] = frappe.db.get_value("Attendance", d.name, "docstatus") try: check_record(d) ret.append(import_doc(d, "Attendance", 1, row_idx, submit=True)) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row)>1 and row[1] or "", cstr(e)))
def import_user_permissions(): frappe.only_for("System Manager") rows = read_csv_content_from_uploaded_file(ignore_encoding=True) clear_default(parenttype="User Permission") if rows[0][0]!="User Permissions" and rows[1][0] != "User": frappe.throw(frappe._("Please upload using the same template as download.")) for row in rows[2:]: frappe.permissions.add_user_permission(row[1], row[2], row[0])
def import_user_permissions(): frappe.only_for("System Manager") rows = read_csv_content_from_uploaded_file(ignore_encoding=True) clear_default(parenttype="User Permission") if rows[0][0] != "User Permissions" and rows[1][0] != "User": frappe.throw( frappe._("Please upload using the same template as download.")) for row in rows[2:]: frappe.permissions.add_user_permission(row[1], row[2], row[0])
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[4]] columns[0] = "name" columns[3] = "attendance_date" ret = [] error = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[5:]): if not row: continue row_idx = i + 5 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" if d.name: d["docstatus"] = frappe.db.get_value("Attendance", d.name, "docstatus") d["docin_time"] = frappe.db.get_value("Attendance", d.name, "docin_time") d["docout_time"] = frappe.db.get_value("Attendance", d.name, "docout_time") d["docovertime_in_minutes"] = frappe.db.get_value( "Attendance", d.name, "docovertime_in_minutes") d["docot_rate"] = frappe.db.get_value("Attendance", d.name, "docot_rate") try: check_record(d) ret.append(import_doc(d, "Attendance", 1, row_idx, submit=True)) except Exception as e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row) > 1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback()) if error: frappe.db.rollback() else: frappe.db.commit() return {"messages": ret, "error": error}
def upload(): if not frappe.has_permission("Quotation", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[0]] columns[0] = "item_code" ret = [] error = [] start_row = 1 total = len(rows) - 1 for i, row in enumerate(rows[start_row:]): row_idx = i + start_row d = frappe._dict(zip(columns, row)) item = frappe.db.sql( """select name from `tabItem` where name = %s and docstatus < 2""", d.item_code, as_dict=1) if item: newitem = {} newitem["item_code"] = item[0].name newitem["qty"] = d.quantity ret.append(newitem) else: error.append('Error for row (#%d) %s : Invalid Item Code' % (row_idx, row[0])) return {"messages": ret, "error": error}
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} #fixme error when importing certain header #columns = [scrub(f) for f in rows[0]] columns = ["employee","att_date","arrival_time","departure_time"] ret = [] error = False started = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[1:]): if not row: continue started = True row_idx = i + 1 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" try: check_record(d) ret.append(import_doc(d, "Attendance", 1, row_idx, submit=False)) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx+1, len(row)>1 and row[1] or "", "Check data")) frappe.errprint(row_idx) frappe.errprint(frappe.get_traceback())
def upload(update_due_date=None): params = json.loads(frappe.form_dict.get("params") or '{}') csv_rows = read_csv_content_from_uploaded_file() ret = [] error = False for index, line in enumerate(csv_rows): d = {key: '' for key in csv_rows[0]} if index > 0: d['Migrated agreement ID'] = line[0] d['Agreement No'] = line[8] d["Payoff"] = line[9] d['Payment ID'] = d['Agreement No'] + "-" + line[1] if not d[ "Payoff"] else "" d['Payment date'] = line[2] d['Payment due date'] = line[3] d['Cash'] = line[4] #d['Credit card'] = line[5] d['Bank Transfer'] = line[5] d['Discount'] = line[6] d['Late Fees'] = line[7] ret.append(made_payments(d, params)) return {"messages": ret, "error": False}
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[4]] columns[0] = "name" columns[3] = "att_date" ret = [] error = False from frappe.utils.csvutils import check_record, import_doc attendance_dict = attendance_rowdata = {} for i, row in enumerate(rows[5:]): if not row: continue row_idx = i + 5 if row[1]: data = row[1] attendance_rowdata.setdefault(data, row) if data in attendance_dict: attendance_dict[data].append([row[8], row[9]]) else: attendance_dict.setdefault(data, [[row[8], row[9]]]) if attendance_dict and attendance_rowdata: for r in attendance_rowdata: pass if error: frappe.db.rollback() return {"messages": ret, "error": error}
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, overwrite=None, ignore_links=False): """upload data""" frappe.flags.mute_emails = True # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("_submit"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True from frappe.utils.csvutils import read_csv_content_from_uploaded_file def bad_template(): frappe.throw(_("Please do not change the rows above {0}").format(data_keys.data_separator)) def check_data_length(): max_rows = 5000 if not data: frappe.throw(_("No data found")) elif len(data) > max_rows: frappe.throw(_("Only allowed {0} rows in one import").format(max_rows)) def get_start_row(): for i, row in enumerate(rows): if row and row[0]==data_keys.data_separator: return i+1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0]==key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = filter(lambda x: x in ("", None), columns) if empty_cols: if columns[-1*len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1*len(empty_cols)] else: frappe.msgprint(_("Please make sure that there are no empty columns in the file."), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx(data_keys.doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d: # value in doctype_row if doctype_row[i]==dt: # prev column is doctype (in case of parentfield) doctype_parentfield[dt] = doctype_row[i+1] else: dt = d doctypes.append(d) column_idx_to_fieldname[dt] = {} column_idx_to_fieldtype[dt] = {} if dt: column_idx_to_fieldname[dt][i+1] = rows[row_idx + 2][i+1] column_idx_to_fieldtype[dt][i+1] = rows[row_idx + 4][i+1] def get_doc(start_idx): if doctypes: doc = {} for idx in xrange(start_idx, len(rows)): if (not doc) or main_doc_empty(rows[idx]): for dt in doctypes: d = {} for column_idx in column_idx_to_fieldname[dt]: try: fieldname = column_idx_to_fieldname[dt][column_idx] fieldtype = column_idx_to_fieldtype[dt][column_idx] d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": d[fieldname] = parse_date(d[fieldname]) if d[fieldname] else None except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite: d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = doctype_parentfield[dt] doc.setdefault(d['parentfield'], []).append(d) else: break return doc else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc def main_doc_empty(row): return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2]))) # header if not rows: rows = read_csv_content_from_uploaded_file(ignore_encoding_errors) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] doctype = get_header_row(data_keys.main_table)[1] columns = filter_empty_columns(get_header_row(data_keys.columns)[1:]) doctypes = [] doctype_parentfield = {} column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if submit_after_import and not cint(frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(data_keys.parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} # allow limit rows to be uploaded check_data_length() make_column_map() frappe.db.begin() if overwrite==None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) ret = [] error = False for i, row in enumerate(data): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None doc = get_doc(row_idx) try: frappe.local.message_log = [] if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() ret.append('Inserted row for %s at #%s' % (getlink(parenttype, doc.parent), unicode(doc.idx))) else: if overwrite and frappe.db.exists(doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original.update(doc) original.ignore_links = ignore_links original.save() ret.append('Updated row (#%d) %s' % (row_idx + 1, getlink(original.doctype, original.name))) else: doc = frappe.get_doc(doc) doc.ignore_links = ignore_links doc.insert() ret.append('Inserted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name))) if submit_after_import: doc.submit() ret.append('Submitted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name))) except Exception, e: error = True if doc: frappe.errprint(doc if isinstance(doc, dict) else doc.as_dict()) err_msg = frappe.local.message_log and "\n\n".join(frappe.local.message_log) or cstr(e) ret.append('Error for row (#%d) %s : %s' % (row_idx + 1, len(row)>1 and row[1] or "", err_msg)) frappe.errprint(frappe.get_traceback())
def upload(rows=None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, update_only=None, ignore_links=False, pre_process=None, via_console=False): """upload data""" frappe.flags.in_import = True # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True if not params.get("no_email"): no_email = False if params.get('update_only'): update_only = True frappe.flags.mute_emails = no_email from frappe.utils.csvutils import read_csv_content_from_uploaded_file def get_data_keys_definition(): return get_data_keys() def bad_template(): frappe.throw( _("Please do not change the rows above {0}").format( get_data_keys_definition().data_separator)) def check_data_length(): max_rows = 5000 if not data: frappe.throw(_("No data found")) elif not via_console and len(data) > max_rows: frappe.throw( _("Only allowed {0} rows in one import").format(max_rows)) def get_start_row(): for i, row in enumerate(rows): if row and row[0] == get_data_keys_definition().data_separator: return i + 1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0] == key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = filter(lambda x: x in ("", None), columns) if empty_cols: if columns[-1 * len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1 * len(empty_cols)] else: frappe.msgprint(_( "Please make sure that there are no empty columns in the file." ), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx( get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d and doctype_row[i] in (None, '', '~', '-', 'DocType:'): dt, parentfield = d, doctype_row[i + 2] or None doctypes.append((dt, parentfield)) column_idx_to_fieldname[(dt, parentfield)] = {} column_idx_to_fieldtype[(dt, parentfield)] = {} if dt: column_idx_to_fieldname[(dt, parentfield)][i + 1] = rows[row_idx + 2][i + 1] column_idx_to_fieldtype[(dt, parentfield)][i + 1] = rows[row_idx + 4][i + 1] def get_doc(start_idx): if doctypes: doc = {} for idx in xrange(start_idx, len(rows)): if (not doc) or main_doc_empty(rows[idx]): for dt, parentfield in doctypes: d = {} for column_idx in column_idx_to_fieldname[( dt, parentfield)]: try: fieldname = column_idx_to_fieldname[( dt, parentfield)][column_idx] fieldtype = column_idx_to_fieldtype[( dt, parentfield)][column_idx] d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": d[fieldname] = getdate( parse_date(d[fieldname]) ) if d[fieldname] else None elif fieldtype == "Datetime": if d[fieldname]: if " " in d[fieldname]: _date, _time = d[fieldname].split() else: _date, _time = d[ fieldname], '00:00:00' _date = parse_date(d[fieldname]) d[fieldname] = get_datetime(_date + " " + _time) else: d[fieldname] = None except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite: d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = parentfield doc.setdefault(d['parentfield'], []).append(d) else: break return doc else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc def main_doc_empty(row): return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2]))) users = frappe.db.sql_list("select name from tabUser") def prepare_for_insert(doc): # don't block data import if user is not set # migrating from another system if not doc.owner in users: doc.owner = frappe.session.user if not doc.modified_by in users: doc.modified_by = frappe.session.user # header if not rows: rows = read_csv_content_from_uploaded_file(ignore_encoding_errors) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns( get_header_row(get_data_keys_definition().columns)[1:]) doctypes = [] column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if submit_after_import and not cint( frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return { "messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True } # allow limit rows to be uploaded check_data_length() make_column_map() if overwrite == None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) ret = [] def log(msg): if via_console: print(msg.encode('utf-8')) else: ret.append(msg) def as_link(doctype, name): if via_console: return "{0}: {1}".format(doctype, name) else: return getlink(doctype, name) error = False total = len(data) for i, row in enumerate(data): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None # publish task_update frappe.publish_realtime("data_import_progress", {"progress": [i, total]}, user=frappe.session.user) try: doc = get_doc(row_idx) if pre_process: pre_process(doc) if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() log('Inserted row for %s at #%s' % (as_link(parenttype, doc.parent), unicode(doc.idx))) else: if overwrite and doc["name"] and frappe.db.exists( doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original_name = original.name original.update(doc) # preserve original name for case sensitivity original.name = original_name original.flags.ignore_links = ignore_links original.save() log('Updated row (#%d) %s' % (row_idx + 1, as_link(original.doctype, original.name))) doc = original else: if not update_only: doc = frappe.get_doc(doc) prepare_for_insert(doc) doc.flags.ignore_links = ignore_links doc.insert() log('Inserted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) else: log('Ignored row (#%d) %s' % (row_idx + 1, row[1])) if submit_after_import: doc.submit() log('Submitted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) except Exception, e: error = True if doc: frappe.errprint( doc if isinstance(doc, dict) else doc.as_dict()) err_msg = frappe.local.message_log and "\n\n".join( frappe.local.message_log) or cstr(e) log('Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", err_msg)) frappe.errprint(frappe.get_traceback()) finally:
def upload(rows=None, submit_after_import=None, ignore_encoding_errors=False, overwrite=None, ignore_links=False): """upload data""" frappe.flags.mute_emails = True # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True from frappe.utils.csvutils import read_csv_content_from_uploaded_file def get_data_keys_definition(): return get_data_keys() def bad_template(): frappe.throw( _("Please do not change the rows above {0}").format( get_data_keys_definition().data_separator)) def check_data_length(): max_rows = 5000 if not data: frappe.throw(_("No data found")) elif len(data) > max_rows: frappe.throw( _("Only allowed {0} rows in one import").format(max_rows)) def get_start_row(): for i, row in enumerate(rows): if row and row[0] == get_data_keys_definition().data_separator: return i + 1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0] == key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = filter(lambda x: x in ("", None), columns) if empty_cols: if columns[-1 * len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1 * len(empty_cols)] else: frappe.msgprint(_( "Please make sure that there are no empty columns in the file." ), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx( get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d: # value in doctype_row if doctype_row[i] == dt: # prev column is doctype (in case of parentfield) doctype_parentfield[dt] = doctype_row[i + 1] else: dt = d doctypes.append(d) column_idx_to_fieldname[dt] = {} column_idx_to_fieldtype[dt] = {} if dt: column_idx_to_fieldname[dt][i + 1] = rows[row_idx + 2][i + 1] column_idx_to_fieldtype[dt][i + 1] = rows[row_idx + 4][i + 1] def get_doc(start_idx): if doctypes: doc = {} for idx in xrange(start_idx, len(rows)): if (not doc) or main_doc_empty(rows[idx]): for dt in doctypes: d = {} for column_idx in column_idx_to_fieldname[dt]: try: fieldname = column_idx_to_fieldname[dt][ column_idx] fieldtype = column_idx_to_fieldtype[dt][ column_idx] d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": d[fieldname] = parse_date( d[fieldname]) if d[fieldname] else None except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite: d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = doctype_parentfield[dt] doc.setdefault(d['parentfield'], []).append(d) else: break return doc else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc def main_doc_empty(row): return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2]))) # header if not rows: rows = read_csv_content_from_uploaded_file(ignore_encoding_errors) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns( get_header_row(get_data_keys_definition().columns)[1:]) doctypes = [] doctype_parentfield = {} column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if submit_after_import and not cint( frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return { "messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True } # allow limit rows to be uploaded check_data_length() make_column_map() frappe.db.begin() if overwrite == None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) ret = [] error = False for i, row in enumerate(data): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None doc = get_doc(row_idx) try: frappe.local.message_log = [] if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() ret.append('Inserted row for %s at #%s' % (getlink(parenttype, doc.parent), unicode(doc.idx))) else: if overwrite and doc["name"] and frappe.db.exists( doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original.update(doc) original.ignore_links = ignore_links original.save() ret.append('Updated row (#%d) %s' % (row_idx + 1, getlink(original.doctype, original.name))) else: doc = frappe.get_doc(doc) doc.ignore_links = ignore_links doc.insert() ret.append('Inserted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name))) if submit_after_import: doc.submit() ret.append('Submitted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name))) except Exception, e: error = True if doc: frappe.errprint( doc if isinstance(doc, dict) else doc.as_dict()) err_msg = frappe.local.message_log and "\n\n".join( frappe.local.message_log) or cstr(e) ret.append('Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", err_msg)) frappe.errprint(frappe.get_traceback())
def upload(): if not frappe.has_permission("Shift Schedule", "create"): raise frappe.PermissionError print("in upload\n\n") from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [] # columns = [scrub(f) for f in rows[1]] columns.append("name") # columns[1] = "date" ret = [] error = False from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[4:]): print("in for loop\n\n") print("row[1]", row[1]) if not row: continue row_idx = i + 3 d = frappe._dict(zip(columns, row)) d["doctype"] = "Shift Schedule" d["employee"] = row[1] d["store"] = row[3] # d["attendance_date"] = rows[3][4] print("row\n") print(row) new_date_list = [] new_shift_time_list = [] print("new logic\n") for i in rows[3]: new_date_list.append(i) new_date_list = new_date_list[4:] new_shift_time_list = row[4:] print("\n new new_date_list", new_date_list) print("\n new new_shift_time_list", new_shift_time_list) print(len(new_shift_time_list), "new_shift_time_list\n") # print(row[1]) # length_of_dates=len(new_shift_time_list) length_of_dates = 8 for i in range(length_of_dates - 1): print("\n", i) print(new_shift_time_list[i]) print(new_date_list[i]) d["attendance_date"] = new_date_list[i] d["shift_time"] = new_shift_time_list[i] import datetime # new_date = datetime.datetime.strptime(row[11],'%d-%b-%y').strftime('%d-%m-%Y') # d["date"] = new_date if d.name: d["docstatus"] = frappe.db.get_value("Shift Schedule", d.name, "docstatus") try: check_record(d) ret.append( import_doc(d, "Shift Schedule", 1, row_idx, submit=True)) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row) > 1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback())
def upload(): if not frappe.has_permission("Quotation", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[0]] columns[0] = "item_code" ret = [] error = False messages = [] start_row = 1 for i, row in enumerate(rows[start_row:]): row_idx = i + start_row d = frappe._dict(zip(columns, row)) itemdict = frappe.db.sql("""select name,item_group, is_sales_item from `tabItem` where name = %s and docstatus < 2""",d.item_code, as_dict=1) if itemdict: item = itemdict[0] newitem = {} newitem["item_code"] = item.name newitem["qty"] = d.quantity newitem["item_group"] = item.item_group if d.page_break: newitem["page_break"] = True else: newitem["page_break"] = False if item.is_sales_item: if str(item.item_group).lower() in {"header1","header2","header3","header4"}: newitem["qty"] = "0" ret.append(newitem) if d.page_break: messages.append('Header Row (#%d) %s with Page Break' % (row_idx,row[0])) else: messages.append('Header Row (#%d) %s' % (row_idx,row[0])) elif str(item.item_group).lower() == "raw material": messages.append('Ignored Row (#%d) %s : Item is a raw material' % (row_idx,row[0])) elif str(item.item_group).lower() == "assemblypart": messages.append('Ignored Row (#%d) %s : Item is an assembly part' % (row_idx,row[0])) else: ret.append(newitem) if d.page_break: messages.append('Row (#%d) %s : Item added with Page Break' % (row_idx,row[0])) else: messages.append('Row (#%d) %s : Item added' % (row_idx,row[0])) else: error = True messages.append('Error for row (#%d) %s : Item is not a sales item' % (row_idx,row[0])) else: error = True messages.append('Error for row (#%d) %s : Invalid Item Code' % (row_idx,row[0])) return {"items":ret,"messages": messages, "error": error}
def upload(): from frappe.utils.csvutils import read_csv_content_from_uploaded_file csv_content = read_csv_content_from_uploaded_file() return filter(lambda x: x and any(x), csv_content)
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, ignore_links=False, pre_process=None, via_console=False): """upload data""" frappe.flags.in_import = True # extra input params params = json.loads(frappe.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True if not params.get("no_email"): no_email = False frappe.flags.mute_emails = no_email from frappe.utils.csvutils import read_csv_content_from_uploaded_file def get_data_keys_definition(): return get_data_keys() def bad_template(): frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) def check_data_length(): max_rows = 5000 if not data: frappe.throw(_("No data found")) elif not via_console and len(data) > max_rows: frappe.throw(_("Only allowed {0} rows in one import").format(max_rows)) def get_start_row(): for i, row in enumerate(rows): if row and row[0]==get_data_keys_definition().data_separator: return i+1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0]==key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = filter(lambda x: x in ("", None), columns) if empty_cols: if columns[-1*len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1*len(empty_cols)] else: frappe.msgprint(_("Please make sure that there are no empty columns in the file."), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d and doctype_row[i] in (None, '' ,'~', '-', 'DocType:'): dt, parentfield = d, doctype_row[i+2] or None doctypes.append((dt, parentfield)) column_idx_to_fieldname[(dt, parentfield)] = {} column_idx_to_fieldtype[(dt, parentfield)] = {} if dt: column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] def get_doc(start_idx): if doctypes: doc = {} for idx in xrange(start_idx, len(rows)): if (not doc) or main_doc_empty(rows[idx]): for dt, parentfield in doctypes: d = {} for column_idx in column_idx_to_fieldname[(dt, parentfield)]: try: fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": d[fieldname] = getdate(parse_date(d[fieldname])) if d[fieldname] else None elif fieldtype == "Datetime": if d[fieldname]: if " " in d[fieldname]: _date, _time = d[fieldname].split() else: _date, _time = d[fieldname], '00:00:00' _date = parse_date(d[fieldname]) d[fieldname] = get_datetime(_date + " " + _time) else: d[fieldname] = None except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite: d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = parentfield doc.setdefault(d['parentfield'], []).append(d) else: break return doc else: doc = frappe._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc def main_doc_empty(row): return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2]))) users = frappe.db.sql_list("select name from tabUser") def prepare_for_insert(doc): # don't block data import if user is not set # migrating from another system if not doc.owner in users: doc.owner = frappe.session.user if not doc.modified_by in users: doc.modified_by = frappe.session.user # header if not rows: rows = read_csv_content_from_uploaded_file(ignore_encoding_errors) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) doctypes = [] column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if submit_after_import and not cint(frappe.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not frappe.permissions.can_import(parenttype or doctype): frappe.flags.mute_emails = False return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} # allow limit rows to be uploaded check_data_length() make_column_map() if overwrite==None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) ret = [] def log(msg): if via_console: print msg.encode('utf-8') else: ret.append(msg) def as_link(doctype, name): if via_console: return "{0}: {1}".format(doctype, name) else: return getlink(doctype, name) error = False total = len(data) for i, row in enumerate(data): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None # publish task_update frappe.publish_realtime("data_import_progress", {"progress": [i, total]}, user=frappe.session.user) try: doc = get_doc(row_idx) if pre_process: pre_process(doc) if parentfield: parent = frappe.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() log('Inserted row for %s at #%s' % (as_link(parenttype, doc.parent), unicode(doc.idx))) else: if overwrite and doc["name"] and frappe.db.exists(doctype, doc["name"]): original = frappe.get_doc(doctype, doc["name"]) original_name = original.name original.update(doc) # preserve original name for case sensitivity original.name = original_name original.flags.ignore_links = ignore_links original.save() log('Updated row (#%d) %s' % (row_idx + 1, as_link(original.doctype, original.name))) doc = original else: doc = frappe.get_doc(doc) prepare_for_insert(doc) doc.flags.ignore_links = ignore_links doc.insert() log('Inserted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) if submit_after_import: doc.submit() log('Submitted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name))) except Exception, e: error = True if doc: frappe.errprint(doc if isinstance(doc, dict) else doc.as_dict()) err_msg = frappe.local.message_log and "\n\n".join(frappe.local.message_log) or cstr(e) log('Error for row (#%d) %s : %s' % (row_idx + 1, len(row)>1 and row[1] or "", err_msg)) frappe.errprint(frappe.get_traceback()) finally:
def upload(import_settings=None): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = list(filter(lambda x: x and any(x), rows)) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} #fixme error when importing certain header #columns = [scrub(f) for f in rows[0]] columns = ["employee", "attendance_date", "arrival_time", "departure_time"] ret = [] error = False started = False import json params = json.loads(frappe.form_dict.get("params") or '{}') if not params.get("import_settings"): import_settings = "default" else: import_settings = params.get("import_settings") from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[1:]): if not row: continue started = True row_idx = i + 1 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" date_error = False try: parse_date(d.attendance_date) except Exception as e: date_error = True ret.append('Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", cstr(e))) except ValueError as e: date_error = True ret.append('Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", cstr(e))) except: date_error = True ret.append('Error for row (#%d) %s' % (row_idx + 1, len(row) > 1 and row[1] or "")) if date_error == True: if import_settings != "ignore": error = True continue formatted_attendance_date = getdate(parse_date(d.attendance_date)) if import_settings == "ignore": attendance = frappe.db.sql( """select name,docstatus,attendance_date from `tabAttendance` where employee = %s and attendance_date = %s""", (d.employee, formatted_attendance_date), as_dict=True) if attendance: link = [ '<a href="#Form/Attendance/{0}">{0}</a>'.format( str(attendance[0].name)) ] # ret.append('Ignored row (#%d) %s : %s - %s' % (row_idx+1, # len(row)>1 and row[1] or "", cstr(d.employee),link)) else: try: check_record(d) ret.append( import_doc(d, "Attendance", 1, row_idx, submit=False)) except Exception, e: # error = True ret.append( 'Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", cstr(e))) # frappe.errprint(frappe.get_traceback()) elif import_settings == "update": attendance = frappe.db.sql( """select name,docstatus,attendance_date from `tabAttendance` where employee = %s and attendance_date = %s""", (d.employee, formatted_attendance_date), as_dict=True) if attendance: d["docstatus"] = attendance[0].docstatus d["name"] = attendance[0].name try: check_record(d) ret.append( import_doc(d, "Attendance", 1, row_idx, submit=False)) except Exception, e: error = True ret.append( 'Error for row (#%d) %s : %s' % (row_idx + 1, len(row) > 1 and row[1] or "", cstr(e)))
def csv_opertions(): #------------------To import csv file on the doctype and validating it is in csv standard----------------- if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = list(filter(lambda x: x and any(x), rows)) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} # --------------Creating records for attendance from the csv provided to us---------------------------- columns = [f for f in rows[0]] columns[0] = "attendance_date" columns[1] = "employee_code" columns[2] = "employee_name" columns[10] = "check_in" columns[11] = "check_out" ret = [] error = False # -----------------checking for previous records creation and validation-------------------------- from frappe.utils.csvutils import check_record, import_doc for i, row in enumerate(rows[1:]): if not row: continue row_idx = i + 1 d = frappe._dict(zip(columns, row)) d["doctype"] = "Attendance" if not d["check_in"]: frappe.msgprint( "Some records didn't submit because of incomplete information. Please check error log for details." ) frappe.log_error( frappe.get_traceback(), """Attendance for date '{0}' for employee '{1}' did not mark due to missing check_in or check_out""" .format(d["attendance_date"], d["employee_name"])) continue if not d["check_out"]: frappe.msgprint( "Some records didn't submit because of incomplete information. Please check error log for details." ) frappe.log_error( frappe.get_traceback(), """Attendance for date '{0}' for employee '{1}' did not mark due to missing check_in or check_out""" .format(d["attendance_date"], d["employee_name"])) continue if d.name: d["docstatus"] = frappe.db.get_value("Attendance", d.name, "docstatus") try: check_record(d) ret.append(import_doc(d, "Attendance", 1, row_idx, submit=True)) except AttributeError: pass except Exception as e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row) > 1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback()) if error: frappe.db.rollback() else: frappe.db.commit() return {"messages": ret, "error": error} # --------------------Function Ends----------------------------------
def upload(): file_name = frappe.form_dict.filename file_type = file_name.split('.')[-1] if not file_type == 'csv': frappe.msgprint(_("Document with extension CSV can only be uploaded")) return rows = read_csv_content_from_uploaded_file() error_list = '' new_list = [] temp_1 = temp_2 = temp_3 = temp_4 = [] for i in range(4, len(rows)): if i == 4: if not len(rows[i]) == len(columns): frappe.msgprint(_("Please Upload Standard Template")) return if rows[i][0] != columns[0] or rows[i][1] != columns[1] or rows[i][ 2] != columns[2] or rows[i][3] != columns[3] or rows[i][4] != columns[ 4] or rows[i][5] != columns[5] or rows[i][6] != columns[ 6] or rows[i][7] != columns[7] or rows[i][8] != columns[ 8] or rows[i][9] != columns[9] or rows[i][ 10] != columns[10] or rows[i][11] != columns[11] or rows[ i][12] != columns[12] or rows[i][13] != columns[13] or rows[i][ 14] != columns[14] or rows[i][15] != columns[15] or rows[i][ 16] != columns[16] or rows[i][17] != columns[17] or rows[ i][18] != columns[18] or rows[i][ 19] != columns[19] or rows[i][20] != columns[20] or rows[ i][21] != columns[21] or rows[ i][22] != columns[22] or rows[ i][23] != columns[23] or rows[ i][24] != columns[24] or rows[ i][25] != columns[25] or rows[ i][26] != columns[26] or rows[ i][27] != columns[ 27] or rows[ i][28] != columns[ 28]: frappe.msgprint("Please Do not Change Column Name in Template") return else: if rows[i][1]: validate_result = join_date_result = birth_date_result = iqama_expiry_date_result = department_result = gender_result = division_result = contractor_result = holi_list_result = salutation_result = company_result = '' birth_date = rows[i][7] birth_date_result = val_date_format( i, birth_date, 'in Excel For Date of Birth') if birth_date_result: birth_date = None error_list += birth_date_result if birth_date: birth_result = birth_validate(birth_date, i, 'in Excel') if birth_result: error_list += birth_result joining_date = rows[i][8] join_date_result = val_date_format( i, joining_date, 'in Excel For Joining Date') if join_date_result: joining_date = None error_list += join_date_result if birth_date and joining_date: birth_date = frappe.utils.data.getdate(birth_date) joining_date = frappe.utils.data.getdate(joining_date) validate_result = date_validate(birth_date, joining_date, i, "in Excel") if validate_result: error_list += validate_result iqama_expiry_date = rows[i][27] if iqama_expiry_date is not None: iqama_expiry_date_result = val_date_format( i, iqama_expiry_date, "in Excel For Iqama Expiry Date") if iqama_expiry_date_result: iqama_expiry_date = None error_list += iqama_expiry_date_result # driving_license_expiry_date = rows[i][13] # driving_license_date_result = val_date_format(i,driving_license_expiry_date) # if driving_license_date_result : # driving_license_expiry_date = None # error_list += driving_license_date_result department = rows[i][9] if department is not None: department_result = val_link(i, 'Department', str(department), "in Excel") if department_result: department = '' error_list += department_result if rows[i][28] and department: com_validate = val_dep_com(i, 'Department', str(department), rows[i][28]) if com_validate: department = '' error_list += com_validate division = rows[i][23] if division is not None: division_result = val_link(i, 'Division', str(division), "in Excel") if division_result: division = '' error_list += division_result contractor = rows[i][21] if contractor is not None: contractor_result = val_link(i, 'Contractor', str(contractor), "in Excel") if contractor_result: contractor = '' error_list += contractor_result # salary_type = rows[i][15] # salary_type_result= val_link(i,'Employee',str(salary_type)) # if salary_type_result : # salary_type = '' # error_list += salary_type_result gender = rows[i][6] if gender is not None: gender_result = val_link(i, 'Gender', str(gender), "in Excel") if gender_result: gender = '' error_list += gender_result shift_type = rows[i][10] if shift_type is not None: shift_result = val_link(i, 'Shift Type', str(shift_type), "in Excel") if shift_result: shift_type = '' error_list += shift_result employee_category = rows[i][20] if employee_category is not None: employee_category_result = val_link( i, 'Employee Category Rule', str(employee_category), "in Excel") if employee_category_result: employee_category = '' error_list += employee_category_result employee_type = rows[i][19] if employee_type is not None: employee_type_result = val_link(i, 'Employment Type', str(employee_type), "in Excel") if employee_type_result: employee_type = '' error_list += employee_type_result holi_list = rows[i][25] if holi_list is not None: holi_list_result = val_link(i, 'Holiday List', str(holi_list), "in Excel") if holi_list_result: holi_list = '' error_list += holi_list_result eligible_weekoff = rows[i][11] if eligible_weekoff is not None: options = frappe.get_meta("Employee Detail").get_field( "eligible_week_off_days").options if eligible_weekoff not in options: eligible_weekoff = "" employee_sal_type = rows[i][22] if employee_sal_type is not None: options = frappe.get_meta("Employee Detail").get_field( "salary_type").options if employee_sal_type not in options: employee_sal_type = "" week_off_day = rows[i][12] week_off = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ] if week_off_day in week_off: pass else: week_off_day = "" nationality = rows[i][13] if nationality is not None: nationality_result = val_link(i, 'Nationality', str(nationality), "in Excel") if nationality_result: nationality = "" error_list += nationality_result country = rows[i][14] if country is not None: country_result = val_link(i, 'Country', str(country), "in Excel") if country_result: country = "" error_list += country_result grade = rows[i][17] if grade is not None: grade_result = val_link(i, 'Employee Grade', str(grade), "in Excel") if grade_result: grade = "" error_list += grade_result salutation = rows[i][0] if salutation is not None: salutation_result = val_link(i, 'Salutation', str(salutation), "in Excel") if salutation_result: salutation = '' error_list += salutation_result # Saudi or Non-Saudi s_or_ns = rows[i][5] if s_or_ns is not None: if s_or_ns == "Saudi" or s_or_ns == "saudi": s_or_ns = 1 else: s_or_ns = 0 # driving_license_no=rows[i][12] # if driving_license_no: # driving_license_result=unique_value('Employee',{'driving_license_no': str(driving_license_no)}) # if driving_license_result=='pass' or driving_license_no in temp_1: # driving_license_no=None # error_list +='<span class="indicator red list-group-item">Please Enter Unique Driving license No at row {0}</span>'.format(str(i+1)) # if driving_license_no: # temp_1.append(driving_license_no) iqama_no = rows[i][26] if iqama_no: iqama_no_result = unique_value('Employee', {'iqama_no': str(iqama_no)}, "In Excel") if iqama_no_result == 'pass' or iqama_no in temp_2: iqama_no = None error_list += '<span class="indicator red list-group-item">Please Enter Unique Iqama No at row {0}</span>'.format( str(i + 1)) if iqama_no: temp_2.append(iqama_no) # baladiya_card_no=rows[i][14] # if baladiya_card_no: # baladiya_card_no_result=unique_value('Employee',{'baladiya_card_no': str(baladiya_card_no)}) # if baladiya_card_no_result=='pass' or baladiya_card_no in temp_3: # baladiya_card_no=None # error_list +='<span class="indicator red list-group-item">Please Enter Unique Baladiya Card No at row {0}</span>'.format(str(i+1)) # if baladiya_card_no: # temp_3.append(baladiya_card_no) enrollment_no = rows[i][18] if enrollment_no: enrollment_no_result = unique_value( 'Employee', {'enroll_number': str(enrollment_no)}, "In Excel") if enrollment_no_result == 'pass' or enrollment_no in temp_4: enrollment_no = None error_list += '<span class="indicator red list-group-item">Please Enter Unique Enrollment No at row {0}</span>'.format( str(i + 1)) if enrollment_no: temp_4.append(enrollment_no) company = rows[i][28] if company is not None: company_result = val_link(i, 'Company', str(company), "in Excel") if company_result: company = '' error_list += company_result if not department or not company: department = '' company = '' new_list.append({ "salutation": rows[i][0], "employee_name": rows[i][1], "full_name_in_arabic": rows[i][4], "saudi_or_nonsaudi": s_or_ns, "shift_type": shift_type, "eligible_week_off_days": eligible_weekoff, "week_off_day1": week_off_day, "nationality": nationality, "country": country, "grade": grade, "passport_no": rows[i][15], "valid_upto_hijri": rows[i][16], "middle_name": rows[i][2], "last_name": rows[i][3], "gender": gender, "date_of_birth": birth_date, "date_of_joining": joining_date, "department": department, "enrollment_no": enrollment_no, "employment_type": employee_type, "employee_category": employee_category, "iqama_no": iqama_no, "iqama_expiry": iqama_expiry_date, "salary_type": employee_sal_type, "division": rows[i][23], "contractor": rows[i][21], "holiday_list": rows[i][25], "previous_employee_id": rows[i][24], 'company': company }) if error_list: frappe.msgprint(_(list_str_format(error_list))) return new_list
def upload(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError from frappe.utils.csvutils import read_csv_content_from_uploaded_file from frappe.modules import scrub rows = read_csv_content_from_uploaded_file() rows = filter(lambda x: x and any(x), rows) #frappe.errprint(rows) if not rows: msg = [_("Please select a csv file")] return {"messages": msg, "error": msg} columns = [scrub(f) for f in rows[5]] #frappe.errprint(columns) columns[0] = "name" columns[3] = "att_date" #frappe.errprint(columns) ret = [] error = False from frappe.utils.csvutils import check_record dict1 = {} att_id = '' worked_hours = '' for i, row in enumerate(rows[6:]): #frappe.errprint(i) #frappe.errprint(row) if row[1] and row[3]: dict1 = { 'employee': row[1], 'att_date': row[3], 'status': row[4], 'fiscal_year': row[5], 'company': row[6], 'naming_series': row[7], 'employee_name': row[2] } # dict1['attendance_time_sheet'] = {'in_time':row[10],'out_time':row[11]} dict1['in_time'] = row[10] dict1['out_time'] = row[11] #frappe.errprint(dict1) if not row: continue row_idx = i + 6 #d = frappe._dict(zip(columns, row)) dict1["doctype"] = "Attendance" if dict1.get('name'): dict1["docstatus"] = frappe.db.get_value("Attendance", dict1.get('name'), "docstatus") try: check_record(dict1) if row[1] and row[3]: #frappe.errprint(dict1) att_id = import_doc(dict1, "Attendance", 1, row_idx, submit=True) make_child_entry(att_id, dict1, worked_hours) ret.append('Inserted row (#%d) %s' % (row_idx + 1, getlink('Attendance', att_id))) else: #frappe.errprint([worked_hours]) make_child_entry(att_id, dict1, worked_hours) #frappe.errprint(d.name) except Exception, e: error = True ret.append('Error for row (#%d) %s : %s' % (row_idx, len(row) > 1 and row[1] or "", cstr(e))) frappe.errprint(frappe.get_traceback())
def upload(): file_name = frappe.form_dict.filename file_type = file_name.split('.')[-1] if not file_type == 'csv': frappe.msgprint(_("Document with extension CSV can only be uploaded")) return args = frappe.form_dict.args args = json.loads(args) if not args['date'] or not args['project']: frappe.msgprint(_("Please Fill the Mandatory Fields")) return if frappe.utils.data.getdate(args['date']) > frappe.utils.data.getdate(): frappe.msgprint(_("Attendance can not be marked for future dates")) return attendance_date = datetime.strptime(args['date'], "%Y-%m-%d") attendance_date = datetime.strftime(attendance_date, '%Y-%m-%d') data = read_csv_content_from_uploaded_file() error_list = '' date_mis_match = '' rows = [] if data: for i in range(3, len(data)): if i == 3: if not len(data[i]) == len(columns): frappe.msgprint(_("Please Upload Standard Template")) return if data[i][1] != columns[1] or data[i][2] != columns[ 2] or data[i][3] != columns[3]: frappe.msgprint( "Please Do not Change Column Name in Template") return else: in_date = in_time = out_date = out_time = '' if data[i][1]: if data[i][2]: try: in_time_og = datetime.strptime( data[i][2], "%Y-%m-%d %H:%M:%S") in_date = datetime.strftime(in_time_og, '%Y-%m-%d') in_time = datetime.strftime(in_time_og, '%H:%M:%S') except ValueError: error_list += '<span class="indicator red list-group-item">Incorrect IN data format, should be YYYY-MM-DD HH:MM:SS at Row Index ' + str( i) + '</span>' if not in_date or in_date == attendance_date: if data[i][3]: try: out_time_og = datetime.strptime( data[i][3], "%Y-%m-%d %H:%M:%S") out_date = datetime.strftime( out_time_og, '%Y-%m-%d') out_time = datetime.strftime( out_time_og, '%H:%M:%S') except ValueError: error_list += '<span class="indicator red list-group-item">Incorrect OUT data format, should be YYYY-MM-DD HH:MM:SS at Row Index ' + str( i) + '</span>' date_diff_result = date_diff_val( in_date, in_time, out_date, out_time) if date_diff_result: error_list += date_diff_result result = result_aft_val(({ 'employee_id': data[i][1], 'in_date': in_date, 'in_time': in_time, 'out_date': out_date, 'out_time': out_time }), attendance_date, args['project']) rows.append({ 'employee_id': data[i][1], 'employee_name': result['employee_name'], 'division': result['task'][0]['division'] if result['task'] else None, 'task': result['task'][0]['task'] if result['task'] else None, 'bill_rate': result['task'][0]['bill_rate'] if result['task'] else None, 'in_date': in_date, 'in_time': in_time, 'out_date': out_date, 'out_time': out_time, 'shift_type': result['task'][0]['shift_name'] if result['task'] else None, 'special_shift': result['special_shift'], 'status': result['status'], 'violation_status': violation_str(result['violation_status']), 'leave_type': result['leave_type'], 'ot_hr': result['ot_hr'], 'total_working': result['total_working'], 'violation_class': result['violation_class'] }) if error_list: frappe.msgprint(str(violation_str(error_list))) return rows.sort(key=None, reverse=True) return rows
def upload(): # frappe.throw(str(mobilize_catg)) file_name = frappe.form_dict.filename file_type = file_name.split('.')[-1] if not file_type == 'csv': frappe.msgprint(_("Document with extension CSV can only be uploaded")) return args = frappe.form_dict.args args = json.loads(args) # frappe.throw(str(args['mobilize_category'])) error_list = [] data_list = [] rows = read_csv_content_from_uploaded_file() columns = [ "Employee ID", "Employee Name", "Employment Type", "Division", "Task ID", "Start Date", "End Date", "Shift Type", "Bill Rate" ] asset_columns = [ "Project", "Asset Id", "Asset Name", "Mobilization Start Date", "Mobilization End Date", "Daily Cost", "Daily Rate" ] if args['mobilize_category'] == "Employee": for i in range(3, len(rows)): # employee = rows[i][0] # employee_name = rows[i][1] # division = rows[i][3] if i == 3: # if not len(rows[i]) == len(columns): # frappe.msgprint(_("Please Upload Standerd Template")) # return if rows[i][0] != columns[0] or rows[i][1] != columns[ 1] or rows[i][2] != columns[2] or rows[i][ 3] != columns[3] or rows[i][4] != columns[ 4] or rows[i][5] != columns[5] or rows[i][ 6] != columns[6] or rows[i][7] != columns[ 7] or rows[i][8] != columns[8]: frappe.msgprint( "Please Do Not Change Column Name In Template") return else: if rows[i][5] is None: error_list.append( "<html> Start Date is Required in Row <b> " + str(i + 1) + "") if rows[i][6] is None: error_list.append( "<html> End Date is Required in Row <b> " + str(i + 1) + "") if rows[i][5] is not None: try: datetime.datetime.strptime(rows[i][5], "%Y-%m-%d") except ValueError: error_list.append( "<html>Incorrect Mobilization End Date Format.. It Shoud Be in <b>YYYY-MM-DD</b> in row " + str(i + 1) + "<html> ") if rows[i][6] is not None: try: datetime.datetime.strptime(rows[i][6], "%Y-%m-%d") except ValueError: error_list.append( "<html>Incorrect Mobilization End Date Format.. It Shoud Be in <b>YYYY-MM-DD</b> in row " + str(i + 1) + "<html> ") if rows[i][5] > rows[i][6]: error_list.append( "<html> <html><b>Start Date</b> should be less than <b>End Date</b> in Row <b> " + str(i + 1) + "") if rows[i][0] is None: error_list.append( "<html> Employee ID is Required in Row <b> " + str(i + 1) + "") task = rows[i][4] if task is None and rows[i][4]: error_list.append("<html> Task is Required in Row <b> " + str(i + 1) + "") shift_name = rows[i][7] if shift_name is None: error_list.append( "<html> Shift Type is Required in Row <b> " + str(i + 1) + "") if shift_name is not None: sql = "SELECT name FROM `tabShift Type` where name = '{0}' ".format( rows[i][7]) data = frappe.db.sql(sql, as_dict=1) if not data: error_list.append( "<html> Existing Shift Type Does Not Matched With System Shift Type</b> For Employee <b> " + str(rows[i][0]) + "</b> In <b> Row " + str(i + 1) + "</b>") employment_type = rows[i][2] bill_rate = rows[i][8] if employment_type in ('PO Rate', 'Hourly') and bill_rate is None: error_list.append( "<html> <b>Bill Rate</b> is Required For <b>Employment Type</b> PO Rate </b> And <b> Hourly</b> in Row <b> " + str(i + 1) + "</b>") data_list.append({ 'employee': rows[i][0], 'employee_name': rows[i][1], 'start_date': rows[i][5], 'end_date': rows[i][6], 'task': task, 'shift_name': shift_name, 'employment_type': employment_type, 'bill_rate': bill_rate, 'division': rows[i][3] }) if error_list: for n in error_list: frappe.msgprint(_(n)) return else: for i in range(3, len(rows)): if i == 3: # if not len(rows[i]) == len(asset_columns): # frappe.msgprint(_("Please Upload Standerd Template")) # return # frappe.throw(str(rows[i][6])) if rows[i][0] != asset_columns[0] or rows[i][ 1] != asset_columns[1] or rows[i][2] != asset_columns[ 2] or rows[i][3] != asset_columns[3] or rows[i][ 4] != asset_columns[4] or rows[i][ 5] != asset_columns[5] or rows[i][ 6] != asset_columns[6]: frappe.msgprint( "Please Do Not Change Column Name In Template") return else: if rows[i][1] is None: error_list.append( "<html> Asset ID is Required in Row <b> " + str(i + 1) + "</b></html>") if rows[i][1] is not None: asset_equiptment = "select is_equipment FROM tabAsset where name='{0}'".format( rows[i][1]) # frappe.throw(str(asset_equiptment)) asset_equiptment_data = frappe.db.sql(asset_equiptment, as_dict=1) # frappe.throw(str(asset_equiptment_data[0])) if asset_equiptment_data: # frappe.throw(str(asset_equiptment_data)) is_equipment = asset_equiptment_data[0] # frappe.throw(str(is_equipment)) if is_equipment['is_equipment'] == 0: error_list.append( "<html> Exitsing Asset Name Is Not Equiptment in Row <b> " + str(i + 1) + "</b> </html>") asset_name = rows[i][2] if asset_name is None: error_list.append( "<html> Asset Name is Required in Row <b> " + str(i + 1) + "</b></html>") start_date = rows[i][3] if start_date is None: error_list.append( "<html> Mobilization Start Date is Required in Row <b> " + str(i + 1) + "</b></html>") if start_date is not None: try: datetime.datetime.strptime(start_date, "%Y-%m-%d") except ValueError: error_list.append( "<html>Incorrect Mobilization Start Date Format.. It Shoud Be in <b>YYYY-MM-DD</b> in row " + str(i + 1) + "<html> ") end_date = rows[i][4] if start_date is None: error_list.append( "<html> Mobilization End Date is Required in Row <b> " + str(i + 1) + "") if end_date is not None: try: datetime.datetime.strptime(end_date, "%Y-%m-%d") except ValueError: error_list.append( "<html>Incorrect Mobilization End Date Format.. It Shoud Be in <b>YYYY-MM-DD</b> in row " + str(i + 1) + "<html> ") data_list.append({ "asset_id": rows[i][1], "asset_name": asset_name, "mobilization_start_date": start_date, "mobilization_end_date": end_date, "daily_cost": rows[i][5], "daily_rate": rows[i][6] }) if error_list: for n in error_list: frappe.msgprint(_(n)) return return data_list