예제 #1
0
def export_query():
	"""export from query reports"""

	data = frappe._dict(frappe.local.form_dict)

	del data["cmd"]
	if "csrf_token" in data:
		del data["csrf_token"]

	if isinstance(data.get("filters"), string_types):
		filters = json.loads(data["filters"])
	if isinstance(data.get("report_name"), string_types):
		report_name = data["report_name"]
	if isinstance(data.get("file_format_type"), string_types):
		file_format_type = data["file_format_type"]

	if isinstance(data.get("visible_idx"), string_types):
		visible_idx = json.loads(data.get("visible_idx"))
	else:
		visible_idx = None

	if file_format_type == "Excel":
		data = run(report_name, filters)
		data = frappe._dict(data)
		columns = get_columns_dict(data.columns)

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_data = build_xlsx_data(columns, data, visible_idx)
		xlsx_file = make_xlsx(xlsx_data, "Query Report")

		frappe.response['filename'] = report_name + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
예제 #2
0
파일: query_report.py 프로젝트: vrms/frappe
def export_query():
    """export from query reports"""

    data = frappe._dict(frappe.local.form_dict)

    del data["cmd"]
    if "csrf_token" in data:
        del data["csrf_token"]

    if isinstance(data.get("filters"), string_types):
        filters = json.loads(data["filters"])
    if isinstance(data.get("report_name"), string_types):
        report_name = data["report_name"]
    if isinstance(data.get("file_format_type"), string_types):
        file_format_type = data["file_format_type"]

    if isinstance(data.get("visible_idx"), string_types):
        visible_idx = json.loads(data.get("visible_idx"))
    else:
        visible_idx = None

    if file_format_type == "Excel":
        data = run(report_name, filters)
        data = frappe._dict(data)
        columns = get_columns_dict(data.columns)

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_data = build_xlsx_data(columns, data, visible_idx)
        xlsx_file = make_xlsx(xlsx_data, "Query Report")

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #3
0
    def build_xlsx(filename, xlsx_data, header):
        from frappe.utils.xlsxutils import make_xlsx
        xlsx_file = make_xlsx(header + xlsx_data, 'Query Report')

        frappe.response['filename'] = filename + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #4
0
def export_query():
    """export from query reports"""

    data = frappe._dict(frappe.local.form_dict)

    del data["cmd"]
    if "csrf_token" in data:
        del data["csrf_token"]

    if isinstance(data.get("report_name"), string_types):
        report_name = data["report_name"]
        frappe.permissions.can_export(frappe.get_cached_value(
            'Report', report_name, 'ref_doctype'),
                                      raise_exception=True)
    if isinstance(data.get("file_format_type"), string_types):
        file_format_type = data["file_format_type"]

    if file_format_type == "Excel":
        columns = json.loads(data.columns) if isinstance(
            data.columns, string_types) else data.columns
        report_data = json.loads(data.data) if isinstance(
            data.data, string_types) else data.data

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_data = [columns] + report_data
        xlsx_file = make_xlsx(xlsx_data, "Query Report")

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #5
0
	def get_report_content(self):
		'''Returns file in for the report in given format'''
		report = frappe.get_doc('Report', self.report)

		if self.report_type=='Report Builder' and self.data_modified_till:
			self.filters = json.loads(self.filters) if self.filters else {}
			self.filters['modified'] = ('>', frappe.utils.now_datetime() - timedelta(hours=self.data_modified_till))

		columns, data = report.get_data(limit=self.no_of_rows or 100, user = self.user,
			filters = self.filters, as_dict=True)

		# add serial numbers
		columns.insert(0, frappe._dict(fieldname='idx', label='', width='30px'))
		for i in range(len(data)):
			data[i]['idx'] = i+1

		if len(data)==0 and self.send_if_data:
			return None

		if self.format == 'HTML':
			return self.get_html_table(columns, data)

		elif self.format == 'XLSX':
			spreadsheet_data = self.get_spreadsheet_data(columns, data)
			xlsx_file = make_xlsx(spreadsheet_data, "Auto Email Report")
			return xlsx_file.getvalue()

		elif self.format == 'CSV':
			spreadsheet_data = self.get_spreadsheet_data(columns, data)
			return to_csv(spreadsheet_data)

		else:
			frappe.throw(_('Invalid Output Format'))
예제 #6
0
def send_daily_linewise_report():
    custom_filter = {'date': add_days(today(), -1)}
    report = frappe.get_doc('Report', "Linewise Count")
    columns, data = report.get_data(limit=100,
                                    filters=custom_filter,
                                    as_dict=True)
    spreadsheet_data = get_spreadsheet_data(columns, data)
    xlsx_file = make_xlsx(spreadsheet_data, "Attendance")
    data = xlsx_file.getvalue()
    attachments = [{
        'fname': add_days(today(), -1) + '.xlsx',
        'fcontent': data
    }]
    frappe.sendmail(
        recipients=[
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**', '*****@*****.**'
        ],
        subject='Employee Attendance Report - ' +
        formatdate(add_days(today(), -1)),
        message=
        'Kindly find the attached Excel Sheet of Linewise Count Report of' +
        formatdate(add_days(today(), -1)),
        attachments=attachments)
예제 #7
0
	def get_report_content(self):
		'''Returns file in for the report in given format'''
		report = frappe.get_doc('Report', self.report)

		if self.report_type=='Report Builder' and self.data_modified_till:
			self.filters = json.loads(self.filters) if self.filters else {}
			self.filters['modified'] = ('>', frappe.utils.now_datetime() - timedelta(hours=self.data_modified_till))

		columns, data = report.get_data(limit=self.no_of_rows or 100, user = self.user,
			filters = self.filters, as_dict=True)

		# add serial numbers
		columns.insert(0, frappe._dict(fieldname='idx', label='', width='30px'))
		for i in range(len(data)):
			data[i]['idx'] = i+1

		if len(data)==0 and self.send_if_data:
			return None

		if self.format == 'HTML':
			return self.get_html_table(columns, data)

		elif self.format == 'XLSX':
			spreadsheet_data = self.get_spreadsheet_data(columns, data)
			xlsx_file = make_xlsx(spreadsheet_data, "Auto Email Report")
			return xlsx_file.getvalue()

		elif self.format == 'CSV':
			spreadsheet_data = self.get_spreadsheet_data(columns, data)
			return to_csv(spreadsheet_data)

		else:
			frappe.throw(_('Invalid Output Format'))
예제 #8
0
def export_query():
	"""export from report builder"""
	form_params = get_form_params()
	form_params["limit_page_length"] = None
	form_params["as_list"] = True
	doctype = form_params.doctype
	add_totals_row = None
	file_format_type = form_params["file_format_type"]

	del form_params["doctype"]
	del form_params["file_format_type"]

	if 'add_totals_row' in form_params and form_params['add_totals_row']=='1':
		add_totals_row = 1
		del form_params["add_totals_row"]

	frappe.permissions.can_export(doctype, raise_exception=True)

	if 'selected_items' in form_params:
		si = json.loads(frappe.form_dict.get('selected_items'))
		form_params["filters"] = {"name": ("in", si)}
		del form_params["selected_items"]

	db_query = DatabaseQuery(doctype)
	ret = db_query.execute(**form_params)

	if add_totals_row:
		ret = append_totals_row(ret)

	data = [['Sr'] + get_labels(db_query.fields, doctype)]
	for i, row in enumerate(ret):
		data.append([i+1] + list(row))

	if file_format_type == "CSV":

		# convert to csv
		import csv
		from frappe.utils.xlsxutils import handle_html

		f = StringIO()
		writer = csv.writer(f)
		for r in data:
			# encode only unicode type strings and not int, floats etc.
			writer.writerow([handle_html(frappe.as_unicode(v)).encode('utf-8') \
				if isinstance(v, string_types) else v for v in r])

		f.seek(0)
		frappe.response['result'] = text_type(f.read(), 'utf-8')
		frappe.response['type'] = 'csv'
		frappe.response['doctype'] = doctype

	elif file_format_type == "Excel":

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(data, doctype)

		frappe.response['filename'] = doctype + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
예제 #9
0
def export_query():
    """export from report builder"""
    form_params = get_form_params()
    form_params["limit_page_length"] = None
    form_params["as_list"] = True
    doctype = form_params.doctype
    add_totals_row = None
    file_format_type = form_params["file_format_type"]

    del form_params["doctype"]
    del form_params["file_format_type"]

    if 'add_totals_row' in form_params and form_params['add_totals_row'] == '1':
        add_totals_row = 1
        del form_params["add_totals_row"]

    frappe.permissions.can_export(doctype, raise_exception=True)

    if 'selected_items' in form_params:
        si = json.loads(frappe.form_dict.get('selected_items'))
        form_params["filters"] = {"name": ("in", si)}
        del form_params["selected_items"]

    db_query = DatabaseQuery(doctype)
    ret = db_query.execute(**form_params)

    if add_totals_row:
        ret = append_totals_row(ret)

    data = [['Sr'] + get_labels(db_query.fields, doctype)]
    for i, row in enumerate(ret):
        data.append([i + 1] + list(row))

    if file_format_type == "CSV":

        # convert to csv
        import csv
        from frappe.utils.xlsxutils import handle_html

        f = StringIO()
        writer = csv.writer(f)
        for r in data:
            # encode only unicode type strings and not int, floats etc.
            writer.writerow([handle_html(frappe.as_unicode(v)).encode('utf-8') \
             if isinstance(v, string_types) else v for v in r])

        f.seek(0)
        frappe.response['result'] = text_type(f.read(), 'utf-8')
        frappe.response['type'] = 'csv'
        frappe.response['doctype'] = doctype

    elif file_format_type == "Excel":

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_file = make_xlsx(data, doctype)

        frappe.response['filename'] = doctype + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #10
0
def export_query():
    """export from query reports"""

    data = frappe._dict(frappe.local.form_dict)

    del data["cmd"]
    if "csrf_token" in data:
        del data["csrf_token"]

    if isinstance(data.get("filters"), string_types):
        filters = json.loads(data["filters"])
    if isinstance(data.get("report_name"), string_types):
        report_name = data["report_name"]
    if isinstance(data.get("file_format_type"), string_types):
        file_format_type = data["file_format_type"]
    if isinstance(data.get("visible_idx"), string_types):
        visible_idx = json.loads(data.get("visible_idx"))
    else:
        visible_idx = None

    if file_format_type == "Excel":

        data = run(report_name, filters)
        data = frappe._dict(data)
        columns = get_columns_dict(data.columns)

        result = [[]]

        # add column headings
        for idx in range(len(data.columns)):
            result[0].append(columns[idx]["label"])

        # build table from dict
        if isinstance(data.result[0], dict):
            for i, row in enumerate(data.result):
                # only rows which are visible in the report
                if row and (i in visible_idx):
                    row_list = []
                    for idx in range(len(data.columns)):
                        row_list.append(
                            row.get(columns[idx]["fieldname"],
                                    row.get(columns[idx]["label"], "")))
                    result.append(row_list)
                elif not row:
                    result.append([])
        else:
            result = result + [
                d for i, d in enumerate(data.result) if (i in visible_idx)
            ]

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_file = make_xlsx(result, "Query Report")

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #11
0
파일: api.py 프로젝트: neel2292/erpx_hrm
def download_expense_claim_report():
    report_data = frappe.desk.reportview.execute(
        "Expense Claim",
        filters=[["approval_status", "=", "Approved"]],
        fields=["name"],
        limit_start=0,
        limit_page_length=0,
        order_by="name",
        as_list=True)

    return make_xlsx(report_data, "demo_sheet")
예제 #12
0
def export_query():
    """export from query reports"""

    data = frappe._dict(frappe.local.form_dict)

    del data["cmd"]

    if isinstance(data.get("filters"), basestring):
        filters = json.loads(data["filters"])
    if isinstance(data.get("report_name"), basestring):
        report_name = data["report_name"]
    if isinstance(data.get("file_format_type"), basestring):
        file_format_type = data["file_format_type"]
    if isinstance(data.get("visible_idx"), basestring):
        visible_idx = json.loads(data.get("visible_idx"))
    else:
        visible_idx = None

    if file_format_type == "Excel":

        data = run(report_name, filters)
        data = frappe._dict(data)
        columns = get_columns_dict(data.columns)

        result = [[]]

        # add column headings
        for idx in range(len(data.columns)):
            result[0].append(columns[idx]["label"])

        # build table from dict
        if isinstance(data.result[0], dict):
            for row in data.result:
                if row:
                    row_list = []
                    for idx in range(len(data.columns)):
                        row_list.append(row.get(columns[idx]["fieldname"], ""))
                    result.append(row_list)
        else:
            result = result + data.result

        # filter rows by slickgrid's inline filter
        if visible_idx:
            result = [
                x for idx, x in enumerate(result)
                if idx == 0 or idx in visible_idx
            ]

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_file = make_xlsx(result, "Query Report")

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #13
0
    def get_report_content(self):

        columns = []
        data = []

        spreadsheet_data = self.get_spreadsheet_data(columns, data)

        from frappe.utils.xlsxutils import make_xlsx

        xlsx_file = make_xlsx(spreadsheet_data, "Project Report")
        return xlsx_file.getvalue()
예제 #14
0
def export_query():
	"""export from query reports"""

	data = frappe._dict(frappe.local.form_dict)

	del data["cmd"]
	if "csrf_token" in data:
		del data["csrf_token"]

	if isinstance(data.get("filters"), string_types):
		filters = json.loads(data["filters"])
	if isinstance(data.get("report_name"), string_types):
		report_name = data["report_name"]
	if isinstance(data.get("file_format_type"), string_types):
		file_format_type = data["file_format_type"]
	if isinstance(data.get("visible_idx"), string_types):
		visible_idx = json.loads(data.get("visible_idx"))
	else:
		visible_idx = None

	if file_format_type == "Excel":

		data = run(report_name, filters)
		data = frappe._dict(data)
		columns = get_columns_dict(data.columns)

		result = [[]]

		# add column headings
		for idx in range(len(data.columns)):
			result[0].append(columns[idx]["label"])

		# build table from dict
		if isinstance(data.result[0], dict):
			for i,row in enumerate(data.result):
				# only rows which are visible in the report
				if row and (i+1 in visible_idx):
					row_list = []
					for idx in range(len(data.columns)):
						row_list.append(row.get(columns[idx]["fieldname"],""))
					result.append(row_list)
				elif not row:
					result.append([])
		else:
			result = result + [d for i,d in enumerate(data.result) if (i+1 in visible_idx)]

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(result, "Query Report")

		frappe.response['filename'] = report_name + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
예제 #15
0
def export_query():
	"""export from query reports"""

	data = frappe._dict(frappe.local.form_dict)

	del data["cmd"]
	if "csrf_token" in data:
		del data["csrf_token"]

	if isinstance(data.get("filters"), string_types):
		filters = json.loads(data["filters"])
	if isinstance(data.get("report_name"), string_types):
		report_name = data["report_name"]
		frappe.permissions.can_export(
			frappe.get_cached_value("Report", report_name, "ref_doctype"),
			raise_exception=True,
		)
	if isinstance(data.get("file_format_type"), string_types):
		file_format_type = data["file_format_type"]

	custom_columns = frappe.parse_json(data["custom_columns"])

	include_indentation = data["include_indentation"]
	if isinstance(data.get("visible_idx"), string_types):
		visible_idx = json.loads(data.get("visible_idx"))
	else:
		visible_idx = None

	if file_format_type == "Excel":
		data = run(report_name, filters, custom_columns=custom_columns)
		data = frappe._dict(data)
		if not data.columns:
			frappe.respond_as_web_page(
				_("No data to export"),
				_("You can try changing the filters of your report."),
			)
			return

		columns = get_columns_dict(data.columns)

		from frappe.utils.xlsxutils import make_xlsx

		data["result"] = handle_duration_fieldtype_values(
			data.get("result"), data.get("columns")
		)
		xlsx_data = build_xlsx_data(columns, data, visible_idx, include_indentation)
		xlsx_file = make_xlsx(xlsx_data, "Query Report")

		frappe.response["filename"] = report_name + ".xlsx"
		frappe.response["filecontent"] = xlsx_file.getvalue()
		frappe.response["type"] = "binary"
예제 #16
0
	def build_response_as_excel(self):
		filename = frappe.generate_hash("", 10)
		with open(filename, 'wb') as f:
			f.write(cstr(self.writer.getvalue()).encode('utf-8'))
		f = open(filename)
		reader = csv.reader(f)

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(reader, "Data Import Template" if self.template else 'Data Export')

		f.close()
		os.remove(filename)

		# write out response as a xlsx type
		frappe.response['filename'] = self.doctype + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
예제 #17
0
파일: exporter.py 프로젝트: ESS-LLP/frappe
	def build_response_as_excel(self):
		filename = frappe.generate_hash("", 10)
		with open(filename, 'wb') as f:
			f.write(cstr(self.writer.getvalue()).encode('utf-8'))
		f = open(filename)
		reader = csv.reader(f)

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(reader, "Data Import Template" if self.template else 'Data Export')

		f.close()
		os.remove(filename)

		# write out response as a xlsx type
		frappe.response['filename'] = self.doctype + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'
def build_response_as_excel(writer):
    filename = frappe.generate_hash("", 10)
    with open(filename, 'wb') as f:
        f.write(cstr(writer.getvalue()).encode('utf-8'))
    f = open(filename)
    reader = csv.reader(f)

    from frappe.utils.xlsxutils import make_xlsx
    xlsx_file = make_xlsx(reader, "Chart of Accounts Importer Template")

    f.close()
    os.remove(filename)

    # write out response as a xlsx type
    frappe.response['filename'] = 'coa_importer_template.xlsx'
    frappe.response['filecontent'] = xlsx_file.getvalue()
    frappe.response['type'] = 'binary'
def download(warehouse, posting_date, posting_time):
	
	# columns.append(frappe._dict(label=_("Item Code"), fieldtype=str, fieldname="item_code"))
	# columns.append(frappe._dict(label=_("Warehouse"), fieldtype=str, fieldname="warehouse"))
	# columns.append(frappe._dict(label=_("Batch No"), fieldtype=str, fieldname="batch_no"))
	# columns.append(frappe._dict(label=_("Quantity"), fieldtype=int, fieldname="qty"))
	# columns.append(frappe._dict(label=_("Rate"), fieldtype=float, fieldname="valuation_rate"))
	
	columns =[
		"Code item",
		"Lot",
		"Quantite",		
		"Taux",
		"Essence",
		"Construction",
		"Grade",
		"Largeur",
		"Epaisseur",
		"Qte/bte",
		"Ref client",
		"Moul.",
		"Source",
		"Info"
	]
	
	data = get_items_with_batch_no(warehouse, posting_date, posting_time, as_list = 1)
	#data = sorted(data, key=lambda k: (k[4], k[5], k[6], k[7], k[11], k[12], k[1]))
	data.insert(0, columns)
	
	
	#if print_debug: frappe.logger().debug("rows : " + cstr(rows))
	# data = []
	# data.append(columns)
	# data.append(rows)
	if print_debug: frappe.logger().debug("data : " + cstr(data))
		
	xlsx_file = make_xlsx(data,warehouse)

	if not xlsx_file:
		frappe.msgprint(_('No Data'))
		return

	frappe.local.response.filecontent = xlsx_file.getvalue()
	frappe.local.response.type = 'binary'
	frappe.local.response.filename = get_file_name(warehouse)
예제 #20
0
def export_query():
    """export from query reports"""
    data = frappe._dict(frappe.local.form_dict)
    data.pop("cmd", None)
    data.pop("csrf_token", None)

    if isinstance(data.get("filters"), string_types):
        filters = json.loads(data["filters"])

    if data.get("report_name"):
        report_name = data["report_name"]
        frappe.permissions.can_export(frappe.get_cached_value(
            'Report', report_name, 'ref_doctype'),
                                      raise_exception=True)

    file_format_type = data.get("file_format_type")
    custom_columns = frappe.parse_json(data.get("custom_columns", "[]"))
    include_indentation = data.get("include_indentation")
    visible_idx = data.get("visible_idx")

    if isinstance(visible_idx, string_types):
        visible_idx = json.loads(visible_idx)

    if file_format_type == "Excel":
        data = run(report_name, filters, custom_columns=custom_columns)
        data = frappe._dict(data)
        if not data.columns:
            frappe.respond_as_web_page(
                _("No data to export"),
                _("You can try changing the filters of your report."))
            return

        columns = get_columns_dict(data.columns)

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_data, column_widths = build_xlsx_data(columns, data, visible_idx,
                                                   include_indentation)
        xlsx_file = make_xlsx(xlsx_data,
                              "Query Report",
                              column_widths=column_widths)

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #21
0
    def build_response_as_excel(self):
        filename = frappe.generate_hash("", 10)
        with open(filename, "wb") as f:
            f.write(cstr(self.writer.getvalue()).encode("utf-8"))
        f = open(filename)
        reader = csv.reader(f)

        from frappe.utils.xlsxutils import make_xlsx

        xlsx_file = make_xlsx(
            reader, "Data Import Template" if self.template else "Data Export")

        f.close()
        os.remove(filename)

        # write out response as a xlsx type
        frappe.response["filename"] = self.doctype + ".xlsx"
        frappe.response["filecontent"] = xlsx_file.getvalue()
        frappe.response["type"] = "binary"
def export_my_query(filters, ws=None, wb=None):

    data = frappe._dict(frappe.local.form_dict)
    del data["cmd"]
    if "csrf_token" in data:
        del data["csrf_token"]

    if isinstance(data.get("report_name"), string_types):
        report_name = data["report_name"]

    data = run("Employee Yearly Summary", filters)
    data = frappe._dict(data)
    columns = get_columns_dict(data.columns)

    result = [[]]

    # add column headings
    for idx in range(len(data.columns)):
        result[0].append(columns[idx]["label"])

    # build table from dict
    if isinstance(data.result[0], dict):
        for i, row in enumerate(data.result):
            # only rows which are visible in the report
            if row:
                row_list = []
                for idx in range(len(data.columns)):
                    row_list.append(row.get(columns[idx]["fieldname"], ""))
                result.append(row_list)
            elif not row:
                result.append([])
    else:
        result = result + [d for i, d in enumerate(data.result)]

    from frappe.utils.xlsxutils import make_xlsx
    if ws is None:
        ws = "Query Report"
    xlsx_file = make_xlsx(result, ws, wb)

    frappe.response['filename'] = report_name + '.xlsx'
    frappe.response['filecontent'] = xlsx_file.getvalue()
    frappe.response['type'] = 'binary'
예제 #23
0
def export_query():
    """export from query reports"""
    data = frappe._dict(frappe.local.form_dict)

    del data["cmd"]
    if "csrf_token" in data:
        del data["csrf_token"]

    if isinstance(data.get("filters"), string_types):
        filters = json.loads(data["filters"])
    if isinstance(data.get("report_name"), string_types):
        report_name = data["report_name"]
        frappe.permissions.can_export(frappe.get_cached_value(
            'Report', report_name, 'ref_doctype'),
                                      raise_exception=True)
    if isinstance(data.get("file_format_type"), string_types):
        file_format_type = data["file_format_type"]

    if isinstance(data.get("visible_idx"), string_types):
        visible_idx = json.loads(data.get("visible_idx"))
    else:
        visible_idx = None

    # add filter this customer
    party = get_party()
    filters["customer"] = party.name or ""

    if file_format_type == "Excel":
        data = run(report_name, filters)
        data = frappe._dict(data)
        columns = get_columns_dict(data.columns)

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_data = build_xlsx_data(columns, data)

        xlsx_file = make_xlsx(xlsx_data, "Query Report")

        frappe.response['filename'] = report_name + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'
예제 #24
0
def send_wage_report():
    custom_filter = {'date': add_days(today(), -1)}
    report = frappe.get_doc('Report', "Wage Monitor Report")
    columns, data = report.get_data(
        limit=100, filters=custom_filter, as_dict=True)
    html = frappe.render_template(
        'frappe/templates/include/print_table.html', {'columns': columns, 'data': data})    
    spreadsheet_data = get_spreadsheet_data(columns, data)
    xlsx_file = make_xlsx(spreadsheet_data, "Minda Custom")
    data = xlsx_file.getvalue()
    attachments = [{
        'fname': add_days(today(), -1) + '.xlsx',
        'fcontent': data
    }]
    frappe.sendmail(
        recipients=['*****@*****.**','*****@*****.**'],
        subject='Wage Monitor Report - ' +
        formatdate(add_days(today(), -1)),
        message='Kindly find the attached Excel Sheet of Wage Monitor Report of ' + formatdate(
            add_days(today(), -1)) + html,
        attachments=attachments
    )
예제 #25
0
def get_template(doctype=None,
                 parent_doctype=None,
                 all_doctypes="No",
                 with_data="No",
                 select_columns=None,
                 from_data_import="No",
                 excel_format="No"):
    all_doctypes = all_doctypes == "Yes"
    if select_columns:
        select_columns = json.loads(select_columns)
    docs_to_export = {}
    if doctype:
        if isinstance(doctype, basestring):
            doctype = [doctype]
        if len(doctype) > 1:
            docs_to_export = doctype[1]
        doctype = doctype[0]

    if not parent_doctype:
        parent_doctype = doctype

    column_start_end = {}

    if all_doctypes:
        child_doctypes = []
        for df in frappe.get_meta(doctype).get_table_fields():
            child_doctypes.append(
                dict(doctype=df.options, parentfield=df.fieldname))

    def get_data_keys_definition():
        return get_data_keys()

    def add_main_header():
        w.writerow([_('Data Import Template')])
        w.writerow([get_data_keys_definition().main_table, doctype])

        if parent_doctype != doctype:
            w.writerow(
                [get_data_keys_definition().parent_table, parent_doctype])
        else:
            w.writerow([''])

        w.writerow([''])
        w.writerow([_('Notes:')])
        w.writerow([_('Please do not change the template headings.')])
        w.writerow([_('First data column must be blank.')])
        w.writerow([
            _('If you are uploading new records, leave the "name" (ID) column blank.'
              )
        ])
        w.writerow([
            _('If you are uploading new records, "Naming Series" becomes mandatory, if present.'
              )
        ])
        w.writerow([
            _('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.'
              )
        ])
        w.writerow([_('For updating, you can update only selective columns.')])
        w.writerow([
            _('You can only upload upto 5000 records in one go. (may be less in some cases)'
              )
        ])
        if key == "parent":
            w.writerow([
                _('"Parent" signifies the parent table in which this row must be added'
                  )
            ])
            w.writerow([
                _('If you are updating, please select "Overwrite" else existing rows will not be deleted.'
                  )
            ])

    def build_field_columns(dt, parentfield=None):
        meta = frappe.get_meta(dt)

        # build list of valid docfields
        tablecolumns = []
        for f in frappe.db.sql('desc `tab%s`' % dt):
            field = meta.get_field(f[0])
            if field and ((select_columns and f[0] in select_columns[dt])
                          or not select_columns):
                tablecolumns.append(field)

        tablecolumns.sort(lambda a, b: int(a.idx - b.idx))

        _column_start_end = frappe._dict(start=0)

        if dt == doctype:
            _column_start_end = frappe._dict(start=0)
        else:
            _column_start_end = frappe._dict(start=len(columns))

            append_field_column(
                frappe._dict({
                    "fieldname": "name",
                    "parent": dt,
                    "label": "ID",
                    "fieldtype": "Data",
                    "reqd": 1,
                    "idx": 0,
                    "info": _("Leave blank for new records")
                }), True)

        for docfield in tablecolumns:
            append_field_column(docfield, True)

        # all non mandatory fields
        for docfield in tablecolumns:
            append_field_column(docfield, False)

        # if there is one column, add a blank column (?)
        if len(columns) - _column_start_end.start == 1:
            append_empty_field_column()

        # append DocType name
        tablerow[_column_start_end.start + 1] = dt

        if parentfield:
            tablerow[_column_start_end.start + 2] = parentfield

        _column_start_end.end = len(columns) + 1

        column_start_end[(dt, parentfield)] = _column_start_end

    def append_field_column(docfield, for_mandatory):
        if not docfield:
            return
        if for_mandatory and not docfield.reqd:
            return
        if not for_mandatory and docfield.reqd:
            return
        if docfield.fieldname in ('parenttype', 'trash_reason'):
            return
        if docfield.hidden:
            return
        if select_columns and docfield.fieldname not in select_columns.get(
                docfield.parent, []):
            return

        tablerow.append("")
        fieldrow.append(docfield.fieldname)
        labelrow.append(_(docfield.label))
        mandatoryrow.append(docfield.reqd and 'Yes' or 'No')
        typerow.append(docfield.fieldtype)
        inforow.append(getinforow(docfield))
        columns.append(docfield.fieldname)

    def append_empty_field_column():
        tablerow.append("~")
        fieldrow.append("~")
        labelrow.append("")
        mandatoryrow.append("")
        typerow.append("")
        inforow.append("")
        columns.append("")

    def getinforow(docfield):
        """make info comment for options, links etc."""
        if docfield.fieldtype == 'Select':
            if not docfield.options:
                return ''
            else:
                return _("One of") + ': %s' % ', '.join(
                    filter(None, docfield.options.split('\n')))
        elif docfield.fieldtype == 'Link':
            return 'Valid %s' % docfield.options
        elif docfield.fieldtype == 'Int':
            return 'Integer'
        elif docfield.fieldtype == "Check":
            return "0 or 1"
        elif hasattr(docfield, "info"):
            return docfield.info
        else:
            return ''

    def add_field_headings():
        w.writerow(tablerow)
        w.writerow(labelrow)
        w.writerow(fieldrow)
        w.writerow(mandatoryrow)
        w.writerow(typerow)
        w.writerow(inforow)
        w.writerow([get_data_keys_definition().data_separator])

    def add_data():
        def add_data_row(row_group, dt, parentfield, doc, rowidx):
            d = doc.copy()
            meta = frappe.get_meta(dt)
            if all_doctypes:
                d.name = '"' + d.name + '"'

            if len(row_group) < rowidx + 1:
                row_group.append([""] * (len(columns) + 1))
            row = row_group[rowidx]

            _column_start_end = column_start_end.get((dt, parentfield))

            if _column_start_end:
                for i, c in enumerate(columns[_column_start_end.
                                              start:_column_start_end.end]):
                    df = meta.get_field(c)
                    fieldtype = df.fieldtype if df else "Data"
                    value = d.get(c, "")
                    if value:
                        if fieldtype == "Date":
                            value = formatdate(value)
                        elif fieldtype == "Datetime":
                            value = format_datetime(value)

                    row[_column_start_end.start + i + 1] = value

        if with_data == 'Yes':
            frappe.permissions.can_export(parent_doctype, raise_exception=True)

            # sort nested set doctypes by `lft asc`
            order_by = None
            table_columns = frappe.db.get_table_columns(parent_doctype)
            if 'lft' in table_columns and 'rgt' in table_columns:
                order_by = '`tab{doctype}`.`lft` asc'.format(
                    doctype=parent_doctype)

            # get permitted data only
            data = frappe.get_list(doctype,
                                   fields=["*"],
                                   limit_page_length=None,
                                   order_by=order_by)

            for doc in data:
                op = docs_to_export.get("op")
                names = docs_to_export.get("name")

                if names and op:
                    if op == '=' and doc.name not in names:
                        continue
                    elif op == '!=' and doc.name in names:
                        continue
                elif names:
                    try:
                        sflags = docs_to_export.get("flags", "I,U").upper()
                        flags = 0
                        for a in re.split('\W+', sflags):
                            flags = flags | reflags.get(a, 0)

                        c = re.compile(names, flags)
                        m = c.match(doc.name)
                        if not m:
                            continue
                    except:
                        if doc.name not in names:
                            continue
                # add main table
                row_group = []

                add_data_row(row_group, doctype, None, doc, 0)

                if all_doctypes:
                    # add child tables
                    for c in child_doctypes:
                        for ci, child in enumerate(
                                frappe.db.sql("""select * from `tab{0}`
							where parent=%s and parentfield=%s order by idx""".format(c['doctype']),
                                              (doc.name, c['parentfield']),
                                              as_dict=1)):
                            add_data_row(row_group, c['doctype'],
                                         c['parentfield'], child, ci)

                for row in row_group:
                    w.writerow(row)

    w = UnicodeWriter()
    key = 'parent' if parent_doctype != doctype else 'name'

    add_main_header()

    w.writerow([''])
    tablerow = [get_data_keys_definition().doctype, ""]
    labelrow = [_("Column Labels:"), "ID"]
    fieldrow = [get_data_keys_definition().columns, key]
    mandatoryrow = [_("Mandatory:"), _("Yes")]
    typerow = [_('Type:'), 'Data (text)']
    inforow = [_('Info:'), '']
    columns = [key]

    build_field_columns(doctype)

    if all_doctypes:
        for d in child_doctypes:
            append_empty_field_column()
            if (select_columns and select_columns.get(
                    d['doctype'], None)) or not select_columns:
                # if atleast one column is selected for this doctype
                build_field_columns(d['doctype'], d['parentfield'])

    add_field_headings()
    add_data()

    if from_data_import == "Yes" and excel_format == "Yes":
        filename = frappe.generate_hash("", 10)
        with open(filename, 'wb') as f:
            f.write(cstr(w.getvalue()).encode("utf-8"))
        f = open(filename)
        reader = csv.reader(f)

        from frappe.utils.xlsxutils import make_xlsx
        xlsx_file = make_xlsx(reader, "Data Import Template")

        f.close()
        os.remove(filename)

        # write out response as a xlsx type
        frappe.response['filename'] = doctype + '.xlsx'
        frappe.response['filecontent'] = xlsx_file.getvalue()
        frappe.response['type'] = 'binary'

    else:
        # write out response as a type csv
        frappe.response['result'] = cstr(w.getvalue())
        frappe.response['type'] = 'csv'
        frappe.response['doctype'] = doctype
예제 #26
0
    def get_report_content(self):
        '''Returns file in for the report in given format'''
        report = frappe.get_doc('Report', self.report)

        self.filters = frappe.parse_json(self.filters) if self.filters else {}

        if self.report_type == 'Report Builder' and self.data_modified_till:
            self.filters['modified'] = (
                '>', now_datetime() - timedelta(hours=self.data_modified_till))

        if self.report_type != 'Report Builder' and self.dynamic_date_filters_set(
        ):
            self.prepare_dynamic_filters()

        columns, data = report.get_data(limit=self.no_of_rows or 100,
                                        user=self.user,
                                        filters=self.filters,
                                        as_dict=True,
                                        ignore_prepared_report=True)

        # add serial numbers
        columns.insert(0, frappe._dict(fieldname='idx', label='',
                                       width='30px'))
        for i in range(len(data)):
            data[i]['idx'] = i + 1

        if len(data) == 0 and self.send_if_data:
            return None

        if self.format == 'HTML':
            columns, data = make_links(columns, data)
            columns = update_field_types(columns)
            return self.get_html_table(columns, data)

        elif self.format == 'XLSX':
            report_data = frappe._dict()
            report_data['columns'] = columns
            report_data['result'] = data

            xlsx_data, column_widths = build_xlsx_data(columns,
                                                       report_data, [],
                                                       1,
                                                       ignore_visible_idx=True)
            xlsx_file = make_xlsx(xlsx_data,
                                  "Auto Email Report",
                                  column_widths=column_widths)
            return xlsx_file.getvalue()

        elif self.format == 'CSV':
            report_data = frappe._dict()
            report_data['columns'] = columns
            report_data['result'] = data

            xlsx_data, column_widths = build_xlsx_data(columns,
                                                       report_data, [],
                                                       1,
                                                       ignore_visible_idx=True)
            return to_csv(xlsx_data)

        else:
            frappe.throw(_('Invalid Output Format'))
예제 #27
0
    def get_report_content(self):
        """Returns file in for the report in given format"""
        report = frappe.get_doc("Report", self.report)

        self.filters = frappe.parse_json(self.filters) if self.filters else {}

        if self.report_type == "Report Builder" and self.data_modified_till:
            self.filters["modified"] = (
                ">", now_datetime() - timedelta(hours=self.data_modified_till))

        if self.report_type != "Report Builder" and self.dynamic_date_filters_set(
        ):
            self.prepare_dynamic_filters()

        columns, data = report.get_data(
            limit=self.no_of_rows or 100,
            user=self.user,
            filters=self.filters,
            as_dict=True,
            ignore_prepared_report=True,
        )

        # add serial numbers
        columns.insert(0, frappe._dict(fieldname="idx", label="",
                                       width="30px"))
        for i in range(len(data)):
            data[i]["idx"] = i + 1

        if len(data) == 0 and self.send_if_data:
            return None

        if self.format == "HTML":
            columns, data = make_links(columns, data)
            columns = update_field_types(columns)
            return self.get_html_table(columns, data)

        elif self.format == "XLSX":
            report_data = frappe._dict()
            report_data["columns"] = columns
            report_data["result"] = data

            xlsx_data, column_widths = build_xlsx_data(columns,
                                                       report_data, [],
                                                       1,
                                                       ignore_visible_idx=True)
            xlsx_file = make_xlsx(xlsx_data,
                                  "Auto Email Report",
                                  column_widths=column_widths)
            return xlsx_file.getvalue()

        elif self.format == "CSV":
            report_data = frappe._dict()
            report_data["columns"] = columns
            report_data["result"] = data

            xlsx_data, column_widths = build_xlsx_data(columns,
                                                       report_data, [],
                                                       1,
                                                       ignore_visible_idx=True)
            return to_csv(xlsx_data)

        else:
            frappe.throw(_("Invalid Output Format"))
예제 #28
0
def export_query():
    """export from report builder"""
    title = frappe.form_dict.title
    frappe.form_dict.pop("title", None)

    form_params = get_form_params()
    form_params["limit_page_length"] = None
    form_params["as_list"] = True
    doctype = form_params.doctype
    add_totals_row = None
    file_format_type = form_params["file_format_type"]
    title = title or doctype

    del form_params["doctype"]
    del form_params["file_format_type"]

    if "add_totals_row" in form_params and form_params["add_totals_row"] == "1":
        add_totals_row = 1
        del form_params["add_totals_row"]

    frappe.permissions.can_export(doctype, raise_exception=True)

    if "selected_items" in form_params:
        si = json.loads(frappe.form_dict.get("selected_items"))
        form_params["filters"] = {"name": ("in", si)}
        del form_params["selected_items"]

    make_access_log(
        doctype=doctype,
        file_type=file_format_type,
        report_name=form_params.report_name,
        filters=form_params.filters,
    )

    db_query = DatabaseQuery(doctype)
    ret = db_query.execute(**form_params)

    if add_totals_row:
        ret = append_totals_row(ret)

    data = [["Sr"] + get_labels(db_query.fields, doctype)]
    for i, row in enumerate(ret):
        data.append([i + 1] + list(row))

    data = handle_duration_fieldtype_values(doctype, data, db_query.fields)

    if file_format_type == "CSV":

        # convert to csv
        import csv

        from frappe.utils.xlsxutils import handle_html

        f = StringIO()
        writer = csv.writer(f)
        for r in data:
            # encode only unicode type strings and not int, floats etc.
            writer.writerow([
                handle_html(frappe.as_unicode(v)) if isinstance(
                    v, string_types) else v for v in r
            ])

        f.seek(0)
        frappe.response["result"] = cstr(f.read())
        frappe.response["type"] = "csv"
        frappe.response["doctype"] = title

    elif file_format_type == "Excel":

        from frappe.utils.xlsxutils import make_xlsx

        xlsx_file = make_xlsx(data, doctype)

        frappe.response["filename"] = title + ".xlsx"
        frappe.response["filecontent"] = xlsx_file.getvalue()
        frappe.response["type"] = "binary"
예제 #29
0
def upload(rows=None,
           submit_after_import=None,
           ignore_encoding_errors=False,
           no_email=True,
           overwrite=None,
           update_only=None,
           ignore_links=False,
           pre_process=None,
           via_console=False,
           from_data_import="No",
           skip_errors=True,
           data_import_doc=None,
           validate_template=False,
           user=None):
    """upload data"""

    # for translations
    if user:
        frappe.cache().hdel("lang", user)
        frappe.set_user_lang(user)
    #frappe.msgprint(data_import_doc.overwrite)
    if data_import_doc and isinstance(data_import_doc, string_types):
        data_import_doc = frappe.get_doc("Data Import", data_import_doc)
        #frappe.msgprint("found in database")
        #frappe.msgprint(data_import_doc.overwrite)
    if data_import_doc and from_data_import == "Yes":
        no_email = data_import_doc.no_email
        ignore_encoding_errors = data_import_doc.ignore_encoding_errors
        update_only = data_import_doc.only_update
        submit_after_import = data_import_doc.submit_after_import
        overwrite = data_import_doc.overwrite
        #frappe.msgprint("overwrite %s" % overwrite)
        skip_errors = data_import_doc.skip_errors
    else:
        # extra input params
        params = json.loads(frappe.form_dict.get("params") or '{}')
        if params.get("submit_after_import"):
            submit_after_import = True
        if params.get("ignore_encoding_errors"):
            ignore_encoding_errors = True
        if not params.get("no_email"):
            no_email = False
        if params.get('update_only'):
            update_only = True
        if params.get('from_data_import'):
            from_data_import = params.get('from_data_import')
        if not params.get('skip_errors'):
            skip_errors = params.get('skip_errors')

    frappe.flags.in_import = True
    frappe.flags.mute_emails = no_email

    def get_data_keys_definition():
        return get_data_keys()

    def bad_template():
        frappe.throw(
            _("Please do not change the rows above {0}").format(
                get_data_keys_definition().data_separator))

    def check_data_length():
        if not data:
            frappe.throw(
                _("No data found in the file. Please reattach the new file with data."
                  ))

    def get_start_row():
        for i, row in enumerate(rows):
            if row and row[0] == get_data_keys_definition().data_separator:
                return i + 1
        bad_template()

    def get_header_row(key):
        return get_header_row_and_idx(key)[0]

    def get_header_row_and_idx(key):
        for i, row in enumerate(header):
            if row and row[0] == key:
                return row, i
        return [], -1

    def filter_empty_columns(columns):
        empty_cols = list(filter(lambda x: x in ("", None), columns))

        if empty_cols:
            if columns[-1 * len(empty_cols):] == empty_cols:
                # filter empty columns if they exist at the end
                columns = columns[:-1 * len(empty_cols)]
            else:
                frappe.msgprint(_(
                    "Please make sure that there are no empty columns in the file."
                ),
                                raise_exception=1)

        return columns

    def make_column_map():
        doctype_row, row_idx = get_header_row_and_idx(
            get_data_keys_definition().doctype)
        if row_idx == -1:  # old style
            return

        dt = None
        for i, d in enumerate(doctype_row[1:]):
            if d not in ("~", "-"):
                if d and doctype_row[i] in (None, '', '~', '-',
                                            _("DocType") + ":"):
                    dt, parentfield = d, None
                    # xls format truncates the row, so it may not have more columns
                    if len(doctype_row) > i + 2:
                        parentfield = doctype_row[i + 2]
                    doctypes.append((dt, parentfield))
                    column_idx_to_fieldname[(dt, parentfield)] = {}
                    column_idx_to_fieldtype[(dt, parentfield)] = {}
                if dt:
                    column_idx_to_fieldname[(dt,
                                             parentfield)][i +
                                                           1] = rows[row_idx +
                                                                     2][i + 1]
                    column_idx_to_fieldtype[(dt,
                                             parentfield)][i +
                                                           1] = rows[row_idx +
                                                                     4][i + 1]

    def get_doc(start_idx):
        if doctypes:
            doc = {}
            attachments = []
            last_error_row_idx = None
            for idx in range(start_idx, len(rows)):
                last_error_row_idx = idx  # pylint: disable=W0612
                if (not doc) or main_doc_empty(rows[idx]):
                    for dt, parentfield in doctypes:
                        d = {}
                        for column_idx in column_idx_to_fieldname[(
                                dt, parentfield)]:
                            try:
                                fieldname = column_idx_to_fieldname[(
                                    dt, parentfield)][column_idx]
                                fieldtype = column_idx_to_fieldtype[(
                                    dt, parentfield)][column_idx]

                                if not fieldname or not rows[idx][column_idx]:
                                    continue

                                d[fieldname] = rows[idx][column_idx]
                                if fieldtype in ("Int", "Check"):
                                    d[fieldname] = cint(d[fieldname])
                                elif fieldtype in ("Float", "Currency",
                                                   "Percent"):
                                    d[fieldname] = flt(d[fieldname])
                                elif fieldtype == "Date":
                                    if d[fieldname] and isinstance(
                                            d[fieldname], string_types):
                                        d[fieldname] = getdate(
                                            parse_date(d[fieldname]))
                                elif fieldtype == "Datetime":
                                    if d[fieldname]:
                                        if " " in d[fieldname]:
                                            _date, _time = d[fieldname].split()
                                        else:
                                            _date, _time = d[
                                                fieldname], '00:00:00'
                                        _date = parse_date(d[fieldname])
                                        d[fieldname] = get_datetime(_date +
                                                                    " " +
                                                                    _time)
                                    else:
                                        d[fieldname] = None

                                elif fieldtype in ("Image", "Attach Image",
                                                   "Attach"):
                                    # added file to attachments list
                                    attachments.append(d[fieldname])

                                elif fieldtype in ("Link", "Dynamic Link",
                                                   "Data") and d[fieldname]:
                                    # as fields can be saved in the number format(long type) in data import template
                                    d[fieldname] = cstr(d[fieldname])

                            except IndexError:
                                pass

                        # scrub quotes from name and modified
                        if d.get("name") and d["name"].startswith('"'):
                            d["name"] = d["name"][1:-1]

                        if sum([0 if not val else 1 for val in d.values()]):
                            d['doctype'] = dt
                            if dt == doctype:
                                doc.update(d)
                            else:
                                if not overwrite and doc.get("name"):
                                    d['parent'] = doc["name"]
                                d['parenttype'] = doctype
                                d['parentfield'] = parentfield
                                doc.setdefault(d['parentfield'], []).append(d)
                else:
                    break
            #frappe.msgprint(doc)
            return doc, attachments, last_error_row_idx
        else:
            doc = frappe._dict(zip(columns, rows[start_idx][1:]))
            doc['doctype'] = doctype
            return doc, [], None

    # used in testing whether a row is empty or parent row or child row
    # checked only 3 first columns since first two columns can be blank for example the case of
    # importing the item variant where item code and item name will be blank.
    def main_doc_empty(row):
        if row:
            for i in range(3, 0, -1):
                if len(row) > i and row[i]:
                    return False
        return True

    def validate_naming(doc):
        autoname = frappe.get_meta(doctype).autoname
        if autoname:
            if autoname[0:5] == 'field':
                autoname = autoname[6:]
            elif autoname == 'naming_series:':
                autoname = 'naming_series'
            else:
                return True

            if (autoname not in doc) or (not doc[autoname]):
                from frappe.model.base_document import get_controller
                if not hasattr(get_controller(doctype), "autoname"):
                    frappe.throw(_(
                        "{0} is a mandatory field".format(autoname)))
        return True

    users = frappe.db.sql_list("select name from tabUser")

    def prepare_for_insert(doc):
        # don't block data import if user is not set
        # migrating from another system
        if not doc.owner in users:
            doc.owner = frappe.session.user
        if not doc.modified_by in users:
            doc.modified_by = frappe.session.user

    def is_valid_url(url):
        is_valid = False
        if url.startswith("/files") or url.startswith("/private/files"):
            url = get_url(url)

        try:
            r = requests.get(url)
            is_valid = True if r.status_code == 200 else False
        except Exception:
            pass

        return is_valid

    def attach_file_to_doc(doctype, docname, file_url):
        # check if attachment is already available
        # check if the attachement link is relative or not
        if not file_url:
            return
        if not is_valid_url(file_url):
            return

        files = frappe.db.sql(
            """Select name from `tabFile` where attached_to_doctype='{doctype}' and
			attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')"""
            .format(doctype=doctype, docname=docname, file_url=file_url))

        if files:
            # file is already attached
            return

        save_url(file_url, None, doctype, docname, "Home/Attachments", 0)

    # header
    filename, file_extension = ['', '']
    if not rows:
        from frappe.utils.file_manager import get_file  # get_file_doc
        fname, fcontent = get_file(data_import_doc.import_file)
        filename, file_extension = os.path.splitext(fname)

        if file_extension == '.xlsx' and from_data_import == 'Yes':
            from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
            rows = read_xlsx_file_from_attached_file(
                file_id=data_import_doc.import_file)
            #frappe.msgprint("%d" % len(rows))
            #frappe.msgprint(rows)
        elif file_extension == '.csv':
            from frappe.utils.csvutils import read_csv_content
            rows = read_csv_content(fcontent, ignore_encoding_errors)

        else:
            frappe.throw(_("Unsupported File Format"))

    start_row = get_start_row()
    header = rows[:start_row]
    data = rows[start_row:]
    try:
        doctype = get_header_row(get_data_keys_definition().main_table)[1]
        columns = filter_empty_columns(
            get_header_row(get_data_keys_definition().columns)[1:])
    except:
        frappe.throw(_("Cannot change header content"))
    doctypes = []
    column_idx_to_fieldname = {}
    column_idx_to_fieldtype = {}

    if skip_errors:
        data_rows_with_error = header

    if submit_after_import and not cint(
            frappe.db.get_value("DocType", doctype, "is_submittable")):
        submit_after_import = False

    parenttype = get_header_row(get_data_keys_definition().parent_table)

    if len(parenttype) > 1:
        parenttype = parenttype[1]

    # check permissions
    if not frappe.permissions.can_import(parenttype or doctype):
        frappe.flags.mute_emails = False
        return {
            "messages": [_("Not allowed to Import") + ": " + _(doctype)],
            "error": True
        }

    # Throw expception in case of the empty data file
    check_data_length()
    make_column_map()
    total = len(data)

    if validate_template:
        if total:
            data_import_doc.total_rows = total
        return True

    if overwrite == None:
        overwrite = params.get('overwrite')

    # delete child rows (if parenttype)
    parentfield = None
    if parenttype:
        parentfield = get_parent_field(doctype, parenttype)

        if overwrite:
            frappe.msgprint("still overwrtie")
            delete_child_rows(data, doctype)

    import_log = []

    def log(**kwargs):
        if via_console:
            print(
                (kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
        else:
            import_log.append(kwargs)

    def as_link(doctype, name):
        if via_console:
            return "{0}: {1}".format(doctype, name)
        else:
            return getlink(doctype, name)

    # publish realtime task update
    def publish_progress(achieved, reload=False):
        if data_import_doc:
            frappe.publish_realtime(
                "data_import_progress", {
                    "progress": str(int(100.0 * achieved / total)),
                    "data_import": data_import_doc.name,
                    "reload": reload
                },
                user=frappe.session.user)

    error_flag = rollback_flag = False

    batch_size = frappe.conf.data_import_batch_size or 1000

    for batch_start in range(0, total, batch_size):
        batch = data[batch_start:batch_start + batch_size]

        for i, row in enumerate(batch):
            # bypass empty rows
            if main_doc_empty(row):
                continue
            #frappe.msgprint("in batch %s" % overwrite)
            #frappe.msgprint("docName %s" % doc.get("name"))
            #frappe.msgprint("exist db %s" % frappe.db.exists(doctype,doc["name"]))
            row_idx = i + start_row
            doc = None

            publish_progress(i)

            #frappe.msgprint("in batch %s" % doc)

            try:
                doc, attachments, last_error_row_idx = get_doc(row_idx)
                validate_naming(doc)
                if pre_process:
                    pre_process(doc)

                original = None
                if parentfield:
                    parent = frappe.get_doc(parenttype, doc["parent"])
                    doc = parent.append(parentfield, doc)
                    parent.save()
                else:
                    if overwrite and doc.get("name") and frappe.db.exists(
                            doctype, doc["name"]):
                        original = frappe.get_doc(doctype, doc["name"])
                        #frappe.msgprint("overwite %s" % original.name)
                        original_name = original.name
                        #frappe.msgprint(original)
                        #frappe.msgprint(doc)
                        original.update(doc)
                        #frappe.msgprint(original)
                        # preserve original name for case sensitivity
                        original.name = original_name
                        original.flags.ignore_links = ignore_links
                        original.save()
                        doc = original
                    else:
                        frappe.msgprint("not overwrite")
                        if not update_only:
                            doc = frappe.get_doc(doc)
                            prepare_for_insert(doc)
                            doc.flags.ignore_links = ignore_links
                            doc.insert()
                    if attachments:
                        # check file url and create a File document
                        for file_url in attachments:
                            attach_file_to_doc(doc.doctype, doc.name, file_url)
                    if submit_after_import:
                        doc.submit()

                # log errors
                if parentfield:
                    log(
                        **{
                            "row":
                            doc.idx,
                            "title":
                            'Inserted row for "%s"' %
                            (as_link(parenttype, doc.parent)),
                            "link":
                            get_absolute_url(parenttype, doc.parent),
                            "message":
                            'Document successfully saved',
                            "indicator":
                            "green"
                        })
                elif submit_after_import:
                    log(
                        **{
                            "row":
                            row_idx + 1,
                            "title":
                            'Submitted row for "%s"' %
                            (as_link(doc.doctype, doc.name)),
                            "message":
                            "Document successfully submitted",
                            "link":
                            get_absolute_url(doc.doctype, doc.name),
                            "indicator":
                            "blue"
                        })
                elif original:
                    log(
                        **{
                            "row":
                            row_idx + 1,
                            "title":
                            'Updated row for "%s"' %
                            (as_link(doc.doctype, doc.name)),
                            "message":
                            "Document successfully updated",
                            "link":
                            get_absolute_url(doc.doctype, doc.name),
                            "indicator":
                            "green"
                        })
                elif not update_only:
                    log(
                        **{
                            "row":
                            row_idx + 1,
                            "title":
                            'Inserted row for "%s"' %
                            (as_link(doc.doctype, doc.name)),
                            "message":
                            "Document successfully saved",
                            "link":
                            get_absolute_url(doc.doctype, doc.name),
                            "indicator":
                            "green"
                        })
                else:
                    log(
                        **{
                            "row": row_idx + 1,
                            "title": 'Ignored row for %s' % (row[1]),
                            "link": None,
                            "message": "Document updation ignored",
                            "indicator": "orange"
                        })

            except Exception as e:
                error_flag = True

                # build error message
                if frappe.local.message_log:
                    err_msg = "\n".join([
                        '<p class="border-bottom small">{}</p>'.format(
                            json.loads(msg).get('message'))
                        for msg in frappe.local.message_log
                    ])
                else:
                    err_msg = '<p class="border-bottom small">{}</p>'.format(
                        cstr(e))

                error_trace = frappe.get_traceback()
                if error_trace:
                    error_log_doc = frappe.log_error(error_trace)
                    error_link = get_absolute_url("Error Log",
                                                  error_log_doc.name)
                else:
                    error_link = None

                log(
                    **{
                        "row":
                        row_idx + 1,
                        "title":
                        'Error for row %s' %
                        (len(row) > 1 and frappe.safe_decode(row[1]) or ""),
                        "message":
                        err_msg,
                        "indicator":
                        "red",
                        "link":
                        error_link
                    })

                # data with error to create a new file
                # include the errored data in the last row as last_error_row_idx will not be updated for the last row
                if skip_errors:
                    if last_error_row_idx == len(rows) - 1:
                        last_error_row_idx = len(rows)
                    data_rows_with_error += rows[row_idx:last_error_row_idx]
                else:
                    rollback_flag = True
            finally:
                frappe.local.message_log = []

        start_row += batch_size
        if rollback_flag:
            frappe.db.rollback()
        else:
            frappe.db.commit()

    frappe.flags.mute_emails = False
    frappe.flags.in_import = False

    log_message = {"messages": import_log, "error": error_flag}
    if data_import_doc:
        data_import_doc.log_details = json.dumps(log_message)

        import_status = None
        if error_flag and data_import_doc.skip_errors and len(data) != len(
                data_rows_with_error):
            import_status = "Partially Successful"
            # write the file with the faulty row
            from frappe.utils.file_manager import save_file
            file_name = 'error_' + filename + file_extension
            if file_extension == '.xlsx':
                from frappe.utils.xlsxutils import make_xlsx
                xlsx_file = make_xlsx(data_rows_with_error,
                                      "Data Import Template")
                file_data = xlsx_file.getvalue()
            else:
                from frappe.utils.csvutils import to_csv
                file_data = to_csv(data_rows_with_error)
            error_data_file = save_file(file_name, file_data, "Data Import",
                                        data_import_doc.name,
                                        "Home/Attachments")
            data_import_doc.error_file = error_data_file.file_url

        elif error_flag:
            import_status = "Failed"
        else:
            import_status = "Successful"

        data_import_doc.import_status = import_status
        data_import_doc.save()
        if data_import_doc.import_status in [
                "Successful", "Partially Successful"
        ]:
            data_import_doc.submit()
            publish_progress(100, True)
        else:
            publish_progress(0, True)
        frappe.db.commit()
    else:
        return log_message
예제 #30
0
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No", select_columns=None,
	from_data_import="No", excel_format="No"):
	all_doctypes = all_doctypes=="Yes"
	if select_columns:
		select_columns = json.loads(select_columns);
	docs_to_export = {}
	if doctype:
		if isinstance(doctype, string_types):
			doctype = [doctype];
		if len(doctype) > 1:
			docs_to_export = doctype[1]
		doctype = doctype[0]

	if not parent_doctype:
		parent_doctype = doctype

	column_start_end = {}

	if all_doctypes:
		child_doctypes = []
		for df in frappe.get_meta(doctype).get_table_fields():
			child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname))

	def get_data_keys_definition():
		return get_data_keys()

	def add_main_header():
		w.writerow([_('Data Import Template')])
		w.writerow([get_data_keys_definition().main_table, doctype])

		if parent_doctype != doctype:
			w.writerow([get_data_keys_definition().parent_table, parent_doctype])
		else:
			w.writerow([''])

		w.writerow([''])
		w.writerow([_('Notes:')])
		w.writerow([_('Please do not change the template headings.')])
		w.writerow([_('First data column must be blank.')])
		w.writerow([_('If you are uploading new records, leave the "name" (ID) column blank.')])
		w.writerow([_('If you are uploading new records, "Naming Series" becomes mandatory, if present.')])
		w.writerow([_('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.')])
		w.writerow([_('For updating, you can update only selective columns.')])
		w.writerow([_('You can only upload upto 5000 records in one go. (may be less in some cases)')])
		if key == "parent":
			w.writerow([_('"Parent" signifies the parent table in which this row must be added')])
			w.writerow([_('If you are updating, please select "Overwrite" else existing rows will not be deleted.')])

	def build_field_columns(dt, parentfield=None):
		meta = frappe.get_meta(dt)

		# build list of valid docfields
		tablecolumns = []
		for f in frappe.db.sql('desc `tab%s`' % dt):
			field = meta.get_field(f[0])
			if field and ((select_columns and f[0] in select_columns[dt]) or not select_columns):
				tablecolumns.append(field)

		tablecolumns.sort(key = lambda a: int(a.idx))

		_column_start_end = frappe._dict(start=0)

		if dt==doctype:
			_column_start_end = frappe._dict(start=0)
		else:
			_column_start_end = frappe._dict(start=len(columns))

			append_field_column(frappe._dict({
				"fieldname": "name",
				"parent": dt,
				"label": "ID",
				"fieldtype": "Data",
				"reqd": 1,
				"idx": 0,
				"info": _("Leave blank for new records")
			}), True)

		for docfield in tablecolumns:
			append_field_column(docfield, True)

		# all non mandatory fields
		for docfield in tablecolumns:
			append_field_column(docfield, False)

		# if there is one column, add a blank column (?)
		if len(columns)-_column_start_end.start == 1:
			append_empty_field_column()

		# append DocType name
		tablerow[_column_start_end.start + 1] = dt

		if parentfield:
			tablerow[_column_start_end.start + 2] = parentfield

		_column_start_end.end = len(columns) + 1

		column_start_end[(dt, parentfield)] = _column_start_end

	def append_field_column(docfield, for_mandatory):
		if not docfield:
			return
		if for_mandatory and not docfield.reqd:
			return
		if not for_mandatory and docfield.reqd:
			return
		if docfield.fieldname in ('parenttype', 'trash_reason'):
			return
		if docfield.hidden:
			return
		if select_columns and docfield.fieldname not in select_columns.get(docfield.parent, []):
			return

		tablerow.append("")
		fieldrow.append(docfield.fieldname)
		labelrow.append(_(docfield.label))
		mandatoryrow.append(docfield.reqd and 'Yes' or 'No')
		typerow.append(docfield.fieldtype)
		inforow.append(getinforow(docfield))
		columns.append(docfield.fieldname)

	def append_empty_field_column():
		tablerow.append("~")
		fieldrow.append("~")
		labelrow.append("")
		mandatoryrow.append("")
		typerow.append("")
		inforow.append("")
		columns.append("")

	def getinforow(docfield):
		"""make info comment for options, links etc."""
		if docfield.fieldtype == 'Select':
			if not docfield.options:
				return ''
			else:
				return _("One of") + ': %s' % ', '.join(filter(None, docfield.options.split('\n')))
		elif docfield.fieldtype == 'Link':
			return 'Valid %s' % docfield.options
		elif docfield.fieldtype == 'Int':
			return 'Integer'
		elif docfield.fieldtype == "Check":
			return "0 or 1"
		elif hasattr(docfield, "info"):
			return docfield.info
		else:
			return ''

	def add_field_headings():
		w.writerow(tablerow)
		w.writerow(labelrow)
		w.writerow(fieldrow)
		w.writerow(mandatoryrow)
		w.writerow(typerow)
		w.writerow(inforow)
		w.writerow([get_data_keys_definition().data_separator])

	def add_data():
		def add_data_row(row_group, dt, parentfield, doc, rowidx):
			d = doc.copy()
			meta = frappe.get_meta(dt)
			if all_doctypes:
				d.name = '"'+ d.name+'"'

			if len(row_group) < rowidx + 1:
				row_group.append([""] * (len(columns) + 1))
			row = row_group[rowidx]

			_column_start_end = column_start_end.get((dt, parentfield))

			if _column_start_end:
				for i, c in enumerate(columns[_column_start_end.start:_column_start_end.end]):
					df = meta.get_field(c)
					fieldtype = df.fieldtype if df else "Data"
					value = d.get(c, "")
					if value:
						if fieldtype == "Date":
							value = formatdate(value)
						elif fieldtype == "Datetime":
							value = format_datetime(value)

					row[_column_start_end.start + i + 1] = value

		if with_data=='Yes':
			frappe.permissions.can_export(parent_doctype, raise_exception=True)

			# sort nested set doctypes by `lft asc`
			order_by = None
			table_columns = frappe.db.get_table_columns(parent_doctype)
			if 'lft' in table_columns and 'rgt' in table_columns:
				order_by = '`tab{doctype}`.`lft` asc'.format(doctype=parent_doctype)

			# get permitted data only
			data = frappe.get_list(doctype, fields=["*"], limit_page_length=None, order_by=order_by)

			for doc in data:
				op = docs_to_export.get("op")
				names = docs_to_export.get("name")

				if names and op:
					if op == '=' and doc.name not in names:
						continue
					elif op == '!=' and doc.name in names:
						continue
				elif names:
					try:
						sflags = docs_to_export.get("flags", "I,U").upper()
						flags = 0
						for a in re.split('\W+',sflags):
							flags = flags | reflags.get(a,0)

						c = re.compile(names, flags)
						m = c.match(doc.name)
						if not m:
							continue
					except:
						if doc.name not in names:
							continue
				# add main table
				row_group = []

				add_data_row(row_group, doctype, None, doc, 0)

				if all_doctypes:
					# add child tables
					for c in child_doctypes:
						for ci, child in enumerate(frappe.db.sql("""select * from `tab{0}`
							where parent=%s and parentfield=%s order by idx""".format(c['doctype']),
							(doc.name, c['parentfield']), as_dict=1)):
							add_data_row(row_group, c['doctype'], c['parentfield'], child, ci)

				for row in row_group:
					w.writerow(row)

	w = UnicodeWriter()
	key = 'parent' if parent_doctype != doctype else 'name'

	add_main_header()

	w.writerow([''])
	tablerow = [get_data_keys_definition().doctype, ""]
	labelrow = [_("Column Labels:"), "ID"]
	fieldrow = [get_data_keys_definition().columns, key]
	mandatoryrow = [_("Mandatory:"), _("Yes")]
	typerow = [_('Type:'), 'Data (text)']
	inforow = [_('Info:'), '']
	columns = [key]

	build_field_columns(doctype)

	if all_doctypes:
		for d in child_doctypes:
			append_empty_field_column()
			if (select_columns and select_columns.get(d['doctype'], None)) or not select_columns:
				# if atleast one column is selected for this doctype
				build_field_columns(d['doctype'], d['parentfield'])

	add_field_headings()
	add_data()

	if from_data_import == "Yes" and excel_format == "Yes":
		filename = frappe.generate_hash("", 10)
		with open(filename, 'wb') as f:
			f.write(cstr(w.getvalue()).encode("utf-8"))
		f = open(filename)
		reader = csv.reader(f)

		from frappe.utils.xlsxutils import make_xlsx
		xlsx_file = make_xlsx(reader, "Data Import Template")

		f.close()
		os.remove(filename)

		# write out response as a xlsx type
		frappe.response['filename'] = doctype + '.xlsx'
		frappe.response['filecontent'] = xlsx_file.getvalue()
		frappe.response['type'] = 'binary'

	else:
		# write out response as a type csv
		frappe.response['result'] = cstr(w.getvalue())
		frappe.response['type'] = 'csv'
		frappe.response['doctype'] = doctype
예제 #31
0
파일: importer.py 프로젝트: robulik/frappe
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
	update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
	skip_errors = True, data_import_doc=None, validate_template=False, user=None):
	"""upload data"""

	# for translations
	if user:
		frappe.cache().hdel("lang", user)
		frappe.set_user_lang(user)

	if data_import_doc and isinstance(data_import_doc, string_types):
		data_import_doc = frappe.get_doc("Data Import", data_import_doc)
	if data_import_doc and from_data_import == "Yes":
		no_email = data_import_doc.no_email
		ignore_encoding_errors = data_import_doc.ignore_encoding_errors
		update_only = data_import_doc.only_update
		submit_after_import = data_import_doc.submit_after_import
		overwrite = data_import_doc.overwrite
		skip_errors = data_import_doc.skip_errors
	else:
		# extra input params
		params = json.loads(frappe.form_dict.get("params") or '{}')
		if params.get("submit_after_import"):
			submit_after_import = True
		if params.get("ignore_encoding_errors"):
			ignore_encoding_errors = True
		if not params.get("no_email"):
			no_email = False
		if params.get('update_only'):
			update_only = True
		if params.get('from_data_import'):
			from_data_import = params.get('from_data_import')
		if not params.get('skip_errors'):
			skip_errors = params.get('skip_errors')

	frappe.flags.in_import = True
	frappe.flags.mute_emails = no_email

	def get_data_keys_definition():
		return get_data_keys()

	def bad_template():
		frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))

	def check_data_length():
		if not data:
			frappe.throw(_("No data found in the file. Please reattach the new file with data."))

	def get_start_row():
		for i, row in enumerate(rows):
			if row and row[0]==get_data_keys_definition().data_separator:
				return i+1
		bad_template()

	def get_header_row(key):
		return get_header_row_and_idx(key)[0]

	def get_header_row_and_idx(key):
		for i, row in enumerate(header):
			if row and row[0]==key:
				return row, i
		return [], -1

	def filter_empty_columns(columns):
		empty_cols = list(filter(lambda x: x in ("", None), columns))

		if empty_cols:
			if columns[-1*len(empty_cols):] == empty_cols:
				# filter empty columns if they exist at the end
				columns = columns[:-1*len(empty_cols)]
			else:
				frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
					raise_exception=1)

		return columns

	def make_column_map():
		doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
		if row_idx == -1: # old style
			return

		dt = None
		for i, d in enumerate(doctype_row[1:]):
			if d not in ("~", "-"):
				if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
					dt, parentfield = d, None
					# xls format truncates the row, so it may not have more columns
					if len(doctype_row) > i+2:
						parentfield = doctype_row[i+2]
					doctypes.append((dt, parentfield))
					column_idx_to_fieldname[(dt, parentfield)] = {}
					column_idx_to_fieldtype[(dt, parentfield)] = {}
				if dt:
					column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
					column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]

	def get_doc(start_idx):
		if doctypes:
			doc = {}
			attachments = []
			last_error_row_idx = None
			for idx in range(start_idx, len(rows)):
				last_error_row_idx = idx	# pylint: disable=W0612
				if (not doc) or main_doc_empty(rows[idx]):
					for dt, parentfield in doctypes:
						d = {}
						for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
							try:
								fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
								fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]

								if not fieldname or not rows[idx][column_idx]:
									continue

								d[fieldname] = rows[idx][column_idx]
								if fieldtype in ("Int", "Check"):
									d[fieldname] = cint(d[fieldname])
								elif fieldtype in ("Float", "Currency", "Percent"):
									d[fieldname] = flt(d[fieldname])
								elif fieldtype == "Date":
									if d[fieldname] and isinstance(d[fieldname], string_types):
										d[fieldname] = getdate(parse_date(d[fieldname]))
								elif fieldtype == "Datetime":
									if d[fieldname]:
										if " " in d[fieldname]:
											_date, _time = d[fieldname].split()
										else:
											_date, _time = d[fieldname], '00:00:00'
										_date = parse_date(d[fieldname])
										d[fieldname] = get_datetime(_date + " " + _time)
									else:
										d[fieldname] = None

								elif fieldtype in ("Image", "Attach Image", "Attach"):
									# added file to attachments list
									attachments.append(d[fieldname])

								elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
									# as fields can be saved in the number format(long type) in data import template
									d[fieldname] = cstr(d[fieldname])

							except IndexError:
								pass

						# scrub quotes from name and modified
						if d.get("name") and d["name"].startswith('"'):
							d["name"] = d["name"][1:-1]

						if sum([0 if not val else 1 for val in d.values()]):
							d['doctype'] = dt
							if dt == doctype:
								doc.update(d)
							else:
								if not overwrite and doc.get("name"):
									d['parent'] = doc["name"]
								d['parenttype'] = doctype
								d['parentfield'] = parentfield
								doc.setdefault(d['parentfield'], []).append(d)
				else:
					break

			return doc, attachments, last_error_row_idx
		else:
			doc = frappe._dict(zip(columns, rows[start_idx][1:]))
			doc['doctype'] = doctype
			return doc, [], None

	# used in testing whether a row is empty or parent row or child row
	# checked only 3 first columns since first two columns can be blank for example the case of
	# importing the item variant where item code and item name will be blank.
	def main_doc_empty(row):
		if row:
			for i in range(3,0,-1):
				if len(row) > i and row[i]:
					return False
		return True

	def validate_naming(doc):
		autoname = frappe.get_meta(doctype).autoname
		if autoname:
			if autoname[0:5] == 'field':
				autoname = autoname[6:]
			elif autoname == 'naming_series:':
				autoname = 'naming_series'
			else:
				return True

			if (autoname not in doc) or (not doc[autoname]):
				from frappe.model.base_document import get_controller
				if not hasattr(get_controller(doctype), "autoname"):
					frappe.throw(_("{0} is a mandatory field".format(autoname)))
		return True

	users = frappe.db.sql_list("select name from tabUser")
	def prepare_for_insert(doc):
		# don't block data import if user is not set
		# migrating from another system
		if not doc.owner in users:
			doc.owner = frappe.session.user
		if not doc.modified_by in users:
			doc.modified_by = frappe.session.user

	def is_valid_url(url):
		is_valid = False
		if url.startswith("/files") or url.startswith("/private/files"):
			url = get_url(url)

		try:
			r = requests.get(url)
			is_valid = True if r.status_code == 200 else False
		except Exception:
			pass

		return is_valid

	def attach_file_to_doc(doctype, docname, file_url):
		# check if attachment is already available
		# check if the attachement link is relative or not
		if not file_url:
			return
		if not is_valid_url(file_url):
			return

		files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
			attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
				doctype=doctype,
				docname=docname,
				file_url=file_url
			))

		if files:
			# file is already attached
			return

		save_url(file_url, None, doctype, docname, "Home/Attachments", 0)

	# header
	filename, file_extension = ['','']
	if not rows:
		from frappe.utils.file_manager import get_file # get_file_doc
		fname, fcontent = get_file(data_import_doc.import_file)
		filename, file_extension = os.path.splitext(fname)

		if file_extension == '.xlsx' and from_data_import == 'Yes':
			from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
			rows = read_xlsx_file_from_attached_file(file_id=data_import_doc.import_file)

		elif file_extension == '.csv':
			from frappe.utils.csvutils import read_csv_content
			rows = read_csv_content(fcontent, ignore_encoding_errors)

		else:
			frappe.throw(_("Unsupported File Format"))

	start_row = get_start_row()
	header = rows[:start_row]
	data = rows[start_row:]
	try:
		doctype = get_header_row(get_data_keys_definition().main_table)[1]
		columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
	except:
		frappe.throw(_("Cannot change header content"))
	doctypes = []
	column_idx_to_fieldname = {}
	column_idx_to_fieldtype = {}

	if skip_errors:
		data_rows_with_error = header

	if submit_after_import and not cint(frappe.db.get_value("DocType",
			doctype, "is_submittable")):
		submit_after_import = False

	parenttype = get_header_row(get_data_keys_definition().parent_table)

	if len(parenttype) > 1:
		parenttype = parenttype[1]

	# check permissions
	if not frappe.permissions.can_import(parenttype or doctype):
		frappe.flags.mute_emails = False
		return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}

	# Throw expception in case of the empty data file
	check_data_length()
	make_column_map()
	total = len(data)

	if validate_template:
		if total:
			data_import_doc.total_rows = total
		return True

	if overwrite==None:
		overwrite = params.get('overwrite')

	# delete child rows (if parenttype)
	parentfield = None
	if parenttype:
		parentfield = get_parent_field(doctype, parenttype)

		if overwrite:
			delete_child_rows(data, doctype)

	import_log = []
	def log(**kwargs):
		if via_console:
			print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
		else:
			import_log.append(kwargs)

	def as_link(doctype, name):
		if via_console:
			return "{0}: {1}".format(doctype, name)
		else:
			return getlink(doctype, name)

	# publish realtime task update
	def publish_progress(achieved, reload=False):
		if data_import_doc:
			frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
				"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)


	error_flag = rollback_flag = False

	batch_size = frappe.conf.data_import_batch_size or 1000

	for batch_start in range(0, total, batch_size):
		batch = data[batch_start:batch_start + batch_size]

		for i, row in enumerate(batch):
			# bypass empty rows
			if main_doc_empty(row):
				continue

			row_idx = i + start_row
			doc = None

			publish_progress(i)

			try:
				doc, attachments, last_error_row_idx = get_doc(row_idx)
				validate_naming(doc)
				if pre_process:
					pre_process(doc)

				original = None
				if parentfield:
					parent = frappe.get_doc(parenttype, doc["parent"])
					doc = parent.append(parentfield, doc)
					parent.save()
				else:
					if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
						original = frappe.get_doc(doctype, doc["name"])
						original_name = original.name
						original.update(doc)
						# preserve original name for case sensitivity
						original.name = original_name
						original.flags.ignore_links = ignore_links
						original.save()
						doc = original
					else:
						if not update_only:
							doc = frappe.get_doc(doc)
							prepare_for_insert(doc)
							doc.flags.ignore_links = ignore_links
							doc.insert()
					if attachments:
						# check file url and create a File document
						for file_url in attachments:
							attach_file_to_doc(doc.doctype, doc.name, file_url)
					if submit_after_import:
						doc.submit()

				# log errors
				if parentfield:
					log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
						"link": get_url_to_form(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
				elif submit_after_import:
					log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
						"message": "Document successfully submitted", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "blue"})
				elif original:
					log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
						"message": "Document successfully updated", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "green"})
				elif not update_only:
					log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
						"message": "Document successfully saved", "link": get_url_to_form(doc.doctype, doc.name), "indicator": "green"})
				else:
					log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
						"message": "Document updation ignored", "indicator": "orange"})

			except Exception as e:
				error_flag = True

				# build error message
				if frappe.local.message_log:
					err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
				else:
					err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e))

				error_trace = frappe.get_traceback()
				if error_trace:
					error_log_doc = frappe.log_error(error_trace)
					error_link = get_url_to_form("Error Log", error_log_doc.name)
				else:
					error_link = None

				log(**{
					"row": row_idx + 1,
					"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
					"message": err_msg,
					"indicator": "red",
					"link":error_link
				})

				# data with error to create a new file
				# include the errored data in the last row as last_error_row_idx will not be updated for the last row
				if skip_errors:
					if last_error_row_idx == len(rows)-1:
						last_error_row_idx = len(rows)
					data_rows_with_error += rows[row_idx:last_error_row_idx]
				else:
					rollback_flag = True
			finally:
				frappe.local.message_log = []

		start_row += batch_size
		if rollback_flag:
			frappe.db.rollback()
		else:
			frappe.db.commit()

	frappe.flags.mute_emails = False
	frappe.flags.in_import = False

	log_message = {"messages": import_log, "error": error_flag}
	if data_import_doc:
		data_import_doc.log_details = json.dumps(log_message)

		import_status = None
		if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
			import_status = "Partially Successful"
			# write the file with the faulty row
			from frappe.utils.file_manager import save_file
			file_name = 'error_' + filename + file_extension
			if file_extension == '.xlsx':
				from frappe.utils.xlsxutils import make_xlsx
				xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
				file_data = xlsx_file.getvalue()
			else:
				from frappe.utils.csvutils import to_csv
				file_data = to_csv(data_rows_with_error)
			error_data_file = save_file(file_name, file_data, "Data Import",
				data_import_doc.name,  "Home/Attachments")
			data_import_doc.error_file = error_data_file.file_url

		elif error_flag:
			import_status = "Failed"
		else:
			import_status = "Successful"

		data_import_doc.import_status = import_status
		data_import_doc.save()
		if data_import_doc.import_status in ["Successful", "Partially Successful"]:
			data_import_doc.submit()
			publish_progress(100, True)
		else:
			publish_progress(0, True)
		frappe.db.commit()
	else:
		return log_message