def get_csv(): args = frappe.local.form_dict w = UnicodeWriter() w = add_header(w,args) w = add_data(w,args) frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Sub Machining RFQ"
def export_suppliers(self): self.writer = UnicodeWriter() group = self.doc.supplier_group or "All Supplier Groups" supplier_list = frappe.db.sql(""" select supplier.supplier_name, contact.first_name, contact.last_name, contact.email_id from `tabSupplier` supplier, `tabContact` contact, `tabDynamic Link` contact_dyn_link where supplier.supplier_group = '{0}' and contact_dyn_link.parent = contact.name and contact_dyn_link.link_doctype = "Supplier" and contact_dyn_link.link_name = supplier.name """.format(group), as_dict=True) self.writer.writerow(["name", "company_name", "email"]) for e in supplier_list: if e.email_id: self.writer.writerow([ "{0} {1}".format(e.first_name, e.last_name) if e.last_name else e.first_name, e.supplier_name, e.email_id ]) self.build_response_as_csv()
def get_template(template_type): fields = [ "Account Name", "Parent Account", "Account Number", "Parent Account Number", "Is Group", "Account Type", "Root Type" ] writer = UnicodeWriter() writer.writerow(fields) if template_type == 'Blank Template': for root_type in get_root_types(): writer.writerow(['', '', '', 1, '', root_type]) for account in get_mandatory_group_accounts(): writer.writerow(['', '', '', 1, account, "Asset"]) for account_type in get_mandatory_account_types(): writer.writerow([ '', '', '', 0, account_type.get('account_type'), account_type.get('root_type') ]) else: writer = get_sample_template(writer) return writer
def create_csv(data): w = UnicodeWriter() w = add_header(w) w = add_data(w, data) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Payment Received Report"
def create_csv(emp_data, mode_of_pay,filters): w = UnicodeWriter() w = add_header(w) w = add_data(w, emp_data, mode_of_pay, filters) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Daily Sales Report"
def create_csv(emp_data, mode_of_pay, filters): w = UnicodeWriter() w = add_header(w) w = add_data(w, emp_data, mode_of_pay, filters) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Daily Sales Report"
def export_leads(self): self.writer = UnicodeWriter() lead_list = frappe.get_list( "Lead", fields=["name", "lead_name", "company_name", "email_id"]) self.writer.writerow(["name", "company_name", "email"]) for e in lead_list: if e.email_id: self.writer.writerow([e.lead_name, e.company_name, e.email_id]) self.build_response_as_csv()
def get_template(): args = frappe.local.form_dict w = UnicodeWriter() w = add_header(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Employee Upload"
def get_template(): if not frappe.has_permission("Upload Timesheet Process", "create"): raise frappe.PermissionError w = UnicodeWriter() w = add_header(w) frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Upload Timesheet Process"
def get_template(mobilization): # frappe.throw(str(mobilization)) args = frappe.local.form_dict if mobilization == "Employee": if args["company"] == ',': args["company"] = json.dumps(None) else: args["company"] = "'" + args["company"] + "'" if args["branch"] == ',': args["branch"] = json.dumps(None) else: args["branch"] = "'" + args["branch"] + "'" if args["division"] == ',': args["division"] = json.dumps(None) else: args["division"] = "'" + args["division"] + "'" if args["department"] == ',': args["department"] = json.dumps(None) else: args["department"] = "'" + args["department"] + "'" if args["designation"] == ',': args["designation"] = json.dumps(None) else: args["designation"] = "'" + args["designation"] + "'" w = UnicodeWriter() w = add_header(w, args) w = add_data(w, args) else: # frappe.msgprint("emp") if args["company"] == ',': args["company"] = json.dumps(None) else: args["company"] = "'" + args["company"] + "'" if args["department"] == ',': args["department"] = json.dumps(None) else: args["department"] = "'" + args["department"] + "'" w = UnicodeWriter() w = add_header_for_asset(w, args) w = add_data_asset(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Upload Mobilization"
def get_user_permissions_csv(): out = [["User Permissions"], ["User", "Document Type", "Value"]] out += [[a.parent, a.defkey, a.defvalue] for a in get_permissions()] csv = UnicodeWriter() for row in out: csv.writerow(row) frappe.response['result'] = str(csv.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "User Permissions"
def create_csv(customer,from_date,to_date,agreement,data_limit): data_list = get_payments_details(customer,from_date,to_date,agreement,data_limit)['data'] for row in data_list: row["payments_ids"] = update_dict_by_payment_ids(row) if row.get("payments_ids") else "" data = data_list w = UnicodeWriter() w = add_header(w) w = add_data(w, data) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Payment Received Report"
def get_template_with_data(): args = frappe.local.form_dict print("adsasd args", args) w = UnicodeWriter() w = add_header_get_data(w, args) w = add_data(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Shift Schedule"
def get_template(): if not frappe.has_permission("BRS Entries", "create"): raise frappe.PermissionError args = frappe.local.form_dict w = UnicodeWriter() w = add_header(w) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "BRS Entries"
def build_response(self): self.writer = UnicodeWriter(quoting=csv.QUOTE_NONE) self.add_data() if not self.data: frappe.respond_as_web_page(_('No Data'), _('There is no data to be exported'), indicator_color='orange') # write out response if self.file_format == "CIEL": frappe.response['filename'] = 'XIMPORT.TXT' frappe.response['filecontent'] = self.writer.getvalue() frappe.response['type'] = 'binary'
def get_departure_template(): if not frappe.has_permission("Departure", "create"): raise frappe.PermissionError args = frappe.local.form_dict w = UnicodeWriter() w = add_header(w,"Departure") w = add_data(w, args,"Departure") # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Departure"
def get_template(): if not frappe.has_permission("Shift Schedule", "create"): raise frappe.PermissionError args = frappe.local.form_dict print("adsasd args", args) w = UnicodeWriter() w = add_header(w, args) w = add_data(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Shift Schedule"
def download_template(): data = frappe._dict(frappe.local.form_dict) fields = ["Account Name", "Parent Account", "Account Number", "Is Group", "Account Type", "Root Type"] writer = UnicodeWriter() writer.writerow([_('Chart of Accounts Template')]) writer.writerow([_("Column Labels : ")] + fields) writer.writerow([_("Start entering data from here : ")]) # download csv file frappe.response['result'] = cstr(writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = data.get('doctype')
def get_template(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError args = frappe.local.form_dict if getdate(args.from_date) > getdate(args.to_date): frappe.throw(_("To Date should be greater than From Date")) w = UnicodeWriter() w = add_header(w) w = add_data(w, args) # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Attendance"
def build_response(self): self.writer = UnicodeWriter() self.name_field = "parent" if self.parent_doctype != self.doctype else "name" if self.template: self.add_main_header() self.writer.writerow([""]) self.tablerow = [self.data_keys.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = [self.data_keys.columns] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_("Type:")] self.inforow = [_("Info:")] self.columns = [] self.build_field_columns(self.doctype) if self.all_doctypes: for d in self.child_doctypes: self.append_empty_field_column() if (self.select_columns and self.select_columns.get( d["doctype"], None)) or not self.select_columns: # if atleast one column is selected for this doctype self.build_field_columns(d["doctype"], d["parentfield"]) self.add_field_headings() self.add_data() if self.with_data and not self.data: frappe.respond_as_web_page(_("No Data"), _("There is no data to be exported"), indicator_color="orange") if self.file_type == "Excel": self.build_response_as_excel() else: # write out response as a type csv frappe.response["result"] = cstr(self.writer.getvalue()) frappe.response["type"] = "csv" frappe.response["doctype"] = self.doctype
def build_response(self): self.writer = UnicodeWriter() self.name_field = 'parent' if self.parent_doctype != self.doctype else 'name' if self.template: self.add_main_header() self.writer.writerow(['']) self.tablerow = [self.data_keys.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = [self.data_keys.columns] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_('Type:')] self.inforow = [_('Info:')] self.columns = [] self.build_field_columns(self.doctype) if self.all_doctypes: for d in self.child_doctypes: self.append_empty_field_column() if (self.select_columns and self.select_columns.get( d['doctype'], None)) or not self.select_columns: # if atleast one column is selected for this doctype self.build_field_columns(d['doctype'], d['parentfield']) self.add_field_headings() self.add_data() if self.with_data and not self.data: frappe.respond_as_web_page(_('No Data'), _('There is no data to be exported'), indicator_color='orange') if self.file_type == 'Excel': self.build_response_as_excel() else: # write out response as a type csv frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doctype
def get_template(): if not frappe.has_permission("Attendance", "create"): raise frappe.PermissionError args = frappe.local.form_dict if getdate(args.from_date) > getdate(args.to_date): frappe.throw(_("To Date should be greater than From Date")) w = UnicodeWriter() w = add_header(w) try: w = add_data(w, args) except Exception as e: frappe.clear_messages() frappe.respond_as_web_page("Holiday List Missing", html=e) return # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Attendance"
def build_response(self): self.writer = UnicodeWriter() self.add_main_header() self.tablerow = [self.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = ['Column Name:'] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_('Type:')] self.inforow = [_('Info:')] self.columns = [] self._append_parent_column() self.build_field_columns(self.child_doc) self.add_field_headings() self.writer.writerow(['Start entering data below this line']) if self.file_type == 'Excel': self.build_response_as_excel() else: # write out response as a type csv frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doctype
def get_file_address(from_date, to_date): result = frappe.db.sql("""SELECT cust.tax_id, sinv.ncf, sinv.posting_date, sinv.total_taxes_and_charges, sinv.base_total FROM `tabSales Invoice` AS sinv JOIN tabCustomer AS cust on sinv.customer = cust.name WHERE sinv.ncf NOT LIKE '%s' AND cust.tax_id > 0 AND sinv.docstatus = 1 AND sinv.posting_date BETWEEN '%s' AND '%s' """ % ("SINV-%", from_date, to_date), as_dict=True) w = UnicodeWriter() w.writerow(['RNC', 'Tipo de RNC', 'NCF', 'NCF modificado', 'Fecha de impresion', 'ITBIS facturado', 'Monto Total']) for row in result: tipo_rnc = frappe.get_value("Customer", {"tax_id": row.tax_id }, ["tipo_rnc"]) w.writerow([row.tax_id, tipo_rnc, row.ncf, "", row.posting_date.strftime("%Y%m%d"), row.total_taxes_and_charges, row.base_total]) frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Reporte_607_" + str(int(time.time()))
def get_file_address(from_date, to_date): result = frappe.db.sql( query = """SELECT ncf, posting_date, tipo_de_anulacion FROM `tabSales Invoice` WHERE docstatus = %(cancelled)s AND posting_date >= '%(from_date)s' AND posting_date <= '%(to_date)s'""" % { "cancelled": 2, "from_date": from_date, "to_date": to_date }, as_dict = True, as_utf8 = 1 ) w = UnicodeWriter(encoding='Windows-1252') w.writerow([ "Numero de Comprobante Fiscal", "", "Fecha de Comprobante", "Tipo de Anulacion", "Estatus" ]) for row in result: #bill_no = row.bill_no.split("-")[1] if(len(row.bill_no.split("-")) > 1) else row.bill_no # NCF-A1##% || A1##% year = str(row.posting_date).split("-")[0] month = str(row.posting_date).split("-")[1] date = str(row.posting_date).split("-")[2] w.writerow([ row.ncf, "", year + month + date, row.tipo_de_anulacion, "" ]) frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Reporte_608_" + str(int(time.time()))
def build_response(self): self.writer = UnicodeWriter() self.name_field = 'parent' if self.parent_doctype != self.doctype else 'name' if self.template: self.add_main_header() self.writer.writerow(['']) self.tablerow = [self.data_keys.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = [self.data_keys.columns] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_('Type:')] self.inforow = [_('Info:')] self.columns = [] self.build_field_columns(self.doctype) if self.all_doctypes: for d in self.child_doctypes: self.append_empty_field_column() if (self.select_columns and self.select_columns.get(d['doctype'], None)) or not self.select_columns: # if atleast one column is selected for this doctype self.build_field_columns(d['doctype'], d['parentfield']) self.add_field_headings() self.add_data() if self.with_data and not self.data: frappe.respond_as_web_page(_('No Data'), _('There is no data to be exported'), indicator_color='orange') if self.file_type == 'Excel': self.build_response_as_excel() else: # write out response as a type csv frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doctype
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No", select_columns=None, from_data_import="No", excel_format="No"): all_doctypes = all_doctypes=="Yes" if select_columns: select_columns = json.loads(select_columns); docs_to_export = {} if doctype: if isinstance(doctype, string_types): doctype = [doctype]; if len(doctype) > 1: docs_to_export = doctype[1] doctype = doctype[0] if not parent_doctype: parent_doctype = doctype column_start_end = {} if all_doctypes: child_doctypes = [] for df in frappe.get_meta(doctype).get_table_fields(): child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname)) def get_data_keys_definition(): return get_data_keys() def add_main_header(): w.writerow([_('Data Import Template')]) w.writerow([get_data_keys_definition().main_table, doctype]) if parent_doctype != doctype: w.writerow([get_data_keys_definition().parent_table, parent_doctype]) else: w.writerow(['']) w.writerow(['']) w.writerow([_('Notes:')]) w.writerow([_('Please do not change the template headings.')]) w.writerow([_('First data column must be blank.')]) w.writerow([_('If you are uploading new records, leave the "name" (ID) column blank.')]) w.writerow([_('If you are uploading new records, "Naming Series" becomes mandatory, if present.')]) w.writerow([_('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.')]) w.writerow([_('For updating, you can update only selective columns.')]) w.writerow([_('You can only upload upto 5000 records in one go. (may be less in some cases)')]) if key == "parent": w.writerow([_('"Parent" signifies the parent table in which this row must be added')]) w.writerow([_('If you are updating, please select "Overwrite" else existing rows will not be deleted.')]) def build_field_columns(dt, parentfield=None): meta = frappe.get_meta(dt) # build list of valid docfields tablecolumns = [] for f in frappe.db.sql('desc `tab%s`' % dt): field = meta.get_field(f[0]) if field and ((select_columns and f[0] in select_columns[dt]) or not select_columns): tablecolumns.append(field) tablecolumns.sort(key = lambda a: int(a.idx)) _column_start_end = frappe._dict(start=0) if dt==doctype: _column_start_end = frappe._dict(start=0) else: _column_start_end = frappe._dict(start=len(columns)) append_field_column(frappe._dict({ "fieldname": "name", "parent": dt, "label": "ID", "fieldtype": "Data", "reqd": 1, "idx": 0, "info": _("Leave blank for new records") }), True) for docfield in tablecolumns: append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: append_field_column(docfield, False) # if there is one column, add a blank column (?) if len(columns)-_column_start_end.start == 1: append_empty_field_column() # append DocType name tablerow[_column_start_end.start + 1] = dt if parentfield: tablerow[_column_start_end.start + 2] = parentfield _column_start_end.end = len(columns) + 1 column_start_end[(dt, parentfield)] = _column_start_end def append_field_column(docfield, for_mandatory): if not docfield: return if for_mandatory and not docfield.reqd: return if not for_mandatory and docfield.reqd: return if docfield.fieldname in ('parenttype', 'trash_reason'): return if docfield.hidden: return if select_columns and docfield.fieldname not in select_columns.get(docfield.parent, []): return tablerow.append("") fieldrow.append(docfield.fieldname) labelrow.append(_(docfield.label)) mandatoryrow.append(docfield.reqd and 'Yes' or 'No') typerow.append(docfield.fieldtype) inforow.append(getinforow(docfield)) columns.append(docfield.fieldname) def append_empty_field_column(): tablerow.append("~") fieldrow.append("~") labelrow.append("") mandatoryrow.append("") typerow.append("") inforow.append("") columns.append("") def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join(filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(): w.writerow(tablerow) w.writerow(labelrow) w.writerow(fieldrow) w.writerow(mandatoryrow) w.writerow(typerow) w.writerow(inforow) w.writerow([get_data_keys_definition().data_separator]) def add_data(): def add_data_row(row_group, dt, parentfield, doc, rowidx): d = doc.copy() meta = frappe.get_meta(dt) if all_doctypes: d.name = '"'+ d.name+'"' if len(row_group) < rowidx + 1: row_group.append([""] * (len(columns) + 1)) row = row_group[rowidx] _column_start_end = column_start_end.get((dt, parentfield)) if _column_start_end: for i, c in enumerate(columns[_column_start_end.start:_column_start_end.end]): df = meta.get_field(c) fieldtype = df.fieldtype if df else "Data" value = d.get(c, "") if value: if fieldtype == "Date": value = formatdate(value) elif fieldtype == "Datetime": value = format_datetime(value) row[_column_start_end.start + i + 1] = value if with_data=='Yes': frappe.permissions.can_export(parent_doctype, raise_exception=True) # sort nested set doctypes by `lft asc` order_by = None table_columns = frappe.db.get_table_columns(parent_doctype) if 'lft' in table_columns and 'rgt' in table_columns: order_by = '`tab{doctype}`.`lft` asc'.format(doctype=parent_doctype) # get permitted data only data = frappe.get_list(doctype, fields=["*"], limit_page_length=None, order_by=order_by) for doc in data: op = docs_to_export.get("op") names = docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+',sflags): flags = flags | reflags.get(a,0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except: if doc.name not in names: continue # add main table row_group = [] add_data_row(row_group, doctype, None, doc, 0) if all_doctypes: # add child tables for c in child_doctypes: for ci, child in enumerate(frappe.db.sql("""select * from `tab{0}` where parent=%s and parentfield=%s order by idx""".format(c['doctype']), (doc.name, c['parentfield']), as_dict=1)): add_data_row(row_group, c['doctype'], c['parentfield'], child, ci) for row in row_group: w.writerow(row) w = UnicodeWriter() key = 'parent' if parent_doctype != doctype else 'name' add_main_header() w.writerow(['']) tablerow = [get_data_keys_definition().doctype, ""] labelrow = [_("Column Labels:"), "ID"] fieldrow = [get_data_keys_definition().columns, key] mandatoryrow = [_("Mandatory:"), _("Yes")] typerow = [_('Type:'), 'Data (text)'] inforow = [_('Info:'), ''] columns = [key] build_field_columns(doctype) if all_doctypes: for d in child_doctypes: append_empty_field_column() if (select_columns and select_columns.get(d['doctype'], None)) or not select_columns: # if atleast one column is selected for this doctype build_field_columns(d['doctype'], d['parentfield']) add_field_headings() add_data() if from_data_import == "Yes" and excel_format == "Yes": filename = frappe.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(w.getvalue()).encode("utf-8")) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, "Data Import Template") f.close() os.remove(filename) # write out response as a xlsx type frappe.response['filename'] = doctype + '.xlsx' frappe.response['filecontent'] = xlsx_file.getvalue() frappe.response['type'] = 'binary' else: # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = doctype
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No"): all_doctypes = all_doctypes=="Yes" docs_to_export = {} if doctype: if isinstance(doctype, basestring): doctype = [doctype]; if len(doctype) > 1: docs_to_export = doctype[1] doctype = doctype[0] if not parent_doctype: parent_doctype = doctype column_start_end = {} if all_doctypes: doctype_parentfield = {} child_doctypes = [] for df in frappe.get_meta(doctype).get_table_fields(): child_doctypes.append(df.options) doctype_parentfield[df.options] = df.fieldname def get_data_keys_definition(): return get_data_keys() def add_main_header(): w.writerow([_('Data Import Template')]) w.writerow([get_data_keys_definition().main_table, doctype]) if parent_doctype != doctype: w.writerow([get_data_keys_definition().parent_table, parent_doctype]) else: w.writerow(['']) w.writerow(['']) w.writerow([_('Notes:')]) w.writerow([_('Please do not change the template headings.')]) w.writerow([_('First data column must be blank.')]) w.writerow([_('If you are uploading new records, leave the "name" (ID) column blank.')]) w.writerow([_('If you are uploading new records, "Naming Series" becomes mandatory, if present.')]) w.writerow([_('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.')]) w.writerow([_('For updating, you can update only selective columns.')]) w.writerow([_('You can only upload upto 5000 records in one go. (may be less in some cases)')]) if key == "parent": w.writerow([_('"Parent" signifies the parent table in which this row must be added')]) w.writerow([_('If you are updating, please select "Overwrite" else existing rows will not be deleted.')]) def build_field_columns(dt): meta = frappe.get_meta(dt) tablecolumns = filter(None, [(meta.get_field(f[0]) or None) for f in frappe.db.sql('desc `tab%s`' % dt)]) tablecolumns.sort(lambda a, b: a.idx - b.idx) if dt==doctype: column_start_end[dt] = frappe._dict({"start": 0}) else: column_start_end[dt] = frappe._dict({"start": len(columns)}) append_field_column(frappe._dict({ "fieldname": "name", "label": "ID", "fieldtype": "Data", "reqd": 1, "idx": 0, "info": _("Leave blank for new records") }), True) for docfield in tablecolumns: append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: append_field_column(docfield, False) # append DocType name tablerow[column_start_end[dt].start + 1] = dt if dt!=doctype: tablerow[column_start_end[dt].start + 2] = doctype_parentfield[dt] column_start_end[dt].end = len(columns) + 1 def append_field_column(docfield, mandatory): if docfield and ((mandatory and docfield.reqd) or not (mandatory or docfield.reqd)) \ and (docfield.fieldname not in ('parenttype', 'trash_reason')) and not docfield.hidden: tablerow.append("") fieldrow.append(docfield.fieldname) labelrow.append(_(docfield.label)) mandatoryrow.append(docfield.reqd and 'Yes' or 'No') typerow.append(docfield.fieldtype) inforow.append(getinforow(docfield)) columns.append(docfield.fieldname) def append_empty_field_column(): tablerow.append("~") fieldrow.append("~") labelrow.append("") mandatoryrow.append("") typerow.append("") inforow.append("") columns.append("") def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join(filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(): w.writerow(tablerow) w.writerow(labelrow) w.writerow(fieldrow) w.writerow(mandatoryrow) w.writerow(typerow) w.writerow(inforow) w.writerow([get_data_keys_definition().data_separator]) def add_data(): def add_data_row(row_group, dt, doc, rowidx): d = doc.copy() if all_doctypes: d.name = '"'+ d.name+'"' if len(row_group) < rowidx + 1: row_group.append([""] * (len(columns) + 1)) row = row_group[rowidx] for i, c in enumerate(columns[column_start_end[dt].start:column_start_end[dt].end]): row[column_start_end[dt].start + i + 1] = d.get(c, "") if with_data=='Yes': frappe.permissions.can_export(parent_doctype, raise_exception=True) # get permitted data only data = frappe.get_list(doctype, fields=["*"], limit_page_length=None) for doc in data: op = docs_to_export.get("op") names = docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+',sflags): flags = flags | reflags.get(a,0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except: if doc.name not in names: continue # add main table row_group = [] add_data_row(row_group, doctype, doc, 0) if all_doctypes: # add child tables for child_doctype in child_doctypes: for ci, child in enumerate(frappe.db.sql("""select * from `tab%s` where parent=%s order by idx""" % (child_doctype, "%s"), doc.name, as_dict=1)): add_data_row(row_group, child_doctype, child, ci) for row in row_group: w.writerow(row) w = UnicodeWriter() key = 'parent' if parent_doctype != doctype else 'name' add_main_header() w.writerow(['']) tablerow = [get_data_keys_definition().doctype, ""] labelrow = [_("Column Labels:"), "ID"] fieldrow = [get_data_keys_definition().columns, key] mandatoryrow = [_("Mandatory:"), _("Yes")] typerow = [_('Type:'), 'Data (text)'] inforow = [_('Info:'), ''] columns = [key] build_field_columns(doctype) if all_doctypes: for d in child_doctypes: append_empty_field_column() build_field_columns(d) add_field_headings() add_data() # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = doctype
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No"): all_doctypes = all_doctypes == "Yes" docs_to_export = {} if doctype: if isinstance(doctype, basestring): doctype = [doctype] if len(doctype) > 1: docs_to_export = doctype[1] doctype = doctype[0] if not parent_doctype: parent_doctype = doctype column_start_end = {} if all_doctypes: doctype_parentfield = {} child_doctypes = [] for df in frappe.get_meta(doctype).get_table_fields(): child_doctypes.append(df.options) doctype_parentfield[df.options] = df.fieldname def get_data_keys_definition(): return get_data_keys() def add_main_header(): w.writerow([_('Data Import Template')]) w.writerow([get_data_keys_definition().main_table, doctype]) if parent_doctype != doctype: w.writerow( [get_data_keys_definition().parent_table, parent_doctype]) else: w.writerow(['']) w.writerow(['']) w.writerow([_('Notes:')]) w.writerow([_('Please do not change the template headings.')]) w.writerow([_('First data column must be blank.')]) w.writerow([ _('If you are uploading new records, leave the "name" (ID) column blank.' ) ]) w.writerow([ _('If you are uploading new records, "Naming Series" becomes mandatory, if present.' ) ]) w.writerow([ _('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.' ) ]) w.writerow([_('For updating, you can update only selective columns.')]) w.writerow([ _('You can only upload upto 5000 records in one go. (may be less in some cases)' ) ]) if key == "parent": w.writerow([ _('"Parent" signifies the parent table in which this row must be added' ) ]) w.writerow([ _('If you are updating, please select "Overwrite" else existing rows will not be deleted.' ) ]) def build_field_columns(dt): meta = frappe.get_meta(dt) tablecolumns = filter(None, [(meta.get_field(f[0]) or None) for f in frappe.db.sql('desc `tab%s`' % dt)]) tablecolumns.sort(lambda a, b: a.idx - b.idx) if dt == doctype: column_start_end[dt] = frappe._dict({"start": 0}) else: column_start_end[dt] = frappe._dict({"start": len(columns)}) append_field_column( frappe._dict({ "fieldname": "name", "label": "ID", "fieldtype": "Data", "reqd": 1, "idx": 0, "info": _("Leave blank for new records") }), True) for docfield in tablecolumns: append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: append_field_column(docfield, False) # append DocType name tablerow[column_start_end[dt].start + 1] = dt if dt != doctype: tablerow[column_start_end[dt].start + 2] = doctype_parentfield[dt] column_start_end[dt].end = len(columns) + 1 def append_field_column(docfield, mandatory): if docfield and ((mandatory and docfield.reqd) or not (mandatory or docfield.reqd)) \ and (docfield.fieldname not in ('parenttype', 'trash_reason')) and not docfield.hidden: tablerow.append("") fieldrow.append(docfield.fieldname) labelrow.append(_(docfield.label)) mandatoryrow.append(docfield.reqd and 'Yes' or 'No') typerow.append(docfield.fieldtype) inforow.append(getinforow(docfield)) columns.append(docfield.fieldname) def append_empty_field_column(): tablerow.append("~") fieldrow.append("~") labelrow.append("") mandatoryrow.append("") typerow.append("") inforow.append("") columns.append("") def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join( filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(): w.writerow(tablerow) w.writerow(labelrow) w.writerow(fieldrow) w.writerow(mandatoryrow) w.writerow(typerow) w.writerow(inforow) w.writerow([get_data_keys_definition().data_separator]) def add_data(): def add_data_row(row_group, dt, doc, rowidx): d = doc.copy() if all_doctypes: d.name = '"' + d.name + '"' if len(row_group) < rowidx + 1: row_group.append([""] * (len(columns) + 1)) row = row_group[rowidx] for i, c in enumerate(columns[column_start_end[dt]. start:column_start_end[dt].end]): row[column_start_end[dt].start + i + 1] = d.get(c, "") if with_data == 'Yes': frappe.permissions.can_export(parent_doctype, raise_exception=True) # get permitted data only data = frappe.get_list(doctype, fields=["*"], limit_page_length=None) for doc in data: op = docs_to_export.get("op") names = docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+', sflags): flags = flags | reflags.get(a, 0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except: if doc.name not in names: continue # add main table row_group = [] add_data_row(row_group, doctype, doc, 0) if all_doctypes: # add child tables for child_doctype in child_doctypes: for ci, child in enumerate( frappe.db.sql("""select * from `tab%s` where parent=%s order by idx""" % (child_doctype, "%s"), doc.name, as_dict=1)): add_data_row(row_group, child_doctype, child, ci) for row in row_group: w.writerow(row) w = UnicodeWriter() key = 'parent' if parent_doctype != doctype else 'name' add_main_header() w.writerow(['']) tablerow = [get_data_keys_definition().doctype, ""] labelrow = [_("Column Labels:"), "ID"] fieldrow = [get_data_keys_definition().columns, key] mandatoryrow = [_("Mandatory:"), _("Yes")] typerow = [_('Type:'), 'Data (text)'] inforow = [_('Info:'), ''] columns = [key] build_field_columns(doctype) if all_doctypes: for d in child_doctypes: append_empty_field_column() build_field_columns(d) add_field_headings() add_data() # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = doctype
class DataExporter: def __init__(self, company=None, accounting_document=None, from_date=None, to_date=None): self.company = company self.accounting_document = accounting_document self.from_date = from_date self.to_date = to_date self.file_format = frappe.db.get_value("Company", self.company, "export_file_format") def build_response(self): self.writer = UnicodeWriter(quoting=csv.QUOTE_NONE) self.add_data() if not self.data: frappe.respond_as_web_page(_('No Data'), _('There is no data to be exported'), indicator_color='orange') # write out response if self.file_format == "CIEL": frappe.response['filename'] = 'XIMPORT.TXT' frappe.response['filecontent'] = self.writer.getvalue() frappe.response['type'] = 'binary' def add_data(self): # get permitted data only self.data = frappe.db.sql(""" select gl.name, gl.posting_date, gl.debit, gl.credit, gl.voucher_no, gl.party_type, gl.party, gl.against_voucher_type, acc.account_number, acc.account_name, supp.subledger_account as supp_subl_acc, cust.subledger_account as cust_subl_acc, pinv.due_date as pinv_due_date, sinv.due_date as sinv_due_date from `tabGL Entry` gl inner join `tabAccount` acc on gl.account = acc.name left join `tabAccount` against_acc on gl.against = against_acc.name left join `tabSupplier` supp on gl.party = supp.name left join `tabCustomer` cust on gl.party = cust.name left join `tabPurchase Invoice` pinv on gl.against_voucher = pinv.name left join `tabSales Invoice` sinv on gl.against_voucher = sinv.name where gl.voucher_type = %(voucher_type)s and gl.posting_date between %(from_date)s and %(to_date)s and acc.account_type not in ("Bank", "Cash") and ifnull(against_acc.account_type, "") not in ("Bank", "Cash") order by gl.name""", { "voucher_type": self.accounting_document, "from_date": self.from_date, "to_date": self.to_date }, as_dict=True) # get journal code if self.accounting_document == "Purchase Invoice": self.journal_code = frappe.db.get_value("Company", self.company, "buying_journal_code") elif self.accounting_document == "Sales Invoice": self.journal_code = frappe.db.get_value("Company", self.company, "selling_journal_code") else: self.journal_code = "" # format row for doc in self.data: row = [] if self.file_format == "CIEL": row = self.add_row_ciel(doc) self.writer.writerow([row]) def add_row_ciel(self, doc): ecriture_num = '{:>5s}'.format(doc.get("name")[-5:]) journal_code = '{:<2s}'.format(self.journal_code) ecriture_date = format_datetime(doc.get("posting_date"), "yyyyMMdd") if doc.get("against_voucher_type") == "Purchase Invoice": echeance_date = format_datetime(doc.get("pinv_due_date"), "yyyyMMdd") elif doc.get("against_voucher_type") == "Sales Invoice": echeance_date = format_datetime(doc.get("sinv_due_date"), "yyyyMMdd") else: echeance_date = '{:<8s}'.format("") piece_num = '{:<12s}'.format(doc.get("voucher_no")) if doc.get("party_type") == "Supplier": compte_num = '{}{:<8s}'.format("401", doc.get("supp_subl_acc") or '') elif doc.get("party_type") == "Customer": compte_num = '{}{:<8s}'.format("411", doc.get("cust_subl_acc") or '') else: compte_num = '{:<11s}'.format(doc.get("account_number")) libelle = '{}{:<17s}'.format("FACTURE ", doc.get("voucher_no")[:17]) montant = '{:>13.2f}'.format(doc.get("debit")) if doc.get("debit") != 0 \ else '{:>13.2f}'.format(doc.get("credit")) credit_debit = "D" if doc.get("debit") > 0 else "C" numero_pointage = piece_num code_analytic = '{:<6s}'.format("") if doc.get("party_type") in ("Supplier", "Customer"): libelle_compte = '{:<34s}'.format(doc.get("party"))[:34] else: libelle_compte = '{:<34s}'.format(doc.get("account_name"))[:34] euro = "O" row = [ ecriture_num, journal_code, ecriture_date, echeance_date, piece_num, compte_num, libelle, montant, credit_debit, numero_pointage, code_analytic, libelle_compte, euro ] return ''.join(row)
class DataExporter: def __init__(self, doctype=None, parent_doctype=None, all_doctypes=True, with_data=False, select_columns=None, file_type='CSV', template=False, filters=None): self.doctype = doctype self.parent_doctype = parent_doctype self.all_doctypes = all_doctypes self.with_data = cint(with_data) self.select_columns = select_columns self.file_type = file_type self.template = template self.filters = filters self.data_keys = get_data_keys() self.prepare_args() def prepare_args(self): if self.select_columns: self.select_columns = parse_json(self.select_columns) if self.filters: self.filters = parse_json(self.filters) self.docs_to_export = {} if self.doctype: if isinstance(self.doctype, string_types): self.doctype = [self.doctype] if len(self.doctype) > 1: self.docs_to_export = self.doctype[1] self.doctype = self.doctype[0] if not self.parent_doctype: self.parent_doctype = self.doctype self.column_start_end = {} if self.all_doctypes: self.child_doctypes = [] for df in frappe.get_meta(self.doctype).get_table_fields(): self.child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname)) def build_response(self): self.writer = UnicodeWriter() self.name_field = 'parent' if self.parent_doctype != self.doctype else 'name' if self.template: self.add_main_header() self.writer.writerow(['']) self.tablerow = [self.data_keys.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = [self.data_keys.columns] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_('Type:')] self.inforow = [_('Info:')] self.columns = [] self.build_field_columns(self.doctype) if self.all_doctypes: for d in self.child_doctypes: self.append_empty_field_column() if (self.select_columns and self.select_columns.get(d['doctype'], None)) or not self.select_columns: # if atleast one column is selected for this doctype self.build_field_columns(d['doctype'], d['parentfield']) self.add_field_headings() self.add_data() if self.with_data and not self.data: frappe.respond_as_web_page(_('No Data'), _('There is no data to be exported'), indicator_color='orange') if self.file_type == 'Excel': self.build_response_as_excel() else: # write out response as a type csv frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doctype def add_main_header(self): self.writer.writerow([_('Data Import Template')]) self.writer.writerow([self.data_keys.main_table, self.doctype]) if self.parent_doctype != self.doctype: self.writer.writerow([self.data_keys.parent_table, self.parent_doctype]) else: self.writer.writerow(['']) self.writer.writerow(['']) self.writer.writerow([_('Notes:')]) self.writer.writerow([_('Please do not change the template headings.')]) self.writer.writerow([_('First data column must be blank.')]) self.writer.writerow([_('If you are uploading new records, leave the "name" (ID) column blank.')]) self.writer.writerow([_('If you are uploading new records, "Naming Series" becomes mandatory, if present.')]) self.writer.writerow([_('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.')]) self.writer.writerow([_('For updating, you can update only selective columns.')]) self.writer.writerow([_('You can only upload upto 5000 records in one go. (may be less in some cases)')]) if self.name_field == "parent": self.writer.writerow([_('"Parent" signifies the parent table in which this row must be added')]) self.writer.writerow([_('If you are updating, please select "Overwrite" else existing rows will not be deleted.')]) def build_field_columns(self, dt, parentfield=None): meta = frappe.get_meta(dt) # build list of valid docfields tablecolumns = [] for f in frappe.db.sql('desc `tab%s`' % dt): field = meta.get_field(f[0]) if field and ((self.select_columns and f[0] in self.select_columns[dt]) or not self.select_columns): tablecolumns.append(field) tablecolumns.sort(key = lambda a: int(a.idx)) _column_start_end = frappe._dict(start=0) if dt==self.doctype: if (meta.get('autoname') and meta.get('autoname').lower()=='prompt') or (self.with_data): self._append_name_column() # if importing only child table for new record, add parent field if meta.get('istable') and not self.with_data: self.append_field_column(frappe._dict({ "fieldname": "parent", "parent": "", "label": "Parent", "fieldtype": "Data", "reqd": 1, "info": _("Parent is the name of the document to which the data will get added to.") }), True) _column_start_end = frappe._dict(start=0) else: _column_start_end = frappe._dict(start=len(self.columns)) if self.with_data: self._append_name_column(dt) for docfield in tablecolumns: self.append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: self.append_field_column(docfield, False) # if there is one column, add a blank column (?) if len(self.columns)-_column_start_end.start == 1: self.append_empty_field_column() # append DocType name self.tablerow[_column_start_end.start + 1] = dt if parentfield: self.tablerow[_column_start_end.start + 2] = parentfield _column_start_end.end = len(self.columns) + 1 self.column_start_end[(dt, parentfield)] = _column_start_end def append_field_column(self, docfield, for_mandatory): if not docfield: return if for_mandatory and not docfield.reqd: return if not for_mandatory and docfield.reqd: return if docfield.fieldname in ('parenttype', 'trash_reason'): return if docfield.hidden: return if self.select_columns and docfield.fieldname not in self.select_columns.get(docfield.parent, []) \ and docfield.fieldname!="name": return self.tablerow.append("") self.fieldrow.append(docfield.fieldname) self.labelrow.append(_(docfield.label)) self.mandatoryrow.append(docfield.reqd and 'Yes' or 'No') self.typerow.append(docfield.fieldtype) self.inforow.append(self.getinforow(docfield)) self.columns.append(docfield.fieldname) def append_empty_field_column(self): self.tablerow.append("~") self.fieldrow.append("~") self.labelrow.append("") self.mandatoryrow.append("") self.typerow.append("") self.inforow.append("") self.columns.append("") @staticmethod def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join(filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif docfield.fieldtype in ["Date", "Datetime"]: return cstr(frappe.defaults.get_defaults().date_format) elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(self): self.writer.writerow(self.tablerow) self.writer.writerow(self.labelrow) self.writer.writerow(self.fieldrow) self.writer.writerow(self.mandatoryrow) self.writer.writerow(self.typerow) self.writer.writerow(self.inforow) if self.template: self.writer.writerow([self.data_keys.data_separator]) def add_data(self): if self.template and not self.with_data: return frappe.permissions.can_export(self.parent_doctype, raise_exception=True) # sort nested set doctypes by `lft asc` order_by = None table_columns = frappe.db.get_table_columns(self.parent_doctype) if 'lft' in table_columns and 'rgt' in table_columns: order_by = '`tab{doctype}`.`lft` asc'.format(doctype=self.parent_doctype) # get permitted data only self.data = frappe.get_list(self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by) for doc in self.data: op = self.docs_to_export.get("op") names = self.docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = self.docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+',sflags): flags = flags | reflags.get(a,0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except Exception: if doc.name not in names: continue # add main table rows = [] self.add_data_row(rows, self.doctype, None, doc, 0) if self.all_doctypes: # add child tables for c in self.child_doctypes: for ci, child in enumerate(frappe.db.sql("""select * from `tab{0}` where parent=%s and parentfield=%s order by idx""".format(c['doctype']), (doc.name, c['parentfield']), as_dict=1)): self.add_data_row(rows, c['doctype'], c['parentfield'], child, ci) for row in rows: self.writer.writerow(row) def add_data_row(self, rows, dt, parentfield, doc, rowidx): d = doc.copy() meta = frappe.get_meta(dt) if self.all_doctypes: d.name = '"'+ d.name+'"' if len(rows) < rowidx + 1: rows.append([""] * (len(self.columns) + 1)) row = rows[rowidx] _column_start_end = self.column_start_end.get((dt, parentfield)) if _column_start_end: for i, c in enumerate(self.columns[_column_start_end.start:_column_start_end.end]): df = meta.get_field(c) fieldtype = df.fieldtype if df else "Data" value = d.get(c, "") if value: if fieldtype == "Date": value = formatdate(value) elif fieldtype == "Datetime": value = format_datetime(value) row[_column_start_end.start + i + 1] = value def build_response_as_excel(self): filename = frappe.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(self.writer.getvalue()).encode('utf-8')) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, "Data Import Template" if self.template else 'Data Export') f.close() os.remove(filename) # write out response as a xlsx type frappe.response['filename'] = self.doctype + '.xlsx' frappe.response['filecontent'] = xlsx_file.getvalue() frappe.response['type'] = 'binary' def _append_name_column(self, dt=None): self.append_field_column(frappe._dict({ "fieldname": "name" if dt else self.name_field, "parent": dt or "", "label": "ID", "fieldtype": "Data", "reqd": 1, }), True)
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No", select_columns=None, from_data_import="No", excel_format="No"): all_doctypes = all_doctypes == "Yes" if select_columns: select_columns = json.loads(select_columns) docs_to_export = {} if doctype: if isinstance(doctype, basestring): doctype = [doctype] if len(doctype) > 1: docs_to_export = doctype[1] doctype = doctype[0] if not parent_doctype: parent_doctype = doctype column_start_end = {} if all_doctypes: child_doctypes = [] for df in frappe.get_meta(doctype).get_table_fields(): child_doctypes.append( dict(doctype=df.options, parentfield=df.fieldname)) def get_data_keys_definition(): return get_data_keys() def add_main_header(): w.writerow([_('Data Import Template')]) w.writerow([get_data_keys_definition().main_table, doctype]) if parent_doctype != doctype: w.writerow( [get_data_keys_definition().parent_table, parent_doctype]) else: w.writerow(['']) w.writerow(['']) w.writerow([_('Notes:')]) w.writerow([_('Please do not change the template headings.')]) w.writerow([_('First data column must be blank.')]) w.writerow([ _('If you are uploading new records, leave the "name" (ID) column blank.' ) ]) w.writerow([ _('If you are uploading new records, "Naming Series" becomes mandatory, if present.' ) ]) w.writerow([ _('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.' ) ]) w.writerow([_('For updating, you can update only selective columns.')]) w.writerow([ _('You can only upload upto 5000 records in one go. (may be less in some cases)' ) ]) if key == "parent": w.writerow([ _('"Parent" signifies the parent table in which this row must be added' ) ]) w.writerow([ _('If you are updating, please select "Overwrite" else existing rows will not be deleted.' ) ]) def build_field_columns(dt, parentfield=None): meta = frappe.get_meta(dt) # build list of valid docfields tablecolumns = [] for f in frappe.db.sql('desc `tab%s`' % dt): field = meta.get_field(f[0]) if field and ((select_columns and f[0] in select_columns[dt]) or not select_columns): tablecolumns.append(field) tablecolumns.sort(lambda a, b: int(a.idx - b.idx)) _column_start_end = frappe._dict(start=0) if dt == doctype: _column_start_end = frappe._dict(start=0) else: _column_start_end = frappe._dict(start=len(columns)) append_field_column( frappe._dict({ "fieldname": "name", "parent": dt, "label": "ID", "fieldtype": "Data", "reqd": 1, "idx": 0, "info": _("Leave blank for new records") }), True) for docfield in tablecolumns: append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: append_field_column(docfield, False) # if there is one column, add a blank column (?) if len(columns) - _column_start_end.start == 1: append_empty_field_column() # append DocType name tablerow[_column_start_end.start + 1] = dt if parentfield: tablerow[_column_start_end.start + 2] = parentfield _column_start_end.end = len(columns) + 1 column_start_end[(dt, parentfield)] = _column_start_end def append_field_column(docfield, for_mandatory): if not docfield: return if for_mandatory and not docfield.reqd: return if not for_mandatory and docfield.reqd: return if docfield.fieldname in ('parenttype', 'trash_reason'): return if docfield.hidden: return if select_columns and docfield.fieldname not in select_columns.get( docfield.parent, []): return tablerow.append("") fieldrow.append(docfield.fieldname) labelrow.append(_(docfield.label)) mandatoryrow.append(docfield.reqd and 'Yes' or 'No') typerow.append(docfield.fieldtype) inforow.append(getinforow(docfield)) columns.append(docfield.fieldname) def append_empty_field_column(): tablerow.append("~") fieldrow.append("~") labelrow.append("") mandatoryrow.append("") typerow.append("") inforow.append("") columns.append("") def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join( filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(): w.writerow(tablerow) w.writerow(labelrow) w.writerow(fieldrow) w.writerow(mandatoryrow) w.writerow(typerow) w.writerow(inforow) w.writerow([get_data_keys_definition().data_separator]) def add_data(): def add_data_row(row_group, dt, parentfield, doc, rowidx): d = doc.copy() meta = frappe.get_meta(dt) if all_doctypes: d.name = '"' + d.name + '"' if len(row_group) < rowidx + 1: row_group.append([""] * (len(columns) + 1)) row = row_group[rowidx] _column_start_end = column_start_end.get((dt, parentfield)) if _column_start_end: for i, c in enumerate(columns[_column_start_end. start:_column_start_end.end]): df = meta.get_field(c) fieldtype = df.fieldtype if df else "Data" value = d.get(c, "") if value: if fieldtype == "Date": value = formatdate(value) elif fieldtype == "Datetime": value = format_datetime(value) row[_column_start_end.start + i + 1] = value if with_data == 'Yes': frappe.permissions.can_export(parent_doctype, raise_exception=True) # sort nested set doctypes by `lft asc` order_by = None table_columns = frappe.db.get_table_columns(parent_doctype) if 'lft' in table_columns and 'rgt' in table_columns: order_by = '`tab{doctype}`.`lft` asc'.format( doctype=parent_doctype) # get permitted data only data = frappe.get_list(doctype, fields=["*"], limit_page_length=None, order_by=order_by) for doc in data: op = docs_to_export.get("op") names = docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+', sflags): flags = flags | reflags.get(a, 0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except: if doc.name not in names: continue # add main table row_group = [] add_data_row(row_group, doctype, None, doc, 0) if all_doctypes: # add child tables for c in child_doctypes: for ci, child in enumerate( frappe.db.sql("""select * from `tab{0}` where parent=%s and parentfield=%s order by idx""".format(c['doctype']), (doc.name, c['parentfield']), as_dict=1)): add_data_row(row_group, c['doctype'], c['parentfield'], child, ci) for row in row_group: w.writerow(row) w = UnicodeWriter() key = 'parent' if parent_doctype != doctype else 'name' add_main_header() w.writerow(['']) tablerow = [get_data_keys_definition().doctype, ""] labelrow = [_("Column Labels:"), "ID"] fieldrow = [get_data_keys_definition().columns, key] mandatoryrow = [_("Mandatory:"), _("Yes")] typerow = [_('Type:'), 'Data (text)'] inforow = [_('Info:'), ''] columns = [key] build_field_columns(doctype) if all_doctypes: for d in child_doctypes: append_empty_field_column() if (select_columns and select_columns.get( d['doctype'], None)) or not select_columns: # if atleast one column is selected for this doctype build_field_columns(d['doctype'], d['parentfield']) add_field_headings() add_data() if from_data_import == "Yes" and excel_format == "Yes": filename = frappe.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(w.getvalue()).encode("utf-8")) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, "Data Import Template") f.close() os.remove(filename) # write out response as a xlsx type frappe.response['filename'] = doctype + '.xlsx' frappe.response['filecontent'] = xlsx_file.getvalue() frappe.response['type'] = 'binary' else: # write out response as a type csv frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = doctype
def get_file_address(from_date, to_date): result = frappe.db.sql(""" SELECT pinv.tax_id, supl.tipo_rnc, pinv.tipo_bienes_y_servicios_comprados, pinv.bill_no, pinv.bill_date, pinv.excise_tax, pinv.base_taxes_and_charges_added, pinv.retention_amount, pinv.isr_amount, pinv.total_taxes_and_charges, pinv.other_taxes, pinv.legal_tip, pinv.base_total, pinv.monto_facturado_servicios, pinv.monto_facturado_bienes FROM `tabPurchase Invoice` AS pinv LEFT JOIN `tabSupplier` AS supl ON supl.name = pinv.supplier WHERE pinv.docstatus = 1 AND pinv.bill_date BETWEEN '%s' AND '%s' """ % (from_date, to_date), debug=True, as_dict=True) w = UnicodeWriter() w.writerow([ 'RNC o Cedula', 'Tipo Id', 'Tipo Bienes y Servicios Comprados', 'NCF', 'NCF o Documento Modificado', 'Fecha Comprobante', '', 'Fecha Pago', '', 'Monto Facturado en Servicios', 'Monto Facturado en Bienes', 'Total Monto Facturado', 'ITBIS Facturado', 'ITBIS Retenido', 'ITBIS sujeto a Proporcionalidad (Art. 349)', 'ITBIS llevado al Costo', 'ITBIS por Adelantar', 'ITBIS percibido en compras', 'Tipo de Retencion en ISR', 'Monto Retención Renta', 'ISR Percibido en compras', 'Impuesto Selectivo al Consumo', 'Otros Impuesto/Tasas', 'Monto Propina Legal', ]) for row in result: bill_no = row.bill_no.split("-")[1] if (len( row.bill_no.split("-")) > 1) else row.bill_no # NCF-A1##% || A1##% w.writerow([ row.tax_id.replace("-", ""), # RNC row.tipo_rnc, # Tipo de RNC row.tipo_bienes_y_servicios_comprados, bill_no, # NCF '', # NCF modificado row.bill_date.strftime("%Y%m"), # FC AAAAMM row.bill_date.strftime("%d"), # FP DD row.bill_date.strftime("%Y%m"), # FP AAAAMM row.bill_date.strftime("%d"), # FP DD row.monto_facturado_servicios, # Monto Facturado en Servicios row.monto_facturado_bienes, # Monto Facturado en Bienes row.base_total, # Monto Facturado row.total_taxes_and_charges, # ITBIS Facturado # row.total_itbis, # ITBIS Facturado row.retention_amount or 0, # ITBIS Retenido '0', # ITBIS sujeto a Proporcionalidad (Art. 349) row.total_itbis or 0, # ITBIS llevado al Costo '0', # ITBIS por Adelantar '0', # ITBIS percibido en compras '', # Tipo de Retención en ISR row.isr_amount or 0, # Monto Retención Renta '0', # ISR Percibido en compras row.excise_tax or 0, # Impuesto Selectivo al Consumo row.other_taxes or 0, # Otros Impuesto/Tasas row.legal_tip, # Monto Propina Legal ]) frappe.response['result'] = cstr(w.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = "Reporte_606_" + str(int(time.time()))
class ExportEmailsCls(Document): def __init__(self, doc): self.doc = parse_json(doc) def export_emails_to_csv(self): switcher = { "Lead": self.export_leads, "Customer": self.export_customers, "Supplier": self.export_suppliers } func = switcher.get(self.doc.export_from, self.invalid_export) return func() def invalid_export(self): frappe.throw("Invalid Document! Please contact Adminstrator") def export_leads(self): self.writer = UnicodeWriter() lead_list = frappe.get_list( "Lead", fields=["name", "lead_name", "company_name", "email_id"]) self.writer.writerow(["name", "company_name", "email"]) for e in lead_list: if e.email_id: self.writer.writerow([e.lead_name, e.company_name, e.email_id]) self.build_response_as_csv() def export_customers(self): self.writer = UnicodeWriter() group = self.doc.customer_group or "All Customer Groups" customer_list = frappe.db.sql(""" select customer.customer_name, contact.first_name, contact.last_name, contact.email_id from `tabCustomer` customer, `tabContact` contact, `tabDynamic Link` contact_dyn_link where customer.customer_group = '{0}' and contact_dyn_link.parent = contact.name and contact_dyn_link.link_doctype = "Customer" and contact_dyn_link.link_name = customer.name """.format(group), as_dict=True) self.writer.writerow(["name", "company_name", "email"]) for e in customer_list: if e.email_id: self.writer.writerow([ "{0} {1}".format(e.first_name, e.last_name) if e.last_name else e.first_name, e.customer_name, e.email_id ]) self.build_response_as_csv() def export_suppliers(self): self.writer = UnicodeWriter() group = self.doc.supplier_group or "All Supplier Groups" supplier_list = frappe.db.sql(""" select supplier.supplier_name, contact.first_name, contact.last_name, contact.email_id from `tabSupplier` supplier, `tabContact` contact, `tabDynamic Link` contact_dyn_link where supplier.supplier_group = '{0}' and contact_dyn_link.parent = contact.name and contact_dyn_link.link_doctype = "Supplier" and contact_dyn_link.link_name = supplier.name """.format(group), as_dict=True) self.writer.writerow(["name", "company_name", "email"]) for e in supplier_list: if e.email_id: self.writer.writerow([ "{0} {1}".format(e.first_name, e.last_name) if e.last_name else e.first_name, e.supplier_name, e.email_id ]) self.build_response_as_csv() def build_response_as_excel(self): filename = frappe.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(self.writer.getvalue()).encode('utf-8')) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, 'Data Export') f.close() os.remove(filename) # write out response as a xlsx type frappe.response['filename'] = self.doc.export_from + '.xlsx' frappe.response['filecontent'] = xlsx_file.getvalue() frappe.response['type'] = 'binary' def build_response_as_csv(self): frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doc.export_from
class DataExporter: def __init__( self, doctype=None, parent_doctype=None, all_doctypes=True, with_data=False, select_columns=None, file_type="CSV", template=False, filters=None, ): self.doctype = doctype self.parent_doctype = parent_doctype self.all_doctypes = all_doctypes self.with_data = cint(with_data) self.select_columns = select_columns self.file_type = file_type self.template = template self.filters = filters self.data_keys = get_data_keys() self.prepare_args() def prepare_args(self): if self.select_columns: self.select_columns = parse_json(self.select_columns) if self.filters: self.filters = parse_json(self.filters) self.docs_to_export = {} if self.doctype: if isinstance(self.doctype, string_types): self.doctype = [self.doctype] if len(self.doctype) > 1: self.docs_to_export = self.doctype[1] self.doctype = self.doctype[0] if not self.parent_doctype: self.parent_doctype = self.doctype self.column_start_end = {} if self.all_doctypes: self.child_doctypes = [] for df in frappe.get_meta(self.doctype).get_table_fields(): self.child_doctypes.append( dict(doctype=df.options, parentfield=df.fieldname)) def build_response(self): self.writer = UnicodeWriter() self.name_field = "parent" if self.parent_doctype != self.doctype else "name" if self.template: self.add_main_header() self.writer.writerow([""]) self.tablerow = [self.data_keys.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = [self.data_keys.columns] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_("Type:")] self.inforow = [_("Info:")] self.columns = [] self.build_field_columns(self.doctype) if self.all_doctypes: for d in self.child_doctypes: self.append_empty_field_column() if (self.select_columns and self.select_columns.get( d["doctype"], None)) or not self.select_columns: # if atleast one column is selected for this doctype self.build_field_columns(d["doctype"], d["parentfield"]) self.add_field_headings() self.add_data() if self.with_data and not self.data: frappe.respond_as_web_page(_("No Data"), _("There is no data to be exported"), indicator_color="orange") if self.file_type == "Excel": self.build_response_as_excel() else: # write out response as a type csv frappe.response["result"] = cstr(self.writer.getvalue()) frappe.response["type"] = "csv" frappe.response["doctype"] = self.doctype def add_main_header(self): self.writer.writerow([_("Data Import Template")]) self.writer.writerow([self.data_keys.main_table, self.doctype]) if self.parent_doctype != self.doctype: self.writer.writerow( [self.data_keys.parent_table, self.parent_doctype]) else: self.writer.writerow([""]) self.writer.writerow([""]) self.writer.writerow([_("Notes:")]) self.writer.writerow( [_("Please do not change the template headings.")]) self.writer.writerow([_("First data column must be blank.")]) self.writer.writerow([ _('If you are uploading new records, leave the "name" (ID) column blank.' ) ]) self.writer.writerow([ _('If you are uploading new records, "Naming Series" becomes mandatory, if present.' ) ]) self.writer.writerow([ _("Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish." ) ]) self.writer.writerow( [_("For updating, you can update only selective columns.")]) self.writer.writerow([ _("You can only upload upto 5000 records in one go. (may be less in some cases)" ) ]) if self.name_field == "parent": self.writer.writerow([ _('"Parent" signifies the parent table in which this row must be added' ) ]) self.writer.writerow([ _('If you are updating, please select "Overwrite" else existing rows will not be deleted.' ) ]) def build_field_columns(self, dt, parentfield=None): meta = frappe.get_meta(dt) # build list of valid docfields tablecolumns = [] table_name = "tab" + dt for f in frappe.db.get_table_columns_description(table_name): field = meta.get_field(f.name) if field and ( (self.select_columns and f.name in self.select_columns[dt]) or not self.select_columns): tablecolumns.append(field) tablecolumns.sort(key=lambda a: int(a.idx)) _column_start_end = frappe._dict(start=0) if dt == self.doctype: if (meta.get("autoname") and meta.get("autoname").lower() == "prompt") or (self.with_data): self._append_name_column() # if importing only child table for new record, add parent field if meta.get("istable") and not self.with_data: self.append_field_column( frappe._dict({ "fieldname": "parent", "parent": "", "label": "Parent", "fieldtype": "Data", "reqd": 1, "info": _("Parent is the name of the document to which the data will get added to." ), }), True, ) _column_start_end = frappe._dict(start=0) else: _column_start_end = frappe._dict(start=len(self.columns)) if self.with_data: self._append_name_column(dt) for docfield in tablecolumns: self.append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: self.append_field_column(docfield, False) # if there is one column, add a blank column (?) if len(self.columns) - _column_start_end.start == 1: self.append_empty_field_column() # append DocType name self.tablerow[_column_start_end.start + 1] = dt if parentfield: self.tablerow[_column_start_end.start + 2] = parentfield _column_start_end.end = len(self.columns) + 1 self.column_start_end[(dt, parentfield)] = _column_start_end def append_field_column(self, docfield, for_mandatory): if not docfield: return if for_mandatory and not docfield.reqd: return if not for_mandatory and docfield.reqd: return if docfield.fieldname in ("parenttype", "trash_reason"): return if docfield.hidden: return if (self.select_columns and docfield.fieldname not in self.select_columns.get( docfield.parent, []) and docfield.fieldname != "name"): return self.tablerow.append("") self.fieldrow.append(docfield.fieldname) self.labelrow.append(_(docfield.label)) self.mandatoryrow.append(docfield.reqd and "Yes" or "No") self.typerow.append(docfield.fieldtype) self.inforow.append(self.getinforow(docfield)) self.columns.append(docfield.fieldname) def append_empty_field_column(self): self.tablerow.append("~") self.fieldrow.append("~") self.labelrow.append("") self.mandatoryrow.append("") self.typerow.append("") self.inforow.append("") self.columns.append("") @staticmethod def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == "Select": if not docfield.options: return "" else: return _("One of") + ": %s" % ", ".join( filter(None, docfield.options.split("\n"))) elif docfield.fieldtype == "Link": return "Valid %s" % docfield.options elif docfield.fieldtype == "Int": return "Integer" elif docfield.fieldtype == "Check": return "0 or 1" elif docfield.fieldtype in ["Date", "Datetime"]: return cstr(frappe.defaults.get_defaults().date_format) elif hasattr(docfield, "info"): return docfield.info else: return "" def add_field_headings(self): self.writer.writerow(self.tablerow) self.writer.writerow(self.labelrow) self.writer.writerow(self.fieldrow) self.writer.writerow(self.mandatoryrow) self.writer.writerow(self.typerow) self.writer.writerow(self.inforow) if self.template: self.writer.writerow([self.data_keys.data_separator]) def add_data(self): from frappe.query_builder import DocType if self.template and not self.with_data: return frappe.permissions.can_export(self.parent_doctype, raise_exception=True) # sort nested set doctypes by `lft asc` order_by = None table_columns = frappe.db.get_table_columns(self.parent_doctype) if "lft" in table_columns and "rgt" in table_columns: order_by = "`tab{doctype}`.`lft` asc".format( doctype=self.parent_doctype) # get permitted data only self.data = frappe.get_list(self.doctype, fields=["*"], filters=self.filters, limit_page_length=None, order_by=order_by) for doc in self.data: op = self.docs_to_export.get("op") names = self.docs_to_export.get("name") if names and op: if op == "=" and doc.name not in names: continue elif op == "!=" and doc.name in names: continue elif names: try: sflags = self.docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split(r"\W+", sflags): flags = flags | reflags.get(a, 0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except Exception: if doc.name not in names: continue # add main table rows = [] self.add_data_row(rows, self.doctype, None, doc, 0) if self.all_doctypes: # add child tables for c in self.child_doctypes: child_doctype_table = DocType(c["doctype"]) data_row = ( frappe.qb.from_(child_doctype_table).select("*").where( child_doctype_table.parent == doc.name).where( child_doctype_table.parentfield == c["parentfield"]).orderby( child_doctype_table.idx)) for ci, child in enumerate(data_row.run(as_dict=True)): self.add_data_row(rows, c["doctype"], c["parentfield"], child, ci) for row in rows: self.writer.writerow(row) def add_data_row(self, rows, dt, parentfield, doc, rowidx): d = doc.copy() meta = frappe.get_meta(dt) if self.all_doctypes: d.name = '"' + d.name + '"' if len(rows) < rowidx + 1: rows.append([""] * (len(self.columns) + 1)) row = rows[rowidx] _column_start_end = self.column_start_end.get((dt, parentfield)) if _column_start_end: for i, c in enumerate(self.columns[_column_start_end. start:_column_start_end.end]): df = meta.get_field(c) fieldtype = df.fieldtype if df else "Data" value = d.get(c, "") if value: if fieldtype == "Date": value = formatdate(value) elif fieldtype == "Datetime": value = format_datetime(value) elif fieldtype == "Duration": value = format_duration(value, df.hide_days) row[_column_start_end.start + i + 1] = value def build_response_as_excel(self): filename = frappe.generate_hash("", 10) with open(filename, "wb") as f: f.write(cstr(self.writer.getvalue()).encode("utf-8")) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx( reader, "Data Import Template" if self.template else "Data Export") f.close() os.remove(filename) # write out response as a xlsx type frappe.response["filename"] = self.doctype + ".xlsx" frappe.response["filecontent"] = xlsx_file.getvalue() frappe.response["type"] = "binary" def _append_name_column(self, dt=None): self.append_field_column( frappe._dict({ "fieldname": "name" if dt else self.name_field, "parent": dt or "", "label": "ID", "fieldtype": "Data", "reqd": 1, }), True, )
class ChildDataExporter: def __init__(self, doctype=None, child_doc=None, file_type='Excel'): self.doctype = doctype self.child_doc = child_doc self.file_type = 'Excel' self.prepare_args() def prepare_args(self): return # if self.select_columns: # self.select_columns = parse_json(self.select_columns) # if self.filters: # self.filters = parse_json(self.filters) # self.docs_to_export = {} # if self.doctype: # if isinstance(self.doctype, string_types): # self.doctype = [self.doctype] # if len(self.doctype) > 1: # self.docs_to_export = self.doctype[1] # self.doctype = self.doctype[0] # if not self.parent_doctype: # self.parent_doctype = self.doctype # self.column_start_end = {} # if self.all_doctypes: # self.child_doctypes = [] # for df in frappe.get_meta(self.doctype).get_table_fields(): # self.child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname)) def add_field_headings(self): self.writer.writerow(self.tablerow) self.writer.writerow(self.labelrow) self.writer.writerow(self.fieldrow) self.writer.writerow(self.mandatoryrow) self.writer.writerow(self.typerow) def build_response(self): self.writer = UnicodeWriter() self.add_main_header() self.tablerow = [self.doctype] self.labelrow = [_("Column Labels:")] self.fieldrow = ['Column Name:'] self.mandatoryrow = [_("Mandatory:")] self.typerow = [_('Type:')] self.inforow = [_('Info:')] self.columns = [] self._append_parent_column() self.build_field_columns(self.child_doc) self.add_field_headings() self.writer.writerow(['Start entering data below this line']) if self.file_type == 'Excel': self.build_response_as_excel() else: # write out response as a type csv frappe.response['result'] = cstr(self.writer.getvalue()) frappe.response['type'] = 'csv' frappe.response['doctype'] = self.doctype def add_main_header(self): parent_doc = frappe.get_meta(self.doctype).as_dict() parentfield = None for i in parent_doc['fields']: if i['fieldtype'] == 'Table' and i['options'] == self.child_doc: parentfield = i['fieldname'] self.writer.writerow([_('Data Import Template')]) self.writer.writerow(['parenttype', self.doctype]) self.writer.writerow(['parentfield', parentfield]) self.writer.writerow(['Child Table', self.child_doc]) self.writer.writerow(['']) self.writer.writerow(['DocType', self.child_doc]) def build_response_as_excel(self): filename = frappe.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(self.writer.getvalue()).encode('utf-8')) f = open(filename) reader = csv.reader(f) from frappe.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, "Data Import Template") f.close() os.remove(filename) # write out response as a xlsx type frappe.response['filename'] = self.doctype + '.xlsx' frappe.response['filecontent'] = xlsx_file.getvalue() frappe.response['type'] = 'binary' def build_field_columns(self, dt, parentfield=None): meta = frappe.get_meta(dt) self.tablerow self.labelrow self.fieldrow self.mandatoryrow self.typerow # build list of valid docfields tablecolumns = [] meta = frappe.get_meta(self.child_doc) # meta.get_field() tablecolumns = [] for f in frappe.db.sql('desc `tab%s`' % self.child_doc): field = meta.get_field(f[0]) if field: tablecolumns.append(field) for docfield in tablecolumns: self.append_field_column(docfield=docfield) def append_field_column(self, docfield): self.fieldrow.append(docfield.fieldname) self.labelrow.append(_(docfield.label)) self.mandatoryrow.append(docfield.reqd and 'Yes' or 'No') self.typerow.append(docfield.fieldtype) def _append_parent_column(self, dt=None): self.append_field_column( frappe._dict({ "fieldname": "parent", "label": "Parent", "fieldtype": "Data", "reqd": 1, }))