def test_confirm_payment(self): razorpay_payment_id = "test_pay_{0}".format( dataent.generate_hash(length=14)) razorpay_payment = make_payment( razorpay_payment_id=razorpay_payment_id, options=json.dumps(data["options"])) self.assertRaises(InvalidRequest, razorpay_payment.insert) razorpay_settings = dataent.get_doc("Razorpay Settings") razorpay_settings.update(data["razorpay_settings"]) razorpay_payment_id = "test_pay_{0}".format( dataent.generate_hash(length=14)) razorpay_payment = make_payment( razorpay_payment_id=razorpay_payment_id, options=json.dumps(data["options"])) razorpay_payment.flags.is_sandbox = True razorpay_payment.sanbox_response = data["sanbox_response"] razorpay_payment.sanbox_response.update({ "id": razorpay_payment_id, "status": "authorized" }) razorpay_payment.insert(ignore_permissions=True) razorpay_payment_status = dataent.db.get_value("Razorpay Payment", razorpay_payment_id, "status") self.assertEquals(razorpay_payment_status, "Authorized")
def start(self): """start a new session""" # generate sid if self.user=='Guest': sid = 'Guest' else: sid = dataent.generate_hash() self.data.user = self.user self.data.sid = sid self.data.data.user = self.user self.data.data.session_ip = dataent.local.request_ip if self.user != "Guest": self.data.data.update({ "last_updated": dataent.utils.now(), "session_expiry": get_expiry_period(self.device), "full_name": self.full_name, "user_type": self.user_type, "device": self.device, "session_country": get_geo_ip_country(dataent.local.request_ip) if dataent.local.request_ip else None, }) # insert session if self.user!="Guest": self.insert_session_record() # update user dataent.db.sql("""UPDATE tabUser SET last_login = %(now)s, last_ip = %(ip)s, last_active = %(now)s where name=%(name)s""", { "now": dataent.utils.now(), "ip": dataent.local.request_ip, "name": self.data['user'] }) dataent.db.commit()
def make_autoname(key='', doctype='', doc=''): """ Creates an autoname from the given key: **Autoname rules:** * The key is separated by '.' * '####' represents a series. The string before this part becomes the prefix: Example: ABC.#### creates a series ABC0001, ABC0002 etc * 'MM' represents the current month * 'YY' and 'YYYY' represent the current year *Example:* * DE/./.YY./.MM./.##### will create a series like DE/09/01/0001 where 09 is the year, 01 is the month and 0001 is the series """ if key == "hash": return dataent.generate_hash(doctype, 10) if "#" not in key: key = key + ".#####" elif "." not in key: dataent.throw( _("Invalid naming series (. missing)") + (_(" for {0}").format(doctype) if doctype else "")) parts = key.split('.') n = parse_naming_series(parts, doctype, doc) return n
def execute(): dataent.reload_doc('manufacturing', 'doctype', 'job_card_time_log') if (dataent.db.table_exists("Job Card") and dataent.get_meta("Job Card").has_field("actual_start_date")): time_logs = [] for d in dataent.get_all('Job Card', fields=[ "actual_start_date", "actual_end_date", "time_in_mins", "name", "for_quantity" ], filters={'docstatus': ("<", 2)}): if d.actual_start_date: time_logs.append([ d.actual_start_date, d.actual_end_date, d.time_in_mins, d.for_quantity, d.name, 'Job Card', 'time_logs', dataent.generate_hash("", 10) ]) if time_logs: dataent.db.sql( """ INSERT INTO `tabJob Card Time Log` (from_time, to_time, time_in_mins, completed_qty, parent, parenttype, parentfield, name) values {values} """.format(values=','.join(['%s'] * len(time_logs))), tuple(time_logs)) dataent.reload_doc('manufacturing', 'doctype', 'job_card') dataent.db.sql( """ update `tabJob Card` set total_completed_qty = for_quantity, total_time_in_mins = time_in_mins where docstatus < 2 """)
def insert_user_social_login(user, modified_by, provider, idx, userid=None, username=None): source_cols = get_standard_cols() creation_time = dataent.utils.get_datetime_str( dataent.utils.get_datetime()) values = [ dataent.generate_hash(length=10), creation_time, creation_time, user, modified_by, user, "User", "social_logins", cstr(idx), provider ] if userid: source_cols.append("userid") values.append(userid) if username: source_cols.append("username") values.append(username) query = """INSERT INTO `tabUser Social Login` ({source_cols}) VALUES ({values}) """.format(source_cols="`" + "`, `".join(source_cols) + "`", values="'" + "', '".join([dataent.db.escape(d) for d in values]) + "'") dataent.db.sql(query)
def make_error_snapshot(exception): if dataent.conf.disable_error_snapshot: return logger = dataent.logger(__name__, with_more_info=False) try: error_id = '{timestamp:s}-{ip:s}-{hash:s}'.format( timestamp=cstr(datetime.datetime.now()), ip=dataent.local.request_ip or '127.0.0.1', hash=dataent.generate_hash(length=3)) snapshot_folder = get_error_snapshot_path() dataent.create_folder(snapshot_folder) snapshot_file_path = os.path.join(snapshot_folder, "{0}.json".format(error_id)) snapshot = get_snapshot(exception) with open(encode(snapshot_file_path), 'wb') as error_file: error_file.write(encode(dataent.as_json(snapshot))) logger.error('New Exception collected with id: {}'.format(error_id)) except Exception as e: logger.error('Could not take error snapshot: {0}'.format(e), exc_info=True)
def validate(self): self.check_demo() # clear new password self.__new_password = self.new_password self.new_password = "" if not dataent.flags.in_test: self.password_strength_test() if self.name not in STANDARD_USERS: self.validate_email_type(self.email) self.validate_email_type(self.name) self.add_system_manager_role() self.set_system_user() self.set_full_name() self.check_enable_disable() self.ensure_unique_roles() self.remove_all_roles_for_guest() self.validate_username() self.remove_disabled_roles() self.validate_user_email_inbox() ask_pass_update() self.validate_roles() self.validate_user_image() if self.language == "Loading...": self.language = None if (self.name not in [ "Administrator", "Guest" ]) and (not self.get_social_login_userid("dataent")): self.set_social_login_userid("dataent", dataent.generate_hash(length=39))
def generate_keys(user): """ generate api key and api secret :param user: str """ if "System Manager" in dataent.get_roles(): user_details = dataent.get_doc("User", user) api_secret = dataent.generate_hash(length=15) # if api key is not set generate api key if not user_details.api_key: api_key = dataent.generate_hash(length=15) user_details.api_key = api_key user_details.api_secret = api_secret user_details.save() return {"api_secret": api_secret} dataent.throw(dataent._("Not Permitted"), dataent.PermissionError)
def autoname(self): """Set name for folder""" if self.is_folder: if self.folder: self.name = self.get_name_based_on_parent_folder() else: # home self.name = self.file_name else: self.name = dataent.generate_hash("", 10)
def read_multi_pdf(output): # Get the content of the merged pdf files fname = os.path.join("/tmp", "dataent-pdf-{0}.pdf".format(dataent.generate_hash())) output.write(open(fname, "wb")) with open(fname, "rb") as fileobj: filedata = fileobj.read() return filedata
def generate_csrf_token(): dataent.local.session.data.csrf_token = dataent.generate_hash() dataent.local.session_obj.update(force=True) # send sid and csrf token to the user # handles the case when a user logs in again from another tab # and it leads to invalid request in the current tab dataent.publish_realtime(event="csrf_generated", message={"sid": dataent.local.session.sid, "csrf_token": dataent.local.session.data.csrf_token}, user=dataent.session.user, after_commit=True)
def token(): dtoken = dataent.new_doc('Chat Token') dtoken.token = dataent.generate_hash() dtoken.ip_address = dataent.local.request_ip country = get_geo_ip_country(dtoken.ip_address) if country: dtoken.country = country['iso_code'] dtoken.save(ignore_permissions = True) return dtoken.token
def get_name_from_hash(): """ Get a name for a Batch by generating a unique hash. :return: The hash that was generated. """ temp = None while not temp: temp = dataent.generate_hash()[:7].upper() if dataent.db.exists('Batch', temp): temp = None return temp
def authenticate_for_2factor(user): '''Authenticate two factor for enabled user before login.''' if dataent.form_dict.get('otp'): return otp_secret = get_otpsecret_for_(user) token = int(pyotp.TOTP(otp_secret).now()) tmp_id = dataent.generate_hash(length=8) cache_2fa_data(user, token, otp_secret, tmp_id) verification_obj = get_verification_obj(user, token, otp_secret) # Save data in local dataent.local.response['verification'] = verification_obj dataent.local.response['tmp_id'] = tmp_id
def get_link_for_qrcode(user, totp_uri): '''Get link to temporary page showing QRCode.''' key = dataent.generate_hash(length=20) key_user = "******".format(key) key_uri = "{}_uri".format(key) lifespan = int( dataent.db.get_value('System Settings', 'System Settings', 'lifespan_qrcode_image')) if lifespan <= 0: lifespan = 240 dataent.cache().set_value(key_uri, totp_uri, expires_in_sec=lifespan) dataent.cache().set_value(key_user, user, expires_in_sec=lifespan) return get_url('/qrcode?k={}'.format(key))
def validate_settings(self): if self.enable_sync: if not self.secret: self.set("secret", dataent.generate_hash()) if not self.woocommerce_server_url: dataent.throw(_("Please enter Woocommerce Server URL")) if not self.api_consumer_key: dataent.throw(_("Please enter API Consumer Key")) if not self.api_consumer_secret: dataent.throw(_("Please enter API Consumer Secret"))
def prepare_header_footer(soup): options = {} head = soup.find("head").contents styles = soup.find_all("style") bootstrap = dataent.read_file( os.path.join(dataent.local.sites_path, "assets/dataent/css/bootstrap.css")) fontawesome = dataent.read_file( os.path.join(dataent.local.sites_path, "assets/dataent/css/font-awesome.css")) # extract header and footer for html_id in ("header-html", "footer-html"): content = soup.find(id=html_id) if content: # there could be multiple instances of header-html/footer-html for tag in soup.find_all(id=html_id): tag.extract() toggle_visible_pdf(content) html = dataent.render_template( "templates/print_formats/pdf_header_footer.html", { "head": head, "styles": styles, "content": content, "html_id": html_id, "bootstrap": bootstrap, "fontawesome": fontawesome }) # create temp file fname = os.path.join( "/tmp", "dataent-pdf-{0}.html".format(dataent.generate_hash())) with open(fname, "wb") as f: f.write(html.encode("utf-8")) # {"header-html": "/tmp/dataent-pdf-random.html"} options[html_id] = fname else: if html_id == "header-html": options["margin-top"] = "15mm" elif html_id == "footer-html": options["margin-bottom"] = "15mm" return options
def get_pdf(html, options=None, output=None): html = scrub_urls(html) html, options = prepare_options(html, options) fname = os.path.join("/tmp", "dataent-pdf-{0}.pdf".format(dataent.generate_hash())) options.update({ "disable-javascript": "", "disable-local-file-access": "", }) try: pdfkit.from_string(html, fname, options=options or {}) if output: append_pdf(PdfFileReader(fname), output) else: with open(fname, "rb") as fileobj: filedata = fileobj.read() except IOError as e: if ("ContentNotFoundError" in e.message or "ContentOperationNotPermittedError" in e.message or "UnknownContentError" in e.message or "RemoteHostClosedError" in e.message): # allow pdfs with missing images if file got created if os.path.exists(fname): if output: append_pdf(PdfFileReader(file(fname, "rb")), output) else: with open(fname, "rb") as fileobj: filedata = fileobj.read() else: dataent.throw( _("PDF generation failed because of broken image links")) else: raise finally: cleanup(fname, options) if output: return output return filedata
def build_response_as_excel(self): filename = dataent.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(self.writer.getvalue()).encode('utf-8')) f = open(filename) reader = csv.reader(f) from dataent.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx( reader, "Data Import Template" if self.template else 'Data Export') f.close() os.remove(filename) # write out response as a xlsx type dataent.response['filename'] = self.doctype + '.xlsx' dataent.response['filecontent'] = xlsx_file.getvalue() dataent.response['type'] = 'binary'
def set_backup_file_name(self): todays_date = now_datetime().strftime('%Y%m%d_%H%M%S') site = dataent.local.site or dataent.generate_hash(length=8) site = site.replace('.', '_') #Generate a random name using today's date and a 8 digit random number for_db = todays_date + "-" + site + "-database.sql" for_public_files = todays_date + "-" + site + "-files.tar" for_private_files = todays_date + "-" + site + "-private-files.tar" backup_path = get_backup_path() if not self.backup_path_db: self.backup_path_db = os.path.join(backup_path, for_db) if not self.backup_path_files: self.backup_path_files = os.path.join(backup_path, for_public_files) if not self.backup_path_private_files: self.backup_path_private_files = os.path.join( backup_path, for_private_files)
def get_random_group(): doc = dataent.get_doc({ "doctype": "Student Group", "student_group_name": "_Test Student Group-" + dataent.generate_hash(length=5), "group_based_on": "Activity" }).insert() student_list = dataent.get_all('Student', limit=5) doc.extend("students", [{ "student": d.name, "active": 1 } for d in student_list]) doc.save() return doc
def qrcode_as_png(user, totp_uri): '''Save temporary Qrcode to server.''' from dataent.utils.file_manager import save_file folder = create_barcode_folder() png_file_name = '{}.png'.format(dataent.generate_hash(length=20)) file_obj = save_file(png_file_name, png_file_name, 'User', user, folder=folder) dataent.db.commit() file_url = get_url(file_obj.file_url) file_path = os.path.join(dataent.get_site_path('public', 'files'), file_obj.file_name) url = qrcreate(totp_uri) with open(file_path, 'w') as png_file: url.png(png_file, scale=8, module_color=[0, 0, 0, 180], background=[0xff, 0xff, 0xcc]) return file_url
def print_by_server(doctype, name, print_format=None, doc=None, no_letterhead=0): print_settings = dataent.get_doc("Print Settings") try: import cups except ImportError: dataent.throw(_("You need to install pycups to use this feature!")) return try: cups.setServer(print_settings.server_ip) cups.setPort(print_settings.port) conn = cups.Connection() output = PdfFileWriter() output = dataent.get_print(doctype, name, print_format, doc=doc, no_letterhead=no_letterhead, as_pdf=True, output=output) file = os.path.join( "/", "tmp", "dataent-pdf-{0}.pdf".format(dataent.generate_hash())) output.write(open(file, "wb")) conn.printFile(print_settings.printer_name, file, name, {}) except IOError as e: if ("ContentNotFoundError" in e.message or "ContentOperationNotPermittedError" in e.message or "UnknownContentError" in e.message or "RemoteHostClosedError" in e.message): dataent.throw(_("PDF generation failed")) except cups.IPPError: dataent.throw(_("Printing failed")) finally: cleanup(file, {})
def set_temp(self, value): """Set a temperory value and return a key.""" key = dataent.generate_hash() dataent.cache().hset("temp", key, value) return key
def autoname(self): self.name = dataent.generate_hash()
def execute(): dataent.reload_doctype('User Permission') # to check if we need to migrate from skip_for_doctype has_skip_for_doctype = dataent.db.has_column("User Permission", "skip_for_doctype") skip_for_doctype_map = {} new_user_permissions_list = [] user_permissions_to_delete = [] for user_permission in dataent.get_all('User Permission', fields=['*']): skip_for_doctype = [] # while migrating from v11 -> v11 if has_skip_for_doctype: if not user_permission.skip_for_doctype: continue skip_for_doctype = user_permission.skip_for_doctype.split('\n') else: # while migrating from v10 -> v11 if skip_for_doctype_map.get( (user_permission.allow, user_permission.user)) == None: skip_for_doctype = get_doctypes_to_skip( user_permission.allow, user_permission.user) # cache skip for doctype for same user and doctype skip_for_doctype_map[(user_permission.allow, user_permission.user)] = skip_for_doctype else: skip_for_doctype = skip_for_doctype_map[(user_permission.allow, user_permission.user)] if skip_for_doctype: # only specific doctypes are selected # split this into multiple records and delete linked_doctypes = get_linked_doctypes(user_permission.allow, True).keys() linked_doctypes = list(linked_doctypes) # append the doctype for which we have build the user permission linked_doctypes += [user_permission.allow] applicable_for_doctypes = list( set(linked_doctypes) - set(skip_for_doctype)) user_permissions_to_delete.append(user_permission.name) user_permission.name = None user_permission.skip_for_doctype = None for doctype in applicable_for_doctypes: if doctype: # Maintain sequence (name, user, allow, for_value, applicable_for, apply_to_all_doctypes, creation, modified) new_user_permissions_list.append( (dataent.generate_hash("", 10), user_permission.user, user_permission.allow, user_permission.for_value, doctype, 0, user_permission.creation, user_permission.modified)) else: # No skip_for_doctype found! Just update apply_to_all_doctypes. dataent.db.set_value('User Permission', user_permission.name, 'apply_to_all_doctypes', 1) if new_user_permissions_list: dataent.db.sql( ''' INSERT INTO `tabUser Permission` (`name`, `user`, `allow`, `for_value`, `applicable_for`, `apply_to_all_doctypes`, `creation`, `modified`) VALUES {} '''.format( # nosec ', '.join(['%s'] * len(new_user_permissions_list))), tuple(new_user_permissions_list)) if user_permissions_to_delete: dataent.db.sql( 'DELETE FROM `tabUser Permission` WHERE `name` in ({})' # nosec .format(','.join(['%s'] * len(user_permissions_to_delete))), tuple(user_permissions_to_delete))
def generate_secret(): woocommerce_settings = dataent.get_doc("Woocommerce Settings") woocommerce_settings.secret = dataent.generate_hash() woocommerce_settings.save()
def validate(self): self.client_id = self.name if not self.client_secret: self.client_secret = dataent.generate_hash(length=10) self.validate_grant_and_response()
def execute(): ''' Fields to move from the item to item defaults child table [ default_warehouse, buying_cost_center, expense_account, selling_cost_center, income_account ] ''' if not dataent.db.has_column('Item', 'default_warehouse'): return dataent.reload_doc('stock', 'doctype', 'item_default') dataent.reload_doc('stock', 'doctype', 'item') companies = dataent.get_all("Company") if len(companies) == 1 and not dataent.get_all("Item Default", limit=1): try: dataent.db.sql( ''' INSERT INTO `tabItem Default` (name, parent, parenttype, parentfield, idx, company, default_warehouse, buying_cost_center, selling_cost_center, expense_account, income_account, default_supplier) SELECT SUBSTRING(SHA2(name,224), 1, 10) as name, name as parent, 'Item' as parenttype, 'item_defaults' as parentfield, 1 as idx, %s as company, default_warehouse, buying_cost_center, selling_cost_center, expense_account, income_account, default_supplier FROM `tabItem`; ''', companies[0].name) except: pass else: item_details = dataent.db.sql(""" SELECT name, default_warehouse, buying_cost_center, expense_account, selling_cost_center, income_account FROM tabItem WHERE name not in (select distinct parent from `tabItem Default`) and ifnull(disabled, 0) = 0""", as_dict=1) items_default_data = {} for item_data in item_details: for d in [["default_warehouse", "Warehouse"], ["expense_account", "Account"], ["income_account", "Account"], ["buying_cost_center", "Cost Center"], ["selling_cost_center", "Cost Center"]]: if item_data.get(d[0]): company = dataent.get_value(d[1], item_data.get(d[0]), "company", cache=True) if item_data.name not in items_default_data: items_default_data[item_data.name] = {} company_wise_data = items_default_data[item_data.name] if company not in company_wise_data: company_wise_data[company] = {} default_data = company_wise_data[company] default_data[d[0]] = item_data.get(d[0]) to_insert_data = [] # items_default_data data structure will be as follow # { # 'item_code 1': {'company 1': {'default_warehouse': 'Test Warehouse 1'}}, # 'item_code 2': { # 'company 1': {'default_warehouse': 'Test Warehouse 1'}, # 'company 2': {'default_warehouse': 'Test Warehouse 1'} # } # } for item_code, companywise_item_data in items_default_data.items(): for company, item_default_data in companywise_item_data.items(): to_insert_data.append(( dataent.generate_hash("", 10), item_code, 'Item', 'item_defaults', company, item_default_data.get('default_warehouse'), item_default_data.get('expense_account'), item_default_data.get('income_account'), item_default_data.get('buying_cost_center'), item_default_data.get('selling_cost_center'), )) if to_insert_data: dataent.db.sql( ''' INSERT INTO `tabItem Default` ( `name`, `parent`, `parenttype`, `parentfield`, `company`, `default_warehouse`, `expense_account`, `income_account`, `buying_cost_center`, `selling_cost_center` ) VALUES {} '''.format(', '.join(['%s'] * len(to_insert_data))), tuple(to_insert_data))
def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No", select_columns=None, from_data_import="No", excel_format="No"): all_doctypes = all_doctypes=="Yes" if select_columns: select_columns = json.loads(select_columns); docs_to_export = {} if doctype: if isinstance(doctype, string_types): doctype = [doctype]; if len(doctype) > 1: docs_to_export = doctype[1] doctype = doctype[0] if not parent_doctype: parent_doctype = doctype column_start_end = {} if all_doctypes: child_doctypes = [] for df in dataent.get_meta(doctype).get_table_fields(): child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname)) def get_data_keys_definition(): return get_data_keys() def add_main_header(): w.writerow([_('Data Import Template')]) w.writerow([get_data_keys_definition().main_table, doctype]) if parent_doctype != doctype: w.writerow([get_data_keys_definition().parent_table, parent_doctype]) else: w.writerow(['']) w.writerow(['']) w.writerow([_('Notes:')]) w.writerow([_('Please do not change the template headings.')]) w.writerow([_('First data column must be blank.')]) w.writerow([_('If you are uploading new records, leave the "name" (ID) column blank.')]) w.writerow([_('If you are uploading new records, "Naming Series" becomes mandatory, if present.')]) w.writerow([_('Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.')]) w.writerow([_('For updating, you can update only selective columns.')]) w.writerow([_('You can only upload upto 5000 records in one go. (may be less in some cases)')]) if key == "parent": w.writerow([_('"Parent" signifies the parent table in which this row must be added')]) w.writerow([_('If you are updating, please select "Overwrite" else existing rows will not be deleted.')]) def build_field_columns(dt, parentfield=None): meta = dataent.get_meta(dt) # build list of valid docfields tablecolumns = [] for f in dataent.db.sql('desc `tab%s`' % dt): field = meta.get_field(f[0]) if field and ((select_columns and f[0] in select_columns[dt]) or not select_columns): tablecolumns.append(field) tablecolumns.sort(key = lambda a: int(a.idx)) _column_start_end = dataent._dict(start=0) if dt==doctype: _column_start_end = dataent._dict(start=0) else: _column_start_end = dataent._dict(start=len(columns)) append_field_column(dataent._dict({ "fieldname": "name", "parent": dt, "label": "ID", "fieldtype": "Data", "reqd": 1, "idx": 0, "info": _("Leave blank for new records") }), True) for docfield in tablecolumns: append_field_column(docfield, True) # all non mandatory fields for docfield in tablecolumns: append_field_column(docfield, False) # if there is one column, add a blank column (?) if len(columns)-_column_start_end.start == 1: append_empty_field_column() # append DocType name tablerow[_column_start_end.start + 1] = dt if parentfield: tablerow[_column_start_end.start + 2] = parentfield _column_start_end.end = len(columns) + 1 column_start_end[(dt, parentfield)] = _column_start_end def append_field_column(docfield, for_mandatory): if not docfield: return if for_mandatory and not docfield.reqd: return if not for_mandatory and docfield.reqd: return if docfield.fieldname in ('parenttype', 'trash_reason'): return if docfield.hidden: return if select_columns and docfield.fieldname not in select_columns.get(docfield.parent, []): return tablerow.append("") fieldrow.append(docfield.fieldname) labelrow.append(_(docfield.label)) mandatoryrow.append(docfield.reqd and 'Yes' or 'No') typerow.append(docfield.fieldtype) inforow.append(getinforow(docfield)) columns.append(docfield.fieldname) def append_empty_field_column(): tablerow.append("~") fieldrow.append("~") labelrow.append("") mandatoryrow.append("") typerow.append("") inforow.append("") columns.append("") def getinforow(docfield): """make info comment for options, links etc.""" if docfield.fieldtype == 'Select': if not docfield.options: return '' else: return _("One of") + ': %s' % ', '.join(filter(None, docfield.options.split('\n'))) elif docfield.fieldtype == 'Link': return 'Valid %s' % docfield.options elif docfield.fieldtype == 'Int': return 'Integer' elif docfield.fieldtype == "Check": return "0 or 1" elif docfield.fieldtype in ["Date", "Datetime"]: return cstr(dataent.defaults.get_defaults().date_format) elif hasattr(docfield, "info"): return docfield.info else: return '' def add_field_headings(): w.writerow(tablerow) w.writerow(labelrow) w.writerow(fieldrow) w.writerow(mandatoryrow) w.writerow(typerow) w.writerow(inforow) w.writerow([get_data_keys_definition().data_separator]) def add_data(): def add_data_row(row_group, dt, parentfield, doc, rowidx): d = doc.copy() meta = dataent.get_meta(dt) if all_doctypes: d.name = '"'+ d.name+'"' if len(row_group) < rowidx + 1: row_group.append([""] * (len(columns) + 1)) row = row_group[rowidx] _column_start_end = column_start_end.get((dt, parentfield)) if _column_start_end: for i, c in enumerate(columns[_column_start_end.start:_column_start_end.end]): df = meta.get_field(c) fieldtype = df.fieldtype if df else "Data" value = d.get(c, "") if value: if fieldtype == "Date": value = formatdate(value) elif fieldtype == "Datetime": value = format_datetime(value) row[_column_start_end.start + i + 1] = value if with_data=='Yes': dataent.permissions.can_export(parent_doctype, raise_exception=True) # sort nested set doctypes by `lft asc` order_by = None table_columns = dataent.db.get_table_columns(parent_doctype) if 'lft' in table_columns and 'rgt' in table_columns: order_by = '`tab{doctype}`.`lft` asc'.format(doctype=parent_doctype) # get permitted data only data = dataent.get_list(doctype, fields=["*"], limit_page_length=None, order_by=order_by) for doc in data: op = docs_to_export.get("op") names = docs_to_export.get("name") if names and op: if op == '=' and doc.name not in names: continue elif op == '!=' and doc.name in names: continue elif names: try: sflags = docs_to_export.get("flags", "I,U").upper() flags = 0 for a in re.split('\W+',sflags): flags = flags | reflags.get(a,0) c = re.compile(names, flags) m = c.match(doc.name) if not m: continue except: if doc.name not in names: continue # add main table row_group = [] add_data_row(row_group, doctype, None, doc, 0) if all_doctypes: # add child tables for c in child_doctypes: for ci, child in enumerate(dataent.db.sql("""select * from `tab{0}` where parent=%s and parentfield=%s order by idx""".format(c['doctype']), (doc.name, c['parentfield']), as_dict=1)): add_data_row(row_group, c['doctype'], c['parentfield'], child, ci) for row in row_group: w.writerow(row) w = UnicodeWriter() key = 'parent' if parent_doctype != doctype else 'name' add_main_header() w.writerow(['']) tablerow = [get_data_keys_definition().doctype, ""] labelrow = [_("Column Labels:"), "ID"] fieldrow = [get_data_keys_definition().columns, key] mandatoryrow = [_("Mandatory:"), _("Yes")] typerow = [_('Type:'), 'Data (text)'] inforow = [_('Info:'), ''] columns = [key] build_field_columns(doctype) if all_doctypes: for d in child_doctypes: append_empty_field_column() if (select_columns and select_columns.get(d['doctype'], None)) or not select_columns: # if atleast one column is selected for this doctype build_field_columns(d['doctype'], d['parentfield']) add_field_headings() add_data() if from_data_import == "Yes" and excel_format == "Yes": filename = dataent.generate_hash("", 10) with open(filename, 'wb') as f: f.write(cstr(w.getvalue()).encode("utf-8")) f = open(filename) # increase the field limit in case of larger fields # works for Python 2.x and 3.x csv.field_size_limit(sys.maxsize) reader = csv.reader(f) from dataent.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(reader, "Data Import Template") f.close() os.remove(filename) # write out response as a xlsx type dataent.response['filename'] = doctype + '.xlsx' dataent.response['filecontent'] = xlsx_file.getvalue() dataent.response['type'] = 'binary' else: # write out response as a type csv dataent.response['result'] = cstr(w.getvalue()) dataent.response['type'] = 'csv' dataent.response['doctype'] = doctype