def setUp(self): self.attached_to_doctype, self.attached_to_docname = make_test_doc() self.test_content1 = test_content1 self.test_content2 = test_content2 self.saved_file1 = save_file('hello.txt', self.test_content1, self.attached_to_doctype, self.attached_to_docname) self.saved_file2 = save_file('hello.txt', self.test_content2, self.attached_to_doctype, self.attached_to_docname) self.saved_filename1 = get_files_path(self.saved_file1.file_name) self.saved_filename2 = get_files_path(self.saved_file2.file_name)
def create_json_gz_file(data, dt, dn): # Storing data in CSV file causes information loss # Reports like P&L Statement were completely unsuable because of this json_filename = '{0}.json.gz'.format( dataent.utils.data.format_datetime(dataent.utils.now(), "Y-m-d-H:M")) encoded_content = dataent.safe_encode(dataent.as_json(data)) # GZip compression seems to reduce storage requirements by 80-90% compressed_content = gzip_compress(encoded_content) save_file(fname=json_filename, content=compressed_content, dt=dt, dn=dn, folder=None, is_private=False)
def setUp(self): self.attached_to_doctype1, self.attached_to_docname1 = make_test_doc() self.attached_to_doctype2, self.attached_to_docname2 = make_test_doc() self.test_content1 = test_content1 self.test_content2 = test_content1 self.orig_filename = 'hello.txt' self.dup_filename = 'hello2.txt' self.saved_file1 = save_file(self.orig_filename, self.test_content1, self.attached_to_doctype1, self.attached_to_docname1) self.saved_file2 = save_file(self.dup_filename, self.test_content2, self.attached_to_doctype2, self.attached_to_docname2) self.saved_filename1 = get_files_path(self.saved_file1.file_name) self.saved_filename2 = get_files_path(self.saved_file2.file_name)
def setUp(self): self.attached_to_doctype, self.attached_to_docname = make_test_doc() self.test_content = test_content1 self.saved_file = save_file('hello.txt', self.test_content, self.attached_to_doctype, self.attached_to_docname) self.saved_filename = get_files_path(self.saved_file.file_name)
def save_attachments_in_doc(self, doc): """Save email attachments in given document.""" saved_attachments = [] for attachment in self.attachments: try: file_data = save_file(attachment['fname'], attachment['fcontent'], doc.doctype, doc.name, is_private=1) saved_attachments.append(file_data) if attachment['fname'] in self.cid_map: self.cid_map[file_data.name] = self.cid_map[ attachment['fname']] except MaxFileSizeReachedError: # WARNING: bypass max file size exception pass except dataent.DuplicateEntryError: # same file attached twice?? pass return saved_attachments
def test_on_delete(self): file = dataent.get_doc("File", {"file_name": "file_copy.txt"}) file.delete() self.assertEqual( dataent.db.get_value("File", _("Home/Test Folder 1"), "file_size"), 0) folder = self.get_folder("Test Folder 3", "Home/Test Folder 1") self.saved_file = save_file('folder_copy.txt', "Testing folder copy example.", "", "", folder.name) folder = dataent.get_doc("File", "Home/Test Folder 1/Test Folder 3") self.assertRaises(dataent.ValidationError, folder.delete)
def prepare_and_attach_invoice(doc, replace=False): progressive_name, progressive_number = get_progressive_name_and_number( doc, replace) invoice = prepare_invoice(doc, progressive_number) invoice_xml = dataent.render_template('epaas/regional/italy/e-invoice.xml', context={"doc": invoice}, is_path=True) invoice_xml = invoice_xml.replace("&", "&") xml_filename = progressive_name + ".xml" return save_file(xml_filename, invoice_xml, dt=doc.doctype, dn=doc.name, is_private=True)
def update_user_name(args): first_name, last_name = args.get('full_name', ''), '' if ' ' in first_name: first_name, last_name = first_name.split(' ', 1) if args.get("email"): if dataent.db.exists('User', args.get('email')): # running again return args['name'] = args.get("email") _mute_emails, dataent.flags.mute_emails = dataent.flags.mute_emails, True doc = dataent.get_doc({ "doctype":"User", "email": args.get("email"), "first_name": first_name, "last_name": last_name }) doc.flags.no_welcome_mail = True doc.insert() dataent.flags.mute_emails = _mute_emails update_password(args.get("email"), args.get("password")) elif first_name: args.update({ "name": dataent.session.user, "first_name": first_name, "last_name": last_name }) dataent.db.sql("""update `tabUser` SET first_name=%(first_name)s, last_name=%(last_name)s WHERE name=%(name)s""", args) if args.get("attach_user"): attach_user = args.get("attach_user").split(",") if len(attach_user)==3: filename, filetype, content = attach_user fileurl = save_file(filename, content, "User", args.get("name"), decode=True).file_url dataent.db.set_value("User", args.get("name"), "user_image", fileurl) if args.get('name'): add_all_roles_to(args.get("name"))
def qrcode_as_png(user, totp_uri): '''Save temporary Qrcode to server.''' from dataent.utils.file_manager import save_file folder = create_barcode_folder() png_file_name = '{}.png'.format(dataent.generate_hash(length=20)) file_obj = save_file(png_file_name, png_file_name, 'User', user, folder=folder) dataent.db.commit() file_url = get_url(file_obj.file_url) file_path = os.path.join(dataent.get_site_path('public', 'files'), file_obj.file_name) url = qrcreate(totp_uri) with open(file_path, 'w') as png_file: url.png(png_file, scale=8, module_color=[0, 0, 0, 180], background=[0xff, 0xff, 0xcc]) return file_url
def test_folder_copy(self): folder = self.get_folder("Test Folder 2", "Home") folder = self.get_folder("Test Folder 3", "Home/Test Folder 2") self.saved_file = save_file('folder_copy.txt', "Testing folder copy example.", "", "", folder.name) move_file([{"name": folder.name}], 'Home/Test Folder 1', folder.folder) file = dataent.get_doc("File", {"file_name": "folder_copy.txt"}) file_copy_txt = dataent.get_value("File", {"file_name": "file_copy.txt"}) if file_copy_txt: dataent.get_doc("File", file_copy_txt).delete() self.assertEqual(_("Home/Test Folder 1/Test Folder 3"), file.folder) self.assertEqual( dataent.db.get_value("File", _("Home/Test Folder 1"), "file_size"), file.file_size) self.assertEqual( dataent.db.get_value("File", _("Home/Test Folder 2"), "file_size"), 0)
def attach_file(filename=None, filedata=None, doctype=None, docname=None, folder=None, decode_base64=False, is_private=None, docfield=None): '''Attach a file to Document (POST) :param filename: filename e.g. test-file.txt :param filedata: base64 encode filedata which must be urlencoded :param doctype: Reference DocType to attach file to :param docname: Reference DocName to attach file to :param folder: Folder to add File into :param decode_base64: decode filedata from base64 encode, default is False :param is_private: Attach file as private file (1 or 0) :param docfield: file to attach to (optional)''' request_method = dataent.local.request.environ.get("REQUEST_METHOD") if request_method.upper() != "POST": dataent.throw(_("Invalid Request")) doc = dataent.get_doc(doctype, docname) if not doc.has_permission(): dataent.throw(_("Not permitted"), dataent.PermissionError) f = save_file(filename, filedata, doctype, docname, folder, decode_base64, is_private, docfield) if docfield and doctype: doc.set(docfield, f.file_url) doc.save() return f.as_dict()
def upload(rows=None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, update_only=None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", skip_errors=True, data_import_doc=None, validate_template=False, user=None): """upload data""" # for translations if user: dataent.cache().hdel("lang", user) dataent.set_user_lang(user) if data_import_doc and isinstance(data_import_doc, string_types): data_import_doc = dataent.get_doc("Data Import", data_import_doc) if data_import_doc and from_data_import == "Yes": no_email = data_import_doc.no_email ignore_encoding_errors = data_import_doc.ignore_encoding_errors update_only = data_import_doc.only_update submit_after_import = data_import_doc.submit_after_import overwrite = data_import_doc.overwrite skip_errors = data_import_doc.skip_errors else: # extra input params params = json.loads(dataent.form_dict.get("params") or '{}') if params.get("submit_after_import"): submit_after_import = True if params.get("ignore_encoding_errors"): ignore_encoding_errors = True if not params.get("no_email"): no_email = False if params.get('update_only'): update_only = True if params.get('from_data_import'): from_data_import = params.get('from_data_import') if not params.get('skip_errors'): skip_errors = params.get('skip_errors') dataent.flags.in_import = True dataent.flags.mute_emails = no_email def get_data_keys_definition(): return get_data_keys() def bad_template(): dataent.throw( _("Please do not change the rows above {0}").format( get_data_keys_definition().data_separator)) def check_data_length(): if not data: dataent.throw( _("No data found in the file. Please reattach the new file with data." )) def get_start_row(): for i, row in enumerate(rows): if row and row[0] == get_data_keys_definition().data_separator: return i + 1 bad_template() def get_header_row(key): return get_header_row_and_idx(key)[0] def get_header_row_and_idx(key): for i, row in enumerate(header): if row and row[0] == key: return row, i return [], -1 def filter_empty_columns(columns): empty_cols = list(filter(lambda x: x in ("", None), columns)) if empty_cols: if columns[-1 * len(empty_cols):] == empty_cols: # filter empty columns if they exist at the end columns = columns[:-1 * len(empty_cols)] else: dataent.msgprint(_( "Please make sure that there are no empty columns in the file." ), raise_exception=1) return columns def make_column_map(): doctype_row, row_idx = get_header_row_and_idx( get_data_keys_definition().doctype) if row_idx == -1: # old style return dt = None for i, d in enumerate(doctype_row[1:]): if d not in ("~", "-"): if d and doctype_row[i] in (None, '', '~', '-', _("DocType") + ":"): dt, parentfield = d, None # xls format truncates the row, so it may not have more columns if len(doctype_row) > i + 2: parentfield = doctype_row[i + 2] doctypes.append((dt, parentfield)) column_idx_to_fieldname[(dt, parentfield)] = {} column_idx_to_fieldtype[(dt, parentfield)] = {} if dt: column_idx_to_fieldname[(dt, parentfield)][i + 1] = rows[row_idx + 2][i + 1] column_idx_to_fieldtype[(dt, parentfield)][i + 1] = rows[row_idx + 4][i + 1] def get_doc(start_idx): if doctypes: doc = {} attachments = [] last_error_row_idx = None for idx in range(start_idx, len(rows)): last_error_row_idx = idx # pylint: disable=W0612 if (not doc) or main_doc_empty(rows[idx]): for dt, parentfield in doctypes: d = {} for column_idx in column_idx_to_fieldname[( dt, parentfield)]: try: fieldname = column_idx_to_fieldname[( dt, parentfield)][column_idx] fieldtype = column_idx_to_fieldtype[( dt, parentfield)][column_idx] if not fieldname or not rows[idx][column_idx]: continue d[fieldname] = rows[idx][column_idx] if fieldtype in ("Int", "Check"): d[fieldname] = cint(d[fieldname]) elif fieldtype in ("Float", "Currency", "Percent"): d[fieldname] = flt(d[fieldname]) elif fieldtype == "Date": if d[fieldname] and isinstance( d[fieldname], string_types): d[fieldname] = getdate( parse_date(d[fieldname])) elif fieldtype == "Datetime": if d[fieldname]: if " " in d[fieldname]: _date, _time = d[fieldname].split() else: _date, _time = d[ fieldname], '00:00:00' _date = parse_date(d[fieldname]) d[fieldname] = get_datetime(_date + " " + _time) else: d[fieldname] = None elif fieldtype in ("Image", "Attach Image", "Attach"): # added file to attachments list attachments.append(d[fieldname]) elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: # as fields can be saved in the number format(long type) in data import template d[fieldname] = cstr(d[fieldname]) except IndexError: pass # scrub quotes from name and modified if d.get("name") and d["name"].startswith('"'): d["name"] = d["name"][1:-1] if sum([0 if not val else 1 for val in d.values()]): d['doctype'] = dt if dt == doctype: doc.update(d) else: if not overwrite and doc.get("name"): d['parent'] = doc["name"] d['parenttype'] = doctype d['parentfield'] = parentfield doc.setdefault(d['parentfield'], []).append(d) else: break return doc, attachments, last_error_row_idx else: doc = dataent._dict(zip(columns, rows[start_idx][1:])) doc['doctype'] = doctype return doc, [], None # used in testing whether a row is empty or parent row or child row # checked only 3 first columns since first two columns can be blank for example the case of # importing the item variant where item code and item name will be blank. def main_doc_empty(row): if row: for i in range(3, 0, -1): if len(row) > i and row[i]: return False return True def validate_naming(doc): autoname = dataent.get_meta(doctype).autoname if autoname: if autoname[0:5] == 'field': autoname = autoname[6:] elif autoname == 'naming_series:': autoname = 'naming_series' else: return True if (autoname not in doc) or (not doc[autoname]): from dataent.model.base_document import get_controller if not hasattr(get_controller(doctype), "autoname"): dataent.throw( _("{0} is a mandatory field".format(autoname))) return True users = dataent.db.sql_list("select name from tabUser") def prepare_for_insert(doc): # don't block data import if user is not set # migrating from another system if not doc.owner in users: doc.owner = dataent.session.user if not doc.modified_by in users: doc.modified_by = dataent.session.user def is_valid_url(url): is_valid = False if url.startswith("/files") or url.startswith("/private/files"): url = get_url(url) try: r = requests.get(url) is_valid = True if r.status_code == 200 else False except Exception: pass return is_valid def attach_file_to_doc(doctype, docname, file_url): # check if attachment is already available # check if the attachement link is relative or not if not file_url: return if not is_valid_url(file_url): return files = dataent.db.sql( """Select name from `tabFile` where attached_to_doctype='{doctype}' and attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""" .format(doctype=doctype, docname=docname, file_url=file_url)) if files: # file is already attached return save_url(file_url, None, doctype, docname, "Home/Attachments", 0) # header filename, file_extension = ['', ''] if not rows: from dataent.utils.file_manager import get_file # get_file_doc fname, fcontent = get_file(data_import_doc.import_file) filename, file_extension = os.path.splitext(fname) if file_extension == '.xlsx' and from_data_import == 'Yes': from dataent.utils.xlsxutils import read_xlsx_file_from_attached_file rows = read_xlsx_file_from_attached_file( file_id=data_import_doc.import_file) elif file_extension == '.csv': from dataent.utils.csvutils import read_csv_content rows = read_csv_content(fcontent, ignore_encoding_errors) else: dataent.throw(_("Unsupported File Format")) start_row = get_start_row() header = rows[:start_row] data = rows[start_row:] try: doctype = get_header_row(get_data_keys_definition().main_table)[1] columns = filter_empty_columns( get_header_row(get_data_keys_definition().columns)[1:]) except: dataent.throw(_("Cannot change header content")) doctypes = [] column_idx_to_fieldname = {} column_idx_to_fieldtype = {} if skip_errors: data_rows_with_error = header if submit_after_import and not cint( dataent.db.get_value("DocType", doctype, "is_submittable")): submit_after_import = False parenttype = get_header_row(get_data_keys_definition().parent_table) if len(parenttype) > 1: parenttype = parenttype[1] # check permissions if not dataent.permissions.can_import(parenttype or doctype): dataent.flags.mute_emails = False return { "messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True } # Throw expception in case of the empty data file check_data_length() make_column_map() total = len(data) if validate_template: if total: data_import_doc.total_rows = total return True if overwrite == None: overwrite = params.get('overwrite') # delete child rows (if parenttype) parentfield = None if parenttype: parentfield = get_parent_field(doctype, parenttype) if overwrite: delete_child_rows(data, doctype) import_log = [] def log(**kwargs): if via_console: print( (kwargs.get("title") + kwargs.get("message")).encode('utf-8')) else: import_log.append(kwargs) def as_link(doctype, name): if via_console: return "{0}: {1}".format(doctype, name) else: return getlink(doctype, name) # publish realtime task update def publish_progress(achieved, reload=False): if data_import_doc: dataent.publish_realtime( "data_import_progress", { "progress": str(int(100.0 * achieved / total)), "data_import": data_import_doc.name, "reload": reload }, user=dataent.session.user) error_flag = rollback_flag = False batch_size = dataent.conf.data_import_batch_size or 1000 for batch_start in range(0, total, batch_size): batch = data[batch_start:batch_start + batch_size] for i, row in enumerate(batch): # bypass empty rows if main_doc_empty(row): continue row_idx = i + start_row doc = None publish_progress(i) try: doc, attachments, last_error_row_idx = get_doc(row_idx) validate_naming(doc) if pre_process: pre_process(doc) original = None if parentfield: parent = dataent.get_doc(parenttype, doc["parent"]) doc = parent.append(parentfield, doc) parent.save() else: if overwrite and doc.get("name") and dataent.db.exists( doctype, doc["name"]): original = dataent.get_doc(doctype, doc["name"]) original_name = original.name original.update(doc) # preserve original name for case sensitivity original.name = original_name original.flags.ignore_links = ignore_links original.save() doc = original else: if not update_only: doc = dataent.get_doc(doc) prepare_for_insert(doc) doc.flags.ignore_links = ignore_links doc.insert() if attachments: # check file url and create a File document for file_url in attachments: attach_file_to_doc(doc.doctype, doc.name, file_url) if submit_after_import: doc.submit() # log errors if parentfield: log( **{ "row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green" }) elif submit_after_import: log( **{ "row": row_idx + 1, "title": 'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue" }) elif original: log( **{ "row": row_idx + 1, "title": 'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green" }) elif not update_only: log( **{ "row": row_idx + 1, "title": 'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green" }) else: log( **{ "row": row_idx + 1, "title": 'Ignored row for %s' % (row[1]), "link": None, "message": "Document updation ignored", "indicator": "orange" }) except Exception as e: error_flag = True # build error message if dataent.local.message_log: err_msg = "\n".join([ '<p class="border-bottom small">{}</p>'.format( json.loads(msg).get('message')) for msg in dataent.local.message_log ]) else: err_msg = '<p class="border-bottom small">{}</p>'.format( cstr(e)) error_trace = dataent.get_traceback() if error_trace: error_log_doc = dataent.log_error(error_trace) error_link = get_absolute_url("Error Log", error_log_doc.name) else: error_link = None log( **{ "row": row_idx + 1, "title": 'Error for row %s' % (len(row) > 1 and dataent.safe_decode(row[1]) or ""), "message": err_msg, "indicator": "red", "link": error_link }) # data with error to create a new file # include the errored data in the last row as last_error_row_idx will not be updated for the last row if skip_errors: if last_error_row_idx == len(rows) - 1: last_error_row_idx = len(rows) data_rows_with_error += rows[row_idx:last_error_row_idx] else: rollback_flag = True finally: dataent.local.message_log = [] start_row += batch_size if rollback_flag: dataent.db.rollback() else: dataent.db.commit() dataent.flags.mute_emails = False dataent.flags.in_import = False log_message = {"messages": import_log, "error": error_flag} if data_import_doc: data_import_doc.log_details = json.dumps(log_message) import_status = None if error_flag and data_import_doc.skip_errors and len(data) != len( data_rows_with_error): import_status = "Partially Successful" # write the file with the faulty row from dataent.utils.file_manager import save_file file_name = 'error_' + filename + file_extension if file_extension == '.xlsx': from dataent.utils.xlsxutils import make_xlsx xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") file_data = xlsx_file.getvalue() else: from dataent.utils.csvutils import to_csv file_data = to_csv(data_rows_with_error) error_data_file = save_file(file_name, file_data, "Data Import", data_import_doc.name, "Home/Attachments") data_import_doc.error_file = error_data_file.file_url elif error_flag: import_status = "Failed" else: import_status = "Successful" data_import_doc.import_status = import_status data_import_doc.save() if data_import_doc.import_status in [ "Successful", "Partially Successful" ]: data_import_doc.submit() publish_progress(100, True) else: publish_progress(0, True) dataent.db.commit() else: return log_message
def upload_file(self): self.saved_file = save_file('file_copy.txt', "Testing file copy example.",\ "", "", self.get_folder("Test Folder 1", "Home").name) self.saved_filename = get_files_path(self.saved_file.file_name)
def accept(web_form, data, for_payment=False): '''Save the web form''' data = dataent._dict(json.loads(data)) files = [] files_to_delete = [] web_form = dataent.get_doc("Web Form", web_form) if data.doctype != web_form.doc_type: dataent.throw(_("Invalid Request")) elif data.name and not web_form.allow_edit: dataent.throw( _("You are not allowed to update this Web Form Document")) dataent.flags.in_web_form = True meta = dataent.get_meta(data.doctype) if data.name: # update doc = dataent.get_doc(data.doctype, data.name) else: # insert doc = dataent.new_doc(data.doctype) # set values for field in web_form.web_form_fields: fieldname = field.fieldname df = meta.get_field(fieldname) value = data.get(fieldname, None) if df and df.fieldtype in ('Attach', 'Attach Image'): if value and 'data:' and 'base64' in value: files.append((fieldname, value)) if not doc.name: doc.set(fieldname, '') continue elif not value and doc.get(fieldname): files_to_delete.append(doc.get(fieldname)) doc.set(fieldname, value) if for_payment: web_form.validate_mandatory(doc) doc.run_method('validate_payment') if doc.name: if has_web_form_permission(doc.doctype, doc.name, "write"): doc.save(ignore_permissions=True) else: # only if permissions are present doc.save() else: # insert if web_form.login_required and dataent.session.user == "Guest": dataent.throw(_("You must login to submit this form")) ignore_mandatory = True if files else False doc.insert(ignore_permissions=True, ignore_mandatory=ignore_mandatory) # add files if files: for f in files: fieldname, filedata = f # remove earlier attached file (if exists) if doc.get(fieldname): remove_file_by_url(doc.get(fieldname), doc.doctype, doc.name) # save new file filename, dataurl = filedata.split(',', 1) filedoc = save_file(filename, dataurl, doc.doctype, doc.name, decode=True) # update values doc.set(fieldname, filedoc.file_url) doc.save(ignore_permissions=True) if files_to_delete: for f in files_to_delete: if f: remove_file_by_url(f, doc.doctype, doc.name) dataent.flags.web_form_doc = doc if for_payment: return web_form.get_payment_gateway_url(doc) else: return doc.as_dict()