def get_filecontent_from_path(path): if not path: return if path.startswith('/'): path = path[1:] if path.startswith('assets/'): # from public folder full_path = os.path.abspath(path) elif path.startswith('files/'): # public file full_path = dataent.get_site_path('public', path) elif path.startswith('private/files/'): # private file full_path = dataent.get_site_path(path) else: full_path = path if os.path.exists(full_path): with open(full_path, 'rb') as f: filecontent = f.read() return filecontent else: return None
def get_test_record_log(): '''Return the list of doctypes for which test records have been created''' if 'test_record_log' not in dataent.flags: if os.path.exists(dataent.get_site_path('.test_log')): with open(dataent.get_site_path('.test_log'), 'r') as f: dataent.flags.test_record_log = f.read().splitlines() else: dataent.flags.test_record_log = [] return dataent.flags.test_record_log
def unzip(self): '''Unzip current file and replace it by its children''' if not ".zip" in self.file_name: dataent.msgprint(_("Not a zip file")) return zip_path = dataent.get_site_path(self.file_url.strip('/')) base_url = os.path.dirname(self.file_url) with zipfile.ZipFile(zip_path) as zf: zf.extractall(os.path.dirname(zip_path)) for info in zf.infolist(): if not info.filename.startswith('__MACOSX'): file_url = file_url = base_url + '/' + info.filename file_name = dataent.db.get_value('File', dict(file_url=file_url)) if file_name: file_doc = dataent.get_doc('File', file_name) else: file_doc = dataent.new_doc("File") file_doc.file_name = info.filename file_doc.file_size = info.file_size file_doc.folder = self.folder file_doc.is_private = self.is_private file_doc.file_url = file_url file_doc.attached_to_doctype = self.attached_to_doctype file_doc.attached_to_name = self.attached_to_name file_doc.save() dataent.delete_doc('File', self.name)
def update_site_usage(): data = get_site_info() # exists = os.path.isfile(dataent.get_site_path("site_data.json")) with open(os.path.join(dataent.get_site_path(), 'site_data.json'), 'w') as outfile: json.dump(data, outfile) outfile.close()
def after_install(): # reset installed apps for re-install dataent.db.set_global("installed_apps", '["dataent"]') install_basic_docs() from dataent.core.doctype.file.file import make_home_folder make_home_folder() import_country_and_currency() from dataent.core.doctype.language.language import sync_languages sync_languages() # save default print setting print_settings = dataent.get_doc("Print Settings") print_settings.save() # all roles to admin dataent.get_doc("User", "Administrator").add_roles( *dataent.db.sql_list("""select name from tabRole""")) # update admin password update_password("Administrator", get_admin_password()) if not dataent.conf.skip_setup_wizard: dataent.db.set_default('desktop:home_page', 'setup-wizard') # clear test log with open(dataent.get_site_path('.test_log'), 'w') as f: f.write('') dataent.db.commit()
def get_local_image(file_url): file_path = dataent.get_site_path("public", file_url.lstrip("/")) try: image = Image.open(file_path) except IOError: dataent.msgprint( _("Unable to read file format for {0}").format(file_url)) raise content = None try: filename, extn = file_url.rsplit(".", 1) except ValueError: # no extn with open(file_path, "r") as f: content = f.read() filename = file_url extn = None extn = get_extension(filename, extn, content) return image, filename, extn
def add_to_test_record_log(doctype): '''Add `doctype` to site/.test_log `.test_log` is a cache of all doctypes for which test records are created''' test_record_log = get_test_record_log() if not doctype in test_record_log: dataent.flags.test_record_log.append(doctype) with open(dataent.get_site_path('.test_log'), 'w') as f: f.write('\n'.join(filter(None, dataent.flags.test_record_log)))
def validate(self): if self.is_new(): self.validate_duplicate_entry() self.validate_folder() if not self.flags.ignore_file_validate: self.validate_file() self.generate_content_hash() self.set_folder_size() if dataent.db.exists('File', {'name': self.name, 'is_folder': 0}): old_file_url = self.file_url if not self.is_folder and (self.is_private != self.db_get('is_private')): private_files = dataent.get_site_path('private', 'files') public_files = dataent.get_site_path('public', 'files') if not self.is_private: shutil.move(os.path.join(private_files, self.file_name), os.path.join(public_files, self.file_name)) self.file_url = "/files/{0}".format(self.file_name) else: shutil.move(os.path.join(public_files, self.file_name), os.path.join(private_files, self.file_name)) self.file_url = "/private/files/{0}".format(self.file_name) # update documents image url with new file url if self.attached_to_doctype and self.attached_to_name: if not self.attached_to_field: field_name = None reference_dict = dataent.get_doc( self.attached_to_doctype, self.attached_to_name).as_dict() for key, value in reference_dict.items(): if value == old_file_url: field_name = key break self.attached_to_field = field_name if self.attached_to_field: dataent.db.set_value(self.attached_to_doctype, self.attached_to_name, self.attached_to_field, self.file_url)
def download_e_invoice_file(file_name): content = None with open(dataent.get_site_path('private', 'files', file_name), "r") as f: content = f.read() dataent.local.response.filename = file_name dataent.local.response.filecontent = content dataent.local.response.type = "download"
def zip_files(self): for folder in ("public", "private"): files_path = dataent.get_site_path(folder, "files") backup_path = self.backup_path_files if folder == "public" else self.backup_path_private_files cmd_string = """tar -cf %s %s""" % (backup_path, files_path) err, out = dataent.utils.execute_in_shell(cmd_string) print('Backed up files', os.path.abspath(backup_path))
def download_zip(files, output_filename): from zipfile import ZipFile input_files = [ dataent.get_site_path('private', 'files', filename) for filename in files ] output_path = dataent.get_site_path('private', 'files', output_filename) with ZipFile(output_path, 'w') as output_zip: for input_file in input_files: output_zip.write(input_file, arcname=os.path.basename(input_file)) with open(output_path, 'rb') as fileobj: filedata = fileobj.read() dataent.local.response.filename = output_filename dataent.local.response.filecontent = filedata dataent.local.response.type = "download"
def update_space_usage(): # public and private files files_size = get_folder_size(dataent.get_site_path("public", "files")) files_size += get_folder_size(dataent.get_site_path("private", "files")) backup_size = get_folder_size(dataent.get_site_path("private", "backups")) database_size = get_database_size() usage = { 'files_size': flt(files_size, 2), 'backup_size': flt(backup_size, 2), 'database_size': flt(database_size, 2), 'total': flt(flt(files_size) + flt(backup_size) + flt(database_size), 2) } update_limits({'space_usage': usage}) return usage
def make_site_dirs(): site_public_path = os.path.join(dataent.local.site_path, 'public') site_private_path = os.path.join(dataent.local.site_path, 'private') for dir_path in (os.path.join(site_private_path, 'backups'), os.path.join(site_public_path, 'files'), os.path.join(site_private_path, 'files'), os.path.join(dataent.local.site_path, 'task-logs')): if not os.path.exists(dir_path): os.makedirs(dir_path) locks_dir = dataent.get_site_path('locks') if not os.path.exists(locks_dir): os.makedirs(locks_dir)
def jupyter(context): try: from pip import main except ImportError: from pip._internal import main reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'freeze']) installed_packages = [r.decode().split('==')[0] for r in reqs.split()] if 'jupyter' not in installed_packages: main(['install', 'jupyter']) site = get_site(context) dataent.init(site=site) jupyter_notebooks_path = os.path.abspath( dataent.get_site_path('jupyter_notebooks')) sites_path = os.path.abspath(dataent.get_site_path('..')) try: os.stat(jupyter_notebooks_path) except OSError: print('Creating folder to keep jupyter notebooks at {}'.format( jupyter_notebooks_path)) os.mkdir(jupyter_notebooks_path) bin_path = os.path.abspath('../env/bin') print(''' Stating Jupyter notebook Run the following in your first cell to connect notebook to dataent ``` import dataent dataent.init(site='{site}', sites_path='{sites_path}') dataent.connect() dataent.local.lang = dataent.db.get_default('lang') dataent.db.connect() ``` '''.format(site=site, sites_path=sites_path)) os.execv('{0}/jupyter'.format(bin_path), [ '{0}/jupyter'.format(bin_path), 'notebook', jupyter_notebooks_path, ])
def load_messages(language): """Load translation messages for given language from all `setup_wizard_requires` javascript files""" dataent.clear_cache() set_default_language(get_language_code(language)) dataent.db.commit() m = get_dict("page", "setup-wizard") for path in dataent.get_hooks("setup_wizard_requires"): # common folder `assets` served from `sites/` js_file_path = os.path.abspath(dataent.get_site_path("..", *path.strip("/").split("/"))) m.update(get_dict("jsfile", js_file_path)) m.update(get_dict("boot")) send_translations(m) return dataent.local.lang
def make_thumbnail(self, set_as_thumbnail=True, width=300, height=300, suffix="small", crop=False): if self.file_url: if self.file_url.startswith("/files"): try: image, filename, extn = get_local_image(self.file_url) except IOError: return else: try: image, filename, extn = get_web_image(self.file_url) except (requests.exceptions.HTTPError, requests.exceptions.SSLError, IOError): return size = width, height if crop: image = ImageOps.fit(image, size, Image.ANTIALIAS) else: image.thumbnail(size, Image.ANTIALIAS) thumbnail_url = filename + "_" + suffix + "." + extn path = os.path.abspath( dataent.get_site_path("public", thumbnail_url.lstrip("/"))) try: image.save(path) if set_as_thumbnail: self.db_set("thumbnail_url", thumbnail_url) self.db_set("thumbnail_url", thumbnail_url) except IOError: dataent.msgprint( _("Unable to write file format for {0}").format(path)) return return thumbnail_url
def qrcode_as_png(user, totp_uri): '''Save temporary Qrcode to server.''' from dataent.utils.file_manager import save_file folder = create_barcode_folder() png_file_name = '{}.png'.format(dataent.generate_hash(length=20)) file_obj = save_file(png_file_name, png_file_name, 'User', user, folder=folder) dataent.db.commit() file_url = get_url(file_obj.file_url) file_path = os.path.join(dataent.get_site_path('public', 'files'), file_obj.file_name) url = qrcreate(totp_uri) with open(file_path, 'w') as png_file: url.png(png_file, scale=8, module_color=[0, 0, 0, 180], background=[0xff, 0xff, 0xcc]) return file_url
def extract_tar_files(site_name, file_path, folder_name): # Need to do dataent.init to maintain the site locals dataent.init(site=site_name) abs_site_path = os.path.abspath(dataent.get_site_path()) # Copy the files to the parent directory and extract shutil.copy2(os.path.abspath(file_path), abs_site_path) # Get the file name splitting the file path on tar_name = os.path.split(file_path)[1] tar_path = os.path.join(abs_site_path, tar_name) try: subprocess.check_output(['tar', 'xvf', tar_path, '--strip', '2'], cwd=abs_site_path) except: raise finally: dataent.destroy() return tar_path
def get_error_snapshot_path(): return dataent.get_site_path('error-snapshots')
def migrate(verbose=True, rebuild_website=False): '''Migrate all apps to the latest version, will: - run before migrate hooks - run patches - sync doctypes (schema) - sync fixtures - sync desktop icons - sync web pages (from /www) - sync web pages (from /www) - run after migrate hooks ''' touched_tables_file = dataent.get_site_path('touched_tables.json') if os.path.exists(touched_tables_file): os.remove(touched_tables_file) try: dataent.flags.touched_tables = set() dataent.flags.in_migrate = True clear_global_cache() #run before_migrate hooks for app in dataent.get_installed_apps(): for fn in dataent.get_hooks('before_migrate', app_name=app): dataent.get_attr(fn)() # run patches dataent.modules.patch_handler.run_all() # sync dataent.model.sync.sync_all(verbose=verbose) dataent.translate.clear_cache() sync_fixtures() sync_customizations() sync_desktop_icons() sync_languages() dataent.get_doc('Portal Settings', 'Portal Settings').sync_menu() # syncs statics render.clear_cache() # add static pages to global search router.sync_global_search() #run after_migrate hooks for app in dataent.get_installed_apps(): for fn in dataent.get_hooks('after_migrate', app_name=app): dataent.get_attr(fn)() dataent.db.commit() clear_notifications() dataent.publish_realtime("version-update") dataent.flags.in_migrate = False finally: with open(touched_tables_file, 'w') as f: json.dump(list(dataent.flags.touched_tables), f, sort_keys=True, indent=4) dataent.flags.touched_tables.clear()