def rename_replacing_files(): replaced_files = get_replaced_files() if len(replaced_files): missing_files = [v[0] for v in replaced_files] with open(get_site_path('missing_files.txt'), 'w') as f: f.write(('\n'.join(missing_files) + '\n').encode('utf-8')) for file_name, file_datas in replaced_files: print('processing ' + file_name) content_hash = dataent.db.get_value('File', file_datas[0], 'content_hash') if not content_hash: continue new_file_name = get_file_name(file_name, content_hash) if os.path.exists(get_files_path(new_file_name)): continue print('skipping ' + file_name) try: os.rename(get_files_path(file_name), get_files_path(new_file_name)) except OSError: print('Error renaming ', file_name) for name in file_datas: f = dataent.get_doc('File', name) f.file_name = new_file_name f.file_url = '/files/' + new_file_name f.save()
def get_context(context): def get_time(path): dt = os.path.getmtime(path) return convert_utc_to_user_timezone( datetime.datetime.utcfromtimestamp(dt)).strftime('%Y-%m-%d %H:%M') def get_size(path): size = os.path.getsize(path) if size > 1048576: return "{0:.1f}M".format(float(size) / 1048576) else: return "{0:.1f}K".format(float(size) / 1024) path = get_site_path('private', 'backups') files = [ x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x)) ] backup_limit = get_scheduled_backup_limit() if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit) files = [('/backups/' + _file, get_time(os.path.join(path, _file)), get_size(os.path.join(path, _file))) for _file in files if _file.endswith('sql.gz')] files.sort(key=lambda x: x[1], reverse=True) return {"files": files}
def delete_downloadable_backups(): path = get_site_path('private', 'backups') files = [ x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x)) ] backup_limit = get_scheduled_backup_limit() if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit)
def remove_old_task_logs(): logs_path = get_site_path('task-logs') def full_path(_file): return os.path.join(logs_path, _file) files_to_remove = [full_path(_file) for _file in os.listdir(logs_path)] files_to_remove = [ _file for _file in files_to_remove if is_file_old(_file) and os.path.isfile(_file) ] for _file in files_to_remove: os.remove(_file)
def get_lock_path(name): name = name.lower() locks_dir = 'locks' lock_path = get_site_path(locks_dir, name + '.lock') return lock_path
def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=None, admin_password=None, verbose=False, install_apps=None, source_sql=None, force=False, reinstall=False): """Install a new Dataent site""" if not db_name: db_name = hashlib.sha1(site.encode()).hexdigest()[:16] from dataent.installer import install_db, make_site_dirs from dataent.installer import install_app as _install_app import dataent.utils.scheduler dataent.init(site=site) try: # enable scheduler post install? enable_scheduler = _is_scheduler_enabled() except Exception: enable_scheduler = False make_site_dirs() installing = None try: installing = touch_file(get_site_path('locks', 'installing.lock')) install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name, admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall) apps_to_install = ['dataent'] + (dataent.conf.get("install_apps") or []) + (list(install_apps) or []) for app in apps_to_install: _install_app(app, verbose=verbose, set_as_patched=not source_sql) dataent.utils.scheduler.toggle_scheduler(enable_scheduler) dataent.db.commit() scheduler_status = "disabled" if dataent.utils.scheduler.is_scheduler_disabled( ) else "enabled" print("*** Scheduler is", scheduler_status, "***") except dataent.exceptions.ImproperDBConfigurationError: _drop_site(site, mariadb_root_username, mariadb_root_password, force=True) finally: if installing and os.path.exists(installing): os.remove(installing) dataent.destroy()