def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than = cint(frappe.db.get_value('Backup Manager', None, 'older_than')) if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) sync_folder(older_than, get_backups_path(), "database") BASE_DIR = os.path.join(get_backups_path(), '../file_backups') if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) sync_folder(older_than, Backup_DIR, "files") if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR) sync_folder(older_than, Backup_DIR, "private/files") frappe.db.close() return did_not_upload, list(set(error_log))
def backup_to_s3(): from boto.s3.connection import S3Connection from frappe.utils.backups import new_backup from frappe.utils import get_backups_path if not frappe.db: frappe.connect() conn = S3Connection( frappe.db.get_value("Amazon S3 Settings", None, "aws_access_key_id"), frappe.db.get_value("Amazon S3 Settings", None, "secret_access_key"), ) # upload database company = re.sub("\s", "_", str(frappe.db.get_value("Amazon S3 Settings", None, "company_name")).lower()) backup = new_backup( ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True ) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) upload_file_to_s3(db_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3(private_files, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3(files_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) frappe.db.close() frappe.connect()
def sync_folder(site,older_than,sourcepath, destfolder,did_not_upload,error_log): # destpath = "gdrive:" + destfolder + " --drive-use-trash" rclone_remote_directory=frappe.db.get_value('Backup Settings', None, 'rclone_remote_directory_path') from frappe.utils import get_bench_path sourcepath=get_bench_path()+"/sites"+sourcepath.replace("./", "/") # final_dest = rclone_remote_directory+"/"+str(site) + "/" + destfolder final_dest = rclone_remote_directory+"/"+str(site) final_dest = final_dest.replace(" ", "_") rclone_remote_name=frappe.db.get_value('Backup Settings', None, 'rclone_remote_name') # rclone_remote_directory=frappe.db.get_value('Backup Settings', None, 'rclone_remote_directory_path') # destpath = rclone_remote_name+":"+rclone_remote_directory+'/'+final_dest destpath = rclone_remote_name+":"+final_dest BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) Backup_DIR = os.path.join(BASE_DIR, "private/files") # print older_than # delete_temp_backups(older_than,sourcepath) sourcepath = get_bench_path()+"/sites"+get_backups_path().replace("./", "/") cmd_string = "rclone sync " + sourcepath + " " + destpath # frappe.errprint(cmd_string) try: err, out = frappe.utils.execute_in_shell(cmd_string) if err: raise Exception except Exception: did_not_upload = True error_log.append(Exception)
def backup_to_s3(): from frappe.utils.backups import new_backup from frappe.utils import get_backups_path doc = frappe.get_single("S3 Backup Settings") bucket = doc.bucket conn = boto3.client( 's3', aws_access_key_id=doc.access_key_id, aws_secret_access_key=doc.get_password('secret_access_key'), endpoint_url=doc.endpoint_url or 'https://s3.amazonaws.com' ) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) folder = os.path.basename(db_filename)[:15] + '/' # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) upload_file_to_s3(private_files, folder, conn, bucket) upload_file_to_s3(files_filename, folder, conn, bucket) delete_old_backups(doc.backup_limit, bucket)
def backup_to_s3(): from frappe.utils import get_backups_path from frappe.utils.backups import new_backup doc = frappe.get_single("S3 Backup Settings") bucket = doc.bucket backup_files = cint(doc.backup_files) conn = boto3.client( "s3", aws_access_key_id=doc.access_key_id, aws_secret_access_key=doc.get_password("secret_access_key"), endpoint_url=doc.endpoint_url or "https://s3.amazonaws.com", ) if frappe.flags.create_new_backup: backup = new_backup( ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True, ) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf)) if backup_files: files_filename = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) else: if backup_files: db_filename, site_config, files_filename, private_files = get_latest_backup_file( with_files=backup_files) if not files_filename or not private_files: generate_files_backup() db_filename, site_config, files_filename, private_files = get_latest_backup_file( with_files=backup_files) else: db_filename, site_config = get_latest_backup_file() folder = os.path.basename(db_filename)[:15] + "/" # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) upload_file_to_s3(site_config, folder, conn, bucket) if backup_files: if private_files: upload_file_to_s3(private_files, folder, conn, bucket) if files_filename: upload_file_to_s3(files_filename, folder, conn, bucket)
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings["access_token"]: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get("oauth2_token"): return ( "Failed backup upload", "No Access Token exists! Please generate the access token for Dropbox.", ) dropbox_settings["access_token"] = access_token["oauth2_token"] set_dropbox_access_token(access_token["oauth2_token"]) dropbox_client = dropbox.Dropbox( oauth2_access_token=dropbox_settings["access_token"], timeout=None) if upload_db_backup: if frappe.flags.create_new_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) site_config = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_conf)) else: filename, site_config = get_latest_backup_file() upload_file_to_dropbox(filename, "/database", dropbox_client) upload_file_to_dropbox(site_config, "/database", dropbox_client) # delete older databases if dropbox_settings["no_of_backups"]: delete_older_backups(dropbox_client, "/database", dropbox_settings["no_of_backups"]) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings["file_backup"]: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def upload_file_to_service(older_than, filename, folder, compress): if not os.path.exists(filename): return if compress: from shutil import make_archive BASE_DIR = os.path.join(get_backups_path(), '../file_backups') Backup_DIR = os.path.join(BASE_DIR, folder) archivename = datetime.today().strftime("%d%m%Y_%H%M%S") + '_files' archivepath = os.path.join(Backup_DIR, archivename) delete_temp_backups(older_than, Backup_DIR) filename = make_archive(archivepath, 'zip', filename) if os.path.exists(filename): sourcepath = filename if os.path.isdir(filename): sourcepath = filename + "/" destpath = "gdrive:" + folder + " --drive-use-trash" # cmd_string = "rclone --min-age 2d delete " + destpath # err, out = frappe.utils.execute_in_shell(cmd_string) cmd_string = "rclone copy " + sourcepath + " " + destpath err, out = frappe.utils.execute_in_shell(cmd_string)
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token(frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token( frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token(dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) dropbox_client = dropbox.Dropbox(dropbox_settings['access_token']) backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): if not frappe.db: frappe.connect() dropbox_client = get_dropbox_client() # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) dropbox_client = upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] dropbox_client = upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) dropbox_client = upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint( frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs, ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site, older_than_hrs, get_backups_path(), "database", did_not_upload, error_log) BASE_DIR = os.path.join(get_backups_path(), '../file_backups') if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "public-files", did_not_upload, error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR, "private") if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "private-files", did_not_upload, error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_s3(): import boto from boto.s3.connection import S3Connection from frappe.utils.backups import new_backup from frappe.utils import get_backups_path if not frappe.db: frappe.connect() conn = boto.s3.connect_to_region( 'ap-south-1', aws_access_key_id=frappe.db.get_value("Amazon S3 Settings", None, "aws_access_key_id"), aws_secret_access_key=frappe.db.get_value("Amazon S3 Settings", None, "secret_access_key"), is_secure=True, calling_format=boto.s3.connection.OrdinaryCallingFormat(), ) # upload database company = re.sub( '\s', '_', str(frappe.db.get_value("Amazon S3 Settings", None, "company_name")).lower()) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) #db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) #upload_file_to_s3(db_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3( private_files, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3( files_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) frappe.db.close() frappe.connect()
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint(frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs,ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site,older_than_hrs,get_backups_path(), "database",did_not_upload,error_log) BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "public-files",did_not_upload,error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR,"private") if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "private-files",did_not_upload,error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token( frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = get_files_path() for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename( file_metadata["path"]) and os.stat( filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(frappe.get_traceback()) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_nextcloud(upload_db_backup=True, ignore_files=True): if not frappe.db: frappe.connect() respnc = "" if upload_db_backup: backup = new_backup(ignore_files=ignore_files) filebackup = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) respnc = filebackup_upload(filebackup) # file backup if not ignore_files and not "Error" in respnc: filebackup = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_files)) respncf = filebackup_upload(filebackup) filebackup = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) respncf = filebackup_upload(filebackup) did_not_upload = [] if "Error" in respnc: error_log = [respnc] else: error_log = [] return did_not_upload, list(set(error_log))
def take(self): if self.exists(): return self.get() backup = new_backup(ignore_files=True) snapshot_path = get_snapshots_path() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) snapshotname = os.path.join(snapshot_path, os.path.basename(backup.backup_path_db)) if not os.path.exists(snapshot_path): os.mkdir(snapshot_path) os.rename(filename, snapshotname) self.update_path(snapshotname)
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token(frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = get_files_path() for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename(file_metadata["path"]) and os.stat(filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(frappe.get_traceback()) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) dropbox_client = dropbox.Dropbox(dropbox_settings['access_token']) if upload_db_backup: if frappe.flags.create_new_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) else: filename = get_latest_backup_file() upload_file_to_dropbox(filename, "/database", dropbox_client) # delete older databases if dropbox_settings['no_of_backups']: delete_older_backups(dropbox_client, "/database", dropbox_settings['no_of_backups']) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def backup_to_ftp(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database ftp_settings, use_tls = get_ftp_settings() if not ftp_settings['host']: return 'Failed backup upload', 'No FTP host! Please enter valid host for FTP.' if not ftp_settings['username']: return 'Failed backup upload', 'No FTP username! Please enter valid username for FTP.' ftp_client = FTP_TLS(**ftp_settings) if use_tls else FTP(**ftp_settings) try: if upload_db_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_ftp(filename, "/database", ftp_client) # delete older databases if ftp_settings['no_of_backups']: delete_older_backups(ftp_client, "/database", ftp_settings['no_of_backups']) # upload files to files folder did_not_upload = [] error_log = [] if ftp_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", ftp_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", ftp_client, did_not_upload, error_log) return did_not_upload, list(set(error_log)) finally: ftp_client.quit()
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) # Wait for 60 seconds before throwing ReadTimeout, in case server doesn't respond dropbox_client = dropbox.Dropbox(dropbox_settings['access_token'], timeout=60) if upload_db_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path #delete old path = get_site_path('private', 'backups') files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))] backup_limit = get_scheduled_backup_limit() endswith='sql.gz' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) endswith='files.tar' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) endswith='private-files.tar' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) #delete old # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than = cint(frappe.db.get_value('Backup Settings', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Settings', None, 'cloud_sync')) site = frappe.db.get_value('Global Defaults', None, 'default_company') if cint(frappe.db.get_value("Backup Settings", None, "enable_database")): # upload database # backup = new_backup(older_than,ignore_files=True) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) folder = os.path.basename(db_filename)[:15] + '/' print db_filename print files_filename print private_files print folder # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: print 'inside uploading db to cloud' sync_folder(site,older_than,db_filename, "database",did_not_upload,error_log) BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) # print(get_backups_path()) # print BASE_DIR if cint(frappe.db.get_value("Backup Settings", None, "enable_public_files")): # Backup_DIR = os.path.join(BASE_DIR, "files") # compress_files(get_files_path(), Backup_DIR) if cloud_sync: print 'inside uploading public to cloud' sync_folder(site,older_than,files_filename, "public-files",did_not_upload,error_log) if cint(frappe.db.get_value("Backup Settings", None, "enable_private_files")): # Backup_DIR = os.path.join(BASE_DIR, "private/files") # compress_files(get_files_path(is_private=1), Backup_DIR) if cloud_sync: print 'inside uploading private to cloud' sync_folder(site,older_than,private_files, "private-files",did_not_upload,error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def get_latest(file_ext): file_list = glob.glob(os.path.join(get_backups_path(), file_ext)) return max(file_list, key=os.path.getctime) if file_list else None
def get_absolute_path(filename): file_path = os.path.join(get_backups_path()[2:], filename) return "{0}/sites/{1}".format(get_bench_path(), file_path)