def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token( frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): if not frappe.db: frappe.connect() dropbox_client = get_dropbox_client() # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) dropbox_client = upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] dropbox_client = upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) dropbox_client = upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def upload_system_backup_to_google_drive(): """ Upload system backup to Google Drive """ # Get Google Drive Object google_drive, account = get_google_drive_object() # Check if folder exists in Google Drive check_for_folder_in_google_drive() account.load_from_db() validate_file_size() if frappe.flags.create_new_backup: set_progress(1, "Backing up Data.") backup = new_backup() file_urls = [] file_urls.append(backup.backup_path_db) file_urls.append(backup.site_config_backup_path) if account.file_backup: file_urls.append(backup.backup_path_files) file_urls.append(backup.backup_path_private_files) else: if account.file_backup: backup_files() file_urls = get_latest_backup_file(with_files=account.file_backup) for fileurl in file_urls: if not fileurl: continue file_metadata = { "name": fileurl, "parents": [account.backup_folder_id] } try: media = MediaFileUpload(get_absolute_path(filename=fileurl), mimetype="application/gzip", resumable=True) except IOError as e: frappe.throw(_("Google Drive - Could not locate - {0}").format(e)) try: set_progress(2, "Uploading backup to Google Drive.") google_drive.files().create(body=file_metadata, media_body=media, fields="id").execute() except HttpError as e: send_email(False, "Google Drive", "Google Drive", "email", error_status=e) set_progress(3, "Uploading successful.") frappe.db.set_value("Google Drive", None, "last_backup_on", frappe.utils.now_datetime()) send_email(True, "Google Drive", "Google Drive", "email") return _("Google Drive Backup Successful.")
def backup_to_dropbox(): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token(dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) dropbox_client = dropbox.Dropbox(dropbox_settings['access_token']) backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token(dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) dropbox_client = dropbox.Dropbox(dropbox_settings['access_token']) backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_s3(): from frappe.utils.backups import new_backup from frappe.utils import get_backups_path doc = frappe.get_single("S3 Backup Settings") bucket = doc.bucket conn = boto3.client( 's3', aws_access_key_id=doc.access_key_id, aws_secret_access_key=doc.get_password('secret_access_key'), endpoint_url=doc.endpoint_url or 'https://s3.amazonaws.com' ) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) folder = os.path.basename(db_filename)[:15] + '/' # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) upload_file_to_s3(private_files, folder, conn, bucket) upload_file_to_s3(files_filename, folder, conn, bucket) delete_old_backups(doc.backup_limit, bucket)
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than = cint(frappe.db.get_value('Backup Manager', None, 'older_than')) if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) sync_folder(older_than, get_backups_path(), "database") BASE_DIR = os.path.join(get_backups_path(), '../file_backups') if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) sync_folder(older_than, Backup_DIR, "files") if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR) sync_folder(older_than, Backup_DIR, "private/files") frappe.db.close() return did_not_upload, list(set(error_log))
def backup_to_s3(): from boto.s3.connection import S3Connection from frappe.utils.backups import new_backup from frappe.utils import get_backups_path if not frappe.db: frappe.connect() conn = S3Connection( frappe.db.get_value("Amazon S3 Settings", None, "aws_access_key_id"), frappe.db.get_value("Amazon S3 Settings", None, "secret_access_key"), ) # upload database company = re.sub("\s", "_", str(frappe.db.get_value("Amazon S3 Settings", None, "company_name")).lower()) backup = new_backup( ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True ) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) upload_file_to_s3(db_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3(private_files, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3(files_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) frappe.db.close() frappe.connect()
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token(frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_s3(): from frappe.utils.backups import new_backup from frappe.utils import get_backups_path doc = frappe.get_single("S3 Backup Settings") bucket = doc.bucket conn = boto3.client( 's3', aws_access_key_id=doc.access_key_id, aws_secret_access_key=doc.get_password('secret_access_key'), endpoint_url=doc.endpoint_url or 'https://s3.amazonaws.com' ) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) folder = os.path.basename(db_filename)[:15] + '/' # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) upload_file_to_s3(private_files, folder, conn, bucket) upload_file_to_s3(files_filename, folder, conn, bucket) delete_old_backups(doc.backup_limit, bucket)
def backup_to_gdrive(): from frappe.utils.backups import new_backup if not frappe.db: frappe.connect() get_gdrive_flow() credentials_json = frappe.db.get_value("Backup Manager", None, "gdrive_credentials") credentials = oauth2client.client.Credentials.new_from_json( credentials_json) http = httplib2.Http() http = credentials.authorize(http) drive_service = build('drive', 'v2', http=http) # upload database backup = new_backup() path = os.path.join(frappe.local.site_path, "public", "backups") filename = os.path.join(path, os.path.basename(backup.backup_path_db)) # upload files to database folder upload_files( filename, 'application/x-gzip', drive_service, frappe.db.get_value("Backup Manager", None, "database_folder_id")) # upload files to files folder did_not_upload = [] error_log = [] files_folder_id = frappe.db.get_value("Backup Manager", None, "files_folder_id") frappe.db.close() path = os.path.join(frappe.local.site_path, "public", "files") for filename in os.listdir(path): filename = cstr(filename) found = False filepath = os.path.join(path, filename) ext = filename.split('.')[-1] size = os.path.getsize(filepath) if ext == 'gz' or ext == 'gzip': mimetype = 'application/x-gzip' else: mimetype = mimetypes.types_map.get( "." + ext) or "application/octet-stream" #Compare Local File with Server File children = drive_service.children().list( folderId=files_folder_id).execute() for child in children.get('items', []): file = drive_service.files().get(fileId=child['id']).execute() if filename == file['title'] and size == int(file['fileSize']): found = True break if not found: try: upload_files(filepath, mimetype, drive_service, files_folder_id) except Exception, e: did_not_upload.append(filename) error_log.append(cstr(e))
def backup_to_s3(): from frappe.utils import get_backups_path from frappe.utils.backups import new_backup doc = frappe.get_single("S3 Backup Settings") bucket = doc.bucket backup_files = cint(doc.backup_files) conn = boto3.client( "s3", aws_access_key_id=doc.access_key_id, aws_secret_access_key=doc.get_password("secret_access_key"), endpoint_url=doc.endpoint_url or "https://s3.amazonaws.com", ) if frappe.flags.create_new_backup: backup = new_backup( ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True, ) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf)) if backup_files: files_filename = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) else: if backup_files: db_filename, site_config, files_filename, private_files = get_latest_backup_file( with_files=backup_files) if not files_filename or not private_files: generate_files_backup() db_filename, site_config, files_filename, private_files = get_latest_backup_file( with_files=backup_files) else: db_filename, site_config = get_latest_backup_file() folder = os.path.basename(db_filename)[:15] + "/" # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) upload_file_to_s3(site_config, folder, conn, bucket) if backup_files: if private_files: upload_file_to_s3(private_files, folder, conn, bucket) if files_filename: upload_file_to_s3(files_filename, folder, conn, bucket)
def backup_to_gdrive(): from frappe.utils.backups import new_backup if not frappe.db: frappe.connect() get_gdrive_flow() credentials_json = frappe.db.get_value("Backup Manager", None, "gdrive_credentials") credentials = oauth2client.client.Credentials.new_from_json(credentials_json) http = httplib2.Http() http = credentials.authorize(http) drive_service = build("drive", "v2", http=http) # upload database backup = new_backup() path = os.path.join(frappe.local.site_path, "public", "backups") filename = os.path.join(path, os.path.basename(backup.backup_path_db)) # upload files to database folder upload_files( filename, "application/x-gzip", drive_service, frappe.db.get_value("Backup Manager", None, "database_folder_id") ) # upload files to files folder did_not_upload = [] error_log = [] files_folder_id = frappe.db.get_value("Backup Manager", None, "files_folder_id") frappe.db.close() path = os.path.join(frappe.local.site_path, "public", "files") for filename in os.listdir(path): filename = cstr(filename) found = False filepath = os.path.join(path, filename) ext = filename.split(".")[-1] size = os.path.getsize(filepath) if ext == "gz" or ext == "gzip": mimetype = "application/x-gzip" else: mimetype = mimetypes.types_map.get("." + ext) or "application/octet-stream" # Compare Local File with Server File children = drive_service.children().list(folderId=files_folder_id).execute() for child in children.get("items", []): file = drive_service.files().get(fileId=child["id"]).execute() if filename == file["title"] and size == int(file["fileSize"]): found = True break if not found: try: upload_files(filepath, mimetype, drive_service, files_folder_id) except Exception, e: did_not_upload.append(filename) error_log.append(cstr(e))
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings["access_token"]: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get("oauth2_token"): return ( "Failed backup upload", "No Access Token exists! Please generate the access token for Dropbox.", ) dropbox_settings["access_token"] = access_token["oauth2_token"] set_dropbox_access_token(access_token["oauth2_token"]) dropbox_client = dropbox.Dropbox( oauth2_access_token=dropbox_settings["access_token"], timeout=None) if upload_db_backup: if frappe.flags.create_new_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) site_config = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_conf)) else: filename, site_config = get_latest_backup_file() upload_file_to_dropbox(filename, "/database", dropbox_client) upload_file_to_dropbox(site_config, "/database", dropbox_client) # delete older databases if dropbox_settings["no_of_backups"]: delete_older_backups(dropbox_client, "/database", dropbox_settings["no_of_backups"]) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings["file_backup"]: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token( frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = get_files_path() for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename( file_metadata["path"]) and os.stat( filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(frappe.get_traceback()) frappe.connect() return did_not_upload, list(set(error_log))
def upload_system_backup_to_google_drive(): """ Upload system backup to Google Drive """ # Get Google Drive Object google_drive, account = get_google_drive_object() # Check if folder exists in Google Drive check_for_folder_in_google_drive() account.load_from_db() progress(1, "Backing up Data.") backup = new_backup() fileurl_backup = os.path.basename(backup.backup_path_db) fileurl_public_files = os.path.basename(backup.backup_path_files) fileurl_private_files = os.path.basename(backup.backup_path_private_files) for fileurl in [ fileurl_backup, fileurl_public_files, fileurl_private_files ]: file_metadata = { "name": fileurl, "parents": [account.backup_folder_id] } try: media = MediaFileUpload(get_absolute_path(filename=fileurl), mimetype="application/gzip", resumable=True) except IOError as e: frappe.throw( _("Google Drive - Could not locate locate - {0}").format(e)) try: progress(2, "Uploading backup to Google Drive.") google_drive.files().create(body=file_metadata, media_body=media, fields="id").execute() except HttpError as e: send_email(success=False, error=e) frappe.msgprint( _("Google Drive - Could not upload backup - Error {0}").format( e)) progress(3, "Uploading successful.") frappe.db.set_value("Google Drive", None, "last_backup_on", frappe.utils.now_datetime()) send_email(success=True) return _("Google Drive Backup Successful.")
def take(self): if self.exists(): return self.get() backup = new_backup(ignore_files=True) snapshot_path = get_snapshots_path() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) snapshotname = os.path.join(snapshot_path, os.path.basename(backup.backup_path_db)) if not os.path.exists(snapshot_path): os.mkdir(snapshot_path) os.rename(filename, snapshotname) self.update_path(snapshotname)
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint( frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs, ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site, older_than_hrs, get_backups_path(), "database", did_not_upload, error_log) BASE_DIR = os.path.join(get_backups_path(), '../file_backups') if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "public-files", did_not_upload, error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR, "private") if cloud_sync: sync_folder(site, older_than_hrs, Backup_DIR, "private-files", did_not_upload, error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(): from dropbox import client, session from frappe.utils.backups import new_backup from frappe.utils import get_files_path, get_backups_path if not frappe.db: frappe.connect() sess = session.DropboxSession(frappe.conf.dropbox_access_key, frappe.conf.dropbox_secret_key, "app_folder") sess.set_token(frappe.db.get_value("Dropbox Backup", None, "dropbox_access_key"), frappe.db.get_value("Dropbox Backup", None, "dropbox_access_secret")) dropbox_client = client.DropboxClient(sess) # upload database backup = new_backup() filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() response = dropbox_client.metadata("/files") # upload files to files folder did_not_upload = [] error_log = [] path = get_files_path() for filename in os.listdir(path): filename = cstr(filename) if filename in ignore_list: continue found = False filepath = os.path.join(path, filename) for file_metadata in response["contents"]: if os.path.basename(filepath) == os.path.basename(file_metadata["path"]) and os.stat(filepath).st_size == int(file_metadata["bytes"]): found = True break if not found: try: upload_file_to_dropbox(filepath, "/files", dropbox_client) except Exception: did_not_upload.append(filename) error_log.append(frappe.get_traceback()) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) dropbox_client = dropbox.Dropbox(dropbox_settings['access_token']) if upload_db_backup: if frappe.flags.create_new_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) else: filename = get_latest_backup_file() upload_file_to_dropbox(filename, "/database", dropbox_client) # delete older databases if dropbox_settings['no_of_backups']: delete_older_backups(dropbox_client, "/database", dropbox_settings['no_of_backups']) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def backup_to_s3(): import boto from boto.s3.connection import S3Connection from frappe.utils.backups import new_backup from frappe.utils import get_backups_path if not frappe.db: frappe.connect() conn = boto.s3.connect_to_region( 'ap-south-1', aws_access_key_id=frappe.db.get_value("Amazon S3 Settings", None, "aws_access_key_id"), aws_secret_access_key=frappe.db.get_value("Amazon S3 Settings", None, "secret_access_key"), is_secure=True, calling_format=boto.s3.connection.OrdinaryCallingFormat(), ) # upload database company = re.sub( '\s', '_', str(frappe.db.get_value("Amazon S3 Settings", None, "company_name")).lower()) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) #db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) #upload_file_to_s3(db_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3( private_files, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) upload_file_to_s3( files_filename, company, conn, frappe.db.get_value("Amazon S3 Settings", None, "backup_plan")) frappe.db.close() frappe.connect()
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than_hrs = cint(frappe.db.get_value('Backup Manager', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Manager', None, 'cloud_sync')) # site = cstr(frappe.local.site) site = get_site_base_path()[2:] if cint(frappe.db.get_value("Backup Manager", None, "enable_database")): # upload database backup = new_backup(older_than_hrs,ignore_files=True) # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: sync_folder(site,older_than_hrs,get_backups_path(), "database",did_not_upload,error_log) BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) if cint(frappe.db.get_value("Backup Manager", None, "enable_files")): Backup_DIR = os.path.join(BASE_DIR, "files") compress_files(get_files_path(), Backup_DIR) if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "public-files",did_not_upload,error_log) if cint(frappe.db.get_value("Backup Manager", None, "enable_private_files")): Backup_DIR = os.path.join(BASE_DIR, "private/files") compress_files(get_files_path(is_private=1), Backup_DIR,"private") if cloud_sync: sync_folder(site,older_than_hrs,Backup_DIR, "private-files",did_not_upload,error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_ftp(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database ftp_settings, use_tls = get_ftp_settings() if not ftp_settings['host']: return 'Failed backup upload', 'No FTP host! Please enter valid host for FTP.' if not ftp_settings['username']: return 'Failed backup upload', 'No FTP username! Please enter valid username for FTP.' ftp_client = FTP_TLS(**ftp_settings) if use_tls else FTP(**ftp_settings) try: if upload_db_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_ftp(filename, "/database", ftp_client) # delete older databases if ftp_settings['no_of_backups']: delete_older_backups(ftp_client, "/database", ftp_settings['no_of_backups']) # upload files to files folder did_not_upload = [] error_log = [] if ftp_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", ftp_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", ftp_client, did_not_upload, error_log) return did_not_upload, list(set(error_log)) finally: ftp_client.quit()
def backup_to_nextcloud(upload_db_backup=True, ignore_files=True): if not frappe.db: frappe.connect() respnc = "" if upload_db_backup: backup = new_backup(ignore_files=ignore_files) filebackup = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) respnc = filebackup_upload(filebackup) # file backup if not ignore_files and not "Error" in respnc: filebackup = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_files)) respncf = filebackup_upload(filebackup) filebackup = os.path.join( get_backups_path(), os.path.basename(backup.backup_path_private_files)) respncf = filebackup_upload(filebackup) did_not_upload = [] if "Error" in respnc: error_log = [respnc] else: error_log = [] return did_not_upload, list(set(error_log))
def backup_to_dropbox(): if not frappe.db: frappe.connect() dropbox_client = get_dropbox_client() # upload database backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) dropbox_client = upload_file_to_dropbox(filename, "/database", dropbox_client) frappe.db.close() # upload files to files folder did_not_upload = [] error_log = [] dropbox_client = upload_from_folder(get_files_path(), "/files", dropbox_client, did_not_upload, error_log) dropbox_client = upload_from_folder(get_files_path(is_private=1), "/private/files", dropbox_client, did_not_upload, error_log) frappe.connect() return did_not_upload, list(set(error_log))
def backup_to_dropbox(upload_db_backup=True): if not frappe.db: frappe.connect() # upload database dropbox_settings = get_dropbox_settings() if not dropbox_settings['access_token']: access_token = generate_oauth2_access_token_from_oauth1_token( dropbox_settings) if not access_token.get('oauth2_token'): return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.' dropbox_settings['access_token'] = access_token['oauth2_token'] set_dropbox_access_token(access_token['oauth2_token']) # Wait for 60 seconds before throwing ReadTimeout, in case server doesn't respond dropbox_client = dropbox.Dropbox(dropbox_settings['access_token'], timeout=60) if upload_db_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) upload_file_to_dropbox(filename, "/database", dropbox_client) # upload files to files folder did_not_upload = [] error_log = [] if dropbox_settings['file_backup']: upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log) upload_from_folder(get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log) return did_not_upload, list(set(error_log))
def backup_to_service(): from frappe.utils.backups import new_backup from frappe.utils import get_files_path #delete old path = get_site_path('private', 'backups') files = [x for x in os.listdir(path) if os.path.isfile(os.path.join(path, x))] backup_limit = get_scheduled_backup_limit() endswith='sql.gz' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) endswith='files.tar' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) endswith='private-files.tar' if len(files) > backup_limit: cleanup_old_backups(path, files, backup_limit,endswith) #delete old # upload files to files folder did_not_upload = [] error_log = [] if not frappe.db: frappe.connect() older_than = cint(frappe.db.get_value('Backup Settings', None, 'older_than')) cloud_sync = cint(frappe.db.get_value('Backup Settings', None, 'cloud_sync')) site = frappe.db.get_value('Global Defaults', None, 'default_company') if cint(frappe.db.get_value("Backup Settings", None, "enable_database")): # upload database # backup = new_backup(older_than,ignore_files=True) backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) folder = os.path.basename(db_filename)[:15] + '/' print db_filename print files_filename print private_files print folder # filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) if cloud_sync: print 'inside uploading db to cloud' sync_folder(site,older_than,db_filename, "database",did_not_upload,error_log) BASE_DIR = os.path.join( get_backups_path(), '../file_backups' ) # print(get_backups_path()) # print BASE_DIR if cint(frappe.db.get_value("Backup Settings", None, "enable_public_files")): # Backup_DIR = os.path.join(BASE_DIR, "files") # compress_files(get_files_path(), Backup_DIR) if cloud_sync: print 'inside uploading public to cloud' sync_folder(site,older_than,files_filename, "public-files",did_not_upload,error_log) if cint(frappe.db.get_value("Backup Settings", None, "enable_private_files")): # Backup_DIR = os.path.join(BASE_DIR, "private/files") # compress_files(get_files_path(is_private=1), Backup_DIR) if cloud_sync: print 'inside uploading private to cloud' sync_folder(site,older_than,private_files, "private-files",did_not_upload,error_log) frappe.db.close() # frappe.connect() return did_not_upload, list(set(error_log))
def prepare_backup(self): odb = new_backup(ignore_files=False if self.backup_files else True, force=frappe.flags.create_new_backup) database, public, private, config = odb.get_recent_backup( older_than=24 * 30) return database, config, public, private