Esempio n. 1
0
def backup_to_s3():
    from frappe.utils import get_backups_path
    from frappe.utils.backups import new_backup

    doc = frappe.get_single("S3 Backup Settings")
    bucket = doc.bucket
    backup_files = cint(doc.backup_files)

    conn = boto3.client(
        "s3",
        aws_access_key_id=doc.access_key_id,
        aws_secret_access_key=doc.get_password("secret_access_key"),
        endpoint_url=doc.endpoint_url or "https://s3.amazonaws.com",
    )

    if frappe.flags.create_new_backup:
        backup = new_backup(
            ignore_files=False,
            backup_path_db=None,
            backup_path_files=None,
            backup_path_private_files=None,
            force=True,
        )
        db_filename = os.path.join(get_backups_path(),
                                   os.path.basename(backup.backup_path_db))
        site_config = os.path.join(get_backups_path(),
                                   os.path.basename(backup.backup_path_conf))
        if backup_files:
            files_filename = os.path.join(
                get_backups_path(), os.path.basename(backup.backup_path_files))
            private_files = os.path.join(
                get_backups_path(),
                os.path.basename(backup.backup_path_private_files))
    else:
        if backup_files:
            db_filename, site_config, files_filename, private_files = get_latest_backup_file(
                with_files=backup_files)

            if not files_filename or not private_files:
                generate_files_backup()
                db_filename, site_config, files_filename, private_files = get_latest_backup_file(
                    with_files=backup_files)

        else:
            db_filename, site_config = get_latest_backup_file()

    folder = os.path.basename(db_filename)[:15] + "/"
    # for adding datetime to folder name

    upload_file_to_s3(db_filename, folder, conn, bucket)
    upload_file_to_s3(site_config, folder, conn, bucket)

    if backup_files:
        if private_files:
            upload_file_to_s3(private_files, folder, conn, bucket)

        if files_filename:
            upload_file_to_s3(files_filename, folder, conn, bucket)
Esempio n. 2
0
def upload_system_backup_to_google_drive():
    """
		Upload system backup to Google Drive
	"""
    # Get Google Drive Object
    google_drive, account = get_google_drive_object()

    # Check if folder exists in Google Drive
    check_for_folder_in_google_drive()
    account.load_from_db()

    validate_file_size()
    if frappe.flags.create_new_backup:
        set_progress(1, "Backing up Data.")
        backup = new_backup()
        file_urls = []
        file_urls.append(backup.backup_path_db)
        file_urls.append(backup.site_config_backup_path)

        if account.file_backup:
            file_urls.append(backup.backup_path_files)
            file_urls.append(backup.backup_path_private_files)
    else:
        if account.file_backup:
            backup_files()
        file_urls = get_latest_backup_file(with_files=account.file_backup)

    for fileurl in file_urls:
        if not fileurl:
            continue

        file_metadata = {
            "name": fileurl,
            "parents": [account.backup_folder_id]
        }

        try:
            media = MediaFileUpload(get_absolute_path(filename=fileurl),
                                    mimetype="application/gzip",
                                    resumable=True)
        except IOError as e:
            frappe.throw(_("Google Drive - Could not locate - {0}").format(e))

        try:
            set_progress(2, "Uploading backup to Google Drive.")
            google_drive.files().create(body=file_metadata,
                                        media_body=media,
                                        fields="id").execute()
        except HttpError as e:
            send_email(False,
                       "Google Drive",
                       "Google Drive",
                       "email",
                       error_status=e)

    set_progress(3, "Uploading successful.")
    frappe.db.set_value("Google Drive", None, "last_backup_on",
                        frappe.utils.now_datetime())
    send_email(True, "Google Drive", "Google Drive", "email")
    return _("Google Drive Backup Successful.")
Esempio n. 3
0
def backup_to_s3():
    from frappe.utils.backups import new_backup
    from frappe.utils import get_backups_path

    doc = frappe.get_single("S3 Backup Settings")
    bucket = doc.bucket
    backup_files = cint(doc.backup_files)

    conn = boto3.client(
        's3',
        aws_access_key_id=doc.access_key_id,
        aws_secret_access_key=doc.get_password('secret_access_key'),
        endpoint_url=doc.endpoint_url or 'https://s3.amazonaws.com')

    if frappe.flags.create_new_backup:
        backup = new_backup(ignore_files=False,
                            backup_path_db=None,
                            backup_path_files=None,
                            backup_path_private_files=None,
                            force=True)
        db_filename = os.path.join(get_backups_path(),
                                   os.path.basename(backup.backup_path_db))
        site_config = os.path.join(
            get_backups_path(),
            os.path.basename(backup.site_config_backup_path))
        if backup_files:
            files_filename = os.path.join(
                get_backups_path(), os.path.basename(backup.backup_path_files))
            private_files = os.path.join(
                get_backups_path(),
                os.path.basename(backup.backup_path_private_files))
    else:
        if backup_files:
            db_filename, site_config, files_filename, private_files = get_latest_backup_file(
                with_files=backup_files)
        else:
            db_filename, site_config = get_latest_backup_file()

    folder = os.path.basename(db_filename)[:15] + '/'
    # for adding datetime to folder name

    upload_file_to_s3(db_filename, folder, conn, bucket)
    upload_file_to_s3(site_config, folder, conn, bucket)
    if backup_files:
        upload_file_to_s3(private_files, folder, conn, bucket)
        upload_file_to_s3(files_filename, folder, conn, bucket)
    delete_old_backups(doc.backup_limit, bucket)
Esempio n. 4
0
def backup_to_dropbox(upload_db_backup=True):
    if not frappe.db:
        frappe.connect()

    # upload database
    dropbox_settings = get_dropbox_settings()

    if not dropbox_settings["access_token"]:
        access_token = generate_oauth2_access_token_from_oauth1_token(
            dropbox_settings)

        if not access_token.get("oauth2_token"):
            return (
                "Failed backup upload",
                "No Access Token exists! Please generate the access token for Dropbox.",
            )

        dropbox_settings["access_token"] = access_token["oauth2_token"]
        set_dropbox_access_token(access_token["oauth2_token"])

    dropbox_client = dropbox.Dropbox(
        oauth2_access_token=dropbox_settings["access_token"], timeout=None)

    if upload_db_backup:
        if frappe.flags.create_new_backup:
            backup = new_backup(ignore_files=True)
            filename = os.path.join(get_backups_path(),
                                    os.path.basename(backup.backup_path_db))
            site_config = os.path.join(
                get_backups_path(), os.path.basename(backup.backup_path_conf))
        else:
            filename, site_config = get_latest_backup_file()

        upload_file_to_dropbox(filename, "/database", dropbox_client)
        upload_file_to_dropbox(site_config, "/database", dropbox_client)

        # delete older databases
        if dropbox_settings["no_of_backups"]:
            delete_older_backups(dropbox_client, "/database",
                                 dropbox_settings["no_of_backups"])

    # upload files to files folder
    did_not_upload = []
    error_log = []

    if dropbox_settings["file_backup"]:
        upload_from_folder(get_files_path(), 0, "/files", dropbox_client,
                           did_not_upload, error_log)
        upload_from_folder(get_files_path(is_private=1), 1, "/private/files",
                           dropbox_client, did_not_upload, error_log)

    return did_not_upload, list(set(error_log))
Esempio n. 5
0
def backup_to_dropbox(upload_db_backup=True):
    if not frappe.db:
        frappe.connect()

    # upload database
    dropbox_settings = get_dropbox_settings()

    if not dropbox_settings['access_token']:
        access_token = generate_oauth2_access_token_from_oauth1_token(
            dropbox_settings)

        if not access_token.get('oauth2_token'):
            return 'Failed backup upload', 'No Access Token exists! Please generate the access token for Dropbox.'

        dropbox_settings['access_token'] = access_token['oauth2_token']
        set_dropbox_access_token(access_token['oauth2_token'])

    dropbox_client = dropbox.Dropbox(dropbox_settings['access_token'])

    if upload_db_backup:
        if frappe.flags.create_new_backup:
            backup = new_backup(ignore_files=True)
            filename = os.path.join(get_backups_path(),
                                    os.path.basename(backup.backup_path_db))
        else:
            filename = get_latest_backup_file()
        upload_file_to_dropbox(filename, "/database", dropbox_client)

        # delete older databases
        if dropbox_settings['no_of_backups']:
            delete_older_backups(dropbox_client, "/database",
                                 dropbox_settings['no_of_backups'])

    # upload files to files folder
    did_not_upload = []
    error_log = []

    if dropbox_settings['file_backup']:
        upload_from_folder(get_files_path(), 0, "/files", dropbox_client,
                           did_not_upload, error_log)
        upload_from_folder(get_files_path(is_private=1), 1, "/private/files",
                           dropbox_client, did_not_upload, error_log)

    return did_not_upload, list(set(error_log))