コード例 #1
0
ファイル: __init__.py プロジェクト: hfunai/bakthat
def upgrade_to_dump_truck():
    glacier_backend = GlacierBackend()
    glacier_backend.upgrade_to_dump_truck()

    s3_backend = S3Backend()

    regex_key = re.compile(r"(?P<backup_name>.+)\.(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?")

    # old regex for backward compatibility (for files without dot before the date component).
    old_regex_key = re.compile(r"(?P<backup_name>.+)(?P<date_component>\d{14})\.tgz(?P<is_enc>\.enc)?")

    for generator, backend in [(s3_backend, "s3"), (glacier_backend, "glacier")]:
        for key in generator.ls():
            match = regex_key.match(key)
            # Backward compatibility
            if not match:
                match = old_regex_key.match(key)
            if match:
                filename = match.group("backup_name")
                is_enc = bool(match.group("is_enc"))
                backup_date = int(datetime.strptime(match.group("date_component"), "%Y%m%d%H%M%S").strftime("%s"))
            else:
                filename = key
                is_enc = False
                backup_date = 0
            if backend == "s3":
                backend_hash = hashlib.sha512(
                    s3_backend.conf.get("access_key") + s3_backend.conf.get(s3_backend.container_key)
                ).hexdigest()
            elif backend == "glacier":
                backend_hash = hashlib.sha512(
                    glacier_backend.conf.get("access_key") + glacier_backend.conf.get(glacier_backend.container_key)
                ).hexdigest()
            new_backup = dict(
                backend=backend,
                is_deleted=0,
                backup_date=backup_date,
                tags=[],
                stored_filename=key,
                filename=filename,
                last_updated=int(datetime.utcnow().strftime("%s")),
                metadata=dict(is_enc=is_enc),
                size=0,
                backend_hash=backend_hash,
            )
            try:
                dump_truck_insert_backup(new_backup)
            except:
                pass
コード例 #2
0
ファイル: __init__.py プロジェクト: hfunai/bakthat
def backup(filename=os.getcwd(), destination=None, prompt="yes", tags=[], profile="default", **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    conf = kwargs.get("conf", None)
    storage_backend = _get_store_backend(conf, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    log.info("Backing up " + filename)
    arcname = filename.strip("/").split("/")[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_data = dict(filename=arcname, backup_date=int(now.strftime("%s")), backend=destination, is_deleted=False)

    password = kwargs.get("password")
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    # Check if the file is not already compressed
    if mimetypes.guess_type(arcname) == ("application/x-tar", "gzip"):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r"(\.t(ar\.)?gz)", "", arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")
        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, (str, unicode)):
        tags = tags.split()

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption)
    backup_data["stored_filename"] = stored_filename
    backup_data["backend_hash"] = hashlib.sha512(
        storage_backend.conf.get("access_key") + storage_backend.conf.get(storage_backend.container_key)
    ).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)

    # We only remove the file if the archive is created by bakthat
    if bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    dump_truck_insert_backup(backup_data)

    return backup_data