Beispiel #1
0
    def __init__(self, conf=None, extra_conf=[], section="aws"):
        self.custom_conf = None
        self.conf = {}
        if not conf:
            try:
                if section == "aws":
                    self.conf["access_key"] = config.get(section, "access_key")
                    self.conf["secret_key"] = config.get(section, "secret_key")
                    self.conf["region_name"] = config.get(section, "region_name")

                for key in extra_conf:
                    try:
                        self.conf[key] = config.get(section, key)
                    except Exception, exc:
                        log.exception(exc)
                        log.error("Missing configuration variable.")
                        self.conf[key] = None

            except ConfigParser.NoOptionError:
                if section == "aws":
                    log.error("Configuration file not available.")
                    log.info("Use 'bakthat configure' to create one.")
                else:
                    log.error("No {0} section available in configuration file.".format(section))

        else:
            if section == "aws":
                self.conf["access_key"] = conf.get("access_key")
                self.conf["secret_key"] = conf.get("secret_key")
                self.conf["region_name"] = conf.get("region_name", DEFAULT_LOCATION)

            for key in extra_conf:
                self.conf[key] = conf.get(key)
Beispiel #2
0
    def __init__(self, conf=None):
        conf = {} if conf is None else conf
        sync_conf = dict(url=config.get("sync", {}).get("url"),
                         username=config.get("sync", {}).get("username"),
                         password=config.get("sync", {}).get("password"))
        sync_conf.update(conf)

        self.sync_auth = (sync_conf["username"], sync_conf["password"])
        self.api_url = sync_conf["url"]

        self.request_kwargs = dict(auth=self.sync_auth)

        self.request_kwargs["headers"] = {'content-type': 'application/json', 'bakthat-client': socket.gethostname()}

        self.get_resource = lambda x: self.api_url + "/{0}".format(x)
Beispiel #3
0
def configure(profile="default"):
    new_conf = config.copy()
    new_conf[profile] = config.get(profile, {})

    new_conf_file = open(CONFIG_FILE, "w")

    new_conf[profile]["access_key"] = raw_input("AWS Access Key: ")
    new_conf[profile]["secret_key"] = raw_input("AWS Secret Key: ")
    new_conf[profile]["s3_bucket"] = raw_input("S3 Bucket Name: ")
    new_conf[profile]["glacier_vault"] = raw_input("Glacier Vault Name: ")

    while 1:
        default_destination = raw_input("Default destination ({0}): ".format(DEFAULT_DESTINATION))
        if default_destination:
            default_destination = default_destination.lower()
            if default_destination in ("s3", "glacier"):
                break
            else:
                log.error("Invalid default_destination, should be s3 or glacier, try again.")
        else:
            default_destination = DEFAULT_DESTINATION
            break

    new_conf[profile]["default_destination"] = default_destination
    region_name = raw_input("Region Name ({0}): ".format(DEFAULT_LOCATION))
    if not region_name:
        region_name = DEFAULT_LOCATION
    new_conf[profile]["region_name"] = region_name

    yaml.dump(new_conf, new_conf_file, default_flow_style=False)

    log.info("Config written in %s" % CONFIG_FILE)
    log.info("Run bakthat configure_backups_rotation if needed.")
Beispiel #4
0
def _get_database():
    """Determine database in use and credentials if required."""
    database = None
    conf = config.get('default')

    if conf.get("database_type"):
        database_type = conf.get("database_type")

        if database_type == "mysql":
            if conf.get("database_host") and conf.get("database_name") and conf.get("database_user") and conf.get("database_pass") and conf.get("database_port"):
                database_host = conf.get("database_host")
                database_name = conf.get("database_name")
                database_user = conf.get("database_user")
                database_pass = conf.get("database_pass")
                database_port = conf.get("database_port")

                database = peewee.MySQLDatabase(database_name, host=database_host, port=database_port, user=database_user, passwd=database_pass)
            else:
                log.error("You must specify all config options if using mysql database.")

        if database_type == 'sqlite':
            database = peewee.SqliteDatabase(DATABASE)
    else:
        log.info("Defaulting to using SQLITE.")
        database = peewee.SqliteDatabase(DATABASE)

    return database
Beispiel #5
0
 def __init__(self, conf={}, profile="default"):
     self.conf = conf
     if not conf:
         self.conf = config.get(profile)
         if not self.conf:
             log.error("No {0} profile defined in {1}.".format(profile, CONFIG_FILE))
         if not "access_key" in self.conf or not "secret_key" in self.conf:
             log.error("Missing access_key/secret_key in {0} profile ({1}).".format(profile, CONFIG_FILE))
Beispiel #6
0
def show_glacier_inventory(**kwargs):
    if config.get("aws", "s3_bucket"):
        conf = kwargs.get("conf", None)
        glacier_backend = GlacierBackend(conf)
        loaded_archives = glacier_backend.load_archives_from_s3()
        log.info(json.dumps(loaded_archives, sort_keys=True, indent=4, separators=(",", ": ")))
    else:
        log.error("No S3 bucket defined.")
    return loaded_archives
Beispiel #7
0
    def __init__(self, conf=None):
        conf = {} if conf is None else conf
        sync_conf = dict(url=config.get("sync", {}).get("url"),
                         username=config.get("sync", {}).get("username"),
                         password=config.get("sync", {}).get("password"))
        sync_conf.update(conf)

        self.sync_auth = (sync_conf["username"], sync_conf["password"])
        self.api_url = sync_conf["url"]

        self.request_kwargs = dict(auth=self.sync_auth)

        self.request_kwargs["headers"] = {
            'content-type': 'application/json',
            'bakthat-client': socket.gethostname()
        }

        self.get_resource = lambda x: self.api_url + "/{0}".format(x)
Beispiel #8
0
def configure(profile="default"):
    try:
        new_conf = config.copy()
        new_conf[profile] = config.get(profile, {})

        while 1:
            database_type = raw_input("Database type: ")
            if database_type:
                database_type = database_type.lower()
                if database_type in ("mysql", "sqlite"):
                    break
                else:
                    log.error("Invalid database_type, should be mysql or sqlite, try again.")

        new_conf[profile]["database_type"] = database_type

        if database_type == "mysql":
            new_conf[profile]["database_host"] = raw_input("Database host: ")
            new_conf[profile]["database_name"] = raw_input("Database name: ")
            new_conf[profile]["database_user"] = raw_input("Database user: "******"database_pass"] = raw_input("Database pass: "******"database_port"] = raw_input("Database port: ")

        new_conf[profile]["access_key"] = raw_input("AWS Access Key: ")
        new_conf[profile]["secret_key"] = raw_input("AWS Secret Key: ")
        new_conf[profile]["s3_bucket"] = raw_input("S3 Bucket Name: ")
        new_conf[profile]["glacier_vault"] = raw_input("Glacier Vault Name: ")

        while 1:
            default_destination = raw_input("Default destination ({0}): ".format(DEFAULT_DESTINATION))
            if default_destination:
                default_destination = default_destination.lower()
                if default_destination in ("s3", "glacier", "swift"):
                    break
                else:
                    log.error("Invalid default_destination, should be s3 or glacier, swift, try again.")
            else:
                default_destination = DEFAULT_DESTINATION
                break

        new_conf[profile]["default_destination"] = default_destination
        region_name = raw_input("Region Name ({0}): ".format(DEFAULT_LOCATION))
        if not region_name:
            region_name = DEFAULT_LOCATION
        new_conf[profile]["region_name"] = region_name

        if default_destination in ("swift"):
            new_conf[profile]["auth_version"] = raw_input("Swift Auth Version: ")
            new_conf[profile]["auth_url"] = raw_input("Swift Auth URL: ")

        yaml.dump(new_conf, open(CONFIG_FILE, "w"), default_flow_style=False)

        log.info("Config written in %s" % CONFIG_FILE)
        log.info("Run bakthat configure_backups_rotation if needed.")
    except KeyboardInterrupt:
        log.error("Cancelled by user")
Beispiel #9
0
    def backup_inventory(self):
        """Backup the local inventory from shelve as a json string to S3."""
        if config.get("aws", "s3_bucket"):
            archives = self.load_archives()

            s3_bucket = S3Backend(self.conf).bucket
            k = Key(s3_bucket)
            k.key = self.backup_key

            k.set_contents_from_string(json.dumps(archives))

            k.set_acl("private")
Beispiel #10
0
    def restore_inventory(self):
        """Restore inventory from S3 to local shelve."""
        if config.get("aws", "s3_bucket"):
            loaded_archives = self.load_archives_from_s3()

            with glacier_shelve() as d:
                archives = {}
                for a in loaded_archives:
                    print a
                    archives[a["filename"]] = a["archive_id"]
                d["archives"] = archives
        else:
            raise Exception("You must set s3_bucket in order to backup/restore inventory to/from S3.")
Beispiel #11
0
    def restore_inventory(self):
        """Restore inventory from S3 to local shelve."""
        if config.get("aws", "s3_bucket"):
            loaded_archives = self.load_archives_from_s3()

            with glacier_shelve() as d:
                if not d.has_key("archives"):
                    d["archives"] = dict()

                archives = loaded_archives
                d["archives"] = archives
        else:
            raise Exception("You must set s3_bucket in order to backup/restore inventory to/from S3.")
Beispiel #12
0
def _get_profile_query(profile):
    if profile:
        profile_conf = config.get(profile)
        if profile_conf:
            s3_hash_key = hashlib.sha512(profile_conf.get("access_key") + \
                                profile_conf.get("s3_bucket")).hexdigest()
            glacier_hash_key = s3_hash_key = hashlib.sha512(profile_conf.get("access_key") + \
                                profile_conf.get("glacier_vault")).hexdigest()
            return " ((backend == 's3' AND backend_hash == '{0}') OR \
                    (backend == 'glacier' AND backend_hash == '{1}'))".format(s3_hash_key,
                                                                        glacier_hash_key)
        else:
            raise Exception("Profile {0} not found.".format(profile))
    return ""
Beispiel #13
0
    def match_filename(cls, filename, destination, **kwargs):
        profile = config.get(kwargs.get("profile", "default"))

        s3_key = hashlib.sha512(profile.get("access_key") +
                                profile.get("s3_bucket")).hexdigest()
        glacier_key = hashlib.sha512(profile.get("access_key") +
                                     profile.get("glacier_vault")).hexdigest()

        try:
            fquery = "{0}*".format(filename)
            query = Backups.select().where(Backups.filename % fquery |
                                           Backups.stored_filename % fquery,
                                           Backups.backend == destination,
                                           Backups.backend_hash << [s3_key, glacier_key])
            query = query.order_by(Backups.backup_date.desc())
            return query.get()
        except Backups.DoesNotExist:
            return
Beispiel #14
0
    def search(cls, query="", destination="", **kwargs):
        if not destination:
            destination = ["s3", "glacier"]
        if isinstance(destination, (str, unicode)):
            destination = [destination]

        profile = config.get(kwargs.get("profile", "default"))

        s3_key = hashlib.sha512(profile.get("access_key") +
                                profile.get("s3_bucket")).hexdigest()
        glacier_key = hashlib.sha512(profile.get("access_key") +
                                     profile.get("glacier_vault")).hexdigest()

        query = "*{0}*".format(query)
        wheres = []
        wheres.append(Backups.filename % query |
                      Backups.stored_filename % query)
        wheres.append(Backups.backend << destination)
        wheres.append(Backups.backend_hash << [s3_key, glacier_key])
        wheres.append(Backups.is_deleted == False)

        older_than = kwargs.get("older_than")
        if older_than:
            wheres.append(Backups.backup_date < older_than)

        backup_date = kwargs.get("backup_date")
        if backup_date:
            wheres.append(Backups.backup_date == backup_date)

        last_updated_gt = kwargs.get("last_updated_gt")
        if last_updated_gt:
            wheres.append(Backups.last_updated >= last_updated_gt)

        tags = kwargs.get("tags", [])
        if tags:
            if isinstance(tags, (str, unicode)):
                tags = tags.split()
            tags_query = ["Backups.tags % '*{0}*'".format(tag) for tag in tags]
            tags_query = eval("({0})".format(" and ".join(tags_query)))
            wheres.append(tags_query)

        return Backups.select().where(*wheres).order_by(Backups.last_updated.desc())
Beispiel #15
0
def configure(profile="default"):
    try:
        new_conf = config.copy()
        new_conf[profile] = config.get(profile, {})

        new_conf[profile]["access_key"] = raw_input("AWS Access Key: ")
        new_conf[profile]["secret_key"] = raw_input("AWS Secret Key: ")
        new_conf[profile]["s3_bucket"] = raw_input("S3 Bucket Name: ")
        new_conf[profile]["glacier_vault"] = raw_input("Glacier Vault Name: ")

        while 1:
            default_destination = raw_input(
                "Default destination ({0}): ".format(DEFAULT_DESTINATION))
            if default_destination:
                default_destination = default_destination.lower()
                if default_destination in ("s3", "glacier", "swift"):
                    break
                else:
                    log.error(
                        "Invalid default_destination, should be s3 or glacier, swift, try again."
                    )
            else:
                default_destination = DEFAULT_DESTINATION
                break

        new_conf[profile]["default_destination"] = default_destination
        region_name = raw_input("Region Name ({0}): ".format(DEFAULT_LOCATION))
        if not region_name:
            region_name = DEFAULT_LOCATION
        new_conf[profile]["region_name"] = region_name

        if default_destination in ("swift"):
            new_conf[profile]["auth_version"] = raw_input(
                "Swift Auth Version: ")
            new_conf[profile]["auth_url"] = raw_input("Swift Auth URL: ")

        yaml.dump(new_conf, open(CONFIG_FILE, "w"), default_flow_style=False)

        log.info("Config written in %s" % CONFIG_FILE)
        log.info("Run bakthat configure_backups_rotation if needed.")
    except KeyboardInterrupt:
        log.error("Cancelled by user")
Beispiel #16
0
def backup(filename=os.getcwd(),
           destination=None,
           prompt="yes",
           tags=[],
           profile="default",
           config=CONFIG_FILE,
           key=None,
           **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier|swift

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :type custom_filename: str
    :keyword custom_filename: Override the original filename (only in metadata)

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    storage_backend, destination, conf = _get_store_backend(
        config, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    # Check if compression is disabled on the configuration.
    if conf:
        compress = conf.get("compress", True)
    else:
        compress = config.get(profile).get("compress", True)

    if not compress:
        backup_file_fmt = "{0}.{1}"

    log.info("Backing up " + filename)
    arcname = filename.strip('/').split('/')[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_date = int(now.strftime("%s"))
    backup_data = dict(filename=kwargs.get("custom_filename", arcname),
                       backup_date=backup_date,
                       last_updated=backup_date,
                       backend=destination,
                       is_deleted=False)

    # Useful only when using bakmanager.io hook
    backup_key = key

    password = kwargs.get("password", os.environ.get("BAKTHAT_PASSWORD"))
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    if not compress:
        log.info("Compression disabled")
        outname = filename
        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size
        bakthat_compression = False

    # Check if the file is not already compressed
    elif mimetypes.guess_type(arcname) == ('application/x-tar', 'gzip'):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r'(\.t(ar\.)?gz)', '', arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")
        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, list):
        tags = " ".join(tags)

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption,
                                   client=socket.gethostname())
    backup_data["stored_filename"] = stored_filename

    access_key = storage_backend.conf.get("access_key")
    container_key = storage_backend.conf.get(storage_backend.container_key)
    backup_data["backend_hash"] = hashlib.sha512(access_key +
                                                 container_key).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)

    # We only remove the file if the archive is created by bakthat
    if bakthat_compression or bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    Backups.create(**backup_data)

    BakSyncer(conf).sync_auto()

    # bakmanager.io hook, enable with -k/--key paramter
    if backup_key:
        bakmanager_hook(conf, backup_data, backup_key)

    return backup_data
Beispiel #17
0
def backup(filename=os.getcwd(), destination=None, profile="default", config=CONFIG_FILE, prompt="yes", tags=[], key=None, exclude_file=None, s3_reduced_redundancy=False, **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier|swift

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :type custom_filename: str
    :keyword custom_filename: Override the original filename (only in metadata)

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    storage_backend, destination, conf = _get_store_backend(config, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    session_id = str(uuid.uuid4())
    events.before_backup(session_id)

    # Check if compression is disabled on the configuration.
    if conf:
        compress = conf.get("compress", True)
    else:
        compress = config.get(profile).get("compress", True)

    if not compress:
        backup_file_fmt = "{0}.{1}"

    log.info("Backing up " + filename)

    if exclude_file and os.path.isfile(exclude_file):
        EXCLUDE_FILES.insert(0, exclude_file)

    _exclude = lambda filename: False
    if os.path.isdir(filename):
        join = functools.partial(os.path.join, filename)
        for efile in EXCLUDE_FILES:
            efile = join(efile)
            if os.path.isfile(efile):
                _exclude = _get_exclude(efile)
                log.info("Using {0} to exclude files.".format(efile))

    arcname = filename.strip('/').split('/')[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_date = int(now.strftime("%s"))
    backup_data = dict(filename=kwargs.get("custom_filename", arcname),
                       backup_date=backup_date,
                       last_updated=backup_date,
                       backend=destination,
                       is_deleted=False)

    # Useful only when using bakmanager.io hook
    backup_key = key

    password = kwargs.get("password", os.environ.get("BAKTHAT_PASSWORD"))
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    if not compress:
        log.info("Compression disabled")
        outname = filename
        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size
        bakthat_compression = False

    # Check if the file is not already compressed
    elif mimetypes.guess_type(arcname) == ('application/x-tar', 'gzip'):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r'(\.t(ar\.)?gz)', '', arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")

        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname, exclude=_exclude)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, list):
        tags = " ".join(tags)

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption,
                                   client=socket.gethostname())
    stored_filename = os.path.join(os.path.dirname(kwargs.get("custom_filename", "")), stored_filename)
    backup_data["stored_filename"] = stored_filename

    access_key = storage_backend.conf.get("access_key")
    container_key = storage_backend.conf.get(storage_backend.container_key)
    backup_data["backend_hash"] = hashlib.sha512(access_key + container_key).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname, s3_reduced_redundancy=s3_reduced_redundancy)

    # We only remove the file if the archive is created by bakthat
    if bakthat_compression or bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    backup = Backups.create(**backup_data)

    BakSyncer(conf).sync_auto()

    # bakmanager.io hook, enable with -k/--key paramter
    if backup_key:
        bakmanager_hook(conf, backup_data, backup_key)

    events.on_backup(session_id, backup)

    return backup
Beispiel #18
0
 def sync_auto(self):
     """Trigger sync if autosync is enabled."""
     if config.get("sync", {}).get("auto", False):
         self.sync()
Beispiel #19
0
 def sync_auto(self):
     """Trigger sync if autosync is enabled."""
     if config.get("sync", {}).get("auto", False):
         self.sync()
Beispiel #20
0
def _get_store_backend(conf, destination=DEFAULT_DESTINATION, profile="default"):
    if not destination:
        destination = config.get("aws", "default_destination")
    return STORAGE_BACKEND[destination](conf, profile)