Пример #1
0
    def compress(file_path: Path) -> tuple[typing.IO, bool]:
        """
        Compress the file using gzip into a tempfile.TemporaryFile.

        Returns a two elements tuple.
        The first one is a file-like object, which is removed when it is closed.
        The second one is True if the file has been recompressed, False otherwise.

        This should always be called before syncing the
        file to a storage provider.
        """
        # If the file is already considered as an archive, we don't recompress it
        if file_path.suffix in ARCHIVE_SUFFIXES:
            log.debug(f"File {file_path.name} is already compressed.")
            return open(file_path, "rb"), False

        temp_file = tempfile.NamedTemporaryFile(suffix=f"-{file_path.name}")

        log.debug(f"Compressing to temporary file: {temp_file.name}")
        with file_path.open(mode="rb") as f_in:
            with gzip.open(temp_file, "wb") as f_out:
                shutil.copyfileobj(f_in, f_out)

        temp_file.seek(0)
        return temp_file, True
Пример #2
0
 def backup(self, backup_path) -> None:
     """Dump all the data to a file and then return the filepath."""
     # Run the backup, and store the outcome.
     self.success, self.output = run_command(
         "redis-cli "
         f"-h {self.config.get('host')} "
         f"-p {str(self.config.get('port', '6379'))} "
         f"--rdb {backup_path}",
         REDISCLI_AUTH=self.config.get("password"))
     log.debug(self.output)
Пример #3
0
 def backup(self, backup_path) -> None:
     """Dump all the data to a file and then return the filepath."""
     # Run the backup, and store the outcome.
     self.success, self.output = run_command(
         f"pg_dumpall --file={backup_path}",
         PGUSER=self.config["username"],
         PGPASSWORD=self.config["password"],
         PGHOST=self.config["host"],
         PGPORT=str(self.config.get("port", "5432")),
     )
     log.debug(self.output)
Пример #4
0
 def backup(self, backup_path) -> None:
     """Dump all the data to a file and then return the filepath."""
     # Run the backup, and store the outcome in this object.
     self.success, self.output = run_command(
         f"mongodump "
         f"--uri={self.config['connection_string']} "
         "--gzip "
         "--forceTableScan "
         f"--archive={backup_path}"
     )
     log.debug(self.output)
Пример #5
0
 def notify(self):
     """Convert report dict to string and send via Telegram."""
     bot = telebot.TeleBot(self.config["token"])
     try:
         bot.send_message(
             chat_id=self.config["chat_id"],
             text=self._parse_report(),
         )
     except ApiTelegramException:
         log.debug(
             "Telegram API key or user_id is wrong "
             "or you forgot to press /start in your bot."
         )
Пример #6
0
def is_on_cooldown(cooldown) -> bool:
    """Check if we can send notification, main function."""
    log.debug(f'Cooldown period set to {cooldown}')
    delta = parse_config_cooldown(cooldown)
    if os.path.exists(get_project_root() / 'notify.json'):
        data = read_config()
        last_send = datetime.strptime(data['last_send'], "%m/%d/%Y, %H:%M:%S")
        if should_notify(last_send, delta):
            write_config()
            return False
        log.debug('On cooldown')
        return True
    else:
        write_config()
        return False
Пример #7
0
    def backup(self) -> Path:
        """Dump all the data to a file and then return the filepath."""
        date = datetime.date.today().strftime("%d_%m_%Y")
        archive_file = Path.home() / f"mongodb_blackbox_{date}.archive"

        # Run the backup, and store the outcome in this object.
        self.success, self.output = run_command(f"mongodump "
                                                f"--uri={self.connstring} "
                                                "--gzip "
                                                "--forceTableScan "
                                                f"--archive={archive_file}")
        log.debug(self.output)

        # Return the path to the backup file
        return archive_file
Пример #8
0
    def sync(self, file_path: Path) -> None:
        """Sync a file to Dropbox."""
        # Check if Dropbox token is valid.
        if self.valid is False:
            error = "Dropbox token is invalid!"
            self.success = False
            self.output = error
            log.error(error)
            return None

        # This is size what can be uploaded as one chunk.
        # When file is bigger than that, this will be uploaded
        # in multiple parts.
        chunk_size = 4 * 1024 * 1024

        temp_file, recompressed = self.compress(file_path)
        upload_path = f"{self.upload_base}{file_path.name}{'.gz' if recompressed else ''}"

        try:
            with temp_file as f:
                file_size = os.stat(f.name).st_size
                log.debug(file_size)
                if file_size <= chunk_size:
                    self.client.files_upload(f.read(), upload_path,
                                             WriteMode.overwrite)
                else:
                    session_start = self.client.files_upload_session_start(
                        f.read(chunk_size))
                    cursor = UploadSessionCursor(session_start.session_id,
                                                 offset=f.tell())
                    # Commit contains path in Dropbox and write mode about file
                    commit = CommitInfo(upload_path, WriteMode.overwrite)

                    while f.tell() < file_size:
                        if (file_size - f.tell()) <= chunk_size:
                            self.client.files_upload_session_finish(
                                f.read(chunk_size), cursor, commit)
                        else:
                            self.client.files_upload_session_append(
                                f.read(chunk_size), cursor.session_id,
                                cursor.offset)
                            cursor.offset = f.tell()
            self.success = True
        except (ApiError, HttpError) as e:
            log.error(e)
            self.success = False
            self.output = str(e)
Пример #9
0
    def backup(self, backup_path) -> None:
        """Dump all the data to a file and then return the filepath."""
        user = self.config["username"]
        password = self.config["password"]
        host = self.config["host"]
        port = str(self.config.get("port", "3306"))

        # Run the backup, and store the outcome.
        self.success, self.output = run_command(
            f"mysqldump -h {host} -u {user} --password='******' "
            f"--port={port} --all-databases > {backup_path}")
        log.debug(self.output)
        # Explicitly check if error message is occurred.
        # Somehow mysqldump is always successful.
        if "error" in self.output.lower():
            self.success = False
            log.debug("mysqldump has error(s) in log")
Пример #10
0
    def backup(self) -> Path:
        """Dump all the data to a file and then return the filepath."""
        date = datetime.date.today().strftime("%d_%m_%Y")
        backup_path = Path.home() / f"postgres_blackbox_{date}.sql"

        # Run the backup, and store the outcome.
        self.success, self.output = run_command(
            f"pg_dumpall --file={backup_path}",
            PGUSER=self.config.get("user"),
            PGPASSWORD=self.config.get("password"),
            PGHOST=self.config.get("host"),
            PGPORT=self.config.get("port"),
        )
        log.debug(self.output)

        # Return the path to the backup file
        return backup_path
Пример #11
0
def read_config() -> dict:
    """Read last notification time."""
    log.debug('Found json, reading it...')
    with open(get_project_root() / 'notify.json') as infile:
        data = json.load(infile)
    return data
Пример #12
0
def write_config():
    """Write down successful notification."""
    data = {'last_send': datetime.now().strftime("%m/%d/%Y, %H:%M:%S")}
    log.debug(f"Sending notification at {data}")
    with open(get_project_root() / 'notify.json', 'w', encoding='utf-8') as f:
        json.dump(data, f, ensure_ascii=False, indent=4)
Пример #13
0
def run() -> bool:
    """
    Implement the main blackbox process.

    Returns whether or not the process is a success.
    """
    # Overall program success
    success = True

    # Parse configuration and instantiate handlers
    if not CONFIG.databases or not CONFIG.storage:
        raise exceptions.ImproperlyConfigured(
            "You have to define least one database and storage")

    database_handlers = workflows.get_configured_handlers(CONFIG.databases)
    storage_handlers = workflows.get_configured_handlers(CONFIG.storage)
    notifier_handlers = workflows.get_configured_handlers(CONFIG.notifiers)

    all_workflows = workflows.get_workflows(database_handlers,
                                            storage_handlers,
                                            notifier_handlers)

    with TemporaryDirectory() as backup_dir:
        log.info(f"Backing up to folder: {backup_dir}")
        backup_dir = Path(backup_dir)
        date = datetime.date.today().strftime("%d_%m_%Y")
        backup_files = []

        for workflow in all_workflows:
            database = workflow.database

            # Do a backup, then return the path to the backup file
            backup_filename = f"{database.config['id']}_blackbox_{date}{database.backup_extension}"
            backup_path = backup_dir / backup_filename
            database.backup(backup_path)
            backup_files.append(backup_path)
            database_id = database.get_id_for_retention()
            database.teardown()

            # Add report to notifiers
            report = DatabaseReport(database.config["id"], database.success,
                                    database.output)
            for notifier in workflow.notifiers:
                notifier.add_database(report)

            # If backup failed, continue to next database. No need to sync.
            if not database.success:
                continue

            for storage in workflow.storage_providers:
                # Sync the provider, then rotate and cleanup
                storage.sync(backup_path)
                storage.rotate(database_id)
                storage.teardown()

                # Store the outcome to the database report
                report.report_storage(storage.config["id"], storage.success,
                                      storage.output)

            # Set overall program success to False if workflow is unsuccessful
            if report.success is False:
                success = False

        cooldown = CONFIG['cooldown']
        logging.debug(f"Cooldown setting is {cooldown}")
        if cooldown:
            is_on_cooldown_ = is_on_cooldown(cooldown)

        # Send a report for each notifier configured
        for notifier in notifier_handlers["all"]:
            # Don't send a notification if no database uses the notifier
            if notifier.report.is_empty:
                continue

            # If cooldown is not set or if report is failed: just notify.
            if cooldown is None or not notifier.report.success:
                log.debug(
                    'Config not found or backup failed, sending notification.')
                notifier.notify()

            # But otherwise let's check do we have a right to notify
            else:
                if not is_on_cooldown_:
                    notifier.notify()

            notifier.teardown()
        return success