def test_autoclean(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # create fake backup dir
            backup = Backup()
            date_ids = [
                '20110824045557', '20100106120931', '20090728192328',
                '20070611074712', '20130729044443', '20070611090332',
                '20090927181422', '20060505063150', '20090608052756',
                '20050413201344', '20111015194547', '20090711221957',
                '20140425202739', '20130808133229', '20120111210958',
                '20120419224811', '20060519170013', '20090111042034',
                '20100112115416'
            ]
            class_names = ['Post', 'User', 'SomeControl', 'Comments']
            for date_id in date_ids:
                for class_name in class_names:
                    name = backup.get_name(class_name, date_id)
                    backup.target.create_file(name, ''.encode())

            # assert files were created
            history()
            backup = Backup()
            backup.files = backup.target.get_files()
            expected_count = len(class_names) * len(date_ids)
            self.assertEqual(len(list(backup.files)), expected_count)

            # run auto clean
            autoclean(True)

            # assert some files were deleted
            backup = Backup()
            backup.files = backup.target.get_files()
            white_list = [
                '20140425202739', '20130808133229', '20120419224811',
                '20111015194547', '20100112115416', '20090927181422',
                '20070611090332', '20060519170013', '20050413201344'
            ]
            expected_count = len(class_names) * len(white_list)
            self.assertEqual(len(list(backup.files)), expected_count)

            # assert only white listed files exists,
            # and only black listed were deleted
            backup = Backup()
            backup.files = tuple(backup.target.get_files())
            self.assertEqual(sorted(white_list),
                             sorted(backup.get_timestamps()))

            # clean up to avoid messing up other tests
            backup = Backup()
            backup.files = backup.target.get_files()
            for name in backup.files:
                backup.target.delete_file(name)
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 0)
示例#2
0
class TestBackup(TestCase):

    FILES = (
        "BRA-19940704123000-USA.gz",
        "BRA-19940709163000-NED.gz",
        "BRA-19940713123000-SWE.gz",
        "BRA-19940717123000-ITA.gz",
    )

    @patch("flask_alchemydumps.backup.decouple.config")
    def setUp(self, mock_config):
        self.tmp = TemporaryDirectory()

        # Respectively: FTP server, FTP user, FTP password, FTP path, local
        # directory for backups and file prefix
        mock_config.side_effect = (None, None, None, None, self.tmp.name,
                                   "BRA")

        # main objects
        self.backup = Backup()
        self.backup.files = tuple(self.files)

    def tearDown(self):
        self.tmp.cleanup()

    @property
    def files(self):
        for name in self.FILES:
            yield Path(self.tmp.name) / name

    def test_get_timestamps(self):
        self.assertEqual(
            sorted(("19940704123000", "19940709163000", "19940713123000",
                    "19940717123000")),
            sorted(self.backup.get_timestamps()),
        )

    def test_by_timestamp(self):
        self.assertEqual(
            (Path(self.tmp.name) / "BRA-19940717123000-ITA.gz", ),
            tuple(self.backup.by_timestamp("19940717123000")),
        )

    def test_valid(self):
        self.assertTrue(self.backup.valid("19940704123000"))
        self.assertFalse(self.backup.valid("19980712210000"))

    def test_get_name(self):
        self.assertEqual(f"BRA-{self.backup.target.TIMESTAMP}-GER.gz",
                         self.backup.get_name("GER"))
示例#3
0
def autoclean(assume_yes=False):
    """
    Remove a series of backup files based on the following rules:
    * Keeps all the backups from the last 7 days
    * Keeps the most recent backup from each week of the last month
    * Keeps the most recent backup from each month of the last year
    * Keeps the most recent backup from each year of the remaining years
    """

    # check if there are backups
    backup = Backup()
    backup.files = tuple(backup.target.get_files())
    if not backup.files:
        print('==> No backups found.')
        return None

    # get black and white list
    cleaning = BackupAutoClean(backup.get_timestamps())
    white_list = cleaning.white_list
    black_list = cleaning.black_list
    if not black_list:
        print('==> No backup to be deleted.')
        return None

    # print the list of files to be kept
    print('\n==> {} backups will be kept:'.format(len(white_list)))
    for date_id in white_list:
        date_formated = backup.target.parse_timestamp(date_id)
        print('\n    ID: {} (from {})'.format(date_id, date_formated))
        for f in backup.by_timestamp(date_id):
            print('    {}{}'.format(backup.target.path, f))

    # print the list of files to be deleted
    delete_list = list()
    print('\n==> {} backups will be deleted:'.format(len(black_list)))
    for date_id in black_list:
        date_formated = backup.target.parse_timestamp(date_id)
        print('\n    ID: {} (from {})'.format(date_id, date_formated))
        for f in backup.by_timestamp(date_id):
            print('    {}{}'.format(backup.target.path, f))
            delete_list.append(f)

    # delete
    confirm = Confirm(assume_yes)
    if confirm.ask():
        for name in delete_list:
            backup.target.delete_file(name)
            print('    {} deleted.'.format(name))
    backup.close_ftp()
示例#4
0
def autoclean(assume_yes=False):
    """
    Remove a series of backup files based on the following rules:
    * Keeps all the backups from the last 7 days
    * Keeps the most recent backup from each week of the last month
    * Keeps the most recent backup from each month of the last year
    * Keeps the most recent backup from each year of the remaining years
    """

    # check if there are backups
    backup = Backup()
    backup.files = tuple(backup.target.get_files())
    if not backup.files:
        click.echo("==> No backups found.")
        return None

    # get black and white list
    cleaning = BackupAutoClean(backup.get_timestamps())
    white_list = cleaning.white_list
    black_list = cleaning.black_list
    if not black_list:
        click.echo("==> No backup to be deleted.")
        return None

    # print the list of files to be kept
    click.echo(f"\n==> {len(white_list)} backups will be kept:")
    for date_id in white_list:
        date_formated = backup.target.parse_timestamp(date_id)
        click.echo(f"\n    ID: {date_id} (from {date_formated})")
        for f in backup.by_timestamp(date_id):
            click.echo(f"    {backup.target.path}{f}")

    # print the list of files to be deleted
    delete_list = list()
    click.echo(f"\n==> {len(black_list)} backups will be deleted:")
    for date_id in black_list:
        date_formated = backup.target.parse_timestamp(date_id)
        click.echo(f"\n    ID: {date_id} (from {date_formated})")
        for f in backup.by_timestamp(date_id):
            click.echo(f"    {backup.target.path}{f}")
            delete_list.append(f)

    # delete
    confirm = Confirm(assume_yes)
    if confirm.ask():
        for name in delete_list:
            backup.target.delete_file(name)
            click.echo(f"    {name} deleted.")
    backup.close_ftp()
示例#5
0
class TestBackup(TestCase):

    FILES = (
        'BRA-19940704123000-USA.gz',
        'BRA-19940709163000-NED.gz',
        'BRA-19940713123000-SWE.gz',
        'BRA-19940717123000-ITA.gz',
    )

    @patch.object(LocalTools, 'normalize_path')
    @patch('flask_alchemydumps.backup.decouple.config')
    def setUp(self, mock_config, mock_path):
        # (respectively: FTP server, FTP # user, FTP password, FTP path, local
        # directory for backups and file prefix)
        mock_config.side_effect = (None, None, None, None, 'foobar', 'BRA')
        self.backup = Backup()
        self.backup.files = self.FILES

    @patch.object(LocalTools, 'normalize_path')
    def test_get_timestamps(self, mock_path):
        expected = [
            '19940704123000',
            '19940709163000',
            '19940713123000',
            '19940717123000'
        ]
        self.assertEqual(expected, self.backup.get_timestamps())

    @patch.object(LocalTools, 'normalize_path')
    def test_by_timestamp(self, mock_path):
        expected = ['BRA-19940717123000-ITA.gz']
        self.assertEqual(expected, list(self.backup.by_timestamp('19940717123000')))

    @patch.object(LocalTools, 'normalize_path')
    def test_valid(self, mock_path):
        self.assertTrue(self.backup.valid('19940704123000'))
        self.assertFalse(self.backup.valid('19980712210000'))

    @patch.object(LocalTools, 'normalize_path')
    def test_get_name(self, mock_path):
        expected = 'BRA-{}-GER.gz'.format(self.backup.target.TIMESTAMP)
        self.assertEqual(expected, self.backup.get_name('GER'))
示例#6
0
class TestBackup(TestCase):

    FILES = (
        'BRA-19940704123000-USA.gz',
        'BRA-19940709163000-NED.gz',
        'BRA-19940713123000-SWE.gz',
        'BRA-19940717123000-ITA.gz',
    )

    @patch.object(LocalTools, 'normalize_path')
    @patch('flask_alchemydumps.backup.decouple.config')
    def setUp(self, mock_config, mock_path):
        # (respectively: FTP server, FTP # user, FTP password, FTP path, local
        # directory for backups and file prefix)
        mock_config.side_effect = (None, None, None, None, 'foobar', 'BRA')
        self.backup = Backup()
        self.backup.files = self.FILES

    @patch.object(LocalTools, 'normalize_path')
    def test_get_timestamps(self, mock_path):
        expected = [
            '19940704123000', '19940709163000', '19940713123000',
            '19940717123000'
        ]
        self.assertEqual(expected, self.backup.get_timestamps())

    @patch.object(LocalTools, 'normalize_path')
    def test_by_timestamp(self, mock_path):
        expected = ['BRA-19940717123000-ITA.gz']
        self.assertEqual(expected,
                         list(self.backup.by_timestamp('19940717123000')))

    @patch.object(LocalTools, 'normalize_path')
    def test_valid(self, mock_path):
        self.assertTrue(self.backup.valid('19940704123000'))
        self.assertFalse(self.backup.valid('19980712210000'))

    @patch.object(LocalTools, 'normalize_path')
    def test_get_name(self, mock_path):
        expected = 'BRA-{}-GER.gz'.format(self.backup.target.TIMESTAMP)
        self.assertEqual(expected, self.backup.get_name('GER'))
示例#7
0
def history():
    """List existing backups"""

    backup = Backup()
    backup.files = tuple(backup.target.get_files())

    # if no files
    if not backup.files:
        print('==> No backups found at {}.'.format(backup.target.path))
        return None

    # create output
    timestamps = backup.get_timestamps()
    groups = [{'id': i, 'files': backup.by_timestamp(i)} for i in timestamps]
    for output in groups:
        if output['files']:
            date_formated = backup.target.parse_timestamp(output['id'])
            print('\n==> ID: {} (from {})'.format(output['id'], date_formated))
            for file_name in output['files']:
                print('    {}{}'.format(backup.target.path, file_name))
    print('')
    backup.close_ftp()
示例#8
0
def history():
    """List existing backups"""

    backup = Backup()
    backup.files = tuple(backup.target.get_files())

    # if no files
    if not backup.files:
        click.echo(f"==> No backups found at {backup.target.path}.")
        return None

    # create output
    timestamps = backup.get_timestamps()
    groups = [{"id": i, "files": backup.by_timestamp(i)} for i in timestamps]
    for output in groups:
        if output["files"]:
            date_formated = backup.target.parse_timestamp(output["id"])
            click.echo(f"\n==> ID: {output['id']} (from {date_formated})")
            for file_name in output["files"]:
                click.echo(f"    {backup.target.path}{file_name}")
    click.echo("")
    backup.close_ftp()
    def test_create_restore_remove(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # assert data was inserted
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # create and assert backup files
            # self.subprocess_run('create')
            create()
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 4)

            # clean up database
            self.db.drop_all()
            self.db.create_all()

            # assert database is empty
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 0)
            self.assertEqual(authors, 0)
            self.assertEqual(controls, 0)
            self.assertEqual(comments, 0)

            # restore backup
            backup.files = backup.target.get_files()
            date_id = backup.get_timestamps()
            restore(date_id[0])

            # assert data was restored
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # assert data is accurate
            posts = Post.query.all()
            for num in range(1):
                self.assertEqual(posts[num].author.email, '*****@*****.**')
                self.assertEqual(posts[num].title, u'Post {}'.format(num + 1))
                self.assertEqual(posts[num].content, u'Lorem ipsum...')

            # remove backup
            remove(date_id[0], True)

            # assert there is no backup left
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(tuple(backup.files)), 0)
class TestCommands(TestCase):
    def setUp(self):

        # create database
        self.db = db
        self.db.create_all()

        # feed user table
        db.session.add(User(email="*****@*****.**"))
        db.session.commit()

        # feed post table
        db.session.add(
            Post(title="Post 1", content="Lorem ipsum...", author_id=1))
        db.session.add(
            Post(title="Post 2", content="Ipsum lorem...", author_id=1))

        # feed some control table
        db.session.add(SomeControl(uuid="1"))

        # commit
        db.session.commit()

        # temp directory & envvar
        self.tmp = TemporaryDirectory()
        self.backup_alchemydumps_dir = environ.get("ALCHEMYDUMPS_DIR")
        environ["ALCHEMYDUMPS_DIR"] = self.tmp.name

        # main object
        self.backup = Backup()

    def tearDown(self):
        self.db.drop_all()
        self.tmp.cleanup()
        del environ["ALCHEMYDUMPS_DIR"]
        if self.backup_alchemydumps_dir:
            environ["ALCHEMYDUMPS_DIR"] = self.backup_alchemydumps_dir

    @staticmethod
    def runner(command, args=""):
        obj = ScriptInfo(app_import_path="tests/integration/app.py")
        return CliRunner().invoke(command, args=args, obj=obj)

    def test_create_restore_remove(self):

        # assert data was inserted
        posts = Post.query.count()
        authors = User.query.count()
        controls = SomeControl.query.count()
        comments = Comments.query.count()
        self.assertEqual(posts, 2)
        self.assertEqual(authors, 1)
        self.assertEqual(controls, 1)
        self.assertEqual(comments, 0)

        # create and assert backup files
        self.runner(create)
        self.backup.files = tuple(self.backup.target.get_files())
        self.assertEqual(len(self.backup.files), 4)

        # clean up database
        self.db.drop_all()
        self.db.create_all()

        # assert database is empty
        posts = Post.query.count()
        authors = User.query.count()
        controls = SomeControl.query.count()
        comments = Comments.query.count()
        self.assertEqual(posts, 0)
        self.assertEqual(authors, 0)
        self.assertEqual(controls, 0)
        self.assertEqual(comments, 0)

        # restore backup
        self.backup.files = tuple(self.backup.target.get_files())
        date_id, *_ = self.backup.get_timestamps()
        self.runner(restore, f"-d {date_id}")

        # assert data was restored
        posts = Post.query.count()
        authors = User.query.count()
        controls = SomeControl.query.count()
        comments = Comments.query.count()
        self.assertEqual(posts, 2)
        self.assertEqual(authors, 1)
        self.assertEqual(controls, 1)
        self.assertEqual(comments, 0)

        # assert data is accurate
        post, *_ = Post.query.all()
        self.assertEqual(post.author.email, "*****@*****.**")
        self.assertEqual(post.title, "Post 1")
        self.assertEqual(post.content, "Lorem ipsum...")

        # remove backup
        self.runner(remove, f"-d {date_id} -y")

        # assert there is no backup left
        self.backup.files = tuple(self.backup.target.get_files())
        self.assertEqual(len(self.backup.files), 0)

    def test_autoclean(self):

        # create fake backup dir
        date_ids = (
            "20110824045557",
            "20100106120931",
            "20090728192328",
            "20070611074712",
            "20130729044443",
            "20070611090332",
            "20090927181422",
            "20060505063150",
            "20090608052756",
            "20050413201344",
            "20111015194547",
            "20090711221957",
            "20140425202739",
            "20130808133229",
            "20120111210958",
            "20120419224811",
            "20060519170013",
            "20090111042034",
            "20100112115416",
        )
        classes = ("Post", "User", "SomeControl", "Comments")
        for date_id in date_ids:
            for class_name in classes:
                name = self.backup.get_name(class_name, date_id)
                self.backup.target.create_file(name, b"")

        # assert files were created
        self.backup.files = tuple(self.backup.target.get_files())
        self.assertEqual(len(self.backup.files), len(classes) * len(date_ids))

        # run auto clean
        self.runner(autoclean, "-y")

        # assert some files were deleted
        self.backup.files = tuple(self.backup.target.get_files())
        white_list = (
            "20140425202739",
            "20130808133229",
            "20120419224811",
            "20111015194547",
            "20100112115416",
            "20090927181422",
            "20070611090332",
            "20060519170013",
            "20050413201344",
        )
        self.assertEqual(len(self.backup.files),
                         len(classes) * len(white_list))
        self.assertEqual(sorted(white_list),
                         sorted(self.backup.get_timestamps()))
示例#11
0
    def test_create_restore_remove(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # assert data was inserted
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # create and assert backup files
            # self.subprocess_run('create')
            create()
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 4)

            # clean up database
            self.db.drop_all()
            self.db.create_all()

            # assert database is empty
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 0)
            self.assertEqual(authors, 0)
            self.assertEqual(controls, 0)
            self.assertEqual(comments, 0)

            # restore backup
            backup.files = backup.target.get_files()
            date_id = backup.get_timestamps()
            restore(date_id[0])

            # assert data was restored
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # assert data is accurate
            posts = Post.query.all()
            for num in range(1):
                self.assertEqual(posts[num].author.email, '*****@*****.**')
                self.assertEqual(posts[num].title, u'Post {}'.format(num + 1))
                self.assertEqual(posts[num].content, u'Lorem ipsum...')

            # remove backup
            remove(date_id[0], True)

            # assert there is no backup left
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(tuple(backup.files)), 0)
示例#12
0
    def test_autoclean(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # create fake backup dir
            backup = Backup()
            date_ids = [
                '20110824045557',
                '20100106120931',
                '20090728192328',
                '20070611074712',
                '20130729044443',
                '20070611090332',
                '20090927181422',
                '20060505063150',
                '20090608052756',
                '20050413201344',
                '20111015194547',
                '20090711221957',
                '20140425202739',
                '20130808133229',
                '20120111210958',
                '20120419224811',
                '20060519170013',
                '20090111042034',
                '20100112115416'
            ]
            class_names = ['Post', 'User', 'SomeControl', 'Comments']
            for date_id in date_ids:
                for class_name in class_names:
                    name = backup.get_name(class_name, date_id)
                    backup.target.create_file(name, ''.encode())

            # assert files were created
            history()
            backup = Backup()
            backup.files = backup.target.get_files()
            expected_count = len(class_names) * len(date_ids)
            self.assertEqual(len(list(backup.files)), expected_count)

            # run auto clean
            autoclean(True)

            # assert some files were deleted
            backup = Backup()
            backup.files = backup.target.get_files()
            white_list = [
                '20140425202739',
                '20130808133229',
                '20120419224811',
                '20111015194547',
                '20100112115416',
                '20090927181422',
                '20070611090332',
                '20060519170013',
                '20050413201344'
            ]
            expected_count = len(class_names) * len(white_list)
            self.assertEqual(len(list(backup.files)), expected_count)

            # assert only white listed files exists,
            # and only black listed were deleted
            backup = Backup()
            backup.files = tuple(backup.target.get_files())
            self.assertEqual(sorted(white_list), sorted(backup.get_timestamps()))

            # clean up to avoid messing up other tests
            backup = Backup()
            backup.files = backup.target.get_files()
            for name in backup.files:
                backup.target.delete_file(name)
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 0)