Ejemplo n.º 1
0
def restore(date_id):
    """Restore a backup based on the date part of the backup files"""

    alchemy = AlchemyDumpsDatabase()
    backup = Backup()

    # loop through mapped classes
    for mapped_class in alchemy.get_mapped_classes():
        class_name = mapped_class.__name__
        name = backup.get_name(class_name, date_id)
        if op.exists(op.join(backup.target.path, name)):

            # read file contents
            contents = backup.target.read_file(name)
            fails = list()

            # restore to the db
            db = alchemy.db()
            for row in alchemy.parse_data(contents):
                try:
                    db.session.merge(row)
                    db.session.commit()
                except (IntegrityError, InvalidRequestError):
                    db.session.rollback()
                    fails.append(row)

            # print summary
            status = "partially" if len(fails) else "totally"
            print("==> {} {} restored.".format(name, status))
            for f in fails:
                print("    Restore of {} failed.".format(f))
        else:
            system("ls alchemydumps-backups")
            msg = "==> No file found for {} ({}{} does not exist)."
            print(msg.format(class_name, backup.target.path, name))
Ejemplo n.º 2
0
    def test_close_connection(self, mock_config, mock_ftp, mock_path):
        mock_config.side_effect = self.CONFIG
        mock_ftp.return_value = MagicMock()
        mock_ftp.return_value.cwd.return_value = '250 foobar'

        backup = Backup()
        backup.close_ftp()

        self.assertEqual(6, mock_config.call_count)
        mock_ftp.assert_called_once_with('server', 'user', None)
        mock_ftp.return_value.quit.called_once_with()
Ejemplo n.º 3
0
def create():
    """Create a backup based on SQLAlchemy mapped classes"""

    # create backup files
    alchemy = AlchemyDumpsDatabase()
    data = alchemy.get_data()
    backup = Backup()
    for class_name in data.keys():
        name = backup.get_name(class_name)
        full_path = backup.target.create_file(name, data[class_name])
        rows = len(alchemy.parse_data(data[class_name]))
        if full_path:
            print("==> {} rows from {} saved as {}".format(
                rows, class_name, full_path))
        else:
            print("==> Error creating {} at {}".format(name,
                                                       backup.target.path))
    backup.close_ftp()
Ejemplo n.º 4
0
    def test_unsuccessful_connection(self, mock_config, mock_ftp, mock_path):
        mock_config.side_effect = self.CONFIG
        mock_ftp.side_effect = error_perm

        backup = Backup()

        self.assertEqual(6, mock_config.call_count)
        mock_ftp.assert_called_once_with('server', 'user', None)
        self.assertFalse(mock_ftp.return_value.cwd.called)
        self.assertFalse(backup.ftp)
Ejemplo n.º 5
0
    def test_ftp_with_wrong_path(self, mock_config, mock_ftp, mock_path):
        mock_config.side_effect = self.CONFIG
        mock_ftp.return_value = MagicMock()
        mock_ftp.return_value.cwd.return_value = '404 foobar'

        backup = Backup()

        self.assertEqual(6, mock_config.call_count)
        mock_ftp.assert_called_once_with('server', 'user', None)
        mock_ftp.return_value.cwd.assert_called_once_with('foobar')
        self.assertFalse(backup.ftp)
Ejemplo n.º 6
0
    def test_successful_connection(self, mock_config, mock_ftp):
        mock_config.side_effect = self.CONFIG
        mock_ftp.return_value = MagicMock()
        mock_ftp.return_value.cwd.return_value = '250 foobar'

        backup = Backup()

        self.assertEqual(6, mock_config.call_count)
        mock_ftp.assert_called_once_with('server', 'user', None)
        mock_ftp.return_value.cwd.assert_called_once_with('foobar')
        self.assertTrue(backup.ftp)
Ejemplo n.º 7
0
class TestBackup(TestCase):

    FILES = (
        'BRA-19940704123000-USA.gz',
        'BRA-19940709163000-NED.gz',
        'BRA-19940713123000-SWE.gz',
        'BRA-19940717123000-ITA.gz',
    )

    @patch.object(LocalTools, 'normalize_path')
    @patch('flask_alchemydumps.backup.decouple.config')
    def setUp(self, mock_config, mock_path):
        # (respectively: FTP server, FTP # user, FTP password, FTP path, local
        # directory for backups and file prefix)
        mock_config.side_effect = (None, None, None, None, 'foobar', 'BRA')
        self.backup = Backup()
        self.backup.files = self.FILES

    @patch.object(LocalTools, 'normalize_path')
    def test_get_timestamps(self, mock_path):
        expected = [
            '19940704123000', '19940709163000', '19940713123000',
            '19940717123000'
        ]
        self.assertEqual(expected, self.backup.get_timestamps())

    @patch.object(LocalTools, 'normalize_path')
    def test_by_timestamp(self, mock_path):
        expected = ['BRA-19940717123000-ITA.gz']
        self.assertEqual(expected,
                         list(self.backup.by_timestamp('19940717123000')))

    @patch.object(LocalTools, 'normalize_path')
    def test_valid(self, mock_path):
        self.assertTrue(self.backup.valid('19940704123000'))
        self.assertFalse(self.backup.valid('19980712210000'))

    @patch.object(LocalTools, 'normalize_path')
    def test_get_name(self, mock_path):
        expected = 'BRA-{}-GER.gz'.format(self.backup.target.TIMESTAMP)
        self.assertEqual(expected, self.backup.get_name('GER'))
Ejemplo n.º 8
0
def autoclean(assume_yes=False):
    """
    Remove a series of backup files based on the following rules:
    * Keeps all the backups from the last 7 days
    * Keeps the most recent backup from each week of the last month
    * Keeps the most recent backup from each month of the last year
    * Keeps the most recent backup from each year of the remaining years
    """

    # check if there are backups
    backup = Backup()
    backup.files = tuple(backup.target.get_files())
    if not backup.files:
        print("==> No backups found.")
        return None

    # get black and white list
    cleaning = BackupAutoClean(backup.get_timestamps())
    white_list = cleaning.white_list
    black_list = cleaning.black_list
    if not black_list:
        print("==> No backup to be deleted.")
        return None

    # print the list of files to be kept
    print("\n==> {} backups will be kept:".format(len(white_list)))
    for date_id in white_list:
        date_formated = backup.target.parse_timestamp(date_id)
        print("\n    ID: {} (from {})".format(date_id, date_formated))
        for f in backup.by_timestamp(date_id):
            print("    {}{}".format(backup.target.path, f))

    # print the list of files to be deleted
    delete_list = list()
    print("\n==> {} backups will be deleted:".format(len(black_list)))
    for date_id in black_list:
        date_formated = backup.target.parse_timestamp(date_id)
        print("\n    ID: {} (from {})".format(date_id, date_formated))
        for f in backup.by_timestamp(date_id):
            print("    {}{}".format(backup.target.path, f))
            delete_list.append(f)

    # delete
    confirm = Confirm(assume_yes)
    if confirm.ask():
        for name in delete_list:
            backup.target.delete_file(name)
            print("    {} deleted.".format(name))
    backup.close_ftp()
Ejemplo n.º 9
0
def history():
    """List existing backups"""

    backup = Backup()
    backup.files = tuple(backup.target.get_files())

    # if no files
    if not backup.files:
        print("==> No backups found at {}.".format(backup.target.path))
        return None

    # create output
    timestamps = backup.get_timestamps()
    groups = [{"id": i, "files": backup.by_timestamp(i)} for i in timestamps]
    for output in groups:
        if output["files"]:
            date_formated = backup.target.parse_timestamp(output["id"])
            print("\n==> ID: {} (from {})".format(output["id"], date_formated))
            for file_name in output["files"]:
                print("    {}{}".format(backup.target.path, file_name))
    print("")
    backup.close_ftp()
Ejemplo n.º 10
0
def remove(date_id, assume_yes=False):
    """Remove a series of backup files based on the date part of the files"""

    # check if date/id is valid
    backup = Backup()
    if backup.valid(date_id):

        # List files to be deleted
        delete_list = tuple(backup.by_timestamp(date_id))
        print("==> Do you want to delete the following files?")
        for name in delete_list:
            print("    {}{}".format(backup.target.path, name))

        # delete
        confirm = Confirm(assume_yes)
        if confirm.ask():
            for name in delete_list:
                backup.target.delete_file(name)
                print("    {} deleted.".format(name))
    backup.close_ftp()
Ejemplo n.º 11
0
 def setUp(self, mock_config, mock_path):
     # (respectively: FTP server, FTP # user, FTP password, FTP path, local
     # directory for backups and file prefix)
     mock_config.side_effect = (None, None, None, None, 'foobar', 'BRA')
     self.backup = Backup()
     self.backup.files = self.FILES
Ejemplo n.º 12
0
    def test_create_restore_remove(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # assert data was inserted
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # create and assert backup files
            # self.subprocess_run('create')
            create()
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 4)

            # clean up database
            self.db.drop_all()
            self.db.create_all()

            # assert database is empty
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 0)
            self.assertEqual(authors, 0)
            self.assertEqual(controls, 0)
            self.assertEqual(comments, 0)

            # restore backup
            backup.files = backup.target.get_files()
            date_id = backup.get_timestamps()
            restore(date_id[0])

            # assert data was restored
            posts = Post.query.count()
            authors = User.query.count()
            controls = SomeControl.query.count()
            comments = Comments.query.count()
            self.assertEqual(posts, 2)
            self.assertEqual(authors, 1)
            self.assertEqual(controls, 1)
            self.assertEqual(comments, 0)

            # assert data is accurate
            posts = Post.query.all()
            for num in range(1):
                self.assertEqual(posts[num].author.email, '*****@*****.**')
                self.assertEqual(posts[num].title, u'Post {}'.format(num + 1))
                self.assertEqual(posts[num].content, u'Lorem ipsum...')

            # remove backup
            remove(date_id[0], True)

            # assert there is no backup left
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(tuple(backup.files)), 0)
Ejemplo n.º 13
0
    def test_autoclean(self, mock_path):
        mock_path.return_value = self.dir + os.sep

        with app.app_context():

            # create fake backup dir
            backup = Backup()
            date_ids = [
                '20110824045557', '20100106120931', '20090728192328',
                '20070611074712', '20130729044443', '20070611090332',
                '20090927181422', '20060505063150', '20090608052756',
                '20050413201344', '20111015194547', '20090711221957',
                '20140425202739', '20130808133229', '20120111210958',
                '20120419224811', '20060519170013', '20090111042034',
                '20100112115416'
            ]
            class_names = ['Post', 'User', 'SomeControl', 'Comments']
            for date_id in date_ids:
                for class_name in class_names:
                    name = backup.get_name(class_name, date_id)
                    backup.target.create_file(name, ''.encode())

            # assert files were created
            history()
            backup = Backup()
            backup.files = backup.target.get_files()
            expected_count = len(class_names) * len(date_ids)
            self.assertEqual(len(list(backup.files)), expected_count)

            # run auto clean
            autoclean(True)

            # assert some files were deleted
            backup = Backup()
            backup.files = backup.target.get_files()
            white_list = [
                '20140425202739', '20130808133229', '20120419224811',
                '20111015194547', '20100112115416', '20090927181422',
                '20070611090332', '20060519170013', '20050413201344'
            ]
            expected_count = len(class_names) * len(white_list)
            self.assertEqual(len(list(backup.files)), expected_count)

            # assert only white listed files exists,
            # and only black listed were deleted
            backup = Backup()
            backup.files = tuple(backup.target.get_files())
            self.assertEqual(sorted(white_list),
                             sorted(backup.get_timestamps()))

            # clean up to avoid messing up other tests
            backup = Backup()
            backup.files = backup.target.get_files()
            for name in backup.files:
                backup.target.delete_file(name)
            backup = Backup()
            backup.files = backup.target.get_files()
            self.assertEqual(len(list(backup.files)), 0)