def test_reporting_reverse(self):
        Author.objects.create(id=10000)

        with captured_stdout() as output:
            qs = Author.objects.reverse()
            for authors in qs.iter_smart_chunks(report_progress=True,
                                                chunk_min=100):
                list(authors)  # fetch them

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        assert len(reports) > 0
        for report in reports:
            assert re.match(
                r"^Author SmartChunkedIterator processed \d+/11 objects "
                r"\(\d+\.\d+%\) in \d+ chunks"
                r"(; lowest pk so far \d+(, [\dhms]+ remaining)?)?( )*$",
                report
            )

        assert re.match(
            r'Finished! Iterated over \d+ objects? in [\dhms]+.',
            lines[1],
        )
Beispiel #2
0
    def test_reporting_reverse(self):
        Author.objects.create(id=10000)

        with captured_stdout() as output:
            qs = Author.objects.reverse()
            for authors in qs.iter_smart_chunks(report_progress=True,
                                                chunk_min=100):
                list(authors)  # fetch them

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        assert len(reports) > 0
        for report in reports:
            assert re.match(
                r"^Author SmartChunkedIterator processed \d+/11 objects "
                r"\(\d+\.\d+%\) in \d+ chunks"
                r"(; lowest pk so far \d+(, [\dhms]+ remaining)?)?( )*$",
                report,
            )

        assert re.match(
            r'Finished! Iterated over \d+ objects? in [\dhms]+.',
            lines[1],
        )
Beispiel #3
0
 def test_basic(self):
     with captured_stdout() as capture:
         Author.objects.all().pt_visual_explain()
     output = capture.getvalue()
     # Can't be too strict about the output since different database and pt-
     # visual-explain versions give different output
     assert "testapp_author" in output
     assert "rows" in output
     assert "Table" in output
 def test_basic(self):
     with captured_stdout() as capture:
         Author.objects.all().pt_visual_explain()
     output = capture.getvalue()
     # Can't be too strict about the output since different database and pt-
     # visual-explain versions give different output
     assert "testapp_author" in output
     assert "rows" in output
     assert "Table" in output
Beispiel #5
0
    def test_filter_and_delete(self):
        VanillaAuthor.objects.create(name="Alpha")
        VanillaAuthor.objects.create(name="pants")
        VanillaAuthor.objects.create(name="Beta")
        VanillaAuthor.objects.create(name="pants")

        bad_authors = VanillaAuthor.objects.filter(name="pants")

        assert bad_authors.count() == 2

        with captured_stdout():
            for author in SmartIterator(bad_authors, report_progress=True):
                author.delete()

        assert bad_authors.count() == 0
    def test_filter_and_delete(self):
        VanillaAuthor.objects.create(name="Alpha")
        VanillaAuthor.objects.create(name="pants")
        VanillaAuthor.objects.create(name="Beta")
        VanillaAuthor.objects.create(name="pants")

        bad_authors = VanillaAuthor.objects.filter(name="pants")

        assert bad_authors.count() == 2

        with captured_stdout():
            for author in SmartIterator(bad_authors, report_progress=True):
                author.delete()

        assert bad_authors.count() == 0
Beispiel #7
0
    def test_reporting_with_total(self):
        with captured_stdout() as output:
            qs = Author.objects.all()
            for authors in qs.iter_smart_chunks(report_progress=True, total=4):
                list(authors)  # fetch them

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        for report in reports:
            assert re.match(
                r"^AuthorSmartChunkedIterator processed \d+/4 objects "
                r"\(\d+\.\d+%\) in \d+ chunks(; highest pk so far \d+)?$",
                report
            )

        assert lines[1] == 'Finished!'
Beispiel #8
0
    def test_reporting_with_total(self):
        with captured_stdout() as output:
            qs = Author.objects.all()
            for authors in qs.iter_smart_chunks(report_progress=True, total=4):
                list(authors)  # fetch them

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        for report in reports:
            assert re.match(
                r"^Author SmartChunkedIterator processed \d+/4 objects "
                r"\(\d+\.\d+%\) in \d+ chunks"
                r"(; highest pk so far \d+(, [\dhms]+ remaining)?)?$", report)

        assert re.match(
            r'Finished! Iterated over \d+ objects? in [\dhms]+.',
            lines[1],
        )
    def test_mysql_cache_migration(self):
        out = StringIO()
        call_command('mysql_cache_migration', stdout=out)
        output = out.getvalue()

        # Lint it
        with captured_stdout() as stderr:
            errors = check_code(output)
        assert errors == 0, (
            "Encountered {} errors whilst trying to lint the mysql cache "
            "migration.\nMigration:\n\n{}\n\nLint errors:\n\n{}"
            .format(errors, output, stderr.getvalue())
        )

        # Dynamic import and check
        migration_mod = imp.new_module('0001_add_cache_tables')
        six.exec_(output, migration_mod.__dict__)
        assert hasattr(migration_mod, 'Migration')
        migration = migration_mod.Migration
        assert hasattr(migration, 'dependencies')
        assert hasattr(migration, 'operations')

        # Since they all have the same table name, there should only be one
        # operation
        assert len(migration.operations) == 1

        # Now run the migration forwards and backwards to check it works
        operation = migration.operations[0]
        assert not self.table_exists(self.table_name)

        state = ProjectState()
        new_state = state.clone()
        with connection.schema_editor() as editor:
            operation.database_forwards("testapp", editor,
                                        state, new_state)
        assert self.table_exists(self.table_name)

        new_state = state.clone()
        with connection.schema_editor() as editor:
            operation.database_backwards("testapp", editor,
                                         new_state, state)
        assert not self.table_exists(self.table_name)
    def test_reporting(self):
        with captured_stdout() as output:
            qs = Author.objects.all()
            for authors in qs.iter_smart_chunks(report_progress=True):
                list(authors)  # fetch them

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        for report in reports:
            assert re.match(
                r"^Author SmartChunkedIterator processed \d+/10 objects "
                r"\(\d+\.\d+%\) in \d+ chunks"
                r"(; highest pk so far \d+(, [\dhms]+ remaining)?)?$",
                report
            )

        assert re.match(
            r'Finished! Iterated over \d+ objects? in [\dhms]+.',
            lines[1],
        )
Beispiel #11
0
    def test_reporting_on_uncounted_qs(self):
        Author.objects.create(name="pants")

        with captured_stdout() as output:
            qs = Author.objects.filter(name="pants")
            for authors in qs.iter_smart_chunks(report_progress=True):
                authors.delete()

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        for report in reports:
            assert re.match(
                # We should have ??? since the deletion means the objects
                # aren't fetched into python
                r"AuthorSmartChunkedIterator processed (0|\?\?\?)/1 objects "
                r"\(\d+\.\d+%\) in \d+ chunks(; highest pk so far \d+)?",
                report
            )

        assert lines[1] == 'Finished!'
    def test_mysql_cache_migration(self):
        out = StringIO()
        call_command('mysql_cache_migration', stdout=out)
        output = out.getvalue()

        # Lint it
        with captured_stdout() as stderr:
            errors = check_code(output)
        assert errors == 0, (
            "Encountered {} errors whilst trying to lint the mysql cache "
            "migration.\nMigration:\n\n{}\n\nLint errors:\n\n{}".format(
                errors, output, stderr.getvalue()))

        # Dynamic import and check
        migration_mod = imp.new_module('0001_add_cache_tables')
        six.exec_(output, migration_mod.__dict__)
        assert hasattr(migration_mod, 'Migration')
        migration = migration_mod.Migration
        assert hasattr(migration, 'dependencies')
        assert hasattr(migration, 'operations')

        # Since they all have the same table name, there should only be one
        # operation
        assert len(migration.operations) == 1

        # Now run the migration forwards and backwards to check it works
        operation = migration.operations[0]
        assert not self.table_exists(self.table_name)

        state = ProjectState()
        new_state = state.clone()
        with connection.schema_editor() as editor:
            operation.database_forwards("testapp", editor, state, new_state)
        assert self.table_exists(self.table_name)

        new_state = state.clone()
        with connection.schema_editor() as editor:
            operation.database_backwards("testapp", editor, new_state, state)
        assert not self.table_exists(self.table_name)
Beispiel #13
0
    def test_reporting_on_uncounted_qs(self):
        Author.objects.create(name="pants")

        with captured_stdout() as output:
            qs = Author.objects.filter(name="pants")
            for authors in qs.iter_smart_chunks(report_progress=True):
                authors.delete()

        lines = output.getvalue().split('\n')

        reports = lines[0].split('\r')
        for report in reports:
            assert re.match(
                # We should have ??? since the deletion means the objects
                # aren't fetched into python
                r"Author SmartChunkedIterator processed (0|\?\?\?)/1 objects "
                r"\(\d+\.\d+%\) in \d+ chunks(; highest pk so far \d+)?",
                report)

        assert re.match(
            r'Finished! Iterated over \?\?\? objects? in [\dhms]+.',
            lines[1],
        )