Example #1
0
def create_product(app, created_models, verbosity, interactive, **kwargs):
    if Product in created_models:
        call_command("loaddata", "cartridge_required.json")
        if interactive:
            confirm = raw_input("\nWould you like to install an initial "
                                "demo product and sale? (yes/no): ")
            while True:
                if confirm == "yes":
                    break
                elif confirm == "no":
                    return
                confirm = raw_input("Please enter either 'yes' or 'no': ")
        # This is a hack. Ideally to split fixtures between optional
        # and required, we'd use the same approach Mezzanine does,
        # within a ``createdb`` management command. Ideally to do this,
        # we'd subclass Mezzanine's createdb command and shadow it,
        # but to do that, the cartridge.shop app would need to appear
        # *after* mezzanine.core in the INSTALLED_APPS setting, but the
        # reverse is needed for template overriding (and probably other
        # bits) to work correctly.
        # SO........... we just cheat, and check sys.argv here. Namaste.
        elif "--nodata" in sys.argv:
            return
        if verbosity >= 1:
            print
            print "Creating demo product and sale ..."
            print
        call_command("loaddata", "cartridge_optional.json")
        copy_test_to_media("cartridge.shop", "product")
Example #2
0
 def test_specific_serializer(self):
     data = (
         '[{"event[date]": "2019-03-14",'
         '  "event[place][name]": "Minneapolis",'
         '  "note": "Test"}]'
     )
     source = CustomSource.objects.create(json_data=data)
     Place.objects.create(name="Minneapolis")
     Identifier.objects.create(
         serializer="tests.naturalkey_app.wizard.NoteSerializer",
         field="event[place][name]",
         name="Minneapolis",
         value="Minneapolis",
         resolved=True,
     )
     call_command(
         'runwizard', 'source_app.customsource', str(source.pk),
         serializer="tests.naturalkey_app.wizard.NoteSerializer",
         username='******',
     )
     instance = Note.objects.filter(
         event__date="2019-03-14",
         event__place__name="Minneapolis",
         note="Test",
     ).first()
     self.assertTrue(instance)
    def test_go_live_page_will_be_published(self):
        # Connect a mock signal handler to page_published signal
        signal_fired = [False]
        signal_page = [None]
        def page_published_handler(sender, instance, **kwargs):
            signal_fired[0] = True
            signal_page[0] = instance
        page_published.connect(page_published_handler)


        page = SimplePage(
            title="Hello world!",
            slug="hello-world",
            live=False,
            go_live_at=timezone.now() - timedelta(days=1),
        )
        self.root_page.add_child(instance=page)

        page.save_revision(approved_go_live_at=timezone.now() - timedelta(days=1))

        p = Page.objects.get(slug='hello-world')
        self.assertFalse(p.live)
        self.assertTrue(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())

        management.call_command('publish_scheduled_pages')

        p = Page.objects.get(slug='hello-world')
        self.assertTrue(p.live)
        self.assertFalse(PageRevision.objects.filter(page=p).exclude(approved_go_live_at__isnull=True).exists())

        # Check that the page_published signal was fired
        self.assertTrue(signal_fired[0])
        self.assertEqual(signal_page[0], page)
        self.assertEqual(signal_page[0], signal_page[0].specific)
 def setUpClass(cls):
     # Keep a record of the original lazy storage instance so we can
     # restore it afterwards. We overwrite this in the setUp method so
     # that any new settings get picked up.
     if not hasattr(cls, '_originals'):
         cls._originals = {'staticfiles_storage': storage.staticfiles_storage}
     # Make a temporary directory and copy in test files
     cls.tmp = tempfile.mkdtemp()
     settings.STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage'
     settings.STATICFILES_DIRS = [os.path.join(cls.tmp, 'static')]
     settings.STATIC_ROOT = os.path.join(cls.tmp, 'static_root')
     settings.WHITENOISE_ROOT = os.path.join(cls.tmp, 'root')
     for path, contents in TEST_FILES.items():
         path = os.path.join(cls.tmp, path.lstrip('/'))
         try:
             os.makedirs(os.path.dirname(path))
         except OSError as e:
             if e.errno != errno.EEXIST:
                 raise
         with open(path, 'wb') as f:
             f.write(contents)
     # Collect static files into STATIC_ROOT
     call_command('collectstatic', verbosity=0, interactive=False)
     # Initialize test application
     django_app = get_wsgi_application()
     cls.application = DjangoWhiteNoise(django_app)
     cls.server = TestServer(cls.application)
     super(DjangoWhiteNoiseTest, cls).setUpClass()
    def test_expired_page_will_be_unpublished(self):
        # Connect a mock signal handler to page_unpublished signal
        signal_fired = [False]
        signal_page = [None]
        def page_unpublished_handler(sender, instance, **kwargs):
            signal_fired[0] = True
            signal_page[0] = instance
        page_unpublished.connect(page_unpublished_handler)


        page = SimplePage(
            title="Hello world!",
            slug="hello-world",
            live=True,
            expire_at=timezone.now() - timedelta(days=1),
        )
        self.root_page.add_child(instance=page)

        p = Page.objects.get(slug='hello-world')
        self.assertTrue(p.live)

        management.call_command('publish_scheduled_pages')

        p = Page.objects.get(slug='hello-world')
        self.assertFalse(p.live)
        self.assertTrue(p.expired)

        # Check that the page_published signal was fired
        self.assertTrue(signal_fired[0])
        self.assertEqual(signal_page[0], page)
        self.assertEqual(signal_page[0], signal_page[0].specific)
    def test_update_topology_command(self):
        t = Topology.objects.first()
        t.parser = 'netdiff.NetJsonParser'
        t.save()
        responses.add(responses.GET,
                      'http://127.0.0.1:9090',
                      body=self._load('static/netjson-1-link.json'),
                      content_type='application/json')
        Node.objects.all().delete()
        update_topology()
        self.assertEqual(Node.objects.count(), 2)
        self.assertEqual(Link.objects.count(), 1)
        # test exception
        t.url = t.url.replace('9090', '9091')
        t.save()
        Node.objects.all().delete()
        Link.objects.all().delete()
        responses.add(responses.GET,
                      'http://127.0.0.1:9091',
                      body=self._load('static/netjson-invalid.json'),
                      content_type='application/json')
        # capture output
        output = StringIO()
        with redirect_stdout(output):
            call_command('update_topology')

        self.assertEqual(Node.objects.count(), 1)
        self.assertEqual(Link.objects.count(), 0)
        self.assertIn('Failed to', output.getvalue())
Example #7
0
    def test_table_name_patching(self):
        """
        This tests the plugin models patching when publishing from the command line
        """
        User = get_user_model()
        User.objects.create_superuser('djangocms', '*****@*****.**', '123456')
        create_page("The page!", "nav_playground.html", "en", published=True)
        draft = Page.objects.drafts()[0]
        draft.reverse_id = 'a_test'  # we have to change *something*
        draft.save()
        add_plugin(draft.placeholders.get(slot=u"body"),
                   u"TextPlugin", u"en", body="Test content")
        draft.publish('en')
        add_plugin(draft.placeholders.get(slot=u"body"),
                   u"TextPlugin", u"en", body="Test content")

        # Manually undoing table name patching
        Text._meta.db_table = 'djangocms_text_ckeditor_text'
        plugin_pool.patched = False

        with disable_logger(log):
            call_command('cms', 'moderator', 'on')
        # Sanity check the database (we should have one draft and one public)
        not_drafts = len(Page.objects.filter(publisher_is_draft=False))
        drafts = len(Page.objects.filter(publisher_is_draft=True))
        self.assertEqual(not_drafts, 1)
        self.assertEqual(drafts, 1)
Example #8
0
	def handle(self, *labels, **options):
		if settings.PRODUCTION:
			print 'I will not install the demo on a PRODUCTION machine.  Sorry.'
			return

		call_command('syncdb', interactive=False)
		call_command('migrate', interactive=False)

		for log in Log.objects.all(): log.delete()
		for publication in Publication.objects.all(): publication.delete()
		for idea in Idea.objects.all(): idea.delete()
		for project in Project.objects.all(): project.delete()
		for comment in Comment.objects.all(): comment.delete()
		for link in Link.objects.all(): link.delete()

		site = Site.objects.get_current()
		site.domain = '127.0.0.1:8000'
		site.name = 'Dinker McDink'
		site.save()

		user1 = self.create_user('alice', '1234', 'Alice', 'Smith', is_staff=True, is_superuser=True)
		user2 = self.create_user('bob', '1234', 'Bob', 'Jones', is_staff=False, is_superuser=False)

		project1 = Project.objects.create(title='Carbon Fiber Shoe Tree', slug='carbon-fiber-foot-tree', description='A foot tree which is both light enough for flight and strong enough for my many shoes.', public=True, portfolio=True, started=datetime.now() - timedelta(days=10))
		project2 = Project.objects.create(title='Airplanes for babies', slug='airplanes-for-babies', description='Babies do not like to fly.  Adults do not like flying near babies.  Put babies in their own airplane.', public=False, portfolio=True, started=datetime.now() - timedelta(days=101))

		idea1 = Idea.objects.create(title='Where to keep my keys', description='Put it in *my bag*. Genius!', public=True, created=datetime.now() - timedelta(days=2))
Example #9
0
 def _dumpdata_assert(self, args, output, format='json', filename=None,
                      natural_foreign_keys=False, natural_primary_keys=False,
                      use_base_manager=False, exclude_list=[], primary_keys=''):
     new_io = six.StringIO()
     if filename:
         filename = os.path.join(tempfile.gettempdir(), filename)
     management.call_command('dumpdata', *args, **{'format': format,
                                                   'stdout': new_io,
                                                   'stderr': new_io,
                                                   'output': filename,
                                                   'use_natural_foreign_keys': natural_foreign_keys,
                                                   'use_natural_primary_keys': natural_primary_keys,
                                                   'use_base_manager': use_base_manager,
                                                   'exclude': exclude_list,
                                                   'primary_keys': primary_keys})
     if filename:
         with open(filename, "r") as f:
             command_output = f.read()
         os.remove(filename)
     else:
         command_output = new_io.getvalue().strip()
     if format == "json":
         self.assertJSONEqual(command_output, output)
     elif format == "xml":
         self.assertXMLEqual(command_output, output)
     else:
         self.assertEqual(command_output, output)
    def test_expiry_date_range(self):
        """
        Test that the verifications are filtered on the given range. Email is not sent for any verification with
        expiry date out of range
        """
        user = UserFactory.create()
        verification_in_range = self.create_and_submit(user)
        verification_in_range.status = 'approved'
        verification_in_range.expiry_date = now() - timedelta(days=1)
        verification_in_range.save()

        user = UserFactory.create()
        verification = self.create_and_submit(user)
        verification.status = 'approved'
        verification.expiry_date = now() - timedelta(days=5)
        verification.save()

        call_command('send_verification_expiry_email', '--days-range=2')

        # Check that only one email is sent
        self.assertEqual(len(mail.outbox), 1)

        # Verify that the email is not sent to the out of range verification
        expiry_email_date = SoftwareSecurePhotoVerification.objects.get(pk=verification.pk).expiry_email_date
        self.assertIsNone(expiry_email_date)
    def setUp(self):
        args = []
        opts = {'dumpfile': 'test_db_dump.xml', 'verbosity': 0}
        cmd = 'migrate_db'
        call_command(cmd, *args, **opts)

        g1 = Genes.objects.get(gene_code='COI')
        g2 = Genes.objects.get(gene_code='EF1a')
        self.cleaned_data = {
            'gene_codes': [g1, g2],
            'taxonset': None,
            'voucher_codes': 'CP100-10\r\nCP100-11',
            'geneset': None,
            'taxon_names': ['CODE', 'GENUS', 'SPECIES'],
            'number_genes': None,
            'positions': ['ALL'],
            'partition_by_positions': 'ONE',
            'file_format': 'PHY',
            'aminoacids': True,
            'outgroup': '',
        }

        self.user = User.objects.get(username='******')
        self.user.set_password('pass')
        self.user.save()

        self.c = Client()
        self.dataset_creator = CreateDataset(self.cleaned_data)
        self.maxDiff = None
 def test_update_index_using(self, m):
     """update_index only applies to indexes specified with --using"""
     call_command('update_index', verbosity=0, using=["eng", "fra"])
     m.assert_any_call("core", "eng")
     m.assert_any_call("core", "fra")
     self.assertTrue(call("core", "default") not in m.call_args_list,
                      "update_index should have been restricted to the index specified with --using")
    def test_dry_run_flag(self):
        """
        Test that the dry run flags sends no email and only logs the the number of email sent in each batch
        """
        user = UserFactory.create()
        verification = self.create_and_submit(user)
        verification.status = 'approved'
        verification.expiry_date = now() - timedelta(days=1)
        verification.save()

        start_date = now() - timedelta(days=1)  # using default days
        count = 1

        with LogCapture(LOGGER_NAME) as logger:
            call_command('send_verification_expiry_email', '--dry-run')
            logger.check(
                (LOGGER_NAME,
                 'INFO',
                 u"For the date range {} - {}, total Software Secure Photo verification filtered are {}"
                 .format(start_date.date(), now().date(), count)
                 ),
                (LOGGER_NAME,
                 'INFO',
                 u"This was a dry run, no email was sent. For the actual run email would have been sent "
                 u"to {} learner(s)".format(count)
                 ))
        self.assertEqual(len(mail.outbox), 0)
Example #14
0
    def handle(self, *args, **options):
        if settings.RELEASE_ENV != "dev" and not settings.TUTORIAL_MODE:
            self.stdout.write("Command can only be run on dev instances and instances "\
                              "with tutorial mode enabled")
            return

        if not options.get("commit"):
            self.stdout.write("This will sync data from {url} to this instance, and will take "\
                              "roughly 20 minutes to complete on a fresh db. "\
                              "Run the command with `--commit` if you are sure you want "\
                              "to do this.".format(**options))
            return


        djpdb_settings.SYNC_URL = options.get("url")
        pre_save.disconnect(signals.addressmodel_save,
                            sender=pdb_models.Facility)

        djpdb_models.all_models = [
            pdb_models.Organization, pdb_models.Facility, pdb_models.Network,
            pdb_models.InternetExchange, pdb_models.InternetExchangeFacility,
            pdb_models.IXLan, pdb_models.IXLanPrefix,
            pdb_models.NetworkContact, pdb_models.NetworkFacility,
            pdb_models.NetworkIXLan
        ]

        call_command("pdb_sync")
    def test_create_default_keyword_to_all_users(self):
        """
        Tests adding a default keyword adds it to all users' default keywords
        list.
        """
        existing_keyword = Keyword.objects.create(name='existing-keyword')
        # A user who added an existing keyword to its default keywords
        u = UserEmail.objects.create(email='*****@*****.**')
        s = EmailSettings.objects.create(user_email=u)
        s.default_keywords.add(existing_keyword)
        s.save()
        # A user who does not have any keywords apart from the defaults
        u = UserEmail.objects.create(email='*****@*****.**')
        s = EmailSettings.objects.create(user_email=u)
        # Make sure that it is so!
        self.assertNotIn(existing_keyword, s.default_keywords.all())
        # Sanity check - the keyword we want to add does not already exist
        self.assertEqual(Keyword.objects.filter(name='new-keyword').count(), 0)

        call_command('tracker_add_keyword', 'new-keyword', **{
            'is_default_keyword': True
        })

        keyword = Keyword.objects.get(name='new-keyword')
        # This keyword is given to all users
        self.assertEqual(
            EmailSettings.objects.filter(default_keywords=keyword).count(),
            EmailSettings.objects.count()
        )
Example #16
0
def start_cms_project():
    argv = list(sys.argv)
    if len(argv) != 2:
        raise management.CommandError("start_cms_project accepts one argument - the name of the project to create.")
    management.call_command(
        "startproject", argv[1], template="~/Workspace/cms/src/cms/project_template/", n="py,css,html"
    )
    def test_export_writes_valid_excel_workbook(self):
        with tempfile.NamedTemporaryFile(suffix='.xlsx') as f:
            call_command('conference_export', 'TEST-GOVDELIVERY-CODE', f.name)

            workbook = load_workbook(f.name)
            self.assertEqual(workbook.active['C2'].value, 'My Name')
            self.assertEqual(workbook.active['C3'].value, 'Name with Unicodë')
Example #18
0
    def test_files_content(self):
        self.assertTableNotExists("migrations_unicodemodel")
        cache.register_models('migrations', UnicodeModel)
        call_command("makemigrations", "migrations", verbosity=0)

        init_file = os.path.join(self.migration_dir, "__init__.py")

        # Check for existing __init__.py file in migrations folder
        self.assertTrue(os.path.exists(init_file))

        with open(init_file, 'r') as fp:
            content = force_text(fp.read())
            self.assertEqual(content, '')

        initial_file = os.path.join(self.migration_dir, "0001_initial.py")

        # Check for existing 0001_initial.py file in migration folder
        self.assertTrue(os.path.exists(initial_file))

        with open(initial_file, 'r') as fp:
            content = force_text(fp.read())
            self.assertTrue('# encoding: utf8' in content)
            self.assertTrue('migrations.CreateModel' in content)

            if six.PY3:
                self.assertTrue('úñí©óðé µóðéø' in content)  # Meta.verbose_name
                self.assertTrue('úñí©óðé µóðéøß' in content)  # Meta.verbose_name_plural
                self.assertTrue('ÚÑÍ¢ÓÐÉ' in content)  # title.verbose_name
                self.assertTrue('“Ðjáñgó”' in content)  # title.default
            else:
                self.assertTrue('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8' in content)  # Meta.verbose_name
                self.assertTrue('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8\\xdf' in content)  # Meta.verbose_name_plural
                self.assertTrue('\\xda\\xd1\\xcd\\xa2\\xd3\\xd0\\xc9' in content)  # title.verbose_name
                self.assertTrue('\\u201c\\xd0j\\xe1\\xf1g\\xf3\\u201d' in content)  # title.default
Example #19
0
    def setup_databases(self):
        config = super().setup_databases()

        # Load fixture data for tests
        call_command('loaddata', 'foundation.json')

        return config
    def test_course_clash(self):
        """
        Test for course_id_clash.
        """
        expected = []
        # clashing courses
        course = CourseFactory.create(org="test", course="courseid", display_name="run1")
        expected.append(course.id)
        course = CourseFactory.create(org="TEST", course="courseid", display_name="RUN12")
        expected.append(course.id)
        course = CourseFactory.create(org="test", course="CourseId", display_name="aRUN123")
        expected.append(course.id)
        # not clashing courses
        not_expected = []
        course = CourseFactory.create(org="test", course="course2", display_name="run1")
        not_expected.append(course.id)
        course = CourseFactory.create(org="test1", course="courseid", display_name="run1")
        not_expected.append(course.id)
        course = CourseFactory.create(org="test", course="courseid0", display_name="run1")
        not_expected.append(course.id)

        old_stdout = sys.stdout
        sys.stdout = mystdout = StringIO()
        call_command('course_id_clash', stdout=mystdout)
        sys.stdout = old_stdout
        result = mystdout.getvalue()
        for courseid in expected:
            self.assertIn(courseid.to_deprecated_string(), result)
        for courseid in not_expected:
            self.assertNotIn(courseid.to_deprecated_string(), result)
Example #21
0
def dump_fixtures(request):
    output = StringIO()

    fixture = request.GET.get('fixture', None)

    try:
        if fixture:
            call_command('dumpdata', fixture, '--indent=2', stdout=output)
        else:
            call_command('dumpdata', '--indent=2', stdout=output)

        data = output.getvalue()
        output.close()

        if fixture:
            file_label = 'fixtures_%s_%s' % (fixture, datetime.datetime.now().strftime('%d-%b-%Y_%H-%M'))
        else:
            file_label = 'fixtures_all_%s' % datetime.datetime.now().strftime('%d-%b-%Y_%H-%M')
        response = HttpResponse(data, content_type="application/json")
        response['Content-Disposition'] = 'attachment; filename=%s' % file_label
        return response
    except:
        dest =  request.META.get('HTTP_REFERER', '/')
        messages.info(request, 'Fixture name not recognized: %s' % fixture)
        return HttpResponseRedirect(dest)
    def test_clear_badge(self, issue_badges, xqueue):
        """
        Given that I have a user with a badge
        If I run regeneration for a user
        Then certificate generation will be requested
        And the badge will be deleted if badge issuing is enabled
        """
        key = self.course.location.course_key
        self._create_cert(key, self.user, CertificateStatuses.downloadable)
        badge_class = get_completion_badge(key, self.user)
        BadgeAssertionFactory(badge_class=badge_class, user=self.user)
        self.assertTrue(BadgeAssertion.objects.filter(user=self.user, badge_class=badge_class))
        self.course.issue_badges = issue_badges
        self.store.update_item(self.course, None)

        args = '-u {} -c {}'.format(self.user.email, text_type(key))
        call_command(self.command, *args.split(' '))

        xqueue.return_value.regen_cert.assert_called_with(
            self.user,
            key,
            course=self.course,
            forced_grade=None,
            template_file=None,
            generate_pdf=True
        )
        self.assertEquals(
            bool(BadgeAssertion.objects.filter(user=self.user, badge_class=badge_class)), not issue_badges
        )
Example #23
0
    def handle_noargs(self, **options):
        db = options.get('database')
        connection = connections[db]
        verbosity = int(options.get('verbosity'))
        interactive = options.get('interactive')
        # The following are stealth options used by Django's internals.
        reset_sequences = options.get('reset_sequences', True)
        allow_cascade = options.get('allow_cascade', False)
        inhibit_post_migrate = options.get('inhibit_post_migrate', False)

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True,
                             reset_sequences=reset_sequences,
                             allow_cascade=allow_cascade)

        if interactive:
            confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to a fresh state.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                with transaction.commit_on_success_unless_managed():
                    cursor = connection.cursor()
                    for sql in sql_list:
                        cursor.execute(sql)
            except Exception as e:
                new_msg = (
                    "Database %s couldn't be flushed. Possible reasons:\n"
                    "  * The database isn't running or isn't configured correctly.\n"
                    "  * At least one of the expected database tables doesn't exist.\n"
                    "  * The SQL was invalid.\n"
                    "Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.\n"
                    "The full error: %s") % (connection.settings_dict['NAME'], e)
                six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])

            if not inhibit_post_migrate:
                self.emit_post_migrate(verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            if options.get('load_initial_data'):
                # Reinstall the initial_data fixture.
                call_command('loaddata', 'initial_data', **options)

        else:
            self.stdout.write("Flush cancelled.\n")
Example #24
0
 def setUp(self):
     self.dbs = settings.DATABASES.keys()
     
     for db in self.dbs:
         if db not in ['default'] and db :
             call_command('createschema', db)
         call_command('syncdb', interactive=False, database=db, cursor=connections['default'].cursor(), verbosity=0)
Example #25
0
 def _pre_setup(self):
     try:
         import whoosh # pylint: disable=unused-variable
     except ImportError:
         raise SkipTest("The Whoosh library is not available")
     super(SearchEnabledTestCase, self)._pre_setup()
     call_command('rebuild_index', interactive=False, verbosity=0)
Example #26
0
    def test_nk_deserialize(self):
        """
        Test for ticket #13030 - Python based parser version
        natural keys deserialize with fk to inheriting model
        """
        management.call_command(
            'loaddata',
            'model-inheritance.json',
            verbosity=0,
            commit=False
        )
        management.call_command(
            'loaddata',
            'nk-inheritance.json',
            verbosity=0,
            commit=False
        )
        self.assertEqual(
            NKChild.objects.get(pk=1).data,
            'apple'
        )

        self.assertEqual(
            RefToNKChild.objects.get(pk=1).nk_fk.data,
            'apple'
        )
Example #27
0
    def test_nk_on_serialize(self):
        """
        Check that natural key requirements are taken into account
        when serializing models
        """
        management.call_command(
            'loaddata',
            'forward_ref_lookup.json',
            verbosity=0,
            commit=False
            )

        stdout = StringIO()
        management.call_command(
            'dumpdata',
            'fixtures_regress.book',
            'fixtures_regress.person',
            'fixtures_regress.store',
            verbosity=0,
            format='json',
            use_natural_keys=True,
            stdout=stdout,
        )
        self.assertEqual(
            stdout.getvalue(),
            """[{"pk": 2, "model": "fixtures_regress.store", "fields": {"main": null, "name": "Amazon"}}, {"pk": 3, "model": "fixtures_regress.store", "fields": {"main": null, "name": "Borders"}}, {"pk": 4, "model": "fixtures_regress.person", "fields": {"name": "Neal Stephenson"}}, {"pk": 1, "model": "fixtures_regress.book", "fields": {"stores": [["Amazon"], ["Borders"]], "name": "Cryptonomicon", "author": ["Neal Stephenson"]}}]"""
        )
Example #28
0
 def test_adding_arrayfield_with_index(self):
     """
     ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
     """
     table_name = 'postgres_tests_chartextarrayindexmodel'
     call_command('migrate', 'postgres_tests', verbosity=0)
     with connection.cursor() as cursor:
         like_constraint_columns_list = [
             v['columns']
             for k, v in list(connection.introspection.get_constraints(cursor, table_name).items())
             if k.endswith('_like')
         ]
     # Only the CharField should have a LIKE index.
     self.assertEqual(like_constraint_columns_list, [['char2']])
     # All fields should have regular indexes.
     with connection.cursor() as cursor:
         indexes = [
             c['columns'][0]
             for c in connection.introspection.get_constraints(cursor, table_name).values()
             if c['index'] and len(c['columns']) == 1
         ]
     self.assertIn('char', indexes)
     self.assertIn('char2', indexes)
     self.assertIn('text', indexes)
     call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
     with connection.cursor() as cursor:
         self.assertNotIn(table_name, connection.introspection.table_names(cursor))
Example #29
0
 def test_close_connection_after_loaddata(self):
     """
     Test for ticket #7572 -- MySQL has a problem if the same connection is
     used to create tables, load data, and then query over that data.
     To compensate, we close the connection after running loaddata.
     This ensures that a new connection is opened when test queries are
     issued.
     """
     management.call_command(
         'loaddata',
         'big-fixture.json',
         verbosity=0,
         commit=False
     )
     articles = Article.objects.exclude(id=9)
     self.assertEqual(
         list(articles.values_list('id', flat=True)),
         [1, 2, 3, 4, 5, 6, 7, 8]
     )
     # Just for good measure, run the same query again.
     # Under the influence of ticket #7572, this will
     # give a different result to the previous call.
     self.assertEqual(
         list(articles.values_list('id', flat=True)),
         [1, 2, 3, 4, 5, 6, 7, 8]
     )
    def test_generate_diffexpr_deseq(self):
        call_command('generate_diffexpr_deseq', '-n=1', '-g=2', '--rseed')
        diffexpr = Data.objects.last()
        if diffexpr:
            self.assertEqual(diffexpr.process.type, 'data:differentialexpression:deseq2:')
            self.assertEqual(len(diffexpr.input['case']), 2)
            self.assertEqual(len(diffexpr.input['control']), 2)
            # NOTE: Python 2 and 3 produce different results even when setting random.seed() to the
            # same number due to https://docs.python.org/3/whatsnew/3.2.html#random
            if six.PY2:
                self.assertJSON(diffexpr, diffexpr.output['de_json'], '',
                                join('large', 'DE-deseq-py2.json.gz'))
                self.assertFile(diffexpr, 'de_file',
                                join('large', 'DE-deseq-py2.tab.gz'), compression='gzip')
            else:
                self.assertJSON(diffexpr, diffexpr.output['de_json'], '',
                                join('large', 'DE-deseq-py3.json.gz'))
                self.assertFile(diffexpr, 'de_file',
                                join('large', 'DE-deseq-py3.tab.gz'), compression='gzip')

            expressions = diffexpr.input['case'] + diffexpr.input['control']
            for expr_id in expressions:
                expression = Data.objects.get(id=expr_id)
                self.assertEqual(expression.process.type, 'data:expression:')
        else:
            self.fail("Differential expression not created")
Example #31
0
 def call_command(self, *args):
     call_command('backpopulate_course_type', *args)
Example #32
0
    def test_list_plugins(self):
        out = StringIO()
        placeholder = Placeholder.objects.create(slot="test")
        add_plugin(placeholder, TextPlugin, "en", body="en body")
        add_plugin(placeholder, TextPlugin, "en", body="en body")
        link_plugin = add_plugin(placeholder, "LinkPlugin", "en",
                                 name="A Link", external_link="https://www.django-cms.org")
        self.assertEqual(
            CMSPlugin.objects.filter(plugin_type=PLUGIN).count(),
            2)
        self.assertEqual(
            CMSPlugin.objects.filter(plugin_type="LinkPlugin").count(),
            1)

        # create a CMSPlugin with an unsaved instance
        instanceless_plugin = CMSPlugin(language="en", plugin_type="TextPlugin")
        instanceless_plugin.save()

        # create a bogus CMSPlugin to simulate one which used to exist but
        # is no longer installed
        bogus_plugin = CMSPlugin(language="en", plugin_type="BogusPlugin")
        bogus_plugin.save()

        management.call_command('cms', 'list', 'plugins', interactive=False, stdout=out)
        report = plugin_report()

        # there should be reports for three plugin types
        self.assertEqual(
            len(report),
            3)

        # check the bogus plugin
        bogus_plugins_report = report[0]
        self.assertEqual(
            bogus_plugins_report["model"],
            None)

        self.assertEqual(
            bogus_plugins_report["type"],
            u'BogusPlugin')

        self.assertEqual(
            bogus_plugins_report["instances"][0],
            bogus_plugin)

        # check the link plugin
        link_plugins_report = report[1]
        self.assertEqual(
            link_plugins_report["model"],
            link_plugin.__class__)

        self.assertEqual(
            link_plugins_report["type"],
            u'LinkPlugin')

        self.assertEqual(
            link_plugins_report["instances"][0].get_plugin_instance()[0],
            link_plugin)

        # check the text plugins
        text_plugins_report = report[2]
        self.assertEqual(
            text_plugins_report["model"],
            TextPlugin.model)

        self.assertEqual(
            text_plugins_report["type"],
            u'TextPlugin')

        self.assertEqual(
            len(text_plugins_report["instances"]),
            3)

        self.assertEqual(
            text_plugins_report["instances"][2],
            instanceless_plugin)

        self.assertEqual(
            text_plugins_report["unsaved_instances"],
            [instanceless_plugin])
Example #33
0
    def test_loading_fixture_for_moderated_model(self):
        management.call_command('loaddata',
                                'test_moderation.json',
                                verbosity=0)

        self.assertEqual(UserProfile.objects.all().count(), 1)
Example #34
0
 def setUp(self):
     self.client = Client()
     for fixture in self.fixtures:
         call_command('loaddata', fixture)
Example #35
0
 def test_migrate_command(self):
     management.call_command("migrate",
                             "migration_test_app",
                             fake=True,
                             stdout=StringIO())
Example #36
0
 def test_makemigration_command(self):
     management.call_command("makemigrations",
                             "migration_test_app",
                             stdout=StringIO())
Example #37
0
def test_localhost(db, monkeypatch, sites):
    monkeypatch.delitem(os.environ, 'BOK_CHOY_HOSTNAME', raising=False)
    call_command('update_fixtures')
    assert Site.objects.get(name='cms').domain == 'localhost:8031'
    assert Site.objects.get(name='lms').domain == 'localhost:8003'
Example #38
0
 def setUp(self):
     call_command('loaddata',
                  'rebu/fixtures/rebu/rebu_testdata.json',
                  verbosity=0)
 def test_pipchecker_when_requirements_file_does_not_exist(self):
     with self.assertRaises(InstallationError):
         call_command('pipchecker', '-r', 'not_exist.txt')
Example #40
0
    def handle(self, **options: Any) -> None:
        if options["percent_huddles"] + options["percent_personals"] > 100:
            self.stderr.write(
                "Error!  More than 100% of messages allocated.\n")
            return

        # Get consistent data for backend tests.
        if options["test_suite"]:
            random.seed(0)

        if options["delete"]:
            # Start by clearing all the data in our database
            clear_database()

            # Create our three default realms
            # Could in theory be done via zerver.lib.actions.do_create_realm, but
            # welcome-bot (needed for do_create_realm) hasn't been created yet
            create_internal_realm()
            zulip_realm = Realm.objects.create(
                string_id="zulip",
                name="Zulip Dev",
                emails_restricted_to_domains=True,
                description=
                "The Zulip development environment default organization."
                "  It's great for testing!",
                invite_required=False,
                org_type=Realm.CORPORATE)
            RealmDomain.objects.create(realm=zulip_realm, domain="zulip.com")
            if options["test_suite"]:
                mit_realm = Realm.objects.create(
                    string_id="zephyr",
                    name="MIT",
                    emails_restricted_to_domains=True,
                    invite_required=False,
                    org_type=Realm.CORPORATE)
                RealmDomain.objects.create(realm=mit_realm, domain="mit.edu")

                lear_realm = Realm.objects.create(
                    string_id="lear",
                    name="Lear & Co.",
                    emails_restricted_to_domains=False,
                    invite_required=False,
                    org_type=Realm.CORPORATE)

            # Create test Users (UserProfiles are automatically created,
            # as are subscriptions to the ability to receive personals).
            names = [
                ("Zoe", "*****@*****.**"),
                ("Othello, the Moor of Venice", "*****@*****.**"),
                ("Iago", "*****@*****.**"),
                ("Prospero from The Tempest", "*****@*****.**"),
                ("Cordelia Lear", "*****@*****.**"),
                ("King Hamlet", "*****@*****.**"),
                ("aaron", "*****@*****.**"),
                ("Polonius", "*****@*****.**"),
            ]

            # For testing really large batches:
            # Create extra users with semi realistic names to make search
            # functions somewhat realistic.  We'll still create 1000 users
            # like Extra222 User for some predicability.
            num_names = options['extra_users']
            num_boring_names = 1000

            for i in range(min(num_names, num_boring_names)):
                full_name = 'Extra%03d User' % (i, )
                names.append((full_name, '*****@*****.**' % (i, )))

            if num_names > num_boring_names:
                fnames = [
                    'Amber', 'Arpita', 'Bob', 'Cindy', 'Daniela', 'Dan',
                    'Dinesh', 'Faye', 'François', 'George', 'Hank', 'Irene',
                    'James', 'Janice', 'Jenny', 'Jill', 'John', 'Kate',
                    'Katelyn', 'Kobe', 'Lexi', 'Manish', 'Mark', 'Matt',
                    'Mayna', 'Michael', 'Pete', 'Peter', 'Phil', 'Phillipa',
                    'Preston', 'Sally', 'Scott', 'Sandra', 'Steve',
                    'Stephanie', 'Vera'
                ]
                mnames = ['de', 'van', 'von', 'Shaw', 'T.']
                lnames = [
                    'Adams', 'Agarwal', 'Beal', 'Benson', 'Bonita', 'Davis',
                    'George', 'Harden', 'James', 'Jones', 'Johnson', 'Jordan',
                    'Lee', 'Leonard', 'Singh', 'Smith', 'Patel', 'Towns',
                    'Wall'
                ]

            for i in range(num_boring_names, num_names):
                fname = random.choice(fnames) + str(i)
                full_name = fname
                if random.random() < 0.7:
                    if random.random() < 0.5:
                        full_name += ' ' + random.choice(mnames)
                    full_name += ' ' + random.choice(lnames)
                email = fname.lower() + '@zulip.com'
                names.append((full_name, email))

            create_users(zulip_realm, names, tos_version=settings.TOS_VERSION)

            iago = get_user("*****@*****.**", zulip_realm)
            do_change_is_admin(iago, True)
            iago.is_staff = True
            iago.save(update_fields=['is_staff'])

            guest_user = get_user("*****@*****.**", zulip_realm)
            guest_user.role = UserProfile.ROLE_GUEST
            guest_user.save(update_fields=['role'])

            # These bots are directly referenced from code and thus
            # are needed for the test suite.
            zulip_realm_bots = [
                ("Zulip Error Bot", "*****@*****.**"),
                ("Zulip Default Bot", "*****@*****.**"),
            ]
            for i in range(options["extra_bots"]):
                zulip_realm_bots.append(
                    ('Extra Bot %d' % (i, ), '*****@*****.**' % (i, )))

            create_users(zulip_realm,
                         zulip_realm_bots,
                         bot_type=UserProfile.DEFAULT_BOT)

            zoe = get_user("*****@*****.**", zulip_realm)
            zulip_webhook_bots = [
                ("Zulip Webhook Bot", "*****@*****.**"),
            ]
            # If a stream is not supplied in the webhook URL, the webhook
            # will (in some cases) send the notification as a PM to the
            # owner of the webhook bot, so bot_owner can't be None
            create_users(zulip_realm,
                         zulip_webhook_bots,
                         bot_type=UserProfile.INCOMING_WEBHOOK_BOT,
                         bot_owner=zoe)
            aaron = get_user("*****@*****.**", zulip_realm)

            zulip_outgoing_bots = [("Outgoing Webhook",
                                    "*****@*****.**")]
            create_users(zulip_realm,
                         zulip_outgoing_bots,
                         bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
                         bot_owner=aaron)
            outgoing_webhook = get_user("*****@*****.**",
                                        zulip_realm)
            add_service("outgoing-webhook",
                        user_profile=outgoing_webhook,
                        interface=Service.GENERIC,
                        base_url="http://127.0.0.1:5002",
                        token=generate_api_key())

            # Add the realm internl bots to each realm.
            create_if_missing_realm_internal_bots()

            # Create public streams.
            stream_list = ["Verona", "Denmark", "Scotland", "Venice", "Rome"]
            stream_dict = {
                "Verona": {
                    "description": "A city in Italy"
                },
                "Denmark": {
                    "description": "A Scandinavian country"
                },
                "Scotland": {
                    "description": "Located in the United Kingdom"
                },
                "Venice": {
                    "description": "A northeastern Italian city"
                },
                "Rome": {
                    "description": "Yet another Italian city",
                    "is_web_public": True
                }
            }  # type: Dict[str, Dict[str, Any]]

            bulk_create_streams(zulip_realm, stream_dict)
            recipient_streams = [
                Stream.objects.get(name=name, realm=zulip_realm).id
                for name in stream_list
            ]  # type: List[int]

            # Create subscriptions to streams.  The following
            # algorithm will give each of the users a different but
            # deterministic subset of the streams (given a fixed list
            # of users). For the test suite, we have a fixed list of
            # subscriptions to make sure test data is consistent
            # across platforms.

            subscriptions_list = [
            ]  # type: List[Tuple[UserProfile, Recipient]]
            profiles = UserProfile.objects.select_related().filter(
                is_bot=False).order_by("email")  # type: Sequence[UserProfile]

            if options["test_suite"]:
                subscriptions_map = {
                    '*****@*****.**': ['Verona'],
                    '*****@*****.**': ['Verona'],
                    '*****@*****.**': ['Verona', 'Denmark'],
                    '*****@*****.**': ['Verona', 'Denmark', 'Scotland'],
                    '*****@*****.**': ['Verona', 'Denmark', 'Scotland'],
                    '*****@*****.**':
                    ['Verona', 'Denmark', 'Scotland', 'Venice'],
                    '*****@*****.**':
                    ['Verona', 'Denmark', 'Scotland', 'Venice', 'Rome'],
                    '*****@*****.**': ['Verona'],
                }

                for profile in profiles:
                    if profile.email not in subscriptions_map:
                        raise Exception(
                            'Subscriptions not listed for user %s' %
                            (profile.email, ))

                    for stream_name in subscriptions_map[profile.email]:
                        stream = Stream.objects.get(name=stream_name)
                        r = Recipient.objects.get(type=Recipient.STREAM,
                                                  type_id=stream.id)
                        subscriptions_list.append((profile, r))
            else:
                num_streams = len(recipient_streams)
                num_users = len(profiles)
                for i, profile in enumerate(profiles):
                    # Subscribe to some streams.
                    fraction = float(i) / num_users
                    num_recips = int(num_streams * fraction) + 1

                    for type_id in recipient_streams[:num_recips]:
                        r = Recipient.objects.get(type=Recipient.STREAM,
                                                  type_id=type_id)
                        subscriptions_list.append((profile, r))

            subscriptions_to_add = []  # type: List[Subscription]
            event_time = timezone_now()
            all_subscription_logs = []  # type: (List[RealmAuditLog])

            i = 0
            for profile, recipient in subscriptions_list:
                i += 1
                color = STREAM_ASSIGNMENT_COLORS[i %
                                                 len(STREAM_ASSIGNMENT_COLORS)]
                s = Subscription(recipient=recipient,
                                 user_profile=profile,
                                 color=color)

                subscriptions_to_add.append(s)

                log = RealmAuditLog(
                    realm=profile.realm,
                    modified_user=profile,
                    modified_stream_id=recipient.type_id,
                    event_last_message_id=0,
                    event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
                    event_time=event_time)
                all_subscription_logs.append(log)

            Subscription.objects.bulk_create(subscriptions_to_add)
            RealmAuditLog.objects.bulk_create(all_subscription_logs)

            # Create custom profile field data
            phone_number = try_add_realm_custom_profile_field(
                zulip_realm,
                "Phone number",
                CustomProfileField.SHORT_TEXT,
                hint='')
            biography = try_add_realm_custom_profile_field(
                zulip_realm,
                "Biography",
                CustomProfileField.LONG_TEXT,
                hint='What are you known for?')
            favorite_food = try_add_realm_custom_profile_field(
                zulip_realm,
                "Favorite food",
                CustomProfileField.SHORT_TEXT,
                hint="Or drink, if you'd prefer")
            field_data = {
                'vim': {
                    'text': 'Vim',
                    'order': '1'
                },
                'emacs': {
                    'text': 'Emacs',
                    'order': '2'
                },
            }  # type: ProfileFieldData
            favorite_editor = try_add_realm_custom_profile_field(
                zulip_realm,
                "Favorite editor",
                CustomProfileField.CHOICE,
                field_data=field_data)
            birthday = try_add_realm_custom_profile_field(
                zulip_realm, "Birthday", CustomProfileField.DATE)
            favorite_website = try_add_realm_custom_profile_field(
                zulip_realm,
                "Favorite website",
                CustomProfileField.URL,
                hint="Or your personal blog's URL")
            mentor = try_add_realm_custom_profile_field(
                zulip_realm, "Mentor", CustomProfileField.USER)
            github_profile = try_add_realm_default_custom_profile_field(
                zulip_realm, "github")

            # Fill in values for Iago and Hamlet
            hamlet = get_user("*****@*****.**", zulip_realm)
            do_update_user_custom_profile_data_if_changed(
                iago, [
                    {
                        "id": phone_number.id,
                        "value": "+1-234-567-8901"
                    },
                    {
                        "id": biography.id,
                        "value": "Betrayer of Othello."
                    },
                    {
                        "id": favorite_food.id,
                        "value": "Apples"
                    },
                    {
                        "id": favorite_editor.id,
                        "value": "emacs"
                    },
                    {
                        "id": birthday.id,
                        "value": "2000-1-1"
                    },
                    {
                        "id": favorite_website.id,
                        "value": "https://zulip.readthedocs.io/en/latest/"
                    },
                    {
                        "id": mentor.id,
                        "value": [hamlet.id]
                    },
                    {
                        "id": github_profile.id,
                        "value": 'zulip'
                    },
                ])
            do_update_user_custom_profile_data_if_changed(
                hamlet, [
                    {
                        "id": phone_number.id,
                        "value": "+0-11-23-456-7890"
                    },
                    {
                        "id":
                        biography.id,
                        "value":
                        "I am:\n* The prince of Denmark\n* Nephew to the usurping Claudius",
                    },
                    {
                        "id": favorite_food.id,
                        "value": "Dark chocolate"
                    },
                    {
                        "id": favorite_editor.id,
                        "value": "vim"
                    },
                    {
                        "id": birthday.id,
                        "value": "1900-1-1"
                    },
                    {
                        "id": favorite_website.id,
                        "value": "https://blog.zulig.org"
                    },
                    {
                        "id": mentor.id,
                        "value": [iago.id]
                    },
                    {
                        "id": github_profile.id,
                        "value": 'zulipbot'
                    },
                ])
        else:
            zulip_realm = get_realm("zulip")
            recipient_streams = [
                klass.type_id
                for klass in Recipient.objects.filter(type=Recipient.STREAM)
            ]

        # Extract a list of all users
        user_profiles = list(UserProfile.objects.filter(
            is_bot=False))  # type: List[UserProfile]

        # Create a test realm emoji.
        IMAGE_FILE_PATH = static_path('images/test-images/checkbox.png')
        with open(IMAGE_FILE_PATH, 'rb') as fp:
            check_add_realm_emoji(zulip_realm, 'green_tick', iago, fp)

        if not options["test_suite"]:
            # Populate users with some bar data
            for user in user_profiles:
                status = UserPresence.ACTIVE  # type: int
                date = timezone_now()
                client = get_client("website")
                if user.full_name[0] <= 'H':
                    client = get_client("ZulipAndroid")
                UserPresence.objects.get_or_create(user_profile=user,
                                                   realm_id=user.realm_id,
                                                   client=client,
                                                   timestamp=date,
                                                   status=status)

        user_profiles_ids = [user_profile.id for user_profile in user_profiles]

        # Create several initial huddles
        for i in range(options["num_huddles"]):
            get_huddle(random.sample(user_profiles_ids, random.randint(3, 4)))

        # Create several initial pairs for personals
        personals_pairs = [
            random.sample(user_profiles_ids, 2)
            for i in range(options["num_personals"])
        ]

        # Generate a new set of test data.
        create_test_data()

        # prepopulate the URL preview/embed data for the links present
        # in the config.generate_data.json data set.  This makes it
        # possible for populate_db to run happily without Internet
        # access.
        with open("zerver/tests/fixtures/docs_url_preview_data.json",
                  "r") as f:
            urls_with_preview_data = ujson.load(f)
            for url in urls_with_preview_data:
                cache_set(url, urls_with_preview_data[url], PREVIEW_CACHE_NAME)

        threads = options["threads"]
        jobs = [
        ]  # type: List[Tuple[int, List[List[int]], Dict[str, Any], Callable[[str], int], int]]
        for i in range(threads):
            count = options["num_messages"] // threads
            if i < options["num_messages"] % threads:
                count += 1
            jobs.append((count, personals_pairs, options, self.stdout.write,
                         random.randint(0, 10**10)))

        for job in jobs:
            generate_and_send_messages(job)

        if options["delete"]:
            if options["test_suite"]:
                # Create test users; the MIT ones are needed to test
                # the Zephyr mirroring codepaths.
                testsuite_mit_users = [
                    ("Fred Sipb (MIT)", "*****@*****.**"),
                    ("Athena Consulting Exchange User (MIT)",
                     "*****@*****.**"),
                    ("Esp Classroom (MIT)", "*****@*****.**"),
                ]
                create_users(mit_realm,
                             testsuite_mit_users,
                             tos_version=settings.TOS_VERSION)

                testsuite_lear_users = [
                    ("King Lear", "*****@*****.**"),
                    ("Cordelia Lear", "*****@*****.**"),
                ]
                create_users(lear_realm,
                             testsuite_lear_users,
                             tos_version=settings.TOS_VERSION)

            if not options["test_suite"]:
                # To keep the messages.json fixtures file for the test
                # suite fast, don't add these users and subscriptions
                # when running populate_db for the test suite

                zulip_stream_dict = {
                    "devel": {
                        "description": "For developing"
                    },
                    "all": {
                        "description": "For **everything**"
                    },
                    "announce": {
                        "description": "For announcements",
                        'stream_post_policy': Stream.STREAM_POST_POLICY_ADMINS
                    },
                    "design": {
                        "description": "For design"
                    },
                    "support": {
                        "description": "For support"
                    },
                    "social": {
                        "description": "For socializing"
                    },
                    "test": {
                        "description": "For testing `code`"
                    },
                    "errors": {
                        "description": "For errors"
                    },
                    "sales": {
                        "description": "For sales discussion"
                    }
                }  # type: Dict[str, Dict[str, Any]]

                # Calculate the maximum number of digits in any extra stream's
                # number, since a stream with name "Extra Stream 3" could show
                # up after "Extra Stream 29". (Used later to pad numbers with
                # 0s).
                maximum_digits = len(str(options['extra_streams'] - 1))

                for i in range(options['extra_streams']):
                    # Pad the number with 0s based on `maximum_digits`.
                    number_str = str(i).zfill(maximum_digits)

                    extra_stream_name = 'Extra Stream ' + number_str

                    zulip_stream_dict[extra_stream_name] = {
                        "description": "Auto-generated extra stream.",
                    }

                bulk_create_streams(zulip_realm, zulip_stream_dict)
                # Now that we've created the notifications stream, configure it properly.
                zulip_realm.notifications_stream = get_stream(
                    "announce", zulip_realm)
                zulip_realm.save(update_fields=['notifications_stream'])

                # Add a few default streams
                for default_stream_name in [
                        "design", "devel", "social", "support"
                ]:
                    DefaultStream.objects.create(realm=zulip_realm,
                                                 stream=get_stream(
                                                     default_stream_name,
                                                     zulip_realm))

                # Now subscribe everyone to these streams
                subscribe_users_to_streams(zulip_realm, zulip_stream_dict)

                # These bots are not needed by the test suite
                internal_zulip_users_nosubs = [
                    ("Zulip Commit Bot", "*****@*****.**"),
                    ("Zulip Trac Bot", "*****@*****.**"),
                    ("Zulip Nagios Bot", "*****@*****.**"),
                ]
                create_users(zulip_realm,
                             internal_zulip_users_nosubs,
                             bot_type=UserProfile.DEFAULT_BOT)

            # Mark all messages as read
            UserMessage.objects.all().update(flags=UserMessage.flags.read)

            if not options["test_suite"]:
                # Update pointer of each user to point to the last message in their
                # UserMessage rows with sender_id=user_profile_id.
                users = list(
                    UserMessage.objects.filter(message__sender_id=F(
                        'user_profile_id')).values('user_profile_id').annotate(
                            pointer=Max('message_id')))
                for user in users:
                    UserProfile.objects.filter(
                        id=user['user_profile_id']).update(
                            pointer=user['pointer'])

            create_user_groups()

            if not options["test_suite"]:
                # We populate the analytics database here for
                # development purpose only
                call_command('populate_analytics_db')
            self.stdout.write("Successfully populated test database.\n")
Example #41
0
 def test_command(self):
     self.assertEqual(ResultType.objects.count(), 0)
     call_command('init-result-type')
     self.assertEqual(ResultType.objects.count(), 3)
Example #42
0
def test_process_addons_invalid_task():
    with pytest.raises(CommandError):
        call_command('process_addons', task='foo')
Example #43
0
 def test_user_delete_does_not_exist(self):
     with self.assertRaisesRegexp(CommandError, "User with username"):
         call_command("deleteuser", "kolibri")
Example #44
0
 def syncdb(self):
     loading.cache.loaded = False
     call_command('syncdb', verbosity=0)
Example #45
0
def send_queued_mail():
    management.call_command("send_queued_mail")
Example #46
0
 def test_user_delete_multiple_users(self):
     with self.assertRaisesRegexp(CommandError, "There is more than one user"):
         FacilityUser.objects.create(username="******", facility=self.facility_2)
         call_command("deleteuser", "user")
Example #47
0
File: tests.py Project: ximi/django
 def test_custom_test_runner(self):
     call_command('test', 'sites',
                  testrunner='regressiontests.test_runner.tests.MockTestRunner')
     self.assertTrue(MockTestRunner.invoked,
                     "The custom test runner has not been invoked")
Example #48
0
 def test_user_delete_with_facility(self):
     utils.input = mock.MagicMock(name="input", return_value="yes")
     call_command("deleteuser", "user", facility=self.facility.id)
     self.assertFalse(FacilityUser.objects.exists())
Example #49
0
# http://flask.pocoo.org/docs/0.10/patterns/appdispatch/
from werkzeug.serving import run_simple
from werkzeug.wsgi import DispatcherMiddleware, SharedDataMiddleware
from mpcontribs.webui.webui import app as flask_app
from test_site.wsgi import application as django_app
from test_site.settings import STATIC_ROOT
from django.core.management import call_command

flask_app.debug = True
call_command('collectstatic', '--clear', '--noinput', '-v 0')

application = DispatcherMiddleware(flask_app, {'/test_site': django_app})
application = SharedDataMiddleware(application, {'/static': STATIC_ROOT})

if __name__ == '__main__':
    run_simple('localhost',
               5000,
               application,
               use_reloader=True,
               use_debugger=True,
               use_evalex=True,
               threaded=True)
Example #50
0
 def django_command_proxy(self, command):
     '''Call a django command'''
     if command == 'sqlall':
         management.call_command(command, 'Reporting')
     else:
         management.call_command(command)
Example #51
0
def _create_db(migrate_cmd=False):
    call_command('migrate')
    def test_import_data_from_csv(self):

        # CommandError raises when a non-existing file is provided
        with pytest.raises(CommandError) as error_info:
            management.call_command('import_works', 'not-an-existing.csv')

        assert error_info.value.args[0] == 'File "not-an-existing.csv" does not exist'

        # Success: At first no works exist. After running the command 8 works exist.
        assert Work.objects.count() == 0

        management.call_command('import_works', here + '/works_metadata.csv')

        # From introspecting the works_metadata.csv file, 4 different works should
        # result from import
        assert Work.objects.count() == 4

        # Now lets see if the data in the database is what we expected
        expected = [
            {
                'title': 'Adventure of a Lifetime',
                'contributors': [
                    'O Brien Edward John',
                    'Yorke Thomas Edward',
                    'Greenwood Colin Charles',
                    'Selway Philip James'
                ],
                'iswc':'T0101974597',
                'source':'warner',
                'source_id':2,
                'alternate':{
                    'iswc': [],
                    'title':[],
                    'source':[],
                    'source_id':['3'],
                }
            },
            {
                'title': 'Me Enamoré',
                'contributors': [
                    'Rayo Gibo Antonio',
                    'Ripoll Shakira Isabel Mebarak'
                ],
                'iswc':'T9214745718',
                'source':'universal',
                'source_id':1,
                'alternate':{
                    'iswc': [],
                    'title':['Me Enamore'],
                    'source':['warner'],
                    'source_id':['4'],
                }
            },
            {
                'title': 'Je ne sais pas',
                'contributors': [
                    'Obispo Pascal Michel',
                    'Florence Lionel Jacques'
                ],
                'iswc':'T0046951705',
                'source':'sony',
                'source_id':2,
                'alternate':{
                    'iswc': [],
                    'title':[],
                    'source':[],
                    'source_id':['3'],
                }
            },
            {
                'title': 'Shape of You',
                'contributors': [
                    'Edward Sheeran',
                    'Edward Christopher Sheeran'
                ],
                'iswc':'T9204649558',
                'source':'warner',
                'source_id':1,
                'alternate':{
                    'iswc': [],
                    'title':[],
                    'source':['sony'],
                    'source_id':['1'],
                }
            }
        ]

        for work in expected:
            imported_as_dict = Work.objects.get(title=work['title']).__dict__

            imported_as_dict.pop('_state')
            imported_as_dict.pop('uid')

            assert imported_as_dict == work
Example #53
0
    def test_job_runner(self):
        "Testing Job runner using management command"

        management.call_command('job', id=self.job.id, verbosity=2)
        management.call_command('job', list=True)
 def setUp(self):
     self.client = Client()
     self.user = create_user_with_group('admin')
     management.call_command('create_water_polls_and_scripts')
Example #55
0
def django_db_setup(django_db_setup, django_db_blocker):
	with django_db_blocker.unblock():
		call_command('loaddata', 'test_data.json')
Example #56
0
    def test_fetch_multiple_providers_data(self):
        """
        Test that management command completes with proper message for error or success
        and logs correct information when there are multiple providers with their data.
        """
        # Create enabled configurations
        self.__create_saml_configurations__()

        # Add another set of configurations
        self.__create_saml_configurations__(
            saml_config={
                "site__domain": "second.testserver.fake",
                "site__name": "testserver.fake",
            },
            saml_provider_config={
                "site__domain": "second.testserver.fake",
                "site__name": "testserver.fake",
                "slug": "second-test-shib",
                "entity_id": "https://idp.testshib.org/idp/another-shibboleth",
                "metadata_source": "https://www.testshib.org/metadata/another-testshib-providers.xml",
            }
        )

        # Add another set of configurations
        self.__create_saml_configurations__(
            saml_config={
                "site__domain": "third.testserver.fake",
                "site__name": "testserver.fake",
            },
            saml_provider_config={
                "site__domain": "third.testserver.fake",
                "site__name": "testserver.fake",
                "slug": "third-test-shib",
                # Note: This entity id will not be present in returned response and will cause failed update.
                "entity_id": "https://idp.testshib.org/idp/non-existent-shibboleth",
                "metadata_source": "https://www.testshib.org/metadata/third/testshib-providers.xml",
            }
        )

        expected = '\n3 provider(s) found in database.\n0 skipped and 3 attempted.\n2 updated and 1 failed.\n'
        with self.assertRaisesRegex(CommandError, r"MetadataParseError: Can't find EntityDescriptor for entityID"):
            call_command("saml", pull=True, stdout=self.stdout)
        self.assertIn(expected, self.stdout.getvalue())

        # Now add a fourth configuration, and indicate that it should not be included in the update
        self.__create_saml_configurations__(
            saml_config={
                "site__domain": "fourth.testserver.fake",
                "site__name": "testserver.fake",
            },
            saml_provider_config={
                "site__domain": "fourth.testserver.fake",
                "site__name": "testserver.fake",
                "slug": "fourth-test-shib",
                "automatic_refresh_enabled": False,
                # Note: This invalid entity id will not be present in the refresh set
                "entity_id": "https://idp.testshib.org/idp/fourth-shibboleth",
                "metadata_source": "https://www.testshib.org/metadata/fourth/testshib-providers.xml",
            }
        )

        # Four configurations -- one will be skipped and three attempted, with similar results.
        expected = '\nDone.\n4 provider(s) found in database.\n1 skipped and 3 attempted.\n0 updated and 1 failed.\n'
        with self.assertRaisesRegex(CommandError, r"MetadataParseError: Can't find EntityDescriptor for entityID"):
            call_command("saml", pull=True, stdout=self.stdout)
        self.assertIn(expected, self.stdout.getvalue())
Example #57
0
 def test_command_add_error(self):
     with self.assertRaises(CommandError):
         call_command('auto_translate', 'test', 'test', 'ia', add=True)
 def test__user_base_amount_increased(self):
     load_money_amount = 10.0
     call_command('load_money', self.user_account.card_id,
                  load_money_amount, 'EUR')
     self.check_account_result_amount(self.user_account.base_account.id,
                                      load_money_amount)
    def setUp(self):
        apps.clear_cache()
        management.call_command('migrate', verbosity=0, interactive=False, load_initial_data=False)

        super(QueryObserversTestCase, self).setUp()
Example #60
0
def install_database():
    from django.core.management import call_command

    call_command('migrate')