Exemple #1
0
    def test_partial_router(self):
        "A router can choose to implement a subset of methods"
        dive = Book.objects.using('other').create(title="Dive into Python",
                                                  published=datetime.date(2009, 5, 4))

        # First check the baseline behaviour

        self.assertEqual(router.db_for_read(User), 'other')
        self.assertEqual(router.db_for_read(Book), 'other')

        self.assertEqual(router.db_for_write(User), 'default')
        self.assertEqual(router.db_for_write(Book), 'default')

        self.assertTrue(router.allow_relation(dive, dive))

        self.assertTrue(router.allow_syncdb('default', User))
        self.assertTrue(router.allow_syncdb('default', Book))

        router.routers = [WriteRouter(), AuthRouter(), TestRouter()]

        self.assertEqual(router.db_for_read(User), 'default')
        self.assertEqual(router.db_for_read(Book), 'other')

        self.assertEqual(router.db_for_write(User), 'writer')
        self.assertEqual(router.db_for_write(Book), 'writer')

        self.assertTrue(router.allow_relation(dive, dive))

        self.assertFalse(router.allow_syncdb('default', User))
        self.assertTrue(router.allow_syncdb('default', Book))
Exemple #2
0
    def django_table_names(self, only_existing=False):
        """
        Returns a list of all table names that have associated Django models and
        are in INSTALLED_APPS.

        If only_existing is True, the resulting list will only include the tables
        that actually exist in the database.
        """
        from django.db import models, router
        tables = set()
        for app in models.get_apps():
            for model in models.get_models(app):
                if not model._meta.managed:
                    continue
                if not router.allow_syncdb(self.connection.alias, model):
                    continue
                tables.add(model._meta.db_table)
                tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many])
        tables = list(tables)
        if only_existing:
            existing_tables = self.table_names()
            tables = [
                t
                for t in tables
                if self.table_name_converter(t) in existing_tables
            ]
        return tables
Exemple #3
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        if verbosity >= 1:
            print "Creating test database '%s'..." % self.connection.alias

        test_database_name = self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command("syncdb", verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias)

        if settings.CACHE_BACKEND.startswith("db://"):
            from django.core.cache import parse_backend_uri, cache
            from django.db import router

            if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
                _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
                call_command("createcachetable", cache_name, database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #4
0
def load_fixture(name, using=DEFAULT_DB_ALIAS):
    """Progammatic way to load a fixture given some path. This does not
    assume the path is a fixture within some app and assumes a full path.
    """
    if os.path.isabs(name):
        fixture_path = name
    else:
        fixture_path = full_fixture_path(name)

    with open(fixture_path) as fixture:
        objects = serializers.deserialize(FIXTURE_FORMAT, fixture, using=using)

        with transaction.commit_manually(using):
            for obj in objects:
                if router.allow_syncdb(using, obj.object.__class__):
                    try:
                        obj.save(using=using)
                    except (DatabaseError, IntegrityError), e:
                        transaction.rollback(using)
                        msg = u"Could not load {app_label}.{object_name}(pk={pk}): {error_msg}".format(
                            app_label=obj.object._meta.app_label,
                            object_name=obj.object._meta.object_name,
                            pk=obj.object.pk,
                            error_msg=e,
                        )
                        raise e.__class__, e.__class__(msg), sys.exc_info()[2]
            transaction.commit(using)
Exemple #5
0
	def create_test_db(self, verbosity=1, autoclobber=False):
		"""
		Creates a test database, prompting the user for confirmation if the
		database already exists. Returns the name of the test database created.

		This method is overloaded to load up the SpatiaLite initialization
		SQL prior to calling the `syncdb` command.
		"""
		if verbosity >= 1:
			print "Creating test database '%s'..." % self.connection.alias

		test_database_name = self._create_test_db(verbosity, autoclobber)

		self.connection.close()

		self.connection.settings_dict["NAME"] = test_database_name
		# Confirm the feature set of the test database
		self.connection.features.confirm()
		# Need to load the SpatiaLite initialization SQL before running `syncdb`.
		self.load_spatialite_sql()
		call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias)

		for cache_alias in settings.CACHES:
			cache = get_cache(cache_alias)
			if isinstance(cache, BaseDatabaseCache):
				from django.db import router
				if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
					call_command('createcachetable', cache._table, database=self.connection.alias)
		# Get a cursor (even though we don't need one yet). This has
		# the side effect of initializing the test database.
		cursor = self.connection.cursor()

		return test_database_name
Exemple #6
0
    def sequence_list(self):
        "Returns a list of information about all DB sequences for all models in all apps."
        from django.db import models, router

        apps = models.get_apps()
        sequence_list = []

        for app in apps:
            for model in models.get_models(app):
                if not model._meta.managed:
                    continue
                if not router.allow_syncdb(self.connection.alias, model):
                    continue
                for f in model._meta.local_fields:
                    if isinstance(f, models.AutoField):
                        sequence_list.append({'table': model._meta.db_table, 'column': f.column})
                        break # Only one AutoField is allowed per model, so don't bother continuing.

                for f in model._meta.local_many_to_many:
                    # If this is an m2m using an intermediate table,
                    # we don't need to reset the sequence.
                    if f.rel.through is None:
                        sequence_list.append({'table': f.m2m_db_table(), 'column': None})

        return sequence_list
Exemple #7
0
def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kwargs):
    """
    Create view permission for all models.
    See django.contrib.auth.management.__init__.py for original
    """
    if not router.allow_syncdb(db, auth_app.Permission):
        return

    from django.contrib.contenttypes.models import ContentType

    app_models = get_models(app)
    searched_perms = list()
    ctypes = set()
    for klass in app_models:
        ctype = ContentType.objects.db_manager(db).get_for_model(klass)
        ctypes.add(ctype)
        for perm in _get_all_permissions(klass._meta, ctype):
            searched_perms.append((ctype, perm))

    all_perms = set(auth_app.Permission.objects.using(db).filter(
        content_type__in=ctypes).values_list("content_type", "codename")
    )

    perms = [
        auth_app.Permission(codename=codename, name=name, content_type=ctype)
        for ctype, (codename, name) in searched_perms if (ctype.pk, codename) not in all_perms
    ]
    auth_app.Permission.objects.using(db).bulk_create(perms)
    if verbosity >= 2:
        for perm in perms:
            print "Adding permission '%s'" % perm
Exemple #8
0
def dump_data(request,appname):
    app_list = SortedDict()
    
    try:
        if request.POST:
            for appname in request.POST.getlist('apps'):
                app = get_app(appname)
                app_list[app] = None
            appname = 'choices'
        else:
            app = get_app(appname)
            app_list[app] = None
    except ImproperlyConfigured:
        if appname == 'all':
            for app in get_apps():
                app_list[app] = None

    if(len(app_list) > 0):
        objects = []
        for model in sort_dependencies(app_list.items()):
            if not model._meta.proxy and router.allow_syncdb(DEFAULT_DB_ALIAS, model):
                objects.extend(model._default_manager.using(DEFAULT_DB_ALIAS).all())
        serializers.get_serializer('json')
        json = serializers.serialize('json', objects, indent=2,use_natural_keys=True)
        response = HttpResponse(json, mimetype='application/json');
        response['Content-Disposition'] = 'attachment; filename=%s_%s_fixture.json' % (date.today().__str__(),appname)
        return response

    return render_to_response('diagnostic/dumpdata.html',context_instance=RequestContext(request))
Exemple #9
0
def load_fixture(name, using=DEFAULT_DB_ALIAS):
    """Progammatic way to load a fixture given some path. This does not
    assume the path is a fixture within some app and assumes a full path.
    """
    if os.path.isabs(name):
        fixture_path = name
    else:
        fixture_path = full_fixture_path(name)

    with open(fixture_path) as fixture:
        objects = serializers.deserialize(FIXTURE_FORMAT, fixture, using=using)

        try:
            with transaction.atomic(using):
                for obj in objects:
                    if (
                        hasattr(router, "allow_migrate") and
                        router.allow_migrate(using, obj.object.__class__)
                    ) or (
                        hasattr(router, "allow_syncdb") and
                        router.allow_syncdb(using, obj.object.__class__)
                    ):
                        obj.save(using=using)
        except (DatabaseError, IntegrityError), e:
            msg = u'Could not load {0}.{1}(pk={2}): {3}'.format(
                obj.object._meta.app_label,
                obj.object._meta.object_name, obj.object.pk, e)
            raise e.__class__, e.__class__(msg), sys.exc_info()[2]
Exemple #10
0
def create_default_anonymous_user(app, created_models, verbosity, db, **kwargs):
    # Only create the default sites in databases where Django created the table
    if User in created_models and router.allow_syncdb(db, User):
        if verbosity >= 2:
            print("Creating anonymous User object")

        user = User(
            pk=settings.ANONYMOUS_USER_ID,
            username='******',
            first_name='Anonymous',
            last_name='User',
        )

        user.is_setup = True  # prevent default profiles to be created, because the sequence hasn't been updated
        user.save(using=db)

        # We set an explicit pk instead of relying on auto-incrementation,
        # so we need to reset the database sequence. See #17415.
        sequence_sql = connections[db].ops.sequence_reset_sql(no_style(), [User])
        if not sequence_sql:
            sequence_sql = connections[db].ops.sequence_reset_sql(no_style(), [Entity])
        if sequence_sql:
            if verbosity >= 2:
                print("Resetting sequence")
            cursor = connections[db].cursor()
            for command in sequence_sql:
                cursor.execute(command)

        user.is_setup = False  # allow default profiles to be created now
        user.save(using=db)
Exemple #11
0
def create_first_user(created_models, verbosity, db, app=None, **kwargs):
    # this is super confusing
    if app and app.__name__ != 'sentry.models':
        return

    if User not in created_models:
        return

    if hasattr(router, 'allow_migrate'):
        if not router.allow_migrate(db, User):
            return
    else:
        if not router.allow_syncdb(db, User):
            return
    if not kwargs.get('interactive', True):
        return

    import click
    if not click.confirm('\nWould you like to create a user account now?', default=True):
        # Not using `abort=1` because we don't want to exit out from further execution
        click.echo('\nRun `sentry createuser` to do this later.\n')
        return

    from sentry.runner import call_command
    call_command('sentry.runner.commands.createuser.createuser')
Exemple #12
0
    def _precompile_fixture(self, fixture, db):
        """Precompiles a fixture.

        The fixture is loaded and deserialized, and the resulting objects
        are stored for future use.
        """
        assert db in TestCase._precompiled_fixtures
        assert fixture not in TestCase._precompiled_fixtures[db]

        fixture_path = None

        for fixture_dir in self._get_fixture_dirs():
            fixture_path = os.path.join(fixture_dir, fixture + '.json')

            if os.path.exists(fixture_path):
                break

        try:
            if not fixture_path:
                raise IOError('Fixture path not found')

            with open(fixture_path, 'r') as fp:
                TestCase._precompiled_fixtures[db][fixture] = [
                    obj
                    for obj in serializers.deserialize('json', fp, using=db)
                    if router.allow_syncdb(db, obj.object.__class__)
                ]
        except IOError as e:
            sys.stderr.write('Unable to load fixture %s: %s\n' % (fixture, e))
Exemple #13
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        if verbosity >= 1:
            print "Creating test database '%s'..." % self.connection.alias

        test_database_name = self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name
        can_rollback = self._rollback_works()
        self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback

        call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias)

        if settings.CACHE_BACKEND.startswith('db://'):
            from django.core.cache import parse_backend_uri, cache
            from django.db import router
            if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
                _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
                call_command('createcachetable', cache_name, database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        # Don't import django.core.management if it isn't needed.
        from django.core.management import call_command

        test_database_name = self._get_test_db_name()

        if verbosity >= 1:
            test_db_repr = ''
            if verbosity >= 2:
                test_db_repr = " ('%s')" % test_database_name
            print "Creating test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)

        self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        if self.connection.alias is "default":
            call_command('syncdb',
            verbosity=max(verbosity - 1, 0),
            interactive=False,
            database=self.connection.alias,
            load_initial_data=False)

        # We need to then do a flush to ensure that any data installed by
        # custom SQL has been removed. The only test data should come from
        # test fixtures, or autogenerated from post_syncdb triggers.
        # This has the side effect of loading initial data (which was
        # intentionally skipped in the syncdb).
        if self.connection.alias is "default":
            call_command('flush',
                verbosity=max(verbosity - 1, 0),
                interactive=False,
                database=self.connection.alias)

            from django.core.cache import get_cache
            from django.core.cache.backends.db import BaseDatabaseCache
            for cache_alias in settings.CACHES:
                cache = get_cache(cache_alias)
                if isinstance(cache, BaseDatabaseCache):
                    from django.db import router
                    if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
                        call_command('createcachetable', cache._table, database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #15
0
def get_capable_databases():
    """
    Returns a list of databases which are capable of supporting
    Nashvegas (based on their routing configuration).
    """
    for database in connections:
        if router.allow_syncdb(database, Migration):
            yield database
Exemple #16
0
    def handle_noargs(self, **options):
        db = options.get('database', DEFAULT_DB_ALIAS)
        connection = connections[db]
        verbosity = int(options.get('verbosity', 1))
        interactive = options.get('interactive')

        self.style = no_style()

        # Import the 'management' module within each installed app, to register
        # dispatcher events.
        for app_name in settings.INSTALLED_APPS:
            try:
                import_module('.management', app_name)
            except ImportError:
                pass

        sql_list = sql_flush(self.style, connection, only_django=True)

        if interactive:
            confirm = raw_input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                cursor = connection.cursor()
                for sql in sql_list:
                    cursor.execute(sql)
            except Exception, e:
                transaction.rollback_unless_managed(using=db)
                raise CommandError("""Database %s couldn't be flushed. Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist.
  * The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
The full error: %s""" % (connection.settings_dict['NAME'], e))
            transaction.commit_unless_managed(using=db)

            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs['database'] = db
            call_command('loaddata', 'initial_data', **kwargs)
Exemple #17
0
 def emit_post_syncdb(verbosity, interactive, database):
     # Emit the post sync signal. This allows individual applications to
     # respond as if the database had been sync'd from scratch.
     all_models = []
     for app in models.get_apps():
         all_models.extend([
             m for m in models.get_models(app, include_auto_created=True)
             if router.allow_syncdb(database, m)
         ])
     emit_post_sync_signal(set(all_models), verbosity, interactive, database)
Exemple #18
0
 def emit_post_syncdb(verbosity, interactive, database):
     # Emit the post sync signal. This allows individual applications to
     # respond as if the database had been sync'd from scratch.
     all_models = []
     for app in models.get_apps():
         all_models.extend([
             m for m in models.get_models(app, include_auto_created=True)
             if router.allow_syncdb(database, m)
         ])
     emit_post_sync_signal(set(all_models), verbosity, interactive,
                           database)
 def get_all_related_objects(self, local_only=False, include_hidden=False,
                             include_proxy_eq=False):
     using = router.db_for_write(self.model)
     objs = models.base.Options.get_all_related_objects_with_model(
             self,
             local_only=local_only,
             include_hidden=include_hidden,
             include_proxy_eq=include_proxy_eq
     )
     objs = [k for k, v in objs if router.allow_syncdb(using, k.model)]
     return objs
Exemple #20
0
 def installed_models(self, tables):
     "Returns a set of all models represented by the provided list of table names."
     from django.db import models, router
     all_models = []
     for app in models.get_apps():
         for model in models.get_models(app):
             if router.allow_syncdb(self.connection.alias, model):
                 all_models.append(model)
     return set([m for m in all_models
         if self.table_name_converter(m._meta.db_table) in map(self.table_name_converter, tables)
     ])
Exemple #21
0
    def installed_models(self, tables):
        "Returns a set of all models represented by the provided list of table names."
        from django.db import models, router

        all_models = []
        for app in models.get_apps():
            for model in models.get_models(app):
                if router.allow_syncdb(self.connection.alias, model):
                    all_models.append(model)
        tables = list(map(self.table_name_converter, tables))
        return set([m for m in all_models if self.table_name_converter(m._meta.db_table) in tables])
Exemple #22
0
 def get_objects():
     # Collate the objects to be serialized.
     for model in sort_dependencies(app_list.items()):
         if model in excluded_models:
             continue
         if not model._meta.proxy and router.allow_syncdb(using, model):
             if use_base_manager:
                 objects = model._base_manager
             else:
                 objects = model._default_manager
             for obj in objects.using(using).order_by(model._meta.pk.name).iterator():
                 yield obj
Exemple #23
0
 def get_objects():
     # Collate the objects to be serialized.
     for model in sort_dependencies(app_list.items()):
         if model in excluded_models:
             continue
         if not model._meta.proxy and router.allow_syncdb(using, model):
             if use_base_manager:
                 objects = model._base_manager
             else:
                 objects = model._default_manager
             for obj in objects.using(using).\
                     order_by(model._meta.pk.name).iterator():
                 yield obj
Exemple #24
0
def create_default_site(app, created_models, verbosity, db, **kwargs):
    # Only create the default sites in databases where Django created the table
    if Site in created_models and router.allow_syncdb(db, Site):
        if verbosity >= 2:
            print "Creating example.com Site object"
        # The default settings set SITE_ID = 1, and some tests in Django's test
        # suite rely on this value. However, if database sequences are reused
        # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
        # the next id will be 1, so we coerce it. See #15573 and #16353. This
        # can also crop up outside of tests - see #15346.
        s = Site(pk=1, domain="example.com", name="example.com")
        s.save(using=db)
    Site.objects.clear_cache()
Exemple #25
0
    def load_label(self, fixture_label):
        """
        Loads fixtures files for a given label.
        """
        for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
            _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
            open_method = self.compression_formats[cmp_fmt]
            fixture = open_method(fixture_file, 'r')
            try:
                self.fixture_count += 1
                objects_in_fixture = 0
                loaded_objects_in_fixture = 0
                if self.verbosity >= 2:
                    self.stdout.write("Installing %s fixture '%s' from %s." %
                        (ser_fmt, fixture_name, humanize(fixture_dir)))

                objects = serializers.deserialize(ser_fmt, fixture,
                    using=self.using, ignorenonexistent=self.ignore)

                for obj in objects:
                    objects_in_fixture += 1
                    if router.allow_syncdb(self.using, obj.object.__class__):
                        loaded_objects_in_fixture += 1
                        self.models.add(obj.object.__class__)
                        try:
                            obj.save(using=self.using)
                        except (DatabaseError, IntegrityError) as e:
                            e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                    'app_label': obj.object._meta.app_label,
                                    'object_name': obj.object._meta.object_name,
                                    'pk': obj.object.pk,
                                    'error_msg': force_text(e)
                                },)
                            raise

                self.loaded_object_count += loaded_objects_in_fixture
                self.fixture_object_count += objects_in_fixture
            except Exception as e:
                if not isinstance(e, CommandError):
                    e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
                raise
            finally:
                fixture.close()

            # Warn if the fixture we loaded contains 0 objects.
            if objects_in_fixture == 0:
                warnings.warn(
                    "No fixture data found for '%s'. (File format may be "
                    "invalid.)" % fixture_name,
                    RuntimeWarning
                )
Exemple #26
0
def create_app_sig(app, database):
    """
    Creates a dictionary representation of the models in a given app.
    Only those attributes that are interesting from a schema-evolution
    perspective are included.
    """
    app_sig = SortedDict()

    for model in get_models(app):
        # only include those who want to be syncdb
        if not is_multi_db() or router.allow_syncdb(database, model):
            app_sig[model._meta.object_name] = create_model_sig(model)

    return app_sig
Exemple #27
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        # Don't import django.core.management if it isn't needed.
        from django.core.management import call_command

        test_database_name = self._get_test_db_name()

        if verbosity >= 1:
            test_db_repr = ''
            if verbosity >= 2:
                test_db_repr = " ('%s')" % test_database_name
            print "Creating test database for alias '%s'%s..." % (
                self.connection.alias, test_db_repr)

        self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command('syncdb',
                     verbosity=max(verbosity - 1, 0),
                     interactive=False,
                     database=self.connection.alias)

        from django.core.cache import get_cache
        from django.core.cache.backends.db import BaseDatabaseCache
        for cache_alias in settings.CACHES:
            cache = get_cache(cache_alias)
            if isinstance(cache, BaseDatabaseCache):
                from django.db import router
                if router.allow_syncdb(self.connection.alias,
                                       cache.cache_model_class):
                    call_command('createcachetable',
                                 cache._table,
                                 database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #28
0
def create_app_sig(app, database):
    """
    Creates a dictionary representation of the models in a given app.
    Only those attributes that are interesting from a schema-evolution
    perspective are included.
    """
    app_sig = SortedDict()

    for model in get_models(app):
        # only include those who want to be syncdb
        if not is_multi_db() or router.allow_syncdb(database, model):
            app_sig[model._meta.object_name] = create_model_sig(model)

    return app_sig
Exemple #29
0
def load_app(app_path):
    testapp = django_load_app(app_path)
    app_name = testapp.__name__.split('.')[-2]
    connection = connections[DEFAULT_DB_ALIAS]
    cursor = connection.cursor()
    test_models = [m for m in models.get_models(testapp, include_auto_created=True)
            if router.allow_syncdb(DEFAULT_DB_ALIAS, m)]
    loaded_models[app_path] = test_models
    # We assume the models haven't been installed, otherwise there's more to do here
    
    # Get a list of already installed *models* so that references work right.
    tables = connection.introspection.table_names()
    seen_models = connection.introspection.installed_models(tables)
    pending_references = {}
    
    verbosity = 0
    
    # Create the tables for each model
    for model in test_models:
        # Create the model's database table, if it doesn't already exist.
        if verbosity >= 2:
            print "Processing %s.%s model" % (app_name, model._meta.object_name)
        sql, references = connection.creation.sql_create_model(model, no_style(), seen_models)
        seen_models.add(model)
        for refto, refs in references.items():
            pending_references.setdefault(refto, []).extend(refs)
            if refto in seen_models:
                sql.extend(connection.creation.sql_for_pending_references(refto, no_style(), pending_references))
        sql.extend(connection.creation.sql_for_pending_references(model, no_style(), pending_references))
        if verbosity >= 1 and sql:
            print "Creating table %s" % model._meta.db_table
        for statement in sql:
            cursor.execute(statement)
        tables.append(connection.introspection.table_name_converter(model._meta.db_table))
    transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS)
    
    for model in test_models:
        index_sql = connection.creation.sql_indexes_for_model(model, no_style())
        if index_sql:
            if verbosity >= 1:
                print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
            try:
                for sql in index_sql:
                    cursor.execute(sql)
            except Exception, e:
                sys.stderr.write("Failed to install index for %s.%s model: %s\n" % \
                                    (app_name, model._meta.object_name, e))
                transaction.rollback_unless_managed(using=DEFAULT_DB_ALIAS)
            else:
                transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS)
Exemple #30
0
def create_permissions(app,
                       created_models,
                       verbosity,
                       db=DEFAULT_DB_ALIAS,
                       **kwargs):
    try:
        get_model('auth', 'Permission')
    except UnavailableApp:
        return

    if not router.allow_syncdb(db, auth_app.Permission):
        return

    from django.contrib.contenttypes.models import ContentType

    app_models = get_models(app)

    # This will hold the permissions we're looking for as
    # (content_type, (codename, name))
    searched_perms = list()
    # The codenames and ctypes that should exist.
    ctypes = set()
    for klass in app_models:
        # Force looking up the content types in the current database
        # before creating foreign keys to them.
        ctype = ContentType.objects.db_manager(db).get_for_model(klass)
        ctypes.add(ctype)
        for perm in _get_all_permissions(klass._meta, ctype):
            searched_perms.append((ctype, perm))

    # Find all the Permissions that have a content_type for a model we're
    # looking for.  We don't need to check for codenames since we already have
    # a list of the ones we're going to create.
    all_perms = set()
    ctypes_pks = set(ct.pk for ct in ctypes)
    for ctype, codename in auth_app.Permission.objects.all().values_list(
            'content_type', 'codename')[:1000000]:
        if ctype in ctypes_pks:
            all_perms.add((ctype, codename))

    perms = [
        auth_app.Permission(codename=codename, name=name, content_type=ctype)
        for ctype, (codename, name) in searched_perms
        if (ctype.pk, codename) not in all_perms
    ]
    auth_app.Permission.objects.using(db).bulk_create(perms)
    if verbosity >= 2:
        for perm in perms:
            print("Adding permission '%s'" % perm)
Exemple #31
0
        def get_objects():
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if model in excluded_models:
                    continue
                if not model._meta.proxy and router.allow_syncdb(using, model):
                    if use_base_manager:
                        objects = model._base_manager
                    else:
                        objects = model._default_manager

                    queryset = objects.using(using).order_by(model._meta.pk.name)
                    if primary_keys:
                        queryset = queryset.filter(pk__in=primary_keys)
                    for obj in queryset.iterator():
                        yield obj
Exemple #32
0
def create_first_user(app, created_models, verbosity, db, **kwargs):
    if User not in created_models:
        return
    if not router.allow_syncdb(db, User):
        return
    if not kwargs.get('interactive', True):
        return

    import click
    if not click.confirm('\nWould you like to create a user account now?', default=True):
        # Not using `abort=1` because we don't want to exit out from further execution
        click.echo('\nRun `sentry createuser` to do this later.\n')
        return

    from sentry.runner import call_command
    call_command('sentry.runner.commands.createuser.createuser')
Exemple #33
0
        def get_objects():
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if model in excluded_models:
                    continue
                if not model._meta.proxy and router.allow_syncdb(using, model):
                    if use_base_manager:
                        objects = model._base_manager
                    else:
                        objects = QuerySet(model).all()

                    queryset = objects.using(using).order_by(model._meta.pk.name)
                    if primary_keys:
                        queryset = queryset.filter(pk__in=primary_keys)
                    for obj in queryset.iterator():
                        yield obj
Exemple #34
0
 def handle_label(self, tablename, **options):
     db = options.get('database')
     cache = BaseDatabaseCache(tablename, {})
     if not router.allow_syncdb(db, cache.cache_model_class):
         return
     connection = connections[db]
     fields = (
         # "key" is a reserved word in MySQL, so use "cache_key" instead.
         models.CharField(name='cache_key',
                          max_length=255,
                          unique=True,
                          primary_key=True),
         models.TextField(name='value'),
         models.DateTimeField(name='expires', db_index=True),
     )
     table_output = []
     index_output = []
     qn = connection.ops.quote_name
     for f in fields:
         field_output = [qn(f.name), f.db_type(connection=connection)]
         field_output.append("%sNULL" % (not f.null and "NOT " or ""))
         if f.primary_key:
             field_output.append("PRIMARY KEY")
         elif f.unique:
             field_output.append("UNIQUE")
         if f.db_index:
             unique = f.unique and "UNIQUE " or ""
             index_output.append("CREATE %sINDEX %s ON %s (%s);" % \
                 (unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
                 qn(f.name)))
         table_output.append(" ".join(field_output))
     full_statement = ["CREATE TABLE %s (" % qn(tablename)]
     for i, line in enumerate(table_output):
         full_statement.append(
             '    %s%s' % (line, i < len(table_output) - 1 and ',' or ''))
     full_statement.append(');')
     curs = connection.cursor()
     try:
         curs.execute("\n".join(full_statement))
     except DatabaseError as e:
         transaction.rollback_unless_managed(using=db)
         raise CommandError(
             "Cache table '%s' could not be created.\nThe error was: %s." %
             (tablename, e))
     for statement in index_output:
         curs.execute(statement)
     transaction.commit_unless_managed(using=db)
Exemple #35
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        if verbosity >= 1:
            print "Creating test database '%s'..." % self.connection.alias

        test_database_name = self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name
        can_rollback = self._rollback_works()
        self.connection.settings_dict["SUPPORTS_TRANSACTIONS"] = can_rollback

        call_command('syncdb',
                     verbosity=verbosity,
                     interactive=False,
                     database=self.connection.alias,
                     load_initial_data=False)

        # We need to then do a flush to ensure that any data installed by
        # custom SQL has been removed. The only test data should come from
        # test fixtures, or autogenerated from post_syncdb triggers.
        # This has the side effect of loading initial data (which was
        # intentionally skipped in the syncdb).
        call_command('flush',
                     verbosity=verbosity,
                     interactive=False,
                     database=self.connection.alias)

        if settings.CACHE_BACKEND.startswith('db://'):
            from django.core.cache import parse_backend_uri, cache
            from django.db import router
            if router.allow_syncdb(self.connection.alias,
                                   cache.cache_model_class):
                _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
                call_command('createcachetable',
                             cache_name,
                             database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #36
0
def create_first_user(app, created_models, verbosity, db, **kwargs):
    if User not in created_models:
        return
    if not router.allow_syncdb(db, User):
        return
    if not kwargs.get('interactive', True):
        return

    import click
    if not click.confirm('\nWould you like to create a user account now?',
                         default=True):
        # Not using `abort=1` because we don't want to exit out from further execution
        click.echo('\nRun `sentry createuser` to do this later.\n')
        return

    from sentry.runner import call_command
    call_command('sentry.runner.commands.createuser.createuser')
Exemple #37
0
def create_permissions(app, created_models, verbosity, db=DEFAULT_DB_ALIAS, **kwargs):
    try:
        get_model('auth', 'Permission')
    except UnavailableApp:
        return

    if not router.allow_syncdb(db, auth_app.Permission):
        return

    from django.contrib.contenttypes.models import ContentType

    app_models = get_models(app)

    # This will hold the permissions we're looking for as
    # (content_type, (codename, name))
    searched_perms = list()
    # The codenames and ctypes that should exist.
    ctypes = set()
    for klass in app_models:
        # Force looking up the content types in the current database
        # before creating foreign keys to them.
        ctype = ContentType.objects.db_manager(db).get_for_model(klass)
        ctypes.add(ctype)
        for perm in _get_all_permissions(klass._meta, ctype):
            searched_perms.append((ctype, perm))

    # Find all the Permissions that have a content_type for a model we're
    # looking for.  We don't need to check for codenames since we already have
    # a list of the ones we're going to create.
    all_perms = set()
    ctypes_pks = set(ct.pk for ct in ctypes)
    for ctype, codename in auth_app.Permission.objects.all().values_list(
            'content_type', 'codename')[:1000000]:
        if ctype in ctypes_pks:
            all_perms.add((ctype, codename))

    perms = [
        auth_app.Permission(codename=codename, name=name, content_type=ctype)
        for ctype, (codename, name) in searched_perms
        if (ctype.pk, codename) not in all_perms
    ]
    auth_app.Permission.objects.using(db).bulk_create(perms)
    if verbosity >= 2:
        for perm in perms:
            print("Adding permission '%s'" % perm)
    def handle_noargs(self, **options):
        db = options.get("database", DEFAULT_DB_ALIAS)
        connection = connections[db]
        verbosity = int(options.get("verbosity", 1))
        interactive = options.get("interactive")

        self.style = no_style()

        if interactive:
            confirm = raw_input(
                """You have requested a erase all the data in the current nucleos EDMS installation.
This will IRREVERSIBLY ERASE all user data currently in the database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """
            )
        else:
            confirm = "yes"

        if confirm == "yes":
            try:
                Cleanup.execute_all()
            except Exception as exception:
                raise CommandError(
                    """Unable to erase data.  Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist."""
                )
            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend(
                    [m for m in models.get_models(app, include_auto_created=True) if router.allow_syncdb(db, m)]
                )
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs["database"] = db
        else:
            print "Erase data cancelled."
Exemple #39
0
 def handle_label(self, tablename, **options):
     db = options.get('database')
     cache = BaseDatabaseCache(tablename, {})
     if not router.allow_syncdb(db, cache.cache_model_class):
         return
     connection = connections[db]
     fields = (
         # "key" is a reserved word in MySQL, so use "cache_key" instead.
         models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
         models.TextField(name='value'),
         models.DateTimeField(name='expires', db_index=True),
     )
     table_output = []
     index_output = []
     qn = connection.ops.quote_name
     for f in fields:
         field_output = [qn(f.name), f.db_type(connection=connection)]
         field_output.append("%sNULL" % (not f.null and "NOT " or ""))
         if f.primary_key:
             field_output.append("PRIMARY KEY")
         elif f.unique:
             field_output.append("UNIQUE")
         if f.db_index:
             unique = f.unique and "UNIQUE " or ""
             index_output.append("CREATE %sINDEX %s ON %s (%s);" % \
                 (unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
                 qn(f.name)))
         table_output.append(" ".join(field_output))
     full_statement = ["CREATE TABLE %s (" % qn(tablename)]
     for i, line in enumerate(table_output):
         full_statement.append('    %s%s' % (line, i < len(table_output)-1 and ',' or ''))
     full_statement.append(');')
     curs = connection.cursor()
     try:
         curs.execute("\n".join(full_statement))
     except DatabaseError as e:
         self.stderr.write(
             "Cache table '%s' could not be created.\nThe error was: %s." %
                 (tablename, e))
         transaction.rollback_unless_managed(using=db)
     else:
         for statement in index_output:
             curs.execute(statement)
         transaction.commit_unless_managed(using=db)
Exemple #40
0
    def handle_noargs(self, **options):
        db = options.get('database', DEFAULT_DB_ALIAS)
        connection = connections[db]
        verbosity = int(options.get('verbosity', 1))
        interactive = options.get('interactive')

        self.style = no_style()

        if interactive:
            confirm = raw_input(
                """You have requested a erase all the data in the current Mayan EDMS installation.
This will IRREVERSIBLY ERASE all user data currently in the database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?

    Type 'yes' to continue, or 'no' to cancel: """)
        else:
            confirm = 'yes'

        if confirm == 'yes':
            try:
                Cleanup.execute_all()
            except Exception as exception:
                raise CommandError("""Unable to erase data.  Possible reasons:
  * The database isn't running or isn't configured correctly.
  * At least one of the expected database tables doesn't exist.""")
            # Emit the post sync signal. This allows individual
            # applications to respond as if the database had been
            # sync'd from scratch.
            all_models = []
            for app in models.get_apps():
                all_models.extend([
                    m
                    for m in models.get_models(app, include_auto_created=True)
                    if router.allow_syncdb(db, m)
                ])
            emit_post_sync_signal(set(all_models), verbosity, interactive, db)

            # Reinstall the initial_data fixture.
            kwargs = options.copy()
            kwargs['database'] = db
        else:
            print 'Erase data cancelled.'
Exemple #41
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        # Don't import django.core.management if it isn't needed.
        from django.core.management import call_command

        test_database_name = self._get_test_db_name()

        if verbosity >= 1:
            test_db_repr = ''
            if verbosity >= 2:
                test_db_repr = " ('%s')" % test_database_name
            print "Creating test database for alias '%s'%s..." % (self.connection.alias, test_db_repr)

        self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command('syncdb', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias)

        from django.core.cache import get_cache
        from django.core.cache.backends.db import BaseDatabaseCache
        for cache_alias in settings.CACHES:
            cache = get_cache(cache_alias)
            if isinstance(cache, BaseDatabaseCache):
                from django.db import router
                if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
                    call_command('createcachetable', cache._table, database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #42
0
def db_router_allows_syncdb(database, model_cls):
    """Return whether a database router allows syncdb operations for a model.

    This will only return ``True`` for Django 1.6 and older and if the
    router allows syncdb operations.

    Args:
        database (unicode):
            The name of the database.

        model_cls (type):
            The model class.

    Returns:
        bool:
        ``True`` if routers allow syncdb for this model.
    """
    return (django.VERSION[:2] <= (1, 6)
            and router.allow_syncdb(database, model_cls))
Exemple #43
0
def create_first_user(app_config, using, interactive, **kwargs):
    if app_config and app_config.name != "sentry":
        return

    try:
        User = app_config.get_model("User")
    except LookupError:
        return

    if User.objects.filter(is_superuser=True).exists():
        return

    if hasattr(router, "allow_migrate"):
        if not router.allow_migrate(using, User):
            return
    else:
        if not router.allow_syncdb(using, User):
            return
    if not interactive:
        return
Exemple #44
0
        def create_db(self, load_initial):
            from django.core.management import call_command

            # Deletes database name because if database doesn't exist,
            # django orm isn't able to connect to it.
            self.connection.settings_dict["NAME"] = None
            self._create_test_db(0, True)
            self.connection.settings_dict["NAME"] = self.database_name

            self.connection.close()
            # Confirm the feature set of the database
            self.connection.features.confirm()

            # Report syncdb messages at one level lower than that requested.
            # This ensures we don't get flooded with messages during testing
            # (unless you really ask to be flooded)
            call_command('syncdb',
                         verbosity=0,
                         interactive=False,
                         database=domain,
                         load_initial_data=load_initial,
                         migrate_all=True)

            from django.core.cache import get_cache
            from django.core.cache.backends.db import BaseDatabaseCache

            for cache_alias in settings.CACHES:
                cache = get_cache(cache_alias)
                if isinstance(cache, BaseDatabaseCache):
                    from django.db import router

                    if router.allow_syncdb(self.connection.alias,
                                           cache.cache_model_class):
                        call_command('createcachetable',
                                     cache._table,
                                     database=self.connection.alias)

            # Get a cursor (even though we don't need one yet). This has
            # the side effect of initializing the test database.
            cursor = self.connection.cursor()
            return self.database_name
Exemple #45
0
    def django_table_names(self, only_existing=False):
        """
        Returns a list of all table names that have associated Django models and
        are in INSTALLED_APPS.

        If only_existing is True, the resulting list will only include the tables
        that actually exist in the database.
        """
        from django.db import models, router
        tables = set()
        for app in models.get_apps():
            for model in models.get_models(app):
                if not model._meta.managed:
                    continue
                if not router.allow_syncdb(self.connection.alias, model):
                    continue
                tables.add(model._meta.db_table)
                tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many])
        if only_existing:
            tables = [t for t in tables if self.table_name_converter(t) in self.table_names()]
        return tables
Exemple #46
0
    def _precompile_fixture(self, fixture, db):
        """Precompile a fixture.

        The fixture is loaded and deserialized, and the resulting objects
        are stored for future use.

        Args:
            fixture (unicode):
                The name of the fixture.

            db (unicode):
                The database name to load fixture data on.
        """
        assert db in self._precompiled_fixtures
        assert fixture not in self._precompiled_fixtures[db]

        fixture_path = None

        for fixture_dir in self._get_fixture_dirs():
            fixture_path = os.path.join(fixture_dir, fixture + '.json')

            if os.path.exists(fixture_path):
                break

        try:
            if not fixture_path:
                raise IOError('Fixture path not found')

            with open(fixture_path, 'r') as fp:
                self._precompiled_fixtures[db][fixture] = [
                    obj
                    for obj in serializers.deserialize('json', fp, using=db)
                    if ((hasattr(router, 'allow_syncdb') and
                         router.allow_syncdb(db, obj.object.__class__)) or
                        (hasattr(router, 'allow_migrate_model') and
                         router.allow_migrate_model(db, obj.object)))
                ]
        except IOError as e:
            sys.stderr.write('Unable to load fixture %s: %s\n' % (fixture, e))
Exemple #47
0
def create_default_school(app, created_models, verbosity, db, **kwargs):
    # Only create the default schools in databases where Django created the table
    if School in created_models and router.allow_syncdb(db, School):
        if verbosity >= 2:
            print("Creating the default School object")
        School(pk=1,
               name="Default School",
               short_name="Default",
               hostname="localhost").save(using=db)

        # We set an explicit pk instead of relying on auto-incrementation,
        # so we need to reset the database sequence. See #17415.
        sequence_sql = connections[db].ops.sequence_reset_sql(
            no_style(), [School])
        if sequence_sql:
            if verbosity >= 2:
                print("Resetting sequence")
            cursor = connections[db].cursor()
            for command in sequence_sql:
                cursor.execute(command)

    School.objects.clear_cache()
Exemple #48
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.
        """
        if verbosity >= 1:
            print "Creating test database '%s'..." % self.connection.alias

        test_database_name = self._create_test_db(verbosity, autoclobber)

        self.connection.close()
        self.connection.settings_dict["NAME"] = test_database_name

        # Confirm the feature set of the test database
        self.connection.features.confirm()

        # Report syncdb messages at one level lower than that requested.
        # This ensures we don't get flooded with messages during testing
        # (unless you really ask to be flooded)
        call_command('syncdb',
                     verbosity=max(verbosity - 1, 0),
                     interactive=False,
                     database=self.connection.alias)

        if settings.CACHE_BACKEND.startswith('db://'):
            from django.core.cache import parse_backend_uri, cache
            from django.db import router
            if router.allow_syncdb(self.connection.alias,
                                   cache.cache_model_class):
                _, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
                call_command('createcachetable',
                             cache_name,
                             database=self.connection.alias)

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
Exemple #49
0
    def create_test_db(self, verbosity=1, autoclobber=False):
        """
        Creates a test database, prompting the user for confirmation if the
        database already exists. Returns the name of the test database created.

        This method is overloaded to load up the SpatiaLite initialization
        SQL prior to calling the `syncdb` command.
        """
        if verbosity >= 1:
            print "Creating test database '%s'..." % self.connection.alias

        test_database_name = self._create_test_db(verbosity, autoclobber)

        self.connection.close()

        self.connection.settings_dict["NAME"] = test_database_name
        # Confirm the feature set of the test database
        self.connection.features.confirm()
        # Need to load the SpatiaLite initialization SQL before running `syncdb`.
        self.load_spatialite_sql()
        call_command('syncdb',
                     verbosity=verbosity,
                     interactive=False,
                     database=self.connection.alias)

        for cache_alias in settings.CACHES:
            cache = get_cache(cache_alias)
            if isinstance(cache, BaseDatabaseCache):
                from django.db import router
                if router.allow_syncdb(self.connection.alias,
                                       cache.cache_model_class):
                    call_command('createcachetable',
                                 cache._table,
                                 database=self.connection.alias)
        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database.
        cursor = self.connection.cursor()

        return test_database_name
    def _update_models(self):
        if (self.process_before() == False):
            # 前処理
            return

        # INSTALLED_APPから全モデルクラスを取得
        self._using = DEFAULT_DB_ALIAS  # ToDo: defaultだけではなく、マルチDB化
        models = sort_dependencies(self._app_list.items())
        ofn = "/tmp/django_s3_backup_%s_" % (
            datetime.now().strftime("%Y%m%d%H%M%S"))
        # print "Writing JSON to %s..." % ofn
        print "get all models Done."
        # appのmodel毎にjsonダンプ -> S3に保存
        stream_fp = None
        for model in models:
            # dbg start
            #if ( model.__name__ != "Customer" ):
            #    continue
            # dbg end
            if ((not model._meta.proxy)
                    and (router.allow_syncdb(self._using, model))):
                try:
                    # 全レコードをファイルofnへ書き出し
                    fsize = self.create_json_file(ofn, model, self._using)
                    if (fsize == None):
                        # レコード存在しない場合
                        continue
                    # ファイル内容をS3にアップロード
                    self.update_S3(model, ofn, fsize)
                except:
                    if (stream_fp):
                        stream_fp.close()
                        os.remove(ofn)
                    raise

        if (self.process_after() == False):
            # 後処理
            return
def create_default_site(app, created_models, verbosity, db, **kwargs):
    # Only create the default sites in databases where Django created the table
    if Site in created_models and router.allow_syncdb(db, Site) :
        # The default settings set SITE_ID = 1, and some tests in Django's test
        # suite rely on this value. However, if database sequences are reused
        # (e.g. in the test suite after flush/syncdb), it isn't guaranteed that
        # the next id will be 1, so we coerce it. See #15573 and #16353. This
        # can also crop up outside of tests - see #15346.
        if verbosity >= 2:
            print "Creating example.com Site object"
        Site(pk=1, domain="example.com", name="example.com").save(using=db)

        # We set an explicit pk instead of relying on auto-incrementation,
        # so we need to reset the database sequence. See #17415.
        sequence_sql = connections[db].ops.sequence_reset_sql(no_style(), [Site])
        if sequence_sql:
            if verbosity >= 2:
                print "Resetting sequence"
            cursor = connections[db].cursor()
            for command in sequence_sql:
                cursor.execute(command)

    Site.objects.clear_cache()
Exemple #52
0
def create_app_sig(app, database):
    """
    Creates a dictionary representation of the models in a given app.
    Only those attributes that are interesting from a schema-evolution
    perspective are included.
    """
    app_sig = OrderedDict()

    for model in get_models(app):
        # Only include those models that can be synced.
        #
        # On Django 1.7 and up, we need to check if the model allows for
        # migrations (using allow_migrate_model).
        #
        # On older versions of Django, we check if the model allows for
        # synchronization to the database (allow_syncdb).
        if ((hasattr(router, 'allow_syncdb')
             and router.allow_syncdb(database, model.__class__))
                or (hasattr(router, 'allow_migrate_model')
                    and router.allow_migrate_model(database, model))):
            app_sig[model._meta.object_name] = create_model_sig(model)

    return app_sig
Exemple #53
0
def load_requested_data(data):
    """
    Load the given data dumps and return the number of imported objects.

    Wraps the entire action in a big transaction.

    """
    style = no_style()

    using = DEFAULT_DB_ALIAS
    connection = connections[using]
    cursor = connection.cursor()

    transaction.commit_unless_managed(using=using)
    transaction.enter_transaction_management(using=using)
    transaction.managed(True, using=using)
    
    models = set()
    counter = 0
    try:
        for format, stream in data:
            objects = serializers.deserialize(format, stream)
            for obj in objects:
                model = obj.object.__class__
                if router.allow_syncdb(using, model):
                    models.add(model)
                    counter += 1
                    obj.save(using=using)
        if counter > 0:
            sequence_sql = connection.ops.sequence_reset_sql(style, models)
            if sequence_sql:
                for line in sequence_sql:
                    cursor.execute(line)
    except Exception, e:
        transaction.rollback(using=using)
        transaction.leave_transaction_management(using=using)
        raise e
Exemple #54
0
def dump_data(request, appname):
    app_list = SortedDict()

    try:
        if request.method == 'POST':
            for appname in request.POST.getlist('apps'):
                app = get_app(appname)
                app_list[app] = None
            appname = 'choices'
        else:
            app = get_app(appname)
            app_list[app] = None
    except ImproperlyConfigured:
        if appname == 'all':
            for app in get_apps():
                app_list[app] = None

    if (len(app_list) > 0):
        objects = []
        for model in sort_dependencies(app_list.items()):
            if not model._meta.proxy and router.allow_syncdb(
                    DEFAULT_DB_ALIAS, model):
                objects.extend(
                    model._default_manager.using(DEFAULT_DB_ALIAS).all())
        serializers.get_serializer('json')
        json = serializers.serialize('json',
                                     objects,
                                     indent=2,
                                     use_natural_keys=True)
        response = HttpResponse(json, mimetype='application/json')
        response[
            'Content-Disposition'] = 'attachment; filename=%s_%s_fixture.json' % (
                date.today().__str__(), appname)
        return response

    return render_to_response('diagnostic/dumpdata.html',
                              context_instance=RequestContext(request))
Exemple #55
0
def load_fixture(name, using=DEFAULT_DB_ALIAS):
    """Progammatic way to load a fixture given some path. This does not
    assume the path is a fixture within some app and assumes a full path.
    """
    if os.path.isabs(name):
        fixture_path = name
    else:
        fixture_path = full_fixture_path(name)

    with open(fixture_path) as fixture:
        objects = serializers.deserialize(FIXTURE_FORMAT, fixture, using=using)

        with transaction.commit_manually(using):
            for obj in objects:
                if router.allow_syncdb(using, obj.object.__class__):
                    try:
                        obj.save(using=using)
                    except (DatabaseError, IntegrityError), e:
                        transaction.rollback(using)
                        msg = u'Could not load {0}.{1}(pk={2}): {3}'.format(
                            obj.object._meta.app_label,
                            obj.object._meta.object_name, obj.object.pk, e)
                        raise e.__class__, e.__class__(msg), sys.exc_info()[2]
            transaction.commit(using)
Exemple #56
0
def load_app(app_path):
    testapp = django_load_app(app_path)
    app_name = testapp.__name__.split('.')[-2]
    connection = connections[DEFAULT_DB_ALIAS]
    cursor = connection.cursor()
    test_models = [
        m for m in models.get_models(testapp, include_auto_created=True)
        if router.allow_syncdb(DEFAULT_DB_ALIAS, m)
    ]
    loaded_models[app_path] = test_models
    # We assume the models haven't been installed, otherwise there's more to do here

    # Get a list of already installed *models* so that references work right.
    tables = connection.introspection.table_names()
    seen_models = connection.introspection.installed_models(tables)
    pending_references = {}

    verbosity = 0

    # Create the tables for each model
    for model in test_models:
        # Create the model's database table, if it doesn't already exist.
        if verbosity >= 2:
            print "Processing %s.%s model" % (app_name,
                                              model._meta.object_name)
        sql, references = connection.creation.sql_create_model(
            model, no_style(), seen_models)
        seen_models.add(model)
        for refto, refs in references.items():
            pending_references.setdefault(refto, []).extend(refs)
            if refto in seen_models:
                sql.extend(
                    connection.creation.sql_for_pending_references(
                        refto, no_style(), pending_references))
        sql.extend(
            connection.creation.sql_for_pending_references(
                model, no_style(), pending_references))
        if verbosity >= 1 and sql:
            print "Creating table %s" % model._meta.db_table
        for statement in sql:
            cursor.execute(statement)
        tables.append(
            connection.introspection.table_name_converter(
                model._meta.db_table))
    transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS)

    for model in test_models:
        index_sql = connection.creation.sql_indexes_for_model(
            model, no_style())
        if index_sql:
            if verbosity >= 1:
                print "Installing index for %s.%s model" % (
                    app_name, model._meta.object_name)
            try:
                for sql in index_sql:
                    cursor.execute(sql)
            except Exception, e:
                sys.stderr.write("Failed to install index for %s.%s model: %s\n" % \
                                    (app_name, model._meta.object_name, e))
                transaction.rollback_unless_managed(using=DEFAULT_DB_ALIAS)
            else:
                transaction.commit_unless_managed(using=DEFAULT_DB_ALIAS)
Exemple #57
0
def tables_used_by_fixtures(fixture_labels, using=DEFAULT_DB_ALIAS):
    """Act like Django's stock loaddata command, but, instead of loading data,
    return an iterable of the names of the tables into which data would be
    loaded."""
    # Keep a count of the installed objects and fixtures
    fixture_count = 0
    loaded_object_count = 0
    fixture_object_count = 0
    tables = set()

    class SingleZipReader(zipfile.ZipFile):
        def __init__(self, *args, **kwargs):
            zipfile.ZipFile.__init__(self, *args, **kwargs)
            if settings.DEBUG:
                assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
        def read(self):
            return zipfile.ZipFile.read(self, self.namelist()[0])

    compression_types = {
        None:   file,
        'gz':   gzip.GzipFile,
        'zip':  SingleZipReader
    }
    if has_bz2:
        compression_types['bz2'] = bz2.BZ2File

    app_module_paths = []
    for app in get_apps():
        if hasattr(app, '__path__'):
            # It's a 'models/' subpackage
            for path in app.__path__:
                app_module_paths.append(path)
        else:
            # It's a models.py module
            app_module_paths.append(app.__file__)

    app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]
    for fixture_label in fixture_labels:
        parts = fixture_label.split('.')

        if len(parts) > 1 and parts[-1] in compression_types:
            compression_formats = [parts[-1]]
            parts = parts[:-1]
        else:
            compression_formats = list(compression_types.keys())

        if len(parts) == 1:
            fixture_name = parts[0]
            formats = serializers.get_public_serializer_formats()
        else:
            fixture_name, format = '.'.join(parts[:-1]), parts[-1]
            if format in serializers.get_public_serializer_formats():
                formats = [format]
            else:
                formats = []

        if not formats:
            # stderr.write(style.ERROR("Problem installing fixture '%s': %s is
            # not a known serialization format.\n" % (fixture_name, format)))
            return set()

        if os.path.isabs(fixture_name):
            fixture_dirs = [fixture_name]
        else:
            fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

        for fixture_dir in fixture_dirs:
            # stdout.write("Checking %s for fixtures...\n" %
            # humanize(fixture_dir))

            label_found = False
            for combo in product([using, None], formats, compression_formats):
                database, format, compression_format = combo
                file_name = '.'.join(
                    p for p in [
                        fixture_name, database, format, compression_format
                    ]
                    if p
                )

                # stdout.write("Trying %s for %s fixture '%s'...\n" % \
                # (humanize(fixture_dir), file_name, fixture_name))
                full_path = os.path.join(fixture_dir, file_name)
                open_method = compression_types[compression_format]
                try:
                    fixture = open_method(full_path, 'r')
                    if label_found:
                        fixture.close()
                        # stderr.write(style.ERROR("Multiple fixtures named
                        # '%s' in %s. Aborting.\n" % (fixture_name,
                        # humanize(fixture_dir))))
                        return set()
                    else:
                        fixture_count += 1
                        objects_in_fixture = 0
                        loaded_objects_in_fixture = 0
                        # stdout.write("Installing %s fixture '%s' from %s.\n"
                        # % (format, fixture_name, humanize(fixture_dir)))
                        try:
                            objects = serializers.deserialize(format, fixture, using=using)
                            for obj in objects:
                                objects_in_fixture += 1
                                if router.allow_syncdb(using, obj.object.__class__):
                                    loaded_objects_in_fixture += 1
                                    tables.add(
                                        obj.object.__class__._meta.db_table)
                            loaded_object_count += loaded_objects_in_fixture
                            fixture_object_count += objects_in_fixture
                            label_found = True
                        except (SystemExit, KeyboardInterrupt):
                            raise
                        except Exception:
                            fixture.close()
                            # stderr.write( style.ERROR("Problem installing
                            # fixture '%s': %s\n" % (full_path, ''.join(tra
                            # ceback.format_exception(sys.exc_type,
                            # sys.exc_value, sys.exc_traceback)))))
                            return set()
                        fixture.close()

                        # If the fixture we loaded contains 0 objects, assume that an
                        # error was encountered during fixture loading.
                        if objects_in_fixture == 0:
                            # stderr.write( style.ERROR("No fixture data found
                            # for '%s'. (File format may be invalid.)\n" %
                            # (fixture_name)))
                            return set()

                except Exception:
                    # stdout.write("No %s fixture '%s' in %s.\n" % \ (format,
                    # fixture_name, humanize(fixture_dir)))
                    pass

    return tables
Exemple #58
0
def update_contenttypes(app,
                        created_models,
                        verbosity=2,
                        db=DEFAULT_DB_ALIAS,
                        **kwargs):
    """
    Creates content types for models in the given app, removing any model
    entries that no longer have a matching model class.
    """
    if not router.allow_syncdb(db, ContentType):
        return

    ContentType.objects.clear_cache()
    app_models = get_models(app)
    if not app_models:
        return
    # They all have the same app_label, get the first one.
    app_label = app_models[0]._meta.app_label
    app_models = dict(
        (model._meta.object_name.lower(), model) for model in app_models)

    # Get all the content types
    content_types = dict(
        (ct.model, ct)
        for ct in ContentType.objects.using(db).filter(app_label=app_label))
    to_remove = [
        ct for (model_name, ct) in six.iteritems(content_types)
        if model_name not in app_models
    ]

    cts = [
        ContentType(
            name=smart_text(model._meta.verbose_name_raw),
            app_label=app_label,
            model=model_name,
        ) for (model_name, model) in six.iteritems(app_models)
        if model_name not in content_types
    ]
    ContentType.objects.using(db).bulk_create(cts)
    if verbosity >= 2:
        for ct in cts:
            print("Adding content type '%s | %s'" % (ct.app_label, ct.model))

    # Confirm that the content type is stale before deletion.
    if to_remove:
        if kwargs.get('interactive', False):
            content_type_display = '\n'.join(
                ['    %s | %s' % (ct.app_label, ct.model) for ct in to_remove])
            ok_to_delete = input(
                """The following content types are stale and need to be deleted:

%s

Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.

    Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
        else:
            ok_to_delete = False

        if ok_to_delete == 'yes':
            for ct in to_remove:
                if verbosity >= 2:
                    print("Deleting stale content type '%s | %s'" %
                          (ct.app_label, ct.model))
                ct.delete()
        else:
            if verbosity >= 2:
                print("Stale content types remain.")