def db_ready_handler(sender, **kwargs): global CHROMOSOME_FAMILY_COUNTS, CHROMOSOME_MAP, CHROMOSOMES_AS_FAMILIES,\ CHROMOSOMES_AS_GENES print 'Pre-loading macro-synteny data...' # get family assignment and number on chromosome for each gene families = GeneFamilyAssignment.objects.all().iterator() gene_family_map = dict((f.gene_id, f.family_label) for f in families) gene_orders = list(GeneOrder.objects.all()\ .order_by('chromosome_id', 'number')) # fetch all chromosomes chromosome_cvs = list(Cvterm.objects.filter(name='chromosome')) chromosomes = list(Feature.objects .only('feature_id', 'name', 'organism_id') .filter(type__in=chromosome_cvs)) CHROMOSOME_MAP = dict((c.feature_id, c) for c in chromosomes) # construct various representations of chromosomes CHROMOSOMES_AS_GENES = defaultdict(list) CHROMOSOMES_AS_FAMILIES = defaultdict(list) CHROMOSOME_FAMILY_COUNTS = defaultdict(lambda: defaultdict(int)) for o in gene_orders: if o.chromosome_id in CHROMOSOME_MAP: CHROMOSOMES_AS_GENES[o.chromosome_id].append(o.gene_id) f = gene_family_map.get(o.gene_id, '') CHROMOSOMES_AS_FAMILIES[o.chromosome_id].append(f) CHROMOSOME_FAMILY_COUNTS[o.chromosome_id][f] += 1 # only want to run on the initial database connection connection_created.disconnect(db_ready_handler)
def monkeypatch_user(sender, **kwargs): profile_model = get_profile_model() exclude_names = set(field.name for field in User._meta.fields) exclude_names.add('user') if include is not None: if exclude is not None: raise ValueError('Cannot pass both "include" and "exclude"') include_names = set(include) - exclude_names iterfields = (field for field in profile_model._meta.fields if field.name in include_names) else: if exclude: exclude_names.update(exclude) iterfields = (field for field in profile_model._meta.fields if field.name not in exclude_names) for field in iterfields: name = field.name setattr_if_unset(User, name, _make_profile_property(name)) if field.choices: setattr_if_unset(User, 'get_%s_display' % name, curry(User._get_FIELD_display, field=field)) if isinstance(field, (DateField, DateTimeField)) and not field.null: setattr_if_unset(User, 'get_next_by_%s' % name, curry(_get_next_or_previous_by_profile_FIELD, field=field, is_next=True)) setattr_if_unset(User, 'get_previous_by_%s' % name, curry(_get_next_or_previous_by_profile_FIELD, field=field, is_next=False)) post_save.connect(_save_profile_listener, sender=User) connection_created.disconnect(monkeypatch_user)
def test_signal(self): data = {} def receiver(sender, connection, **kwargs): data["connection"] = connection connection_created.connect(receiver) connection.close() cursor = connection.cursor() self.assertTrue(data["connection"] is connection) connection_created.disconnect(receiver) data.clear() cursor = connection.cursor() self.assertTrue(data == {})
def load_items(connection: BaseDatabaseWrapper, **kwargs) -> None: """ This function provides a way to load items into memory on server startup from the target database. It unregisters itself to ensure that it is only run once per startup. :param connection: a Django BaseDatabaseWrapper object :param kwargs: additional keyword arguments :return: None """ connection = connections[DEFAULT_DB_ALIAS] connection.prepare_database() executor = MigrationExecutor(connection) targets = executor.loader.graph.leaf_nodes() autodetector = MigrationAutodetector( executor.loader.project_state(), ProjectState.from_apps(apps), ) unmade_migrations = autodetector.changes(graph=executor.loader.graph) unrun_migrations = executor.migration_plan(targets) all_migrations_run = True # If there are any unmade migrations, we can't safely load the items into memory for app in unmade_migrations: if APP_NAME in str(app): print("Can't load items. Detected unmade migrations.") all_migrations_run = False break # If there aren't unmade migrations, check if any migrations still need applied if all_migrations_run: for migration, _ in unrun_migrations: if APP_NAME in str(migration): print("Can't load items. Migrations need applied.") all_migrations_run = False break # If all migrations are run and there aren't unmade migrations, we load the items into memory if all_migrations_run: for item in Item.objects.all(): items[item.id] = item # We do this to ensure this runs ONLY once per startup connection_created.disconnect(load_items)
def uninstall_if_needed(setting, value, enter, **kwargs): """ Undo the effects of PostgresConfig.ready() when django.contrib.postgres is "uninstalled" by override_settings(). """ if not enter and setting == 'INSTALLED_APPS' and 'django.contrib.postgres' not in set(value): connection_created.disconnect(register_type_handlers) CharField._unregister_lookup(Unaccent) TextField._unregister_lookup(Unaccent) CharField._unregister_lookup(SearchLookup) TextField._unregister_lookup(SearchLookup) CharField._unregister_lookup(TrigramSimilar) TextField._unregister_lookup(TrigramSimilar) # Disconnect this receiver until the next time this app is installed # and ready() connects it again to prevent unnecessary processing on # each setting change. setting_changed.disconnect(uninstall_if_needed)
def test_signal(self): data = {} def receiver(sender, connection, **kwargs): data["connection"] = connection connection_created.connect(receiver) connection.close() with connection.cursor(): pass self.assertIs(data["connection"].connection, connection.connection) connection_created.disconnect(receiver) data.clear() with connection.cursor(): pass self.assertEqual(data, {})
def uninstall_if_needed(setting, value, enter, **kwargs): """ Undo the effects of PostgresConfig.ready() when django.contrib.postgres is "uninstalled" by override_settings(). """ if not enter and setting == 'INSTALLED_APPS' and 'django.contrib.postgres' not in set(value): connection_created.disconnect(register_type_handlers) CharField._unregister_lookup(Unaccent) TextField._unregister_lookup(Unaccent) CharField._unregister_lookup(SearchLookup) TextField._unregister_lookup(SearchLookup) CharField._unregister_lookup(TrigramSimilar) TextField._unregister_lookup(TrigramSimilar) # Disconnect this receiver until the next time this app is installed # and ready() connects it again to prevent unnecessary processing on # each setting change. setting_changed.disconnect(uninstall_if_needed) MigrationWriter.unregister_serializer(RANGE_TYPES)
def add_spatial_version_related_fields(sender, **kwargs): """ Adds fields after establishing a database connection to prevent database operations at compile time. """ if connection_created.disconnect(add_spatial_version_related_fields, sender=DatabaseWrapper): spatial_version = connection.ops.spatial_version[0] if spatial_version >= 4: SpatialiteSpatialRefSys.add_to_class('srtext', models.CharField(max_length=2048)) SpatialiteGeometryColumns.add_to_class('type', models.IntegerField(db_column='geometry_type')) else: SpatialiteGeometryColumns.add_to_class('type', models.CharField(max_length=30))
def database_connected(cls, signal, sender, connection, **kwargs): """ Register this type with the database the first time a connection is made. """ if isinstance(connection, PostgresDatabaseWrapper): # Try to register the type. If the type has not been created in a # migration, the registration will fail. The type will be # registered as part of the migration, so hopefully the migration # will run soon. try: cls.register_composite(connection) except ProgrammingError: LOGGER.warning( "Failed to register composite %s. This might be because " "the migration to register it has not run yet") # Disconnect the signal now - only need to register types on the # initial connection connection_created.disconnect(cls.database_connected, dispatch_uid=cls._meta.db_type)
def startup_db(sender, connection, signal=None, **kwargs): from models import Database Database.objects.using( 'default').all().load() #Problem with initial syncdb connection_created.disconnect(dispatch_uid='db_autoload') print 'LOADED'
def tearDownClass(cls): # No need to keep that signal overhead for non PostgreSQL-related tests. from django.contrib.postgres.signals import register_hstore_handler connection_created.disconnect(register_hstore_handler) super(PostgreSQLTestCase, cls).tearDownClass()
def setup_databases(self): connection_created.connect(create_hstore) result = super(PostgresRunner, self).setup_databases() connection_created.disconnect(create_hstore) return result
def tearDownClass(cls): # No need to keep that signal overhead for non PostgreSQL-related tests. from django.contrib.postgres.signals import register_type_handlers connection_created.disconnect(register_type_handlers) super().tearDownClass()
def on_db_connection_ready(sender, **kwargs): from .signals import django_ready if 'migrate' not in sys.argv: django_ready.send(CommonConfig) connection_created.disconnect(on_db_connection_ready)
def startup_db(sender, connection, signal=None, **kwargs): from models import Database Database.objects.using('default').all().load() #Problem with initial syncdb connection_created.disconnect(dispatch_uid='db_autoload') print 'LOADED'
"""