def ready(self): def create_database_signal(sender, **kwargs): create_database() post_syncdb.connect(create_database_signal, sender=models) User = get_user_model() @receiver(user_logged_in, dispatch_uid='metrics_user_loggedin') def user_loggedin(sender, **kwargs): """ collect metrics about user logins """ tags = { 'user_id': str(kwargs['user'].pk), 'username': kwargs['user'].username, } values = {'value': 1, 'path': kwargs['request'].path} write('user_logins', values=values, tags=tags) @receiver(post_delete, sender=User, dispatch_uid='metrics_user_created') def user_created(sender, **kwargs): """ collect metrics about users unsubscribing """ write('user_variations', {'variation': -1}, tags={'action': 'deleted'}) write('user_count', {'total': User.objects.count()}) @receiver(post_save, sender=User, dispatch_uid='metrics_user_deleted') def user_deleted(sender, **kwargs): """ collect metrics about new users signing up """ if kwargs.get('created'): write('user_variations', {'variation': 1}, tags={'action': 'created'}) write('user_count', {'total': User.objects.count()})
def schedule_post_syncdb(self, period, callable_func): """Schedule a new periodic job after syncdb is done. You will need to use this function for scheduling if you are getting errors saying that the time_job table does not exist in the database. See L{JobManager.schedule}. """ from expedient.common.timer import models as timer_app uid = "schedule_post_syncdb_job_%s_%s" % (period, callable_func.__name__) def schedule_callback_factory(): def schedule_callback(sender, **kwargs): try: self.schedule(period, callable_func) except JobAlreadyScheduled: pass return schedule_callback post_syncdb.connect( schedule_callback_factory(), sender=timer_app, weak=False, dispatch_uid=uid, )
def contribute_to_class(self, cls, name): super(AutoSequenceField, self).contribute_to_class(cls, name) # parent models still call this method, but dont need sequences post_syncdb.connect(self.create_sequence, dispatch_uid='create_sequence_%s_%s' % (cls._meta, name), weak=False) class_prepared.connect(self.set_sequence_name, sender=cls, weak=False)
def set_signals(): """ Add load_data to amcat.models::post_syncdb to make sure data is loaded before auth, and unhook the django.auth create_superuser hook """ post_syncdb.connect(initialize, sender=amcat.models) # From http://stackoverflow.com/questions/1466827/ -- post_syncdb.disconnect( create_superuser, sender=auth_models, dispatch_uid='django.contrib.auth.management.create_superuser')
def register_messagetype(type): from django.db.models.signals import post_syncdb from mountain.core import models def install_type(sender, app, created_models, verbosity=0, **kwargs): if verbosity >= 1: obj, created = AcceptedTypes.objects.get_or_create(identifier=type) if created: print "Installed message type %s" % type post_syncdb.connect(install_type, sender=models, weak=False)
def set_signals(): """ Add load_data to amcat.models::post_syncdb to make sure data is loaded before auth, and unhook the django.auth create_superuser hook """ post_syncdb.connect(initialize, sender=amcat.models) # From http://stackoverflow.com/questions/1466827/ -- post_syncdb.disconnect( auth_models.User.objects.create_superuser, sender=auth_models, dispatch_uid='django.contrib.auth.management.create_superuser' )
def setup_test_environment_common(): """Common setup procedures. """ # Catch leftover .pyc from, say, changing git branches. remove_orphaned_pyc_files('.') # Make sure the startup function works. # As of implementation, this function adds a custom function to # our Postgres database. from django.db.models.signals import post_syncdb import main post_syncdb.connect(handle_post_syncdb_startup, sender=main.models)
def fix_auth_post_syncdb(): """Disconnects post_syncdb signals so that syncdb doesn't attempt to call create_permissions or create_superuser before South migrations have had chance to run. Connects a new post_syncdb hook that creates permissions during tests. """ # ensure signals have had a chance to be connected __import__('django.contrib.auth.management') post_syncdb.disconnect( dispatch_uid="django.contrib.auth.management.create_permissions") post_syncdb.disconnect( dispatch_uid="django.contrib.auth.management.create_superuser") post_syncdb.connect( create_permissions_for_tests, dispatch_uid="go.base.models.create_permissions_for_tests")
def is_tenant_user_model(sender): from .models import TenantModelBase if isinstance(sender, TenantModelBase): global TENANT_AUTH_USER_MODEL TENANT_AUTH_USER_MODEL = True # Disconnect the `create_superuser` post-syncdb signal receiver # since the swapped for user model is tenant specific post_syncdb.disconnect( create_superuser, sender=auth_app, dispatch_uid="django.contrib.auth.management.create_superuser" ) else: # Make sure the `create_superuser` signal is correctly attached # since this module might be reloaded during testing post_syncdb.connect( create_superuser, sender=auth_app, dispatch_uid="django.contrib.auth.management.create_superuser" )
def is_tenant_user_model(sender): from .models import TenantModelBase if isinstance(sender, TenantModelBase): global TENANT_AUTH_USER_MODEL TENANT_AUTH_USER_MODEL = True # Disconnect the `create_superuser` post-syncdb signal receiver # since the swapped for user model is tenant specific post_syncdb.disconnect( create_superuser, sender=auth_app, dispatch_uid='django.contrib.auth.management.create_superuser' ) else: # Make sure the `create_superuser` signal is correctly attached # since this module might be reloaded during testing post_syncdb.connect( create_superuser, sender=auth_app, dispatch_uid='django.contrib.auth.management.create_superuser', )
def update_sites_module(sender, **kwargs): """ Create a new row in the django_sites table that holds SITE_ID, SITE_NAME and SITE_DOMAIN defined in setting.py If SITE_NAME or SITE_DOMAIN are not defined they will default to 'example.com' """ id, name, domain = (1, "example.com", "example.com") try: id = settings.SITE_ID except AttributeError as e: print e print "Using: '%s' for site id." % id try: name = settings.SITE_NAME except AttributeError as e: print e print "Using: '%s' for site name." % name try: domain = settings.SITE_DOMAIN except AttributeError as e: print e print "Using: '%s' for site domain." % domain try: site = Site.objects.get(id=id) except Site.DoesNotExist: print "New site: [%s] %s (%s) created in django_site table." % (id, name, domain) site = Site(id=id, name=name, domain=domain) site.save() # Reconnect create_default_site request for other apps post_syncdb.connect(create_default_site, sender=sites_app) else: if site.name != name: print "A site with the id of %s is already taken. " "Please change SITE_ID to a different number in your " "settings.py file." % id
confirm = raw_input("Please enter either 'yes' or 'no': ") if verbosity >= 1: print print( "Creating initial content " "(About page, Blog, Contact form, Gallery) ...") print call_command("loaddata", "mezzanine.json") zip_name = "gallery.zip" copy_test_to_media("mezzanine.core", zip_name) gallery = Gallery.objects.get() gallery.zip_import = zip_name gallery.save() def create_site(app, created_models, verbosity, interactive, **kwargs): if settings.DEBUG and Site in created_models: domain = "127.0.0.1:8000" if verbosity >= 1: print print "Creating default Site %s ... " % domain print Site.objects.create(name="Local development", domain=domain) if not settings.TESTING: post_syncdb.connect(create_user, sender=auth_app) post_syncdb.connect(create_pages, sender=pages_app) post_syncdb.connect(create_site, sender=sites_app) post_syncdb.disconnect(create_default_site, sender=sites_app)
'user_blocks__isnull': True } }[name] user_ids = User.objects.filter(**kwargs).values_list('pk', flat=True)\ .order_by('pk') # Note: In Django 1.5, a batch_size parameter can be passed directly to # bulk_create(). Better to use that parameter instead of the below # code once version 1.4 is no longer supported by this app. total = 0 batch_size = FRIENDS_SYNCDB_BATCH_SIZE while True: objs = [model(user_id=id) for id in user_ids[:batch_size]] if objs: from django.db import transaction with transaction.commit_on_success(): for item in objs: model.objects.create(user_id=item.user_id) #model.objects.bulk_create(objs) total += len(objs) else: break if verbosity >= 2 and total: print "Created {0} new {1} record(s).".format(total, name) post_syncdb.connect( post_syncdb_handler, sender=models, dispatch_uid='friends.signals.post_syncdb', )
for content_type in ContentType.objects.all(): # build our permission slug label = "menu_%s" % content_type.model title = "Can see on menu %s" % content_type.name addEntityPermission(label, title) label = "list_%s" % content_type.model title = "Can list %s" % content_type.name addEntityPermission(label, title) label = "config_%s" % content_type.model title = "Can config %s" % content_type.name addEntityPermission(label, title) label = "custom_%s" % content_type.model title = "Can customize %s" % content_type.name addEntityPermission(label, title) # label = "wfadmin_%s" % content_type.model # title = "Workflow admin for %s" % content_type.name # addEntityPermission(label, title) # label = "refallow_%s" % content_type.model # title = "Can reference %s" % content_type.name # addEntityPermission(label, title) # check for all proto permissions after a syncdb post_syncdb.connect(addProtoPermissions)
from django.db.models.signals import post_syncdb import pg_fuzzysearch.models from django.db import connection, transaction def my_callback(sender, **kwargs): cursor = connection.cursor() print "Setting up pg_fuzzysearch ..." with open("pg_fuzzysearch/setup.sql", "r") as f: query = f.read() with transaction.commit_on_success(): cursor.execute(query) post_syncdb.connect(my_callback, sender=pg_fuzzysearch.models)
class CategoryRelation(models.Model): """Related category item""" category = models.ForeignKey(Category, verbose_name=_('category')) content_type = models.ForeignKey(ContentType, limit_choices_to=CATEGORY_RELATION_LIMITS, verbose_name=_('content type')) object_id = models.PositiveIntegerField(verbose_name=_('object id')) content_object = generic.GenericForeignKey('content_type', 'object_id') relation_type = models.CharField( verbose_name=_('relation type'), max_length="200", blank=True, null=True, help_text=_( "A generic text field to tag a relation, like 'leadphoto'.")) objects = CategoryRelationManager() def __unicode__(self): return u"CategoryRelation" try: from south.db import db # South is required for migrating. Need to check for it from django.db.models.signals import post_syncdb from categories.migration import migrate_app post_syncdb.connect(migrate_app) except ImportError: pass
try: user = User.objects.get(username=settings.SYSTEM_USERNAME) except User.DoesNotExist: user = User.objects.create_user(settings.SYSTEM_USERNAME, settings.SYSTEM_EMAIL_ADDRESS, settings.SYSTEM_PASSWORD) user.is_staff = True user.is_superuser = True user.save() # Create system account try: account = Account.objects.get(user=user) except Account.DoesNotExist: print "create account" account = Account.objects.create(user=user) # Create Built-in tables installed_models = dict() for table_class in REGISTERED_BUILT_IN_TABLES: try: UserTable.objects.get(account=account, table_class_name=table_class.CLASS_NAME) except UserTable.DoesNotExist: installed_models[table_class.CLASS_NAME] = table_class().initialize(account, installed_models) # Signal after syncdb from django.db.models.signals import post_syncdb post_syncdb.connect(after_syncdb)
from django.db.models.signals import post_syncdb from friends import models, signals post_syncdb.connect( signals.create_friendship_instance_post_syncdb, sender=models, dispatch_uid='friends.signals.create_friendship_instance_post_syncdb', )
from django.db.models.signals import post_syncdb from notification import models as notification import badges.api import badges.models from local import update_badges from django.contrib.auth.models import User def create_notice_types(app, created_models, verbosity, **kwargs): notification.create_notice_type("invite_accepted", "Invite Accepted", "An invitation you sent has been accepted.") post_syncdb.connect(create_notice_types, sender=notification) def create_invite_badges(app, created_models, verbosity, **kwargs): badges.api.create_badge("three_invites", "Three Invites", "You invited three people") badges.api.create_badge("five_invites", "Five Invites", "You invited five people") badges.api.create_badge("ten_invites", "Ten Invites", "You invited ten people") for u in User.objects.all(): update_badges(u) post_syncdb.connect(create_invite_badges, sender=badges.models)
indexname = 'source' ) def create_target_index(lang): return create_in( settings.WHOOSH_INDEX, schema = TARGET_SCHEMA, indexname = 'target-%s' % lang ) def create_index(sender=None, **kwargs): if not os.path.exists(settings.WHOOSH_INDEX): os.mkdir(settings.WHOOSH_INDEX) create_source_index() post_syncdb.connect(create_index) class Index(object): ''' Class to manage index readers and writers. ''' _source = None _target = {} _source_writer = None _target_writer = {} def source(self): ''' Returns source index. '''
"'domain:port'. For example 'localhost:8000' " "or 'www.example.com'. Hit enter to use the " "default (%s): " % domain) if entered: domain = entered.strip("': ") if verbosity >= 1: print print "Creating default Site %s ... " % domain print Site.objects.create(name="Default", domain=domain) def install_optional_data(verbosity): call_command("loaddata", "mezzanine_optional.json") zip_name = "gallery.zip" copy_test_to_media("mezzanine.core", zip_name) gallery = Gallery.objects.get() gallery.zip_import = zip_name gallery.save() if verbosity >= 1: print print ("Creating demo content " "(About page, Blog, Contact form, Gallery) ...") print if not settings.TESTING: post_syncdb.connect(create_user, sender=auth_app) post_syncdb.connect(create_pages, sender=pages_app) post_syncdb.connect(create_site, sender=sites_app) post_syncdb.disconnect(create_default_site, sender=sites_app)
type=Activity.SET_REGRESSION, ) def on_alert_creation(instance, **kwargs): from sentry.plugins import plugins for plugin in plugins.for_project(instance.project): safe_execute(plugin.on_alert, alert=instance) # Anything that relies on default objects that may not exist with default # fields should be wrapped in handle_db_failure post_syncdb.connect( handle_db_failure(create_default_projects), dispatch_uid="create_default_project", weak=False, ) post_save.connect( handle_db_failure(create_keys_for_project), sender=Project, dispatch_uid="create_keys_for_project", weak=False, ) post_save.connect( handle_db_failure(create_org_member_for_owner), sender=Organization, dispatch_uid="create_org_member_for_owner", weak=False, ) user_logged_in.connect(
if interactive: confirm = raw_input("\nWould you like to install an initial " "demo product and sale? (yes/no): ") while True: if confirm == "yes": break elif confirm == "no": return confirm = raw_input("Please enter either 'yes' or 'no': ") # This is a hack. Ideally to split fixtures between optional # and required, we'd use the same approach Mezzanine does, # within a ``createdb`` management command. Ideally to do this, # we'd subclass Mezzanine's createdb command and shadow it, # but to do that, the cartridge.shop app would need to appear # *after* mezzanine.core in the INSTALLED_APPS setting, but the # reverse is needed for template overriding (and probably other # bits) to work correctly. # SO........... we just cheat, and check sys.argv here. Namaste. elif "--nodata" in sys.argv: return if verbosity >= 1: print print "Creating demo product and sale ..." print call_command("loaddata", "cartridge_optional.json") copy_test_to_media("cartridge.shop", "product") if not settings.TESTING: post_syncdb.connect(create_product, sender=shop_app)
def freeze_option_epoch_for_project(instance, created, app=None, **kwargs): if app and app.__name__ != "sentry.models": return if not created or kwargs.get("raw"): return from sentry import projectoptions projectoptions.default_manager.freeze_option_epoch(project=instance, force=False) # Anything that relies on default objects that may not exist with default # fields should be wrapped in handle_db_failure post_syncdb.connect(handle_db_failure(create_default_projects), dispatch_uid="create_default_project", weak=False) post_save.connect( handle_db_failure(create_keys_for_project), sender=Project, dispatch_uid="create_keys_for_project", weak=False, ) post_save.connect( handle_db_failure(freeze_option_epoch_for_project), sender=Project, dispatch_uid="freeze_option_epoch_for_project", weak=False, )
('custom_edit_order_budget', u"Modifier l'imputation"), ('custom_goto_status_3', u"Transmettre pour saisie SIFAC/SILAB"), ('custom_goto_status_4', u"Effectuer une saisie SIFAC/SILAB"), ('custom_order_any_team', u"Commander pour toutes les équipes"), ('custom_view_local_provider', u"Gestionnaire magasin"))) def create_custom_budget_permissions(sender, **kwargs): make_permissions( u"budget", (('custom_view_budget', u"Voir un budget"), ('custom_add_budget', u"Ajouter un budget"), ('custom_edit_budget', u"Editer un budget"), ('custom_can_transfer', u"Effectuer un virement"), ('custom_history_budget', u"Voir l'historique des budgets"))) def create_custom_team_permissions(sender, **kwargs): make_permissions( u"team", (('custom_is_admin', u"Administrateur"), ('custom_view_teams', u"Voir toutes les équipes"), ('custom_edit_member', u"Editer un membre d'équipe"), ('custom_activate_account', u"Activer un nouveau compte"), ('custom_add_group', u"Créer un groupe utilisateur"), ('custom_add_team', u"Créer une équipe"))) post_syncdb.connect(create_custom_order_permission, sender=order.models) post_syncdb.connect(create_custom_budget_permissions, sender=budget.models) post_syncdb.connect(create_custom_team_permissions, sender=team.models)
key = models.CharField(choices=FILTER_KEYS, max_length=32) value = models.CharField(max_length=200) class Meta: unique_together = (('key', 'value'), ) ### Helper methods def register_indexes(): """ Grabs all required indexes from filters and registers them. """ logger = logging.getLogger('sentry.setup') for filter_ in get_filters(): if filter_.column.startswith('data__'): Index.objects.register_model(Message, filter_.column, index_to='group') logger.debug('Registered index for for %s' % filter_.column) register_indexes() # XXX: Django sucks and we can't listen to our specific app # post_syncdb.connect(GroupedMessage.create_sort_index, sender=__name__) post_syncdb.connect(GroupedMessage.create_sort_index, sender=sys.modules[__name__])
declare new_val int; begin loop update sentry_projectcounter set value = value + delta where project_id = project returning value into new_val; if found then return new_val; end if; begin insert into sentry_projectcounter(project_id, value) values (project, delta) returning value into new_val; return new_val; exception when unique_violation then end; end loop; end $$ language plpgsql; """) finally: cursor.close() # TODO(dcramer): Remove when Django 1.6 is no longer supported, as this does # nothing with Django migrations post_syncdb.connect(create_counter_function, dispatch_uid="create_counter_function", weak=False)
# Note: the manager needs to know the subclass if issubclass(sender, EsIndexable): sender.es = ElasticsearchManager(sender) class_prepared.connect(add_es_manager) def es_save_callback(sender, instance, **kwargs): # TODO: batch ?! @task ?! if not issubclass(sender, EsIndexable): return instance.es.do_index() def es_delete_callback(sender, instance, **kwargs): if not issubclass(sender, EsIndexable): return instance.es.delete() def es_syncdb_callback(sender, app, created_models, **kwargs): for model in created_models: if issubclass(model, EsIndexable): model.es.create_index() if getattr(settings, 'ELASTICSEARCH_AUTO_INDEX', False): # Note: can't specify the sender class because EsIndexable is Abstract, # see: https://code.djangoproject.com/ticket/9318 post_save.connect(es_save_callback) post_delete.connect(es_delete_callback) post_syncdb.connect(es_syncdb_callback)
instance.member_set.create( user=instance.owner, type=MEMBER_OWNER ) def update_document(instance, created, **kwargs): if created: return SearchDocument.objects.filter( project=instance.project, group=instance, ).update(status=instance.status) # Signal registration post_syncdb.connect( create_default_project, dispatch_uid="create_default_project" ) post_save.connect( create_project_member_for_owner, sender=Project, dispatch_uid="create_project_member_for_owner" ) post_save.connect( update_document, sender=Group, dispatch_uid="update_document" )
import sys import logging from django.db.models.signals import post_syncdb from server.management import defaults logger = logging.getLogger('ecocontrol') def initialize_defaults(**kwargs): # keep in mind that this function can be called multiple times defaults.initialize_default_user() defaults.initialize_default_scenario() defaults.initialize_views() defaults.initialize_weathervalues() post_syncdb.connect(initialize_defaults)
for codename, name in _get_all_permissions(klass._meta): p, created = Permission.objects.get_or_create( codename=codename, content_type__pk=ctype.id, defaults={ 'name': name, 'content_type': ctype }) if created and verbosity >= 2: print("Adding permission '%s'" % p) # Replace the original handling with our modified one if # CONTENTTYPE_NO_TRAVERSE_PROXY is set. # This is needed if you want to use proper permissions for proxy models # that are tied to the proxy application. # See also: http://code.djangoproject.com/ticket/11154 try: settings.CONTENTTYPE_NO_TRAVERSE_PROXY except AttributeError: pass else: if settings.CONTENTTYPE_NO_TRAVERSE_PROXY: from django.db.models.signals import post_syncdb post_syncdb.disconnect( create_permissions, dispatch_uid='django.contrib.auth.management.create_permissions') post_syncdb.connect( create_permissions_respecting_proxy, dispatch_uid='django.contrib.auth.management.create_permissions')
def on_scenario_pre_delete(sender, **kwargs): pass def check_basic_increments(scenario): from footprint.main.sql_unformatted import rawSQL if not db_table_exists("basic_increments_{0}".format(scenario.id)): from footprint.uf_tools import executeSQL_now executeSQL_now(scenario.projects.inputs_outputs_db, [ rawSQL.make_increment_headers.format(scenario.working_schema, scenario.id) ]) # Register Django Signals to respond to synd_db and Scenario persistence post_syncdb.connect(on_post_syncdb, sender=main.models) post_save.connect(on_scenario_post_save, sender=Scenario) pre_delete.connect(on_scenario_pre_delete, sender=Scenario) # TODO not sure what this was for def get_project_options(project): return { 'base_year_grid': project.resolve_db_entity(DbEntityKey.BASE), 'base_year': project.base_year, 'vmt_geography_type': 'taz', 'vmt_geographies': project.resolve_db_entity(Keys.DB_ENTITY_VMT_GEOGRAPHIES),
super(SharedMemoryModel, cls).save(*args, **kwargs) #blockingCallFromThread(reactor, _save_callback, cls, *args, **kwargs) callFromThread(_save_callback, cls, *args, **kwargs) # Use a signal so we make sure to catch cascades. def flush_cache(**kwargs): def class_hierarchy(root): """Recursively yield a class hierarchy.""" yield root for subcls in root.__subclasses__(): for cls in class_hierarchy(subcls): yield cls for model in class_hierarchy(SharedMemoryModel): model.flush_instance_cache() #request_finished.connect(flush_cache) post_syncdb.connect(flush_cache) def flush_cached_instance(sender, instance, **kwargs): # XXX: Is this the best way to make sure we can flush? if not hasattr(instance, 'flush_cached_instance'): return sender.flush_cached_instance(instance) pre_delete.connect(flush_cached_instance) def update_cached_instance(sender, instance, **kwargs): if not hasattr(instance, 'cache_instance'): return sender.cache_instance(instance) post_save.connect(update_cached_instance)
from django.db.models.signals import post_syncdb from django.conf import settings from django.utils.translation import ugettext_noop as _ if "notification" in settings.INSTALLED_APPS: from notification.models import NoticeType else: NoticeType = None if NoticeType: def create_notice_types(app, created_models, verbosity, **kwargs): NoticeType.create( "system_message", _("System Message"), _("Important information about %s") % settings.STORYBASE_SITE_NAME) post_syncdb.connect(create_notice_types, sender=NoticeType)
# Set user language if set def set_language_on_logon(request, user, **kwargs): language = UserOption.objects.get_value( user=user, project=None, key='language', default=None, ) if language and hasattr(request, 'session'): request.session['django_language'] = language # Signal registration post_syncdb.connect( create_default_project, dispatch_uid="create_default_project", weak=False, ) post_save.connect( create_team_and_keys_for_project, sender=Project, dispatch_uid="create_team_and_keys_for_project", weak=False, ) post_save.connect( create_team_member_for_owner, sender=Team, dispatch_uid="create_team_member_for_owner", weak=False, ) post_save.connect(
msg = "\nYou are using django.contrib.auth. Do you want cardstories " \ "to alter the username column\nto allow 75 characters? (yes/no): " answer = raw_input(msg) while not answer.lower() in ('y', 'n', 'yes', 'no'): answer = raw_input("Please enter either \"yes\" or \"no\": ") if answer.lower() in ('y', 'yes'): cursor = connection.cursor() cursor.execute("ALTER TABLE auth_user MODIFY COLUMN username varchar(75) NOT NULL") def update_domain_name(sender, app, created_models, verbosity, interactive, **kwargs): """ Updates default domain name. If non-interactive, uses a sane default that is compatible with local Facebook development. """ from django.contrib.sites.models import Site if Site in created_models and interactive: msg = "\nYou just installed Django's sites system. What domain name " \ "would you like to use?\nEnter a domain such as \"cardstories.org\": " domain = raw_input(msg) s = Site.objects.get(id=1) s.domain = domain s.name = domain s.save() post_syncdb.connect(update_username_column) post_syncdb.connect(update_domain_name, sender=sites_app)
import os from whoosh import index, store, fields from whoosh.index import create_in from whoosh.qparser import QueryParser from django.db.models.signals import post_syncdb from django.conf import settings PAGES_WHOOSH_SCHEMA = fields.Schema(title=fields.TEXT(stored=True), content=fields.TEXT(stored=True), url=fields.ID(stored=True, unique=True)) def create_index(sender=None, **kwargs): if not os.path.exists(settings.HAYSTACK_WHOOSH_PATH): os.mkdir(settings.HAYSTACK_WHOOSH_PATH) ix = create_in(settings.HAYSTACK_WHOOSH_PATH, PAGES_WHOOSH_SCHEMA, "ZORNA_PAGES") post_syncdb.connect(create_index)
# -*- coding: utf-8 -*- # # Copyright (C) 2009 Ignacio Vazquez-Abrams # This file is part of python-fedora # # python-fedora is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # python-fedora is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with python-fedora; if not, see <http://www.gnu.org/licenses/> # ''' .. moduleauthor:: Ignacio Vazquez-Abrams <*****@*****.**> ''' from fedora.django.auth import models from django.db.models.signals import post_syncdb post_syncdb.connect(models._syncdb_handler, sender=models)
# ------------ # All-auth # ----------- def setup_dummy_social_apps(sender, **kwargs): """ `allauth` needs tokens for OAuth based providers. So let's setup some dummy tokens """ site = Site.objects.get_current() for provider in registry.get_list(): if isinstance(provider, OAuth2Provider) or isinstance(provider, OAuthProvider): try: SocialApp.objects.get(provider=provider.id, sites=site) except SocialApp.DoesNotExist: print( "Installing dummy application credentials for %s." " Authentication via this provider will not work" " until you configure proper credentials via the" " Django admin (`SocialApp` models)" % provider.id ) app = SocialApp.objects.create( provider=provider.id, secret="secret", client_id="client-id", name="%s application" % provider.id ) app.sites.add(site) # We don't want to interfere with unittests et al if "syncdb" in sys.argv: post_syncdb.connect(setup_dummy_social_apps, sender=sys.modules[__name__])
# if issubclass(sender, EsIndexable): # sender.es = ElasticsearchManager(sender) # class_prepared.connect(add_es_manager) def es_save_callback(sender, instance, **kwargs): # TODO: batch ?! @task ?! if not issubclass(sender, EsIndexable): return instance.es.do_index() def es_delete_callback(sender, instance, **kwargs): if not issubclass(sender, EsIndexable): return instance.es.delete() def es_syncdb_callback(sender, app, created_models, **kwargs): for model in created_models: if issubclass(model, EsIndexable): model.es.create_index() if getattr(settings, 'ELASTICSEARCH_AUTO_INDEX', False): # Note: can't specify the sender class because EsIndexable is Abstract, # see: https://code.djangoproject.com/ticket/9318 post_save.connect(es_save_callback) post_delete.connect(es_delete_callback) post_syncdb.connect(es_syncdb_callback)
cursor.execute(DROP_SQL % args) where = "WHERE %(version_model)s.%(state)s = %%s" % args base_sql = VIEW_SQL % args sql = " ".join([base_sql, where]) cursor.execute(sql, (schema, )) cursor.execute(TRIGGER % args) if DJANGO_VERSION < (1, 6): transaction.commit_unless_managed() def update_schema(sender=None, **kwargs): if sender: for m in sender.get_models(): if getattr(m._meta, '_view_model', None): do_updates(m._meta._view_model) def update_syncdb_schema(app, created_models, verbosity, **kwargs): for m in created_models: do_updates(m) if DJANGO_VERSION < (1, 7): post_syncdb.connect(update_syncdb_schema, dispatch_uid='update_syncdb_schema') else: post_migrate.connect(update_schema, dispatch_uid='update_schema')
from django.db.models.signals import post_syncdb import pg_fuzzysearch.models from django.db import connection, transaction def my_callback(sender, **kwargs): cursor = connection.cursor() print "Setting up pg_fuzzysearch ..." with open("pg_fuzzysearch/setup.sql","r") as f: query = f.read() with transaction.commit_on_success(): cursor.execute(query) post_syncdb.connect(my_callback, sender=pg_fuzzysearch.models)
from django.conf import settings from django.utils.translation import ugettext_noop as _ from django.db.models.signals import post_syncdb if "notification" in settings.INSTALLED_APPS: from notification import models as notification def create_notice_types(app, created_models, verbosity, **kwargs): notification.create_notice_type("payment_added", _("Payment Generated"), _("A payment has been generated.")) post_syncdb.connect(create_notice_types, sender=notification) else: print "Skipping creation of NoticeTypes as notification app not found"
cursor = connection.cursor() cursor.execute(DROP_SQL % args) cursor.execute(base_sql) trigger_sql = trigger_function(m._meta._base_model, m._meta._version_model, args) cursor.execute(trigger_sql) cursor.execute(TRIGGER % args) for schema in m._meta._version_model.UNIQUE_STATES: qn_schema = qn(schema) # Make sure schema exists cursor.execute("SELECT exists(select schema_name FROM information_schema.schemata WHERE schema_name = %s)", (schema,)) if not cursor.fetchone()[0]: cursor.execute("CREATE SCHEMA %s" % qn_schema) args['schema'] = qn_schema cursor.execute(DROP_SQL % args) where = "WHERE %(version_model)s.%(state)s = %%s" % args base_sql = VIEW_SQL % args sql = " ".join([base_sql, where]) cursor.execute(sql, (schema,)) cursor.execute(TRIGGER % args) transaction.commit_unless_managed() post_syncdb.connect(update_schema, dispatch_uid='update_schema')
""" # for each of our content types for content_type in ContentType.objects.all(): # build our permission slug codename = "view_%s" % content_type.model # if it doesn't exist.. if not Permission.objects.filter(content_type=content_type, codename=codename): # add it Permission.objects.create(content_type=content_type, codename=codename, name="Can view %s" % content_type.name) #print "Added view permission for %s" % content_type.name # check for all our view permissions after a syncdb post_syncdb.connect(add_view_permissions) class Bookmark(models.Model): title = models.CharField(_(u'Title'), max_length=128) user = models.ForeignKey(AUTH_USER_MODEL, verbose_name=_(u"user"), blank=True, null=True) url_name = models.CharField(_(u'Url Name'), max_length=64) content_type = models.ForeignKey(ContentType) query = models.CharField(_(u'Query String'), max_length=1000, blank=True) is_share = models.BooleanField(_(u'Is Shared'), default=False) @property def url(self): base_url = reverse(self.url_name) if self.query: base_url = base_url + '?' + self.query
# -*- encoding: utf-8 -*- from django.conf import settings from django.contrib.sites.models import Site def after_syncdb(sender, **kwargs): """ PRODUCTION CODE """ Site.objects.all().update(domain=settings.WEBSITE_DOMAIN, name=settings.WEBSITE_NAME) from django.db.models.signals import post_syncdb post_syncdb.connect(after_syncdb, dispatch_uid="common.management")
if not is_last_model(kwargs): return config = getattr(settings, 'PERMISSIONS', dict()) # for each of our items for natural_key, permissions in config.items(): # if the natural key '*' then that means add to all objects if natural_key == '*': # for each of our content types for content_type in ContentType.objects.all(): for permission in permissions: add_permission(content_type, permission) # otherwise, this is on a specific content type, add for each of those else: app, model = natural_key.split('.') try: content_type = ContentType.objects.get_by_natural_key(app, model) except ContentType.DoesNotExist: continue # add each permission for permission in permissions: add_permission(content_type, permission) post_syncdb.connect(check_all_permissions) post_syncdb.connect(check_all_group_permissions) post_syncdb.connect(check_all_anon_permissions)
category=top_menu['category'], role=top_menu['role'], tenant_id=top_menu['tenant_id'], prepare_id=top_menu['prepare_id'], ) try: need_to_do.save() top_menu_name_with_id[need_to_do.need_uuid] = need_to_do.need_uuid except Exception, e: print "init control_manage data failed.please check error (%s)" % e return None return top_menu_name_with_id def init_control_manage_data(sender, **kwargs): if sender == control_manage_models: try: NeedToDo.objects.all().delete() except Exception, e: print "flush control_manage data failed.please check error (%s)" % e return None top_menu_name_with_id = init_top_menu(NEED_TO_DO) if top_menu_name_with_id: print "create need_to_do ......... ok" else: print "create need_to_do ......... failed" return post_syncdb.connect(init_control_manage_data, sender=control_manage_models)
for cls in clslist: subclass_list = cls.__subclasses__() if subclass_list: for subcls in class_hierarchy(subclass_list): yield subcls else: yield cls #print "start flush ..." for cls in class_hierarchy([SharedMemoryModel]): #print cls cls.flush_instance_cache() # run the python garbage collector return gc.collect() #request_finished.connect(flush_cache) post_syncdb.connect(flush_cache) def flush_cached_instance(sender, instance, **kwargs): """ Flush the idmapper cache only for a given instance. """ # XXX: Is this the best way to make sure we can flush? if not hasattr(instance, 'flush_cached_instance'): return sender.flush_cached_instance(instance, force=True) pre_delete.connect(flush_cached_instance) def update_cached_instance(sender, instance, **kwargs): """
from django.db.models.signals import post_syncdb import models #from django.contrib.auth.models import User import signals post_syncdb.connect( signals.create_connection_instance_post_syncdb, sender=models, dispatch_uid='connections.signals.create_connection_instance_post_syncdb', )
group=instance, type=Activity.SET_REGRESSION, ) def on_alert_creation(instance, **kwargs): from sentry.plugins import plugins for plugin in plugins.for_project(instance.project): safe_execute(plugin.on_alert, alert=instance) # Signal registration post_syncdb.connect( create_default_projects, dispatch_uid="create_default_project", weak=False, ) post_save.connect( create_keys_for_project, sender=Project, dispatch_uid="create_keys_for_project", weak=False, ) post_save.connect( create_org_member_for_owner, sender=Organization, dispatch_uid="create_org_member_for_owner", weak=False, ) user_logged_in.connect(set_language_on_logon,
from django.db.models.signals import post_syncdb message = """ 'django-email-accounts' has detected that you just installed Django's authentication system (django.auth). For your convenience, django-email-accounts can alter the user table's username field to allow 75 characters instead of the default 35 chars. Unless you do this, emails that are longer than 30 characters will be cut off, and this app will probably not work! NOTE: this will only work if the SQL user account you have created for django has the privileges to run ALTER statements. Do you want django-email-accounts to try to alter the auth_user.username column to allow 75 characters? (y/N): """ def query_fix_usertable(sender, app, created_models, verbosity, interactive, **kwargs): model_names = [m.__name__ for m in created_models] if not interactive or app.__name__ != 'django.contrib.auth.models' or "User" not in model_names: return answer = raw_input(message) while not answer.lower() in ('y', 'n', 'yes', 'no'): raw_input("You need to either decide yes ('y') or no ('n'). Default is no. (y/N): ") from django.db import connection cursor = connection.cursor() cursor.execute("ALTER TABLE auth_user MODIFY COLUMN username varchar(75) NOT NULL") post_syncdb.connect(query_fix_usertable)
from django.db.models.signals import post_syncdb import bsMaterialsApp.models from bsMaterialsApp.models import NoFraccionable, ESTRATEGIA_NOFRACCIONABLE def crearSingleton(sender, **kwargs): print NoFraccionable.objects.get_or_create(pk = ESTRATEGIA_NOFRACCIONABLE, defaults = {"medida":0, "minimo":0}) print NoFraccionable, sender, kwargs post_syncdb.connect(crearSingleton, sender=bsMaterialsApp.models)