Esempio n. 1
0
    def test_sealed(self):
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedB', lambda self: self.name.upper())

        self.resolver.seal()
        self.assertEqual(self.resolver._sealed, True)
        self.assertEqual(self.resolver._initialized, False)
        self.assertEqual(self.resolver._map_loaded, False)

        # should raise on new fields or models
        with self.assertRaises(ResolverException):
            self.resolver.add_field(rt_field)
        with self.assertRaises(ResolverException):
            self.resolver.add_model(rt_model)
        with self.assertRaises(ResolverException):
            generate_computedmodel(self.resolver, 'RuntimeGeneratedC', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)

        # should allow access to models_with_computedfields, computedfields_with_models
        self.assertEqual(list(self.resolver.models_with_computedfields), [(rt_model, {rt_field})])
        self.assertEqual(list(self.resolver.computedfields_with_models), [(rt_field, {rt_model})])

        # should raise on computed_models
        with self.assertRaises(ResolverException):
            self.resolver.computed_models
Esempio n. 2
0
 def contribute_to_class(self, model, name, **kwargs):
     self.model = model
     self.apps = model._meta.apps
     self.lock = threading.RLock()
     setattr(model, name, self)
     # Ideally we would connect to the model.apps.clear_cache()
     class_prepared.connect(self.class_prepared_receiver, weak=False)
Esempio n. 3
0
    def contribute_to_class(self, cls, name):
        # If the source field name isn't defined, figure it out.

        def register_source_group(source):
            setattr(cls, name, ImageSpecFileDescriptor(self, name, source))
            self._set_spec_id(cls, name)

            # Add the model and field as a source for this spec id
            register.source_group(self.spec_id, ImageFieldSourceGroup(cls, source))

        if self.source:
            register_source_group(self.source)
        else:
            # The source argument is not defined
            # Then we need to see if there is only one ImageField in that model
            # But we need to do that after full model initialization
            def handle_model_preparation(sender, **kwargs):

                image_fields = [f.attname for f in cls._meta.fields if
                                isinstance(f, models.ImageField)]
                if len(image_fields) == 0:
                    raise Exception(
                        '%s does not define any ImageFields, so your %s'
                        ' ImageSpecField has no image to act on.' %
                        (cls.__name__, name))
                elif len(image_fields) > 1:
                    raise Exception(
                        '%s defines multiple ImageFields, but you have not'
                        ' specified a source for your %s ImageSpecField.' %
                        (cls.__name__, name))
                register_source_group(image_fields[0])

            class_prepared.connect(handle_model_preparation, sender=cls, weak=False)
Esempio n. 4
0
 def contribute_to_class(self, model, name, **kwargs):
     self.model = model
     self.apps = model._meta.apps
     self.lock = threading.RLock()
     setattr(model, name, self)
     # Ideally we would connect to the model.apps.clear_cache()
     class_prepared.connect(self.class_prepared_receiver, weak=False)
        def __new__(meta, name, bases, attrs):
            # Register any additional model fields specified in fields
            def add_field(sender, **kwargs):
                if sender.__name__ == computed_name:
                    for field_name, field in fields.items():
                        field.contribute_to_class(sender, field_name)
            class_prepared.connect(add_field)

            def create_class_property(class_attr):
                related_attr = class_attr.split('__')[0]
                related_class_name = related_class_lookup.get(related_attr, None)
                if not related_class_name:
                    raise Exception("Expected related_class_lookup to contain %s, since class_attrs contain %s" % (related_attr, class_attr) )
                related_class = resolve_module_attr(related_class_name)
                # Create the getter property that uses the class manager to lookup up the related model by id
                def getter(cls):
                    return related_class.objects.get(id=getattr(cls, class_attr))
                return ClassProperty(classmethod(getter))

            # Create class-level getter properties to resolve things like the config_entity since we only store the id
            class_properties = map_to_dict(
                lambda class_attr: [class_attr.split('__')[0], create_class_property(class_attr)],
                filter(lambda class_attr: class_attr.endswith('__id'), class_attrs))

            return models.base.ModelBase.__new__(
                meta,
                computed_name,
                (base_class,),
                # Merge the manager objects (if the abstract_class has one) with attrs and class_attrs
                merge(dict(objects=base_class.objects.__class__()) if hasattr(base_class, 'objects') else {},
                      attrs,
                      class_attrs,
                      class_properties))
Esempio n. 6
0
    def test_initialized_full_wrong_modelbase(self):
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedF', lambda self: self.name.upper(), True)
        class_prepared.disconnect(self.resolver.add_model)

        with self.assertRaises(ResolverException):
            self.resolver.initialize()
Esempio n. 7
0
    def _class_prepared_handler(sender, **kwargs):
        """ Signal handler for class_prepared. 
            This will be run for every model, looking for the moment when all
            dependent models are prepared for the first time. It will then run
            the given function, only once.
        """

        sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name
        already_prepared=set([sender_app])
        for app,models in app_cache.app_models.items():
            for model_name,model in models.items():
                already_prepared.add(app.lower()+'.'+model_name)
                
        if all([x in already_prepared for x in dependencies]):
            db.start_transaction()
            try:
                # We need to disconnect, otherwise each new dynamo model generation
                # will trigger it and cause a "maximim recursion error"
                class_prepared.disconnect(_class_prepared_handler,weak=False)                
                fn()
            except DatabaseError, message:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this 
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
                # Better connect again
                if message<>'no such table: dynamo_metamodel':
                    class_prepared.connect(_class_prepared_handler, weak=False)
                else:
                    raise
            else:
                db.commit_transaction()
Esempio n. 8
0
 def contribute_to_class(self, cls, name):
     super(AutoSequenceField, self).contribute_to_class(cls, name)
     # parent models still call this method, but dont need sequences
     post_syncdb.connect(self.create_sequence,
                         dispatch_uid='create_sequence_%s_%s' %
                         (cls._meta, name),
                         weak=False)
     class_prepared.connect(self.set_sequence_name, sender=cls, weak=False)
Esempio n. 9
0
 def __init__(self, *args, **kwargs):
     """
     Install pgtriggers for Change detection models whenever
     the change detection model class is ready. We have to do this
     in __init__ instead of ready() here since the class_prepared
     signal is emitted before models are ready
     """
     class_prepared.connect(pgh_setup)
     super().__init__(*args, **kwargs)
Esempio n. 10
0
def when_classes_prepared(app_name, dependencies, fn):
    """ Runs the given function as soon as the model dependencies are available.
        You can use this to build dyanmic model classes on startup instead of
        runtime. 

        app_name       the name of the relevant app
        dependencies   a list of model names that need to have already been 
                       prepared before the dynamic classes can be built.
        fn             this will be called as soon as the all required models 
                       have been prepared

        NB: The fn will be called as soon as the last required
            model has been prepared. This can happen in the middle of reading
            your models.py file, before potentially referenced functions have
            been loaded. Becaue this function must be called before any 
            relevant model is defined, the only workaround is currently to 
            move the required functions before the dependencies are declared.

        TODO: Allow dependencies from other apps?
    """
    dependencies = [x.lower() for x in dependencies]

    def _class_prepared_handler(sender, **kwargs):
        """ Signal handler for class_prepared. 
            This will be run for every model, looking for the moment when all
            dependent models are prepared for the first time. It will then run
            the given function, only once.
        """
        sender_name = sender._meta.object_name.lower()
        already_prepared = set(
            app_cache.app_models.get(app_name, {}).keys() + [sender_name])

        if (sender._meta.app_label == app_name and sender_name in dependencies
                and all([x in already_prepared for x in dependencies])):
            db.start_transaction()
            try:
                fn()
            except DatabaseError:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
            else:
                db.commit_transaction()
                # TODO Now that the function has been run, should/can we
                # disconnect this signal handler?

    # Connect the above handler to the class_prepared signal
    # NB: Although this signal is officially documented, the documentation
    # notes the following:
    #     "Django uses this signal internally; it's not generally used in
    #      third-party applications."
    class_prepared.connect(_class_prepared_handler, weak=False)
Esempio n. 11
0
    def contribute_to_class(self, cls, name, virtual_only=False):
        self.base_cls = cls

        super(FSMFieldMixin, self).contribute_to_class(cls, name, virtual_only=virtual_only)
        setattr(cls, self.name, self.descriptor_class(self))
        setattr(cls, 'get_available_{}_transitions'.format(self.name),
                curry(get_available_FIELD_transitions, field=self))
        setattr(cls, 'get_all_{}_transitions'.format(self.name),
                curry(get_all_FIELD_transitions, field=self))

        class_prepared.connect(self._collect_transitions)
Esempio n. 12
0
    def contribute_to_class(self, cls, name, virtual_only=False):
        self.base_cls = cls

        super(FSMFieldMixin,
              self).contribute_to_class(cls, name, virtual_only=virtual_only)
        setattr(cls, self.name, self.descriptor_class(self))
        setattr(cls, 'get_available_{}_transitions'.format(self.name),
                curry(get_available_FIELD_transitions, field=self))
        setattr(cls, 'get_all_{}_transitions'.format(self.name),
                curry(get_all_FIELD_transitions, field=self))

        class_prepared.connect(self._collect_transitions)
Esempio n. 13
0
    def test_initialized_models_only(self):
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedD', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)

        self.resolver.initialize(models_only=True)
        self.assertEqual(self.resolver._sealed, True)
        self.assertEqual(self.resolver._initialized, True)
        self.assertEqual(self.resolver._map_loaded, False)

        # should allow access to computed_models
        self.assertEqual(self.resolver.computed_models, {rt_model: {'comp': rt_field}})
Esempio n. 14
0
def when_classes_prepared(app_name, dependencies, fn):
    """ Runs the given function as soon as the model dependencies are available.
        You can use this to build dyanmic model classes on startup instead of
        runtime. 

        app_name       the name of the relevant app
        dependencies   a list of model names that need to have already been 
                       prepared before the dynamic classes can be built.
        fn             this will be called as soon as the all required models 
                       have been prepared

        NB: The fn will be called as soon as the last required
            model has been prepared. This can happen in the middle of reading
            your models.py file, before potentially referenced functions have
            been loaded. Becaue this function must be called before any 
            relevant model is defined, the only workaround is currently to 
            move the required functions before the dependencies are declared.

        TODO: Allow dependencies from other apps?
    """
    dependencies = [x.lower() for x in dependencies]

    def _class_prepared_handler(sender, **kwargs):
        """ Signal handler for class_prepared. 
            This will be run for every model, looking for the moment when all
            dependent models are prepared for the first time. It will then run
            the given function, only once.
        """
        sender_name = sender._meta.object_name.lower()
        already_prepared = set(app_cache.app_models.get(app_name,{}).keys() + [sender_name])

        if (sender._meta.app_label == app_name and sender_name in dependencies
          and all([x in already_prepared for x in dependencies])):
            db.start_transaction()
            try:
                fn()
            except DatabaseError:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this 
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
            else:
                db.commit_transaction()
                # TODO Now that the function has been run, should/can we 
                # disconnect this signal handler?
    
    # Connect the above handler to the class_prepared signal
    # NB: Although this signal is officially documented, the documentation
    # notes the following:
    #     "Django uses this signal internally; it's not generally used in 
    #      third-party applications."
    class_prepared.connect(_class_prepared_handler, weak=False)
Esempio n. 15
0
def when_classes_prepared(app_name, dependencies, fn):
    """ Executa a função administrada logo que as dependências do modelo estão disponíveis.
        Você pode usar isso para construir classes de modelo dyanmic na inicialização, em vez de
        tempo de execução.
        
        app_name o nome do app relevante
        dependências uma lista de nomes de modelos que precisam já ter sido
                       preparado antes das aulas dinâmicas podem ser construídas.
        fn este será chamado logo que os modelos de todos os necessários
                       Foram preparados
        
        NB: A fn será chamado logo que a requerida última
            modelo foi preparado. Isto pode acontecer no meio da leitura
            o arquivo models.py, antes de funções potencialmente referenciados têm
            foi carregado. Becaue esta função deve ser chamada antes de qualquer
            modelo relevante é definido, a única solução é atualmente a
            mover as funções necessárias antes que as dependências são declarados.
        
        TODO: Permitir dependências de outros aplicativos?
    """
    dependencies = [x.lower() for x in dependencies]

    def _class_prepared_handler(sender, **kwargs):
        """ Manipulador de sinal para class_prepared.
            Isto será executado para cada modelo, procurando o momento em que toda
            modelos dependentes são preparados pela primeira vez. Ela irá então executar
            a função dada, apenas uma vez.
        """
        sender_name = sender._meta.object_name.lower()
        already_prepared = set(app_cache.app_models.get(app_name,{}).keys() + [sender_name])

        if (sender._meta.app_label == app_name and sender_name in dependencies
          and all([x in already_prepared for x in dependencies])):
            db.start_transaction()
            try:
                fn()
            except DatabaseError:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this 
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
            else:
                db.commit_transaction()
                # TODO Now that the function has been run, should/can we 
                # disconnect this signal handler?
    
    # Ligue o manipulador acima, para o sinal de class_prepared
    # NB: Embora este sinal está documentado oficialmente, a documentação
    # Regista o seguinte:
    # "Django usa este sinal internamente, não é geralmente utilizado em
    # Aplicativos de terceiros. "
    class_prepared.connect(_class_prepared_handler, weak=False)
Esempio n. 16
0
    def contribute_to_class(self, cls, name, **kwargs):
        self.base_cls = cls

        super(FSMFieldMixin, self).contribute_to_class(cls, name, **kwargs)
        setattr(cls, self.name + "_description", self.descriptor_class(self))
        setattr(cls, 'get_all_{0}_transitions'.format(self.name),
                curry(get_all_FIELD_transitions, field=self))
        setattr(cls, 'get_available_{0}_transitions'.format(self.name),
                curry(get_available_FIELD_transitions, field=self))
        setattr(cls, 'get_available_user_{0}_transitions'.format(self.name),
                curry(get_available_user_FIELD_transitions, field=self))

        class_prepared.connect(self._collect_transitions)
Esempio n. 17
0
    def test_initialized_full(self):
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedE', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)

        self.resolver.initialize()
        self.assertEqual(self.resolver._sealed, True)
        self.assertEqual(self.resolver._initialized, True)
        self.assertEqual(self.resolver._map_loaded, True)

        # should have all maps loaded
        self.assertEqual(self.resolver._map, {})
        self.assertEqual(self.resolver._fk_map, {})
        self.assertEqual(self.resolver._local_mro, {rt_model: {'base': ['comp'], 'fields': {'comp': 1, 'name': 1}}})
Esempio n. 18
0
def lazy_class_prepared(app_label, object_name, callback):
    """
    Lazily execute a callback upon model class preparation.
    """
    model = get_model(app_label, object_name.lower())
    if model:
        callback(model)
    else:
        def receiver(sender, **kwargs):
            opts = sender._meta
            if (opts.app_label == app_label and
                    opts.object_name == object_name):
                class_prepared.disconnect(receiver)
                callback(sender)
        class_prepared.connect(receiver, weak=False)
Esempio n. 19
0
def lazy_class_prepared(app_label, object_name, callback):
    """
    Lazily execute a callback upon model class preparation.
    """
    model = get_model(app_label, object_name.lower())
    if model:
        callback(model)
    else:
        def receiver(sender, **kwargs):
            opts = sender._meta
            if (opts.app_label == app_label and
                    opts.object_name == object_name):
                class_prepared.disconnect(receiver)
                callback(sender)
        class_prepared.connect(receiver, weak=False)
Esempio n. 20
0
def register_model_lookup(field, related_model):
    if isinstance(related_model, str):
        app_label, model_name = related_model.split('.')
        try:
            field.related_model = apps.get_registered_model(app_label, model_name)
        except LookupError:
            def resolve(**kwargs):
                clz = kwargs['sender']
                # noinspection PyProtectedMember
                if clz._meta.app_label == app_label and clz._meta.object_name == model_name:
                    field.related_model = clz
                    class_prepared.disconnect(resolve, weak=False)

            class_prepared.connect(resolve, weak=False)
    else:
        field.related_model = related_model
Esempio n. 21
0
        def __new__(cls, name, bases, attrs):

            # Register any additional model fields specified in fields
            def add_field(sender, **kwargs):
                if sender.__name__ == computed_name:
                    for field_name, field in fields.items():
                        field.contribute_to_class(sender, field_name)
            class_prepared.connect(add_field)
            return models.base.ModelBase.__new__(
                cls,
                computed_name,
                (abstract_class,),
                # Merge the manager objects (if the abstract_class has one) with attrs and class_attrs
                merge(dict(objects=abstract_class.objects.__class__()) if hasattr(abstract_class, 'objects') else {},
                      attrs,
                      class_attrs))
Esempio n. 22
0
    def test_runtime_coverage(self):
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedH', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)
        self.resolver.initialize()

        # MRO expansion
        self.assertEqual(self.resolver.get_local_mro(rt_model), ['comp'])
        self.assertEqual(self.resolver.get_local_mro(models.Concrete), [])

        # update_computedfields with update_fields expansion
        self.assertEqual(self.resolver.update_computedfields(rt_model(), {'name'}), {'name', 'comp'})
        self.assertEqual(self.resolver.update_computedfields(models.Concrete(), {'name'}), {'name'})

        # is_computedfield test
        self.assertEqual(self.resolver.is_computedfield(rt_model, 'name'), False)
        self.assertEqual(self.resolver.is_computedfield(rt_model, 'comp'), True)
        self.assertEqual(self.resolver.is_computedfield(models.Concrete, 'name'), False)
Esempio n. 23
0
def register_model_lookup(field, related_model):
    if isinstance(related_model, str):
        app_label, model_name = related_model.split('.')
        try:
            field.related_model = apps.get_registered_model(
                app_label, model_name)
        except LookupError:

            def resolve(**kwargs):
                clz = kwargs['sender']
                # noinspection PyProtectedMember
                if clz._meta.app_label == app_label and clz._meta.object_name == model_name:
                    field.related_model = clz
                    class_prepared.disconnect(resolve, weak=False)

            class_prepared.connect(resolve, weak=False)
    else:
        field.related_model = related_model
        def __new__(meta, name, bases, attrs):
            # Register any additional model fields specified in fields
            def add_field(sender, **kwargs):
                if sender.__name__ == computed_name:
                    for field_name, field in fields.items():
                        field.contribute_to_class(sender, field_name)

            class_prepared.connect(add_field)

            def create_class_property(class_attr):
                related_attr = class_attr.split('__')[0]
                related_class_name = related_class_lookup.get(
                    related_attr, None)
                if not related_class_name:
                    raise Exception(
                        "Expected related_class_lookup to contain %s, since class_attrs contain %s"
                        % (related_attr, class_attr))
                related_class = resolve_module_attr(related_class_name)

                # Create the getter property that uses the class manager to lookup up the related model by id
                def getter(cls):
                    return related_class.objects.get(
                        id=getattr(cls, class_attr))

                return ClassProperty(classmethod(getter))

            # Create class-level getter properties to resolve things like the config_entity since we only store the id
            class_properties = map_to_dict(
                lambda class_attr:
                [class_attr.split('__')[0],
                 create_class_property(class_attr)],
                filter(lambda class_attr: class_attr.endswith('__id'),
                       class_attrs))

            return models.base.ModelBase.__new__(
                meta,
                computed_name,
                (base_class, ),
                # Merge the manager objects (if the abstract_class has one) with attrs and class_attrs
                merge(
                    dict(objects=base_class.objects.__class__()) if hasattr(
                        base_class, 'objects') else {}, attrs, class_attrs,
                    class_properties))
def receiver(signal, **kwargs):  # noqa
    if django.VERSION < (1, 7, 0):
        unresolved_references = {}

        def _resolve_references(sender, **kwargs):
            opts = sender._meta
            reference = (opts.app_label, opts.object_name)
            try:
                receivers = unresolved_references.pop(reference)
            except KeyError:
                pass
            else:
                for signal, func, kwargs in receivers:
                    kwargs["sender"] = sender
                    signal.connect(func, **kwargs)

        class_prepared.connect(_resolve_references, weak=False)

    def _decorator(func):  # noqa
        if django.VERSION < (1, 7, 0):
            from django.db.models.loading import cache as app_cache

            sender = kwargs.get("sender")
            if isinstance(sender, six.string_types):
                try:
                    app_label, model_name = sender.split(".")
                except ValueError:
                    raise ValueError(
                        "Specified sender must either be a model or a " "model name of the 'app_label.ModelName' form."
                    )
                sender = app_cache.app_models.get(app_label, {}).get(model_name.lower())
                if sender is None:
                    ref = (app_label, model_name)
                    refs = unresolved_references.setdefault(ref, [])
                    refs.append((signal, func, kwargs))
                    return func
                else:
                    kwargs["sender"] = sender
        signal.connect(func, **kwargs)
        return func

    return _decorator
Esempio n. 26
0
def receiver(signal, **kwargs):  # noqa
    if django.VERSION < (1, 7, 0):
        unresolved_references = {}

        def _resolve_references(sender, **kwargs):
            opts = sender._meta
            reference = (opts.app_label, opts.object_name)
            try:
                receivers = unresolved_references.pop(reference)
            except KeyError:
                pass
            else:
                for signal, func, kwargs in receivers:
                    kwargs["sender"] = sender
                    signal.connect(func, **kwargs)

        class_prepared.connect(_resolve_references, weak=False)

    def _decorator(func):  # noqa
        if django.VERSION < (1, 7, 0):
            from django.db.models.loading import cache as app_cache
            sender = kwargs.get("sender")
            if isinstance(sender, six.string_types):
                try:
                    app_label, model_name = sender.split(".")
                except ValueError:
                    raise ValueError(
                        "Specified sender must either be a model or a "
                        "model name of the 'app_label.ModelName' form.")
                sender = app_cache.app_models.get(app_label,
                                                  {}).get(model_name.lower())
                if sender is None:
                    ref = (app_label, model_name)
                    refs = unresolved_references.setdefault(ref, [])
                    refs.append((signal, func, kwargs))
                    return func
                else:
                    kwargs["sender"] = sender
        signal.connect(func, **kwargs)
        return func

    return _decorator
Esempio n. 27
0
    def test_initialstate(self):
        # all states should be false
        self.assertEqual(self.resolver._sealed, False)
        self.assertEqual(self.resolver._initialized, False)
        self.assertEqual(self.resolver._map_loaded, False)

        # should allow to add fields and models
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedA', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)
        self.assertEqual(self.resolver.computedfields, {rt_field})
        self.assertEqual(self.resolver.models, {rt_model})
        
        # should raise on computed_models, models_with_computedfields, computedfields_with_models
        with self.assertRaises(ResolverException):
            self.resolver.computed_models
        with self.assertRaises(ResolverException):
            list(self.resolver.models_with_computedfields)
        with self.assertRaises(ResolverException):
            list(self.resolver.computedfields_with_models)
Esempio n. 28
0
    def contribute_to_class(self, cls, name, **kwargs):
        self.base_cls = cls

        super(FSMFieldMixin, self).contribute_to_class(cls, name, **kwargs)
        setattr(cls, self.name, self.descriptor_class(self))
        setattr(cls, 'get_all_{0}_transitions'.format(self.name),
                partialmethod(get_all_FIELD_transitions, field=self))
        setattr(cls, 'get_available_{0}_transitions'.format(self.name),
                partialmethod(get_available_FIELD_transitions, field=self))
        setattr(
            cls, 'get_available_user_{0}_transitions'.format(self.name),
            partialmethod(get_available_user_FIELD_transitions, field=self))

        if self.protected:
            if hasattr(self.base_cls,
                       'refresh_from_db'):  # check for Django prior to v1.8
                self.base_cls.refresh_from_db = self.override_protection_decorator(
                    self.base_cls.refresh_from_db)
            self.base_cls.clean_fields = self.override_protection_decorator(
                self.base_cls.clean_fields)

        class_prepared.connect(self._collect_transitions)
Esempio n. 29
0
    def contribute_to_class(self, cls, name):
        # If the source field name isn't defined, figure it out.

        def register_source_group(source):
            setattr(cls, name, ImageSpecFileDescriptor(self, name, source))
            self._set_spec_id(cls, name)

            # Add the model and field as a source for this spec id
            register.source_group(self.spec_id,
                                  ImageFieldSourceGroup(cls, source))

        if self.source:
            register_source_group(self.source)
        else:
            # The source argument is not defined
            # Then we need to see if there is only one ImageField in that model
            # But we need to do that after full model initialization
            def handle_model_preparation(sender, **kwargs):

                image_fields = [
                    f.attname for f in cls._meta.fields
                    if isinstance(f, models.ImageField)
                ]
                if len(image_fields) == 0:
                    raise Exception(
                        '%s does not define any ImageFields, so your %s'
                        ' ImageSpecField has no image to act on.' %
                        (cls.__name__, name))
                elif len(image_fields) > 1:
                    raise Exception(
                        '%s defines multiple ImageFields, but you have not'
                        ' specified a source for your %s ImageSpecField.' %
                        (cls.__name__, name))
                register_source_group(image_fields[0])

            class_prepared.connect(handle_model_preparation,
                                   sender=cls,
                                   weak=False)
Esempio n. 30
0
    def test_pickled_load(self):
        # write pickled map file
        class_prepared.connect(self.resolver.add_model)
        rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedG', lambda self: self.name.upper())
        class_prepared.disconnect(self.resolver.add_model)
        self.resolver.initialize()

        # patch test_full.models (otherwise pickle doesnt work)
        models.RuntimeGeneratedG = rt_model

        settings.COMPUTEDFIELDS_MAP = 'mapfile.test_generated'
        self.resolver._write_pickled_data()

        # load back pickled file
        data = self.resolver._load_pickled_data()
        settings.COMPUTEDFIELDS_MAP = None
        os.remove('mapfile.test_generated')
        
        # compare pickle data
        self.assertEqual(data['hash'], self.resolver._calc_modelhash())
        self.assertEqual(data['lookup_map'], self.resolver._map)
        self.assertEqual(data['fk_map'], self.resolver._fk_map)
        self.assertEqual(data['local_mro'], self.resolver._local_mro)
Esempio n. 31
0
# who may need more assistance from their IDE, I want to try this approach out.
# this module is also a form of documentation of the public API.
subsession_module = import_module('otree.models.subsession')
group_module = import_module('otree.models.group')
player_module = import_module('otree.models.player')

# so that oTree users don't see internal details
session_module = import_module('otree.models.session')
participant_module = import_module('otree.models.participant')


def ensure_required_fields(sender, **kwargs):
    """
    Some models need to hook up some dynamically created fields. They can be
    created on the fly or might be defined by the user in the app directly.

    We use this signal handler to ensure that these fields exist and are
    created on demand.
    """
    if hasattr(sender, '_ensure_required_fields'):
        sender._ensure_required_fields()

class_prepared.connect(ensure_required_fields)


Session = session_module.Session
Participant = participant_module.Participant
BaseSubsession = subsession_module.BaseSubsession
BaseGroup = group_module.BaseGroup
BasePlayer = player_module.BasePlayer
Esempio n. 32
0
def register_pre_signals():
    """
    Called from tagulous/models/__init__.py
    """
    if settings.ENHANCE_MODELS:
        class_prepared.connect(class_prepared_listener, weak=False)
Esempio n. 33
0
 def contribute_to_class(self, model, name, **kwargs):
     self.model = model
     self.name = name
     setattr(model, name, self)
     # Ideally we would connect to the model.apps.clear_cache()
     class_prepared.connect(self.class_prepared_receiver, weak=False)
Esempio n. 34
0
from django.utils.translation import ugettext as _
from django.db.models.signals import class_prepared
from longerusername import MAX_USERNAME_LENGTH

def longer_username(sender, *args, **kwargs):
    if sender.__name__ == "User" and sender.__module__ == "django.contrib.auth.models":
        sender._meta.get_field("username").max_length = MAX_USERNAME_LENGTH()
        sender._meta.get_field("username").help_text = _("Required, %s characters or fewer. Only letters, numbers, and @, ., +, -, or _ characters." % MAX_USERNAME_LENGTH())

class_prepared.connect(longer_username)
Esempio n. 35
0
            sender._concurrencymeta.enabled = getattr(sender.ConcurrencyMeta, 'enabled')

        if not (sender._concurrencymeta.manually):
            sender._concurrencymeta.field.wrap_model(sender)

        setattr(sender, 'get_concurrency_version', get_revision_of_object)


def post_syncdb_concurrency_handler(sender, **kwargs):
    from concurrency.triggers import create_triggers
    from django.db import connections
    databases = [alias for alias in connections]
    create_triggers(databases)


class_prepared.connect(class_prepared_concurrency_handler, dispatch_uid='class_prepared_concurrency_handler')


class TriggerRegistry(object):
    # FIXME: this is very bad. it seems required only by tests
    # see
    # https://github.com/pytest-dev/pytest-django/issues/75
    # https://code.djangoproject.com/ticket/22280#comment:20

    _fields = []

    def append(self, field):
        self._fields.append([field.model._meta.app_label, field.model.__name__])

    def __iter__(self):
        return iter([get_model(*i)._concurrencymeta.field for i in self._fields])
    def setup_fix_parent_and_child_relation():
        from django.db.models.signals import class_prepared

        class_prepared.connect(fix_parent_and_child_relation_signal_handler)
Esempio n. 37
0
                # database operations will be rolled back.
                db.rollback_transaction()
                # Better connect again
                if message<>'no such table: dynamo_metamodel':
                    class_prepared.connect(_class_prepared_handler, weak=False)
                else:
                    raise
            else:
                db.commit_transaction()
    
    # Connect the above handler to the class_prepared signal
    # NB: Although this signal is officially documented, the documentation
    # notes the following:
    #     "Django uses this signal internally; it's not generally used in 
    #      third-party applications."
    class_prepared.connect(_class_prepared_handler, weak=False)


def field_pre_save(sender, **kwargs):
    '''
    A signal handler to run any pre_save activities and trigger the built-in
    pre_save signals
    1. Detect renamed fields and store the old field name for migration
    2. Detect if the field is just created and store this information.
    3. Detect if the field is just updated and store this information
    4. Trigger the pre creation signal
    5. Trigger the pre update signal
    '''
    MetaField=sender
    meta_field=kwargs['instance']
    
Esempio n. 38
0
 def __init__(self):
     self.pending_operations = {}
     class_prepared.connect(self.signal_receiver)
Esempio n. 39
0
    authmodels = 'django.contrib.auth.models'
    if sender.__name__ == 'User' and sender.__module__ == authmodels:

        # patch the length
        sender._meta.get_field('username').max_length = 80

        # patch the help text
        help_text = "Required. 80 characters or fewer."
        sender._meta.get_field('username').help_text = help_text

        # remove the unique constraint
        sender._meta.get_field('username').unique = False


class_prepared.connect(patch_user)

# Monkey patch the default admin login form with our custom form
def patch_admin_login():
    from django import forms
    from django.contrib import admin
    from django.contrib import auth

    ERROR_MESSAGE = "Please enter a correct organization, username, and password."

    def patched_clean(self):
        organization = self.cleaned_data.get('organization')
        username = self.cleaned_data.get('username')
        password = self.cleaned_data.get('password')
        message = ERROR_MESSAGE
Esempio n. 40
0
    complete_apps = ['django_monkeypatches']

########NEW FILE########
__FILENAME__ = models
import django
from django.core.validators import MaxLengthValidator
from django.utils.translation import ugettext as _
from django.db.models.signals import class_prepared
from django.conf import settings
from longerusername import MAX_USERNAME_LENGTH

def longer_username_signal(sender, *args, **kwargs):
    if (sender.__name__ == "User" and
        sender.__module__ == "django.contrib.auth.models"):
        patch_user_model(sender)
class_prepared.connect(longer_username_signal)

def patch_user_model(model):
    field = model._meta.get_field("username")

    field.max_length = MAX_USERNAME_LENGTH()
    field.help_text = _("Required, %s characters or fewer. Only letters, "
                        "numbers, and @, ., +, -, or _ "
                        "characters." % MAX_USERNAME_LENGTH())

    # patch model field validator because validator doesn't change if we change
    # max_length
    for v in field.validators:
        if isinstance(v, MaxLengthValidator):
            v.limit_value = MAX_USERNAME_LENGTH()
Esempio n. 41
0
        if self.currency_choices != CURRENCY_CHOICES:
            kwargs['currency_choices'] = self.currency_choices
        if self.currency_field_name:
            kwargs['currency_field_name'] = self.currency_field_name
        return name, path, args, kwargs


def patch_managers(sender, **kwargs):
    """
    Patches models managers.
    """
    if sender._meta.proxy_for_model:
        has_money_field = hasattr(sender._meta.proxy_for_model._meta,
                                  'has_money_field')
    else:
        has_money_field = hasattr(sender._meta, 'has_money_field')

    if has_money_field:
        setup_managers(sender)


class_prepared.connect(patch_managers)


class MoneyPatched(Money):
    def __init__(self, *args, **kwargs):
        warn(
            "'djmoney.models.fields.MoneyPatched' is deprecated. Use 'djmoney.money.Money' instead",
            PendingDeprecationWarning)
        super(MoneyPatched, self).__init__(*args, **kwargs)
Esempio n. 42
0
    def setup_fix_parent_and_child_relation():
        from django.db.models.signals import class_prepared

        class_prepared.connect(fix_parent_and_child_relation_signal_handler)
Esempio n. 43
0
class MessageIndex(BaseIndex):
    model = Event


## Register Signals

def register_indexes(**kwargs):
    """
    Grabs all required indexes from filters and registers them.
    """
    from sentry.filters import get_filters
    logger = logging.getLogger('sentry.setup')
    for cls in (f for f in get_filters() if f.column.startswith('data__')):
        MessageIndex.objects.register_index(cls.column, index_to='group')
        logger.debug('Registered index for for %r', cls.column)
class_prepared.connect(register_indexes, sender=MessageIndex)


def create_default_project(created_models, verbosity=2, **kwargs):
    if Project in created_models:
        if Project.objects.filter(pk=settings.PROJECT).exists():
            return

        project = Project.objects.create(
            public=settings.ALLOW_PUBLIC_PROJECTS and settings.PUBLIC,
            name='Sentry (Internal)',
            slug='sentry',
        )
        # default key (used by sentry-js client, etc)
        ProjectKey.objects.create(
            project=project,
Esempio n. 44
0
def add_image_fields(sender, **kwargs):
    """
    class_prepared signal handler that checks for the model massmedia.Image
    and adds sized image fields
    """
    if sender.__name__ == "Image" and sender._meta.app_label == 'massmedia':
        large = models.ImageField(upload_to=".", blank=True, verbose_name=_('large image file'))
        medium = models.ImageField(upload_to=".", blank=True, verbose_name=_('medium image file'))
        small = models.ImageField(upload_to=".", blank=True, verbose_name=_('small image file'))
        
        large.contribute_to_class(sender, "large")
        medium.contribute_to_class(sender, "medium")
        small.contribute_to_class(sender, "small")

class_prepared.connect(add_image_fields) 

from massmedia.models import Image
from PIL import Image as PILImage, ImageFile as PILImageFile
from cStringIO import StringIO
from django.core.files.uploadedfile import SimpleUploadedFile
import os
from massmedia.settings import IMAGE_UPLOAD_TO
from time import strftime

try:
    Image._meta.get_field_by_name('large')[0]
except:
    add_image_fields(Image)

Image.image_fields = [
Esempio n. 45
0
        results = cursor.fetchall()
        tables = [r[1] for r in results]
        mapping = cache.get_many(tables)
        for r in results:
            key = version_key(".".join(("cachesignals", r[1])))
            accessor_set = mapping.get(key)
            if accessor_set is None:
                accessor_set = set()
            accessor_set.add(r[2:5])
            mapping[key] = accessor_set
        cache.set_many(mapping, CACHE_SECONDS)
        cache_signals.ready = True


class_prepared.connect(load_cache_signals)


### INVALIDATION FUNCTIONS ###


def post_update_cachebot(sender, instance, **kwargs):
    ## TODO auto add select reverse and related ##
    invalidate_cache(sender, instance)


def post_save_cachebot(sender, instance, **kwargs):
    invalidate_cache(sender, (instance,))


def pre_delete_cachebot(sender, instance, **kwargs):
Esempio n. 46
0
try:
    from south.modelsinspector import add_introspection_rules
    rules = [
        # MoneyField has its own method.
        ((CurrencyField,),
         [],  # No positional args
         {'default': ('default', {'default': DEFAULT_CURRENCY.code}),
          'max_length': ('max_length', {'default': 3})}),
    ]

    # MoneyField implement the serialization in south_field_triple method
    add_introspection_rules(rules, ['^djmoney\.models\.fields\.CurrencyField'])
except ImportError:
    pass


def patch_managers(sender, **kwargs):
    """
    Patches models managers.
    """
    if sender._meta.proxy_for_model:
        has_money_field = hasattr(sender._meta.proxy_for_model._meta, 'has_money_field')
    else:
        has_money_field = hasattr(sender._meta, 'has_money_field')

    if has_money_field:
        setup_managers(sender)


class_prepared.connect(patch_managers)
Esempio n. 47
0
        raise ImproperlyConfigured(
            "No TranslatedFields found on %r, subclasses of "
            "TranslatableModel must define TranslatedFields." % model)

    #### Now we have to work ####

    contribute_translations(model, found.related)

    # Ensure _base_manager cannot be TranslationManager despite use_for_related_fields
    # 1- it is useless unless default_class is overriden
    # 2- in that case, _base_manager is used for saving objects and must not be
    #    translation aware.
    base_mgr = getattr(model, '_base_manager', None)
    if base_mgr is None or isinstance(base_mgr, TranslationManager):
        model.add_to_class('_base_manager', Manager())

    # Replace get_field_by_name with one that warns for common mistakes
    if django.VERSION < (1, 9) and not isinstance(
            model._meta.get_field_by_name, SmartGetFieldByName):
        model._meta.get_field_by_name = MethodType(
            SmartGetFieldByName(model._meta.get_field_by_name), model._meta)
    if not isinstance(model._meta.get_field, SmartGetField):
        model._meta.get_field = MethodType(
            SmartGetField(model._meta.get_field), model._meta)

    # Attach save_translations
    post_save.connect(model.save_translations, sender=model, weak=False)


class_prepared.connect(prepare_translatable_model)
Esempio n. 48
0
                    initially_hidden=hidden or initially_hidden,
                    editable=editable,
                ))
        else:
            raise Exception("Invalid attribute type '%s'." % fieldtype)
    return result


_first = True
if _first:
    _first = False
    # Scan attribute definitions from the settings
    for model, attrlist in settings.ATTRIBUTES:
        registerAttribute(model, attrlist)

    # Scan attribute definitions from the installed apps
    for app in reversed(settings.INSTALLED_APPS):
        try:
            mod = import_module("%s.attributes" % app)
        except ImportError as e:
            # Silently ignore if it's the menu module which isn't found
            if str(e) not in (
                    "No module named %s.attributes" % app,
                    "No module named '%s.attributes'" % app,
            ):
                raise e

    if _register:
        class_prepared.connect(add_extra_model_fields,
                               dispatch_uid="frepple_attribute_injection")
Esempio n. 49
0
 def contribute_to_class(self, model, name):
     super(BaseManager, self).contribute_to_class(model, name)
     class_prepared.connect(self._class_prepared, sender=model)
Esempio n. 50
0
    field_path, field_args, field_kwargs = entry[1:]
    if "." not in field_path:
        field_path = "django.db.models.%s" % field_path
    try:
        field_class = import_dotted_path(field_path)
    except ImportError:
        raise ImproperlyConfigured("The EXTRA_MODEL_FIELDS setting contains "
                                   "the field '%s' which could not be "
                                   "imported." % entry[1])
    try:
        field = field_class(*field_args, **field_kwargs)
    except TypeError, e:
        raise ImproperlyConfigured("The EXTRA_MODEL_FIELDS setting contains "
                                   "arguments for the field '%s' which could "
                                   "not be applied: %s" % (entry[1], e))
    fields[model_path][field_name] = field


def add_extra_model_fields(sender, **kwargs):
    """
    Injects custom fields onto the given sender model as defined
    by the ``EXTRA_MODEL_FIELDS`` setting.
    """
    model_path = "%s.%s" % (sender.__module__, sender.__name__)
    for field_name, field in fields.get(model_path, {}).items():
        field.contribute_to_class(sender, field_name)


if fields:
    class_prepared.connect(add_extra_model_fields, dispatch_uid="FQFEQ#rfq3r")
Esempio n. 51
0
    Resolve relation and call the operation with the specified kwargs.

    The operation will be called when the relation is ready to resolved.
    The original idea was copied from Django 1.2.2 source code thus the
    license belongs to the Django's license (BSD License)

    Args:
        relation (str or class): A relation which you want to resolve
        operation (fn): A callback function which will called with resolved
            relation (class) and the specified kwargs.
    """
    app_label, model_name = get_relation(relation)
    try:
        model = _get_model(app_label, model_name)
        operation(model, **kwargs)
    except AppRegistryNotReady:
        key = (app_label, model_name)
        value = (operation, kwargs)
        _pending_lookups.setdefault(key, []).append(value)


def _do_pending_lookups(sender, **kwargs):
    key = (sender._meta.app_label, sender.__name__)
    for operation, kwargs in _pending_lookups.pop(key, []):
        operation(sender, **kwargs)


from django.db.models.signals import class_prepared

class_prepared.connect(_do_pending_lookups)
Esempio n. 52
0
            # only set the format if the block was just created, or it's blank, and if a format is defined
            if (not block.format or created) and label_tuple[1]:
                block.format = label_tuple[1]
                block.save()

        for label in kwargs['instance'].IMAGE_LABELS:
            Image.objects.get_or_create(
                label=label,
                content_type=ContentType.objects.get_for_model(
                    kwargs['instance']),
                object_id=kwargs['instance'].id)


post_save.connect(add_blocks)
"""
Possible todo: If the base model has a field named _block_LABEL,
save the block's value there as well via post_save?
"""


# Set up the accessor method for block content
def add_methods(sender, **kwargs):
    if issubclass(sender, CMSBaseModel):
        for label_tuple in sender.BLOCK_LABELS:
            setattr(sender, 'block_%s' % label_tuple[0],
                    curry(sender._block_LABEL, label=label_tuple[0]))


# connect the add_accessor_methods function to the post_init signal
class_prepared.connect(add_methods)
Esempio n. 53
0
        )

    #### Now we have to work ####

    contribute_translations(model, found.related)

    # Ensure _base_manager cannot be TranslationManager despite use_for_related_fields
    # 1- it is useless unless default_class is overriden
    # 2- in that case, _base_manager is used for saving objects and must not be
    #    translation aware.
    base_mgr = getattr(model, '_base_manager', None)
    if base_mgr is None or isinstance(base_mgr, TranslationManager):
        model.add_to_class('_base_manager', Manager())

    # Replace get_field_by_name with one that warns for common mistakes
    if django.VERSION < (1, 9) and not isinstance(model._meta.get_field_by_name, SmartGetFieldByName):
        model._meta.get_field_by_name = MethodType(
            SmartGetFieldByName(model._meta.get_field_by_name),
            model._meta
        )
    if not isinstance(model._meta.get_field, SmartGetField):
        model._meta.get_field = MethodType(
            SmartGetField(model._meta.get_field),
            model._meta
        )

    # Attach save_translations
    post_save.connect(model.save_translations, sender=model, weak=False)

class_prepared.connect(prepare_translatable_model)
Esempio n. 54
0
def activate_branch(branch_obj):
    if not branch_obj.state == u'open':
        raise ValueError('Only open branches can be activated.')
    from django.db import transaction, connections
    cursor = connections['default'].cursor()
    with transaction.atomic():
        cursor.execute("DELETE FROM hydra_activebranch WHERE "
                       "session_id = currval('_hydra_session_id_seq')")
        cursor.execute("INSERT INTO hydra_activebranch (session_id, branch_name) "
                       "VALUES (currval('_hydra_session_id_seq'), %s)",
                       (branch_obj.branch_name,))

def deactivate_branch():
    from django.db import connections
    cursor = connections['default'].cursor()
    cursor.execute("DELETE FROM hydra_activebranch WHERE "
                   "session_id = currval('_hydra_session_id_seq')")

_registered = set()
def hydrize_model(sender=None, **kwargs):
    logger.debug('Model %s is ready.', sender)
    settings.HYDRA_MODELS = set(getattr(settings, 'HYDRA_MODELS', set()) - forbidden_models())
    if (sender not in _registered and sender._meta.app_label != 'hydra' and
                ('%s.%s' % (sender._meta.app_label, sender._meta.model_name)).lower()
                in [s.lower() for s in settings.HYDRA_MODELS]):
        logger.info('Generating Hydra models for %s', sender)
        from .models import generate_hydra_models
        generate_hydra_models(sender)
    _registered.add(sender)
class_prepared.connect(hydrize_model)
Esempio n. 55
0
 def contribute_to_class(self, model, name):
     super(BaseManager, self).contribute_to_class(model, name)
     class_prepared.connect(self.__class_prepared, sender=model)
Esempio n. 56
0
 def __init__(self, *args, **kwargs):
     super(CategoriesConfig, self).__init__(*args, **kwargs)
     from django.db.models.signals import class_prepared
     class_prepared.connect(handle_class_prepared)
Esempio n. 57
0
                                   "not be applied: %s" % (entry[1], e))
    fields[model_path][field_name] = field


def add_extra_model_fields(sender, **kwargs):
    """
    Injects custom fields onto the given sender model as defined
    by the ``EXTRA_MODEL_FIELDS`` setting.
    """
    model_path = "%s.%s" % (sender.__module__, sender.__name__)
    for field_name, field in fields.get(model_path, {}).items():
        field.contribute_to_class(sender, field_name)


if fields:
    class_prepared.connect(add_extra_model_fields, dispatch_uid="FQFEQ#rfq3r")

# Override django.contrib.admin.site with LazyAdminSite. It must
# be bound to a separate name (admin_site) for access in autodiscover
# below.

admin_site = LazyAdminSite()
admin.site = admin_site
django_autodiscover = admin.autodiscover


def autodiscover(*args, **kwargs):
    """
    Replaces django's original autodiscover to add a call to
    LazyAdminSite's lazy_registration.
    """
            it's not safe to save it.""".format(self, self.__class__))

    def save(self, *args, **kwargs):
        self._raise_no_db_operation()
        super(EsIndexable, self).save(*args, **kwargs)

    def delete(self, *args, **kwargs):
        self._raise_no_db_operation()
        super(EsIndexable, self).delete(*args, **kwargs)


def add_es_manager(sender, **kwargs):
    # Note: the manager needs to know the subclass
    if issubclass(sender, EsIndexable):
        sender.es = ElasticsearchManager(sender)
class_prepared.connect(add_es_manager)


def es_save_callback(sender, instance, **kwargs):
    # TODO: batch ?! @task ?!
    if not issubclass(sender, EsIndexable):
        return
    instance.es.do_index()


def es_delete_callback(sender, instance, **kwargs):
    if not issubclass(sender, EsIndexable):
        return
    instance.es.delete()