class ModelForm(six.with_metaclass(ModelFormMetaclass, BaseModelForm)): pass
# # You should have received a copy of the GNU General Public License # along with Patchwork; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from __future__ import absolute_import import hashlib import django from django.db import models from django.utils import six if django.VERSION < (1, 8): HashFieldBase = six.with_metaclass(models.SubfieldBase, models.CharField) # noqa else: HashFieldBase = models.CharField class HashField(HashFieldBase): def __init__(self, *args, **kwargs): self.n_bytes = len(hashlib.sha1().hexdigest()) kwargs['max_length'] = self.n_bytes super(HashField, self).__init__(*args, **kwargs) def construct(self, value): if isinstance(value, six.text_type): value = value.encode('utf-8')
class FilterCollection( six.with_metaclass(DeclarativeFilterCollectionMetaclass, BaseFilterCollection)): pass
class Widget(six.with_metaclass(RenameWidgetMethods)): needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False supports_microseconds = True def __init__(self, attrs=None): if attrs is not None: self.attrs = attrs.copy() else: self.attrs = {} def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj @property def is_hidden(self): return self.input_type == 'hidden' if hasattr(self, 'input_type') else False def subwidgets(self, name, value, attrs=None): context = self.get_context(name, value, attrs) yield context['widget'] def format_value(self, value): """ Return a value as it should appear when rendered in a template. """ if value == '' or value is None: return None if self.is_localized: return formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs): context = {} context['widget'] = { 'name': name, 'is_hidden': self.is_hidden, 'required': self.is_required, 'value': self.format_value(value), 'attrs': self.build_attrs(self.attrs, attrs), 'template_name': self.template_name, } return context def render(self, name, value, attrs=None, renderer=None): """ Returns this Widget rendered as HTML, as a Unicode string. """ context = self.get_context(name, value, attrs) return self._render(self.template_name, context, renderer) def _render(self, template_name, context, renderer=None): if renderer is None: renderer = get_default_renderer() return mark_safe(renderer.render(template_name, context)) def build_attrs(self, base_attrs, extra_attrs=None): "Helper function for building an attribute dictionary." attrs = base_attrs.copy() if extra_attrs is not None: attrs.update(extra_attrs) return attrs def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, returns the value of this widget. Returns None if it's not provided. """ return data.get(name) def value_omitted_from_data(self, data, files, name): return name not in data def id_for_label(self, id_): """ Returns the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Returns None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_ def use_required_attribute(self, initial): return not self.is_hidden
class TranslationOptions(with_metaclass(FieldsAggregationMetaClass, object)): """ Translatable fields are declared by registering a model using ``TranslationOptions`` class with appropriate ``fields`` attribute. Model-specific fallback values and languages can also be given as class attributes. Options instances hold info about translatable fields for a model and its superclasses. The ``local_fields`` and ``fields`` attributes are mappings from fields to sets of their translation fields; ``local_fields`` contains only those fields that are handled in the model's database table (those inherited from abstract superclasses, unless there is a concrete superclass in between in the inheritance chain), while ``fields`` also includes fields inherited from concrete supermodels (giving all translated fields available on a model). ``related`` attribute inform whether this model is related part of some relation with translated model. This model may be not translated itself. ``related_fields`` contains names of reverse lookup fields. """ required_languages = () def __init__(self, model): """ Create fields dicts without any translation fields. """ self.model = model self.registered = False self.related = False self.local_fields = dict((f, set()) for f in self.fields) self.fields = dict((f, set()) for f in self.fields) self.related_fields = [] def validate(self): """ Perform options validation. """ # TODO: at the moment only required_languages is validated. # Maybe check other options as well? if self.required_languages: if isinstance(self.required_languages, (tuple, list)): self._check_languages(self.required_languages) else: self._check_languages(self.required_languages.keys(), extra=('default', )) for fieldnames in self.required_languages.values(): if any(f not in self.fields for f in fieldnames): raise ImproperlyConfigured( 'Fieldname in required_languages which is not in fields option.' ) def _check_languages(self, languages, extra=()): correct = list(mt_settings.AVAILABLE_LANGUAGES) + list(extra) if any(l not in correct for l in languages): raise ImproperlyConfigured( 'Language in required_languages which is not in AVAILABLE_LANGUAGES.' ) def update(self, other): """ Update with options from a superclass. """ if other.model._meta.abstract: self.local_fields.update(other.local_fields) self.fields.update(other.fields) def add_translation_field(self, field, translation_field): """ Add a new translation field to both fields dicts. """ self.local_fields[field].add(translation_field) self.fields[field].add(translation_field) def get_field_names(self): """ Return name of all fields that can be used in filtering. """ return list(self.fields.keys()) + self.related_fields def __str__(self): local = tuple(self.local_fields.keys()) inherited = tuple(set(self.fields.keys()) - set(local)) return '%s: %s + %s' % (self.__class__.__name__, local, inherited)
from django.core import exceptions, validators from django.db import models from django.utils import six from django.utils.encoding import force_text, python_2_unicode_compatible from django.utils.text import capfirst from django.utils.translation import ugettext_lazy as _ from .codecs import decode_csv_to_list, encode_list_to_csv from .validators import MaxChoicesValidator, MaxLengthValidator import select_multiple_field.forms as forms DEFAULT_DELIMITER = ',' if django.VERSION < (1, 8): SelectMultipleFieldBase = six.with_metaclass(models.SubfieldBase, models.Field) else: SelectMultipleFieldBase = models.Field @python_2_unicode_compatible class SelectMultipleField(SelectMultipleFieldBase): """Stores multiple selection choices as serialized list""" default_error_messages = { 'blank': _("This field cannot be blank."), 'invalid_type': _( "Types passed as value must be string, list, tuple or None, " "not '%(value)s'."), 'invalid_choice': _( "Select a valid choice. %(value)s is not one of the available "
import django from django.db import models from django.utils import six try: from dill import loads, dumps except ImportError: # python 3.x does not have cPickle module try: # cpython 2.x from cPickle import loads, dumps # noqa except ImportError: from pickle import loads, dumps # noqa if django.VERSION >= (1, 8): _PickledObjectField = models.Field else: _PickledObjectField = six.with_metaclass(models.SubfieldBase, models.Field)
class Form(six.with_metaclass(DeclarativeFieldsMetaclass, BaseForm)): "A collection of Fields, plus their associated data."
class HexIntegerField( six.with_metaclass(models.SubfieldBase, models.BigIntegerField)): """ This field stores a hexadecimal *string* of up to 64 bits as an unsigned integer on *all* backends including postgres. Reasoning: Postgres only supports signed bigints. Since we don't care about signedness, we store it as signed, and cast it to unsigned when we deal with the actual value (with struct) On sqlite and mysql, native unsigned bigint types are used. In all cases, the value we deal with in python is always in hex. """ validators = [ MinValueValidator(UNSIGNED_64BIT_INT_MIN_VALUE), MaxValueValidator(UNSIGNED_64BIT_INT_MAX_VALUE) ] def db_type(self, connection): engine = connection.settings_dict["ENGINE"] if "mysql" in engine: return "bigint unsigned" elif "sqlite" in engine: return "UNSIGNED BIG INT" else: return super(HexIntegerField, self).db_type(connection=connection) def get_prep_value(self, value): """ Return the integer value to be stored from the hex string """ if value is None or value == "": return None if isinstance(value, six.string_types): value = _hex_string_to_unsigned_integer(value) if _using_signed_storage(): value = _unsigned_to_signed_integer(value) return value def from_db_value(self, value, expression, connection, context): """ Return an unsigned int representation from all db backends """ if value is None: return value if _using_signed_storage(): value = _signed_to_unsigned_integer(value) return value def to_python(self, value): """ Return a str representation of the hexadecimal """ if isinstance(value, six.string_types): return value if value is None: return "" return _unsigned_integer_to_hex_string(value) def formfield(self, **kwargs): defaults = {"form_class": HexadecimalField} defaults.update(kwargs) # yes, that super call is right return super(models.IntegerField, self).formfield(**defaults) def run_validators(self, value): # make sure validation is performed on integer value not string value value = _hex_string_to_unsigned_integer(value) return super(models.BigIntegerField, self).run_validators(value)
class RegexSearchMixin(six.with_metaclass(abc.ABCMeta)): """Mixin to adding regex search to a search backend.""" supports_prefix_matching = True def do_search(self, engine_slug, queryset, search_text): """Filters the given queryset according the the search logic for this backend.""" word_query = Q() for word in search_text.split(): regex = regex_from_word(word) word_query &= (Q(title__iregex=regex) | Q(description__iregex=regex) | Q(content__iregex=regex)) return queryset.filter(word_query) def do_filter(self, engine_slug, queryset, search_text): """Filters the given queryset according the the search logic for this backend.""" model = queryset.model connection = connections[queryset.db] db_table = connection.ops.quote_name(SearchEntry._meta.db_table) model_db_table = connection.ops.quote_name(model._meta.db_table) pk = model._meta.pk id = connection.ops.quote_name(pk.db_column or pk.attname) # Add in basic filters. word_query = [ """ ({db_table}.{engine_slug} = %s) """, """ ({db_table}.{content_type_id} = %s) """ ] word_kwargs = { "db_table": db_table, "model_db_table": model_db_table, "engine_slug": connection.ops.quote_name("engine_slug"), "title": connection.ops.quote_name("title"), "description": connection.ops.quote_name("description"), "content": connection.ops.quote_name("content"), "content_type_id": connection.ops.quote_name("content_type_id"), "object_id": connection.ops.quote_name("object_id"), "object_id_int": connection.ops.quote_name("object_id_int"), "id": id, "iregex_operator": connection.operators["iregex"], } word_args = [ engine_slug, ContentType.objects.get_for_model(model).id, ] # Add in join. if has_int_pk(model): word_query.append(""" ({db_table}.{object_id_int} = {model_db_table}.{id}) """) else: word_query.append(""" ({db_table}.{object_id} = {model_db_table}.{id}) """) # Add in all words. for word in search_text.split(): regex = regex_from_word(word) word_query.append(""" ({db_table}.{title} {iregex_operator} OR {db_table}.{description} {iregex_operator} OR {db_table}.{content} {iregex_operator}) """) word_args.extend((regex, regex, regex)) # Compile the query. full_word_query = " AND ".join(word_query).format(**word_kwargs) return queryset.extra( tables=(db_table, ), where=(full_word_query, ), params=word_args, )
class MyModel(six.with_metaclass(CustomBaseModel, models.Model)): """Model subclass with a custom base using six.with_metaclass."""
class CMSPlugin(six.with_metaclass(PluginModelBase, MP_Node)): ''' The base class for a CMS plugin model. When defining a new custom plugin, you should store plugin-instance specific information on a subclass of this class. An example for this would be to store the number of pictures to display in a galery. Two restrictions apply when subclassing this to use in your own models: 1. Subclasses of CMSPlugin *cannot be further subclassed* 2. Subclasses of CMSPlugin cannot define a "text" field. ''' placeholder = models.ForeignKey(Placeholder, editable=False, null=True) parent = models.ForeignKey('self', blank=True, null=True, editable=False) position = models.PositiveSmallIntegerField(_("position"), blank=True, null=True, editable=False) language = models.CharField(_("language"), max_length=15, blank=False, db_index=True, editable=False) plugin_type = models.CharField(_("plugin_name"), max_length=50, db_index=True, editable=False) creation_date = models.DateTimeField(_("creation date"), editable=False, default=timezone.now) changed_date = models.DateTimeField(auto_now=True) child_plugin_instances = None translatable_content_excluded_fields = [] class Meta: app_label = 'cms' class RenderMeta: index = 0 total = 1 text_enabled = False def __reduce__(self): """ Provide pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ data = self.__dict__ # The obvious thing to do here is to invoke super().__reduce__() # for the non-deferred case. Don't do that. # On Python 2.4, there is something wierd with __reduce__, # and as a result, the super call will cause an infinite recursion. # See #10547 and #12121. deferred_fields = [ f for f in self._meta.fields if isinstance( self.__class__.__dict__.get(f.attname), DeferredAttribute) ] model = self._meta.proxy_for_model return (model_unpickle, (model, deferred_fields), data) def __str__(self): return force_text(self.pk) def get_plugin_name(self): from cms.plugin_pool import plugin_pool return plugin_pool.get_plugin(self.plugin_type).name def get_short_description(self): instance = self.get_plugin_instance()[0] if instance is not None: return force_text(instance) return _("<Empty>") def get_plugin_class(self): from cms.plugin_pool import plugin_pool return plugin_pool.get_plugin(self.plugin_type) def get_plugin_class_instance(self, admin=None): plugin_class = self.get_plugin_class() # needed so we have the same signature as the original ModelAdmin return plugin_class(plugin_class.model, admin) def get_plugin_instance(self, admin=None): ''' Given a plugin instance (usually as a CMSPluginBase), this method returns a tuple containing: instance - The instance AS THE APPROPRIATE SUBCLASS OF CMSPluginBase and not necessarily just 'self', which is often just a CMSPluginBase, plugin - the associated plugin class instance (subclass of CMSPlugin) ''' plugin = self.get_plugin_class_instance(admin) if hasattr(self, "_inst"): return self._inst, plugin if plugin.model != self.__class__: # and self.__class__ == CMSPlugin: # (if self is actually a subclass, getattr below would break) try: instance = plugin.model.objects.get(cmsplugin_ptr=self) instance._render_meta = self._render_meta except (AttributeError, ObjectDoesNotExist): instance = None else: instance = self self._inst = instance return self._inst, plugin def render_plugin(self, context=None, placeholder=None, admin=False, processors=None): instance, plugin = self.get_plugin_instance() if instance and not (admin and not plugin.admin_preview): if not placeholder or not isinstance(placeholder, Placeholder): placeholder = instance.placeholder placeholder_slot = placeholder.slot current_app = context.current_app if context else None context = PluginContext(context, instance, placeholder, current_app=current_app) context = plugin.render(context, instance, placeholder_slot) request = context.get('request', None) page = None if request: page = request.current_page context['allowed_child_classes'] = plugin.get_child_classes( placeholder_slot, page) if plugin.render_plugin: template = plugin._get_render_template(context, instance, placeholder) if not template: raise ValidationError("plugin has no render_template: %s" % plugin.__class__) else: template = None return render_plugin(context, instance, placeholder, template, processors, context.current_app) else: from cms.middleware.toolbar import toolbar_plugin_processor if processors and toolbar_plugin_processor in processors: if not placeholder: placeholder = self.placeholder current_app = context.current_app if context else None context = PluginContext(context, self, placeholder, current_app=current_app) template = None return render_plugin(context, self, placeholder, template, processors, context.current_app) return "" def get_media_path(self, filename): pages = self.placeholder.page_set.all() if pages.count(): return pages[0].get_media_path(filename) else: # django 1.0.2 compatibility today = date.today() return os.path.join(get_cms_setting('PAGE_MEDIA_PATH'), str(today.year), str(today.month), str(today.day), filename) @property def page(self): warnings.warn( "Don't use the page attribute on CMSPlugins! CMSPlugins are not " "guaranteed to have a page associated with them!", DontUsePageAttributeWarning) return self.placeholder.page if self.placeholder_id else None def get_instance_icon_src(self): """ Get src URL for instance's icon """ instance, plugin = self.get_plugin_instance() return plugin.icon_src(instance) if instance else u'' def get_instance_icon_alt(self): """ Get alt text for instance's icon """ instance, plugin = self.get_plugin_instance() return force_text(plugin.icon_alt(instance)) if instance else u'' def save(self, no_signals=False, *args, **kwargs): if not self.depth: if self.parent_id or self.parent: self.parent.add_child(instance=self) else: if not self.position and not self.position == 0: self.position == CMSPlugin.objects.filter( parent__isnull=True, placeholder_id=self.placeholder_id).count() self.add_root(instance=self) return super(CMSPlugin, self).save() def reload(self): return CMSPlugin.objects.get(pk=self.pk) def move(self, target, pos=None): super(CMSPlugin, self).move(target, pos) return self.reload() def set_base_attr(self, plugin): for attr in [ 'parent_id', 'placeholder', 'language', 'plugin_type', 'creation_date', 'depth', 'path', 'numchild', 'pk', 'position' ]: setattr(plugin, attr, getattr(self, attr)) def copy_plugin(self, target_placeholder, target_language, parent_cache, no_signals=False): """ Copy this plugin and return the new plugin. The logic of this method is the following: # get a new generic plugin instance # assign the position in the plugin tree # save it to let mptt/treebeard calculate the tree attributes # then get a copy of the current plugin instance # assign to it the id of the generic plugin instance above; this will effectively change the generic plugin created above into a concrete one # copy the tree related attributes from the generic plugin to the concrete one # save the concrete plugin # trigger the copy relations # return the generic plugin instance This copy logic is required because we don't know what the fields of the real plugin are. By getting another instance of it at step 4 and then overwriting its ID at step 5, the ORM will copy the custom fields for us. """ try: plugin_instance, cls = self.get_plugin_instance() except KeyError: # plugin type not found anymore return # set up some basic attributes on the new_plugin new_plugin = CMSPlugin() new_plugin.placeholder = target_placeholder # we assign a parent to our new plugin parent_cache[self.pk] = new_plugin if self.parent: parent = parent_cache[self.parent_id] parent = CMSPlugin.objects.get(pk=parent.pk) new_plugin.parent_id = parent.pk new_plugin.parent = parent new_plugin.language = target_language new_plugin.plugin_type = self.plugin_type if no_signals: from cms.signals import pre_save_plugins signals.pre_save.disconnect(pre_save_plugins, sender=CMSPlugin, dispatch_uid='cms_pre_save_plugin') signals.pre_save.disconnect(pre_save_plugins, sender=CMSPlugin) new_plugin._no_reorder = True new_plugin.save() if plugin_instance: # get a new instance so references do not get mixed up plugin_instance = plugin_instance.__class__.objects.get( pk=plugin_instance.pk) plugin_instance.pk = new_plugin.pk plugin_instance.id = new_plugin.pk plugin_instance.placeholder = target_placeholder plugin_instance.cmsplugin_ptr = new_plugin plugin_instance.language = target_language plugin_instance.parent = new_plugin.parent plugin_instance.depth = new_plugin.depth plugin_instance.path = new_plugin.path plugin_instance.numchild = new_plugin.numchild plugin_instance._no_reorder = True plugin_instance.save() old_instance = plugin_instance.__class__.objects.get(pk=self.pk) plugin_instance.copy_relations(old_instance) if no_signals: signals.pre_save.connect(pre_save_plugins, sender=CMSPlugin, dispatch_uid='cms_pre_save_plugin') return new_plugin @classmethod def fix_tree(cls, destructive=False): """ Fixes the plugin tree by first calling treebeard fix_tree and the recalculating the correct position property for each plugin. """ from cms.utils.plugins import reorder_plugins super(CMSPlugin, cls).fix_tree(destructive) for placeholder in Placeholder.objects.all(): for language, __ in settings.LANGUAGES: order = CMSPlugin.objects.filter( placeholder_id=placeholder.pk, language=language, parent_id__isnull=True).values_list('pk', flat=True) reorder_plugins(placeholder, None, language, order) for plugin in CMSPlugin.objects.filter( placeholder_id=placeholder.pk, language=language).order_by('depth', 'path'): order = CMSPlugin.objects.filter( parent_id=plugin.pk).values_list('pk', flat=True) reorder_plugins(placeholder, plugin.pk, language, order) def post_copy(self, old_instance, new_old_ziplist): """ Handle more advanced cases (eg Text Plugins) after the original is copied """ pass def copy_relations(self, old_instance): """ Handle copying of any relations attached to this plugin. Custom plugins have to do this themselves! """ pass def has_change_permission(self, request): page = self.placeholder.page if self.placeholder else None if page: return page.has_change_permission(request) elif self.placeholder: return self.placeholder.has_change_permission(request) return False def get_position_in_placeholder(self): """ 1 based position! """ return self.position + 1 def get_breadcrumb(self): from cms.models import Page model = self.placeholder._get_attached_model() or Page breadcrumb = [] if not self.parent_id: try: url = force_text( admin_reverse( "%s_%s_edit_plugin" % (model._meta.app_label, model._meta.model_name), args=[self.pk])) except NoReverseMatch: url = force_text( admin_reverse( "%s_%s_edit_plugin" % (Page._meta.app_label, Page._meta.model_name), args=[self.pk])) breadcrumb.append({ 'title': force_text(self.get_plugin_name()), 'url': url }) return breadcrumb for parent in self.get_ancestors().reverse(): try: url = force_text( admin_reverse( "%s_%s_edit_plugin" % (model._meta.app_label, model._meta.model_name), args=[parent.pk])) except NoReverseMatch: url = force_text( admin_reverse( "%s_%s_edit_plugin" % (Page._meta.app_label, Page._meta.model_name), args=[parent.pk])) breadcrumb.append({ 'title': force_text(parent.get_plugin_name()), 'url': url }) return breadcrumb def get_breadcrumb_json(self): result = json.dumps(self.get_breadcrumb()) result = mark_safe(result) return result def num_children(self): return self.numchild def notify_on_autoadd(self, request, conf): """ Method called when we auto add this plugin via default_plugins in CMS_PLACEHOLDER_CONF. Some specific plugins may have some special stuff to do when they are auto added. """ pass def notify_on_autoadd_children(self, request, conf, children): """ Method called when we auto add children to this plugin via default_plugins/<plugin>/children in CMS_PLACEHOLDER_CONF. Some specific plugins may have some special stuff to do when we add children to them. ie : TextPlugin must update its content to add HTML tags to be able to see his children in WYSIWYG. """ pass def get_translatable_content(self): """ Returns {field_name: field_contents} for translatable fields, where field_contents > '' """ fields = (f for f in self._meta.fields if isinstance(f, (models.CharField, models.TextField)) and f.editable and not f.choices and f.name not in self.translatable_content_excluded_fields) return dict( filter(itemgetter(1), ((f.name, getattr(self, f.name)) for f in fields))) def set_translatable_content(self, fields): for field, value in fields.items(): setattr(self, field, value) self.save() return all( getattr(self, field) == value for field, value in fields.items()) def delete(self, no_mp=False, *args, **kwargs): if no_mp: Model.delete(self, *args, **kwargs) else: super(CMSPlugin, self).delete(*args, **kwargs) @property def add_url(self): """ Returns a custom url to add plugin instances """ return None @property def edit_url(self): """ Returns a custom url to edit plugin instances """ return None @property def move_url(self): """ Returns a custom url to move plugin instances """ return None @property def delete_url(self): """ Returns a custom url to delete plugin instances """ return None @property def copy_url(self): """ Returns a custom url to copy plugin instances """ return None
class GenericForeignKey(six.with_metaclass(RenameGenericForeignKeyMethods)): """ Provides a generic relation to any object through content-type/object-id fields. """ def __init__(self, ct_field="content_type", fk_field="object_id", for_concrete_model=True): self.ct_field = ct_field self.fk_field = fk_field self.for_concrete_model = for_concrete_model self.editable = False def contribute_to_class(self, cls, name): self.name = name self.model = cls self.cache_attr = "_%s_cache" % name cls._meta.add_virtual_field(self) # Only run pre-initialization field assignment on non-abstract models if not cls._meta.abstract: signals.pre_init.connect(self.instance_pre_init, sender=cls) setattr(cls, name, self) def __str__(self): model = self.model app = model._meta.app_label return '%s.%s.%s' % (app, model._meta.object_name, self.name) def check(self, **kwargs): errors = [] errors.extend(self._check_field_name()) errors.extend(self._check_object_id_field()) errors.extend(self._check_content_type_field()) return errors def _check_field_name(self): if self.name.endswith("_"): return [ checks.Error( 'Field names must not end with an underscore.', hint=None, obj=self, id='fields.E001', ) ] else: return [] def _check_object_id_field(self): try: self.model._meta.get_field(self.fk_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey object ID references the non-existent field '%s'." % self.fk_field, hint=None, obj=self, id='contenttypes.E001', ) ] else: return [] def _check_content_type_field(self): """ Check if field named `field_name` in model `model` exists and is valid content_type field (is a ForeignKey to ContentType). """ try: field = self.model._meta.get_field(self.ct_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey content type references the non-existent field '%s.%s'." % (self.model._meta.object_name, self.ct_field), hint=None, obj=self, id='contenttypes.E002', ) ] else: if not isinstance(field, models.ForeignKey): return [ checks.Error( "'%s.%s' is not a ForeignKey." % (self.model._meta.object_name, self.ct_field), hint= "GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.", obj=self, id='contenttypes.E003', ) ] elif field.rel.to != ContentType: return [ checks.Error( "'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % (self.model._meta.object_name, self.ct_field), hint= "GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.", obj=self, id='contenttypes.E004', ) ] else: return [] def instance_pre_init(self, signal, sender, args, kwargs, **_kwargs): """ Handles initializing an object with the generic FK instead of content-type/object-id fields. """ if self.name in kwargs: value = kwargs.pop(self.name) if value is not None: kwargs[self.ct_field] = self.get_content_type(obj=value) kwargs[self.fk_field] = value._get_pk_val() else: kwargs[self.ct_field] = None kwargs[self.fk_field] = None def get_content_type(self, obj=None, id=None, using=None): if obj is not None: return ContentType.objects.db_manager(obj._state.db).get_for_model( obj, for_concrete_model=self.for_concrete_model) elif id is not None: return ContentType.objects.db_manager(using).get_for_id(id) else: # This should never happen. I love comments like this, don't you? raise Exception("Impossible arguments to GFK.get_content_type!") def get_prefetch_queryset(self, instances, queryset=None): if queryset is not None: raise ValueError("Custom queryset can't be used for this lookup.") # For efficiency, group the instances by content type and then do one # query per model fk_dict = defaultdict(set) # We need one instance for each group in order to get the right db: instance_dict = {} ct_attname = self.model._meta.get_field(self.ct_field).get_attname() for instance in instances: # We avoid looking for values if either ct_id or fkey value is None ct_id = getattr(instance, ct_attname) if ct_id is not None: fk_val = getattr(instance, self.fk_field) if fk_val is not None: fk_dict[ct_id].add(fk_val) instance_dict[ct_id] = instance ret_val = [] for ct_id, fkeys in fk_dict.items(): instance = instance_dict[ct_id] ct = self.get_content_type(id=ct_id, using=instance._state.db) ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys)) # For doing the join in Python, we have to match both the FK val and the # content type, so we use a callable that returns a (fk, class) pair. def gfk_key(obj): ct_id = getattr(obj, ct_attname) if ct_id is None: return None else: model = self.get_content_type( id=ct_id, using=obj._state.db).model_class() return (model._meta.pk.get_prep_value( getattr(obj, self.fk_field)), model) return (ret_val, lambda obj: (obj._get_pk_val(), obj.__class__), gfk_key, True, self.cache_attr) def is_cached(self, instance): return hasattr(instance, self.cache_attr) def __get__(self, instance, instance_type=None): if instance is None: return self try: return getattr(instance, self.cache_attr) except AttributeError: rel_obj = None # Make sure to use ContentType.objects.get_for_id() to ensure that # lookups are cached (see ticket #5570). This takes more code than # the naive ``getattr(instance, self.ct_field)``, but has better # performance when dealing with GFKs in loops and such. f = self.model._meta.get_field(self.ct_field) ct_id = getattr(instance, f.get_attname(), None) if ct_id is not None: ct = self.get_content_type(id=ct_id, using=instance._state.db) try: rel_obj = ct.get_object_for_this_type( pk=getattr(instance, self.fk_field)) except ObjectDoesNotExist: pass setattr(instance, self.cache_attr, rel_obj) return rel_obj def __set__(self, instance, value): ct = None fk = None if value is not None: ct = self.get_content_type(obj=value) fk = value._get_pk_val() setattr(instance, self.ct_field, ct) setattr(instance, self.fk_field, fk) setattr(instance, self.cache_attr, value)
DJ_VERSION = StrictVersion(get_version()) # # Get the base class for custom model fields # if DJ_VERSION >= StrictVersion('1.8'): # Django 1.8 and later - Custom Fields just override Field FieldBase = Field else: # Django 1.7 and lower require SubfieldBase from django.db.models import SubfieldBase from django.utils.six import with_metaclass FieldBase = with_metaclass(SubfieldBase, Field) # # Get the 'blank' value for a field # def _get_blank_value_18(field): """Get the value for blank fields in Django 1.8 and earlier.""" if field.null: return None else: return field.value_to_string(None) def _get_blank_value_19(field):
class Event(with_metaclass(ModelBase, *get_model_bases('Event'))): ''' This model stores meta data for a date. You can relate this data to many other models. ''' start = models.DateTimeField(_("start"), db_index=True) end = models.DateTimeField( _("end"), null=True, blank=True, help_text=_("The end time must be later than the start time.")) title = models.CharField(_("title"), max_length=255) description = models.TextField(_("description"), blank=True) creator = models.ForeignKey(DjangoUser, on_delete=models.CASCADE, verbose_name=_("user"), related_name='user') created_on = models.DateTimeField(_("created on"), auto_now_add=True) updated_on = models.DateTimeField(_("updated on"), auto_now=True) rule = models.ForeignKey( Rule, on_delete=models.CASCADE, null=True, blank=True, verbose_name=_("rule"), help_text=_("Select '----' for a one time only event.")) end_recurring_period = models.DateTimeField( _("end recurring period"), null=True, blank=True, db_index=True, help_text=_("This date is ignored for one time only events.")) calendar = models.ForeignKey(Calendar, on_delete=models.CASCADE, null=True, blank=True, verbose_name=_("calendar")) color_event = models.CharField(_("Color event"), blank=True, max_length=10) by_week_day = models.CharField(_("By week day"), blank=True, max_length=26, default="") objects = EventManager() all_day = models.BooleanField(default=True) class Meta(object): verbose_name = _('event') verbose_name_plural = _('events') app_label = 'schedule' index_together = (('start', 'end'), ) def __str__(self): return ugettext('%(title)s: %(start)s - %(end)s') % { 'title': self.title, 'start': date(self.start, django_settings.DATE_FORMAT), 'end': date(self.end, django_settings.DATE_FORMAT), } @property def seconds(self): return (self.end - self.start).total_seconds() @property def minutes(self): return float(self.seconds) / 60 @property def hours(self): return float(self.seconds) / 3600 def get_absolute_url(self): return reverse('event', args=[self.id]) def get_occurrences(self, start, end, clear_prefetch=True): if clear_prefetch: persisted_occurrences = self.occurrence_set.select_related( None).all() else: persisted_occurrences = self.occurrence_set.all() occ_replacer = OccurrenceReplacer(persisted_occurrences) occurrences = self._get_occurrence_list(start, end) final_occurrences = [] for occ in occurrences: # replace occurrences with their persisted counterparts if occ_replacer.has_occurrence(occ): p_occ = occ_replacer.get_occurrence(occ) # ...but only if they are within this period if p_occ.start < end and p_occ.end >= start: final_occurrences.append(p_occ) else: final_occurrences.append(occ) # then add persisted occurrences which originated outside of this period but now # fall within it final_occurrences += occ_replacer.get_additional_occurrences( start, end) return final_occurrences def get_rrule_object(self, tzinfo): from dateutil.rrule import WEEKLY, MO, TU, WE, TH, FR, SA, SU if self.rule is None: return params = self._event_params() frequency = self.rule.rrule_frequency() if timezone.is_naive(self.start): dtstart = self.start else: dtstart = tzinfo.normalize(self.start).replace(tzinfo=None) if self.end_recurring_period is None: until = None elif timezone.is_naive(self.end_recurring_period): until = self.end_recurring_period else: until = tzinfo.normalize( self.end_recurring_period.astimezone(tzinfo)).replace( tzinfo=None) if frequency == WEEKLY: list_week_day = [] for day in self.by_week_day.split(','): if day in ["MO", "TU", "WE", "TH", "FR", "SA", "SU"]: list_week_day.append(eval(day)) if list_week_day: params["byweekday"] = tuple(list_week_day) return rrule.rrule( frequency, dtstart=dtstart, until=until, **params, ) return rrule.rrule(frequency, dtstart=dtstart, until=until, **params) def _create_occurrence(self, start, end=None): if end is None: end = start + (self.end - self.start) return Occurrence(event=self, start=start, end=end, original_start=start, original_end=end, all_day=self.all_day) def get_occurrence(self, date): use_naive = timezone.is_naive(date) tzinfo = timezone.utc if timezone.is_naive(date): date = timezone.make_aware(date, timezone.utc) if date.tzinfo: tzinfo = date.tzinfo rule = self.get_rrule_object(tzinfo) if rule: next_occurrence = rule.after( tzinfo.normalize(date).replace(tzinfo=None), inc=True) next_occurrence = tzinfo.localize(next_occurrence) else: next_occurrence = self.start if next_occurrence == date: try: return Occurrence.objects.get(event=self, original_start=date) except Occurrence.DoesNotExist: if use_naive: next_occurrence = timezone.make_naive( next_occurrence, tzinfo) return self._create_occurrence(next_occurrence) def _get_occurrence_list(self, start, end): """ Returns a list of occurrences that fall completely or partially inside the timespan defined by start (inclusive) and end (exclusive) """ if self.rule is not None: duration = self.end - self.start use_naive = timezone.is_naive(start) # Use the timezone from the start date tzinfo = timezone.utc if start.tzinfo: tzinfo = start.tzinfo # Limit timespan to recurring period occurrences = [] if self.end_recurring_period and self.end_recurring_period < end: end = self.end_recurring_period start_rule = self.get_rrule_object(tzinfo) start = start.replace(tzinfo=None) if timezone.is_aware(end): end = tzinfo.normalize(end).replace(tzinfo=None) o_starts = [] # Occurrences that start before the timespan but ends inside or after timespan closest_start = start_rule.before(start, inc=False) if closest_start is not None and closest_start + duration > start: o_starts.append(closest_start) # Occurrences starts that happen inside timespan (end-inclusive) occs = start_rule.between(start, end, inc=True) # The occurrence that start on the end of the timespan is potentially # included above, lets remove if thats the case. if len(occs) > 0: if occs[-1] == end: occs.pop() # Add the occurrences found inside timespan o_starts.extend(occs) # Create the Occurrence objects for the found start dates for o_start in o_starts: o_start = tzinfo.localize(o_start) if use_naive: o_start = timezone.make_naive(o_start, tzinfo) o_end = o_start + duration occurrence = self._create_occurrence(o_start, o_end) if occurrence not in occurrences: occurrences.append(occurrence) return occurrences else: # check if event is in the period if self.start < end and self.end > start: return [self._create_occurrence(self.start)] else: return [] def _occurrences_after_generator(self, after=None): """ returns a generator that produces unpresisted occurrences after the datetime ``after``. (Optionally) This generator will return up to ``max_occurrences`` occurrences or has reached ``self.end_recurring_period``, whichever is smallest. """ tzinfo = timezone.utc if after is None: after = timezone.now() elif not timezone.is_naive(after): tzinfo = after.tzinfo rule = self.get_rrule_object(tzinfo) if rule is None: if self.end > after: yield self._create_occurrence(self.start, self.end) return date_iter = iter(rule) difference = self.end - self.start loop_counter = 0 for o_start in date_iter: o_start = tzinfo.localize(o_start) o_end = o_start + difference if o_end > after: yield self._create_occurrence(o_start, o_end) loop_counter += 1 def occurrences_after(self, after=None, max_occurrences=None): """ returns a generator that produces occurrences after the datetime ``after``. Includes all of the persisted Occurrences. (Optionally) This generator will return up to ``max_occurrences`` occurrences or has reached ``self.end_recurring_period``, whichever is smallest. """ if after is None: after = timezone.now() occ_replacer = OccurrenceReplacer(self.occurrence_set.all()) generator = self._occurrences_after_generator(after) trickies = list( self.occurrence_set.filter(original_start__lte=after, start__gte=after).order_by('start')) for index, nxt in enumerate(generator): if max_occurrences and index > max_occurrences - 1: break if (len(trickies) > 0 and (nxt is None or nxt.start > trickies[0].start)): yield trickies.pop(0) yield occ_replacer.get_occurrence(nxt) @property def event_start_params(self): start = self.start params = { 'byyearday': start.timetuple().tm_yday, 'bymonth': start.month, 'bymonthday': start.day, 'byweekno': start.isocalendar()[1], 'byweekday': start.weekday(), 'byhour': start.hour, 'byminute': start.minute, 'bysecond': start.second } return params @property def event_rule_params(self): return self.rule.get_params() def _event_params(self): freq_order = freq_dict_order[self.rule.frequency] rule_params = self.event_rule_params start_params = self.event_start_params event_params = {} if len(rule_params) == 0: return event_params for param in rule_params: # start date influences rule params if (param in param_dict_order and param_dict_order[param] > freq_order and param in start_params): sp = start_params[param] if sp == rule_params[param] or (hasattr( rule_params[param], '__iter__') and sp in rule_params[param]): event_params[param] = [sp] else: event_params[param] = rule_params[param] else: event_params[param] = rule_params[param] return event_params @property def event_params(self): event_params = self._event_params() start = self.effective_start if not start: empty = True elif self.end_recurring_period and start > self.end_recurring_period: empty = True return event_params, empty @property def effective_start(self): if self.pk and self.end_recurring_period: occ_generator = self._occurrences_after_generator(self.start) try: return next(occ_generator).start except StopIteration: pass elif self.pk: return self.start return None @property def effective_end(self): if self.pk and self.end_recurring_period: params, empty = self.event_params if empty or not self.effective_start: return None elif self.end_recurring_period: occ = None occ_generator = self._occurrences_after_generator(self.start) for occ in occ_generator: pass return occ.end elif self.pk: return datetime.max return None
class BaseManager(six.with_metaclass(RenameManagerMethods)): # Tracks each time a Manager instance is created. Used to retain order. creation_counter = 0 def __init__(self): super(BaseManager, self).__init__() self._set_creation_counter() self.model = None self._inherited = False self._db = None self._hints = {} def __str__(self): """ Return "app_label.model_label.manager_name". """ model = self.model opts = model._meta app = model._meta.app_label manager_name = next(name for (_, name, manager) in opts.concrete_managers + opts.abstract_managers if manager == self) return '%s.%s.%s' % (app, model._meta.object_name, manager_name) def check(self, **kwargs): return [] @classmethod def _get_queryset_methods(cls, queryset_class): def create_method(name, method): def manager_method(self, *args, **kwargs): return getattr(self.get_queryset(), name)(*args, **kwargs) manager_method.__name__ = method.__name__ manager_method.__doc__ = method.__doc__ return manager_method new_methods = {} # Refs http://bugs.python.org/issue1785. predicate = inspect.isfunction if six.PY3 else inspect.ismethod for name, method in inspect.getmembers(queryset_class, predicate=predicate): # Only copy missing methods. if hasattr(cls, name): continue # Only copy public methods or methods with the attribute `queryset_only=False`. queryset_only = getattr(method, 'queryset_only', None) if queryset_only or (queryset_only is None and name.startswith('_')): continue # Copy the method onto the manager. new_methods[name] = create_method(name, method) return new_methods @classmethod def from_queryset(cls, queryset_class, class_name=None): if class_name is None: class_name = '%sFrom%s' % (cls.__name__, queryset_class.__name__) class_dict = { '_queryset_class': queryset_class, } class_dict.update(cls._get_queryset_methods(queryset_class)) return type(class_name, (cls,), class_dict) def contribute_to_class(self, model, name): # TODO: Use weakref because of possible memory leak / circular reference. self.model = model # Only contribute the manager if the model is concrete if model._meta.abstract: setattr(model, name, AbstractManagerDescriptor(model)) elif model._meta.swapped: setattr(model, name, SwappedManagerDescriptor(model)) else: # if not model._meta.abstract and not model._meta.swapped: setattr(model, name, ManagerDescriptor(self)) if not getattr(model, '_default_manager', None) or self.creation_counter < model._default_manager.creation_counter: model._default_manager = self if model._meta.abstract or (self._inherited and not self.model._meta.proxy): model._meta.abstract_managers.append((self.creation_counter, name, self)) else: model._meta.concrete_managers.append((self.creation_counter, name, self)) def _set_creation_counter(self): """ Sets the creation counter value for this instance and increments the class-level copy. """ self.creation_counter = BaseManager.creation_counter BaseManager.creation_counter += 1 def _copy_to_model(self, model): """ Makes a copy of the manager and assigns it to 'model', which should be a child of the existing model (used when inheriting a manager from an abstract base class). """ assert issubclass(model, self.model) mgr = copy.copy(self) mgr._set_creation_counter() mgr.model = model mgr._inherited = True return mgr def db_manager(self, using=None, hints=None): obj = copy.copy(self) obj._db = using or self._db obj._hints = hints or self._hints return obj @property def db(self): return self._db or router.db_for_read(self.model, **self._hints) ####################### # PROXIES TO QUERYSET # ####################### def get_queryset(self): """ Returns a new QuerySet object. Subclasses can override this method to easily customize the behavior of the Manager. """ return self._queryset_class(self.model, using=self._db, hints=self._hints) def all(self): # We can't proxy this method through the `QuerySet` like we do for the # rest of the `QuerySet` methods. This is because `QuerySet.all()` # works by creating a "copy" of the current queryset and in making said # copy, all the cached `prefetch_related` lookups are lost. See the # implementation of `RelatedManager.get_queryset()` for a better # understanding of how this comes into play. return self.get_queryset()
# Courtesy of django-social-auth import json import django from django.core.exceptions import ValidationError from django.db import models from django.utils import six try: from django.utils.encoding import smart_unicode as smart_text except ImportError: from django.utils.encoding import smart_text if django.VERSION < (1, 8): JSONFieldBase = six.with_metaclass(models.SubfieldBase, models.TextField) else: JSONFieldBase = models.TextField class JSONField(JSONFieldBase): """Simple JSON field that stores python structures as JSON strings on database. """ def from_db_value(self, value, expression, connection, context): return self.to_python(value) def to_python(self, value): """ Convert the input JSON value into python structures, raises django.core.exceptions.ValidationError if the data can't be converted.
class Field(six.with_metaclass(RenameFieldMethods, object)): widget = TextInput # Default widget to use when rendering this type of Field. hidden_widget = HiddenInput # Default widget to use when rendering this as "hidden". default_validators = [] # Default set of validators # Add an 'invalid' entry to default_error_message if you want a specific # field error message not raised by the field validators. default_error_messages = { 'required': _('This field is required.'), } empty_values = list(validators.EMPTY_VALUES) # Tracks each time a Field instance is created. Used to retain order. creation_counter = 0 def __init__(self, required=True, widget=None, label=None, initial=None, help_text='', error_messages=None, show_hidden_initial=False, validators=[], localize=False, disabled=False, label_suffix=None): # required -- Boolean that specifies whether the field is required. # True by default. # widget -- A Widget class, or instance of a Widget class, that should # be used for this Field when displaying it. Each Field has a # default Widget that it'll use if you don't specify this. In # most cases, the default widget is TextInput. # label -- A verbose name for this field, for use in displaying this # field in a form. By default, Django will use a "pretty" # version of the form field name, if the Field is part of a # Form. # initial -- A value to use in this Field's initial display. This value # is *not* used as a fallback if data isn't given. # help_text -- An optional string to use as "help text" for this Field. # error_messages -- An optional dictionary to override the default # messages that the field will raise. # show_hidden_initial -- Boolean that specifies if it is needed to render a # hidden widget with initial value after widget. # validators -- List of additional validators to use # localize -- Boolean that specifies if the field should be localized. # disabled -- Boolean that specifies whether the field is disabled, that # is its widget is shown in the form but not editable. # label_suffix -- Suffix to be added to the label. Overrides # form's label_suffix. self.required, self.label, self.initial = required, label, initial self.show_hidden_initial = show_hidden_initial self.help_text = help_text self.disabled = disabled self.label_suffix = label_suffix widget = widget or self.widget if isinstance(widget, type): widget = widget() # Trigger the localization machinery if needed. self.localize = localize if self.localize: widget.is_localized = True # Let the widget know whether it should display as required. widget.is_required = self.required # Hook into self.widget_attrs() for any Field-specific HTML attributes. extra_attrs = self.widget_attrs(widget) if extra_attrs: widget.attrs.update(extra_attrs) self.widget = widget # Increase the creation counter, and save our local copy. self.creation_counter = Field.creation_counter Field.creation_counter += 1 messages = {} for c in reversed(self.__class__.__mro__): messages.update(getattr(c, 'default_error_messages', {})) messages.update(error_messages or {}) self.error_messages = messages self.validators = self.default_validators + validators super(Field, self).__init__() def prepare_value(self, value): return value def to_python(self, value): return value def validate(self, value): if value in self.empty_values and self.required: raise ValidationError(self.error_messages['required'], code='required') def run_validators(self, value): if value in self.empty_values: return errors = [] for v in self.validators: try: v(value) except ValidationError as e: if hasattr(e, 'code') and e.code in self.error_messages: e.message = self.error_messages[e.code] errors.extend(e.error_list) if errors: raise ValidationError(errors) def clean(self, value): """ Validates the given value and returns its "cleaned" value as an appropriate Python object. Raises ValidationError for any errors. """ value = self.to_python(value) self.validate(value) self.run_validators(value) return value def bound_data(self, data, initial): """ Return the value that should be shown for this field on render of a bound form, given the submitted POST data for the field and the initial data, if any. For most fields, this will simply be data; FileFields need to handle it a bit differently. """ return data def widget_attrs(self, widget): """ Given a Widget instance (*not* a Widget class), returns a dictionary of any HTML attributes that should be added to the Widget, based on this Field. """ return {} def has_changed(self, initial, data): """ Return True if data differs from initial. """ # For purposes of seeing whether something has changed, None is # the same as an empty string, if the data or initial value we get # is None, replace it w/ ''. initial_value = initial if initial is not None else '' try: data = self.to_python(data) if hasattr(self, '_coerce'): data = self._coerce(data) initial_value = self._coerce(initial_value) except ValidationError: return True data_value = data if data is not None else '' return initial_value != data_value def get_bound_field(self, form, field_name): """ Return a BoundField instance that will be used when accessing the form field in a template. """ return BoundField(form, self, field_name) def __deepcopy__(self, memo): result = copy.copy(self) memo[id(self)] = result result.widget = copy.deepcopy(self.widget, memo) result.validators = self.validators[:] return result
from django.db.models.fields import CharField from django.forms.widgets import Input from django.template.loader import render_to_string from django.utils.six import with_metaclass from django.utils.translation import ugettext_lazy as _ from django.contrib.admin.templatetags.admin_static import static from filebrowser.base import FileObject from filebrowser.settings import ADMIN_THUMBNAIL, EXTENSIONS, UPLOAD_TEMPDIR from filebrowser.sites import site if django.VERSION >= (1, 8): _charfield_base_class = CharField else: _charfield_base_class = with_metaclass(models.SubfieldBase, CharField) class FileBrowseWidget(Input): input_type = 'text' class Media: js = ('filebrowser/js/AddFileBrowser.js',) def __init__(self, attrs={}): super(FileBrowseWidget, self).__init__(attrs) self.site = attrs.get('filebrowser_site', None) self.directory = attrs.get('directory', '') self.extensions = attrs.get('extensions', '') self.format = attrs.get('format', '') if attrs is not None:
class TypedModel(with_metaclass(TypedModelMetaclass, models.Model)): ''' This class contains the functionality required to auto-downcast a model based on its ``type`` attribute. To use, simply subclass TypedModel for your base type, and then subclass that for your concrete types. Example usage:: from django.db import models from typedmodels.models import TypedModel class Animal(TypedModel): """ Abstract model """ name = models.CharField(max_length=255) def say_something(self): raise NotImplemented def __repr__(self): return u'<%s: %s>' % (self.__class__.__name__, self.name) class Canine(Animal): def say_something(self): return "woof" class Feline(Animal): def say_something(self): return "meoww" ''' type = models.CharField(choices=(), max_length=255, null=False, blank=False, db_index=True) # Class variable indicating if model should be automatically recasted after initialization _auto_recast = True class Meta: abstract = True def __init__(self, *args, **kwargs): # Calling __init__ on base class because some functions (e.g. save()) need access to field values from base # class. # Move args to kwargs since base_class may have more fields defined with different ordering args = list(args) if len(args) > len(self._meta.fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") for field_value, field in zip(args, self._meta.fields): kwargs[field.attname] = field_value args = [] # args were all converted to kwargs if self.base_class: before_class = self.__class__ self.__class__ = self.base_class else: before_class = None super(TypedModel, self).__init__(*args, **kwargs) if before_class: self.__class__ = before_class if self._auto_recast: self.recast() def recast(self, typ=None): if not self.type: if not hasattr(self, '_typedmodels_type'): # Ideally we'd raise an error here, but the django admin likes to call # model() and doesn't expect an error. # Instead, we raise an error when the object is saved. return self.type = self._typedmodels_type for base in self.__class__.mro(): if issubclass(base, TypedModel) and hasattr(base, '_typedmodels_registry'): break else: raise ValueError("No suitable base class found to recast!") if typ is None: typ = self.type else: if isinstance(typ, type) and issubclass(typ, base): if django.VERSION < (1, 7): model_name = typ._meta.module_name else: model_name = typ._meta.model_name typ = '%s.%s' % (typ._meta.app_label, model_name) try: correct_cls = base._typedmodels_registry[typ] except KeyError: raise ValueError("Invalid %s identifier: %r" % (base.__name__, typ)) self.type = typ current_cls = self.__class__ if current_cls != correct_cls: if self._deferred: # create a new deferred class based on correct_cls instead of current_cls correct_cls = get_deferred_class_for_instance(self, correct_cls) self.__class__ = correct_cls def save(self, *args, **kwargs): if not getattr(self, '_typedmodels_type', None): raise RuntimeError("Untyped %s cannot be saved." % self.__class__.__name__) return super(TypedModel, self).save(*args, **kwargs)
return not self.future if self.past or other.past: return not other.past return (self.year, self.month, self.day) < (other.year, other.month, other.day) def __len__(self): return len(self.__repr__()) ansi_date_re = re.compile(r'^\d{4}-\d{1,2}-\d{1,2}$') prefix_date_re = re.compile(r'^([a-zA-Z]+[.,]?) (\d{4})$') prefix_date_reverse_re = re.compile(r'^(\d{4}) ([a-zA-Z]+[,.]?)$') if django.VERSION < (1, 8,): FIELD_BASE = with_metaclass(models.SubfieldBase, models.CharField) else: FIELD_BASE = models.CharField class ApproximateDateField(FIELD_BASE): """A model field to store ApproximateDate objects in the database (as a CharField because MySQLdb intercepts dates from the database and forces them to be datetime.date()s.""" def __init__(self, *args, **kwargs): kwargs['max_length'] = 10 super(ApproximateDateField, self).__init__(*args, **kwargs) def to_python(self, value): if value in (None, ''): return None
class DocumentRelationship(six.with_metaclass(RelationshipMetaclass, RelationshipMixin)): name = _('Documents') slug = 'documents' def get_queryset(self, obj): return obj.djangobmf_document
class I18nModelForm(six.with_metaclass(ModelFormMetaclass, BaseI18nModelForm)): pass
class FilterSet(six.with_metaclass(FilterSetMetaclass, BaseFilterSet)): pass
class MPTTModel(six.with_metaclass(MPTTModelBase, models.Model)): """ Base class for tree models. """ _default_manager = TreeManager() class Meta: abstract = True def __init__(self, *args, **kwargs): super(MPTTModel, self).__init__(*args, **kwargs) self._mptt_meta.update_mptt_cached_fields(self) def _mpttfield(self, fieldname): translated_fieldname = getattr(self._mptt_meta, fieldname + '_attr') return getattr(self, translated_fieldname) @_classproperty def _mptt_updates_enabled(cls): if not cls._mptt_tracking_base: return True return getattr(cls._mptt_tracking_base._threadlocal, 'mptt_updates_enabled', True) # ideally this'd be part of the _mptt_updates_enabled classproperty, but it seems # that settable classproperties are very, very hard to do! suggestions please :) @classmethod def _set_mptt_updates_enabled(cls, value): assert cls is cls._mptt_tracking_base,\ "Can't enable or disable mptt updates on a non-tracking class." cls._threadlocal.mptt_updates_enabled = value @_classproperty def _mptt_is_tracking(cls): if not cls._mptt_tracking_base: return False if not hasattr(cls._threadlocal, 'mptt_delayed_tree_changes'): # happens the first time this is called from each thread cls._threadlocal.mptt_delayed_tree_changes = None return cls._threadlocal.mptt_delayed_tree_changes is not None @classmethod def _mptt_start_tracking(cls): assert cls is cls._mptt_tracking_base,\ "Can't start or stop mptt tracking on a non-tracking class." assert not cls._mptt_is_tracking, "mptt tracking is already started." cls._threadlocal.mptt_delayed_tree_changes = set() @classmethod def _mptt_stop_tracking(cls): assert cls is cls._mptt_tracking_base,\ "Can't start or stop mptt tracking on a non-tracking class." assert cls._mptt_is_tracking, "mptt tracking isn't started." results = cls._threadlocal.mptt_delayed_tree_changes cls._threadlocal.mptt_delayed_tree_changes = None return results @classmethod def _mptt_track_tree_modified(cls, tree_id): if not cls._mptt_is_tracking: return cls._threadlocal.mptt_delayed_tree_changes.add(tree_id) @classmethod def _mptt_track_tree_insertions(cls, tree_id, num_inserted): if not cls._mptt_is_tracking: return changes = cls._threadlocal.mptt_delayed_tree_changes if not num_inserted or not changes: return if num_inserted < 0: deleted = range(tree_id + num_inserted, -num_inserted) changes.difference_update(deleted) new_changes = set( (t + num_inserted if t >= tree_id else t) for t in changes) cls._threadlocal.mptt_delayed_tree_changes = new_changes @raise_if_unsaved def get_ancestors(self, ascending=False, include_self=False): """ Creates a ``QuerySet`` containing the ancestors of this model instance. This defaults to being in descending order (root ancestor first, immediate parent last); passing ``True`` for the ``ascending`` argument will reverse the ordering (immediate parent first, root ancestor last). If ``include_self`` is ``True``, the ``QuerySet`` will also include this model instance. """ if self.is_root_node(): if not include_self: return self._tree_manager.none() else: # Filter on pk for efficiency. return self._tree_manager.filter(pk=self.pk) opts = self._mptt_meta order_by = opts.left_attr if ascending: order_by = '-' + order_by left = getattr(self, opts.left_attr) right = getattr(self, opts.right_attr) if not include_self: left -= 1 right += 1 qs = self._tree_manager._mptt_filter( left__lte=left, right__gte=right, tree_id=self._mpttfield('tree_id'), ) return qs.order_by(order_by) @raise_if_unsaved def get_family(self): """ Returns a ``QuerySet`` containing the ancestors, the model itself and the descendants, in tree order. """ opts = self._mptt_meta left = getattr(self, opts.left_attr) right = getattr(self, opts.right_attr) ancestors = Q(**{ "%s__lte" % opts.left_attr: left, "%s__gte" % opts.right_attr: right, opts.tree_id_attr: self._mpttfield('tree_id'), }) descendants = Q(**{ "%s__gte" % opts.left_attr: left, "%s__lte" % opts.left_attr: right, opts.tree_id_attr: self._mpttfield('tree_id'), }) return self._tree_manager.filter(ancestors | descendants) @raise_if_unsaved def get_children(self): """ Returns a ``QuerySet`` containing the immediate children of this model instance, in tree order. The benefit of using this method over the reverse relation provided by the ORM to the instance's children is that a database query can be avoided in the case where the instance is a leaf node (it has no children). If called from a template where the tree has been walked by the ``cache_tree_children`` filter, no database query is required. """ if hasattr(self, '_cached_children'): qs = self._tree_manager.filter(pk__in=[n.pk for n in self._cached_children]) qs._result_cache = self._cached_children return qs else: if self.is_leaf_node(): return self._tree_manager.none() return self._tree_manager._mptt_filter(parent=self) @raise_if_unsaved def get_descendants(self, include_self=False): """ Creates a ``QuerySet`` containing descendants of this model instance, in tree order. If ``include_self`` is ``True``, the ``QuerySet`` will also include this model instance. """ if self.is_leaf_node(): if not include_self: return self._tree_manager.none() else: return self._tree_manager.filter(pk=self.pk) opts = self._mptt_meta left = getattr(self, opts.left_attr) right = getattr(self, opts.right_attr) if not include_self: left += 1 right -= 1 return self._tree_manager._mptt_filter( tree_id=self._mpttfield('tree_id'), left__gte=left, left__lte=right ) def get_descendant_count(self): """ Returns the number of descendants this model instance has. """ if self._mpttfield('right') is None: # node not saved yet return 0 else: return (self._mpttfield('right') - self._mpttfield('left') - 1) // 2 @raise_if_unsaved def get_leafnodes(self, include_self=False): """ Creates a ``QuerySet`` containing leafnodes of this model instance, in tree order. If ``include_self`` is ``True``, the ``QuerySet`` will also include this model instance (if it is a leaf node) """ descendants = self.get_descendants(include_self=include_self) return self._tree_manager._mptt_filter( descendants, left=(models.F(self._mptt_meta.right_attr) - 1) ) @raise_if_unsaved def get_next_sibling(self, *filter_args, **filter_kwargs): """ Returns this model instance's next sibling in the tree, or ``None`` if it doesn't have a next sibling. """ qs = self._tree_manager.filter(*filter_args, **filter_kwargs) if self.is_root_node(): qs = self._tree_manager._mptt_filter( qs, parent=None, tree_id__gt=self._mpttfield('tree_id'), ) else: qs = self._tree_manager._mptt_filter( qs, parent__pk=getattr(self, self._mptt_meta.parent_attr + '_id'), left__gt=self._mpttfield('right'), ) siblings = qs[:1] return siblings and siblings[0] or None @raise_if_unsaved def get_previous_sibling(self, *filter_args, **filter_kwargs): """ Returns this model instance's previous sibling in the tree, or ``None`` if it doesn't have a previous sibling. """ opts = self._mptt_meta qs = self._tree_manager.filter(*filter_args, **filter_kwargs) if self.is_root_node(): qs = self._tree_manager._mptt_filter( qs, parent=None, tree_id__lt=self._mpttfield('tree_id'), ) qs = qs.order_by('-' + opts.tree_id_attr) else: qs = self._tree_manager._mptt_filter( qs, parent__pk=getattr(self, opts.parent_attr + '_id'), right__lt=self._mpttfield('left'), ) qs = qs.order_by('-' + opts.right_attr) siblings = qs[:1] return siblings and siblings[0] or None @raise_if_unsaved def get_root(self): """ Returns the root node of this model instance's tree. """ if self.is_root_node() and type(self) == self._tree_manager.tree_model: return self return self._tree_manager._mptt_filter( tree_id=self._mpttfield('tree_id'), parent=None, ).get() @raise_if_unsaved def get_siblings(self, include_self=False): """ Creates a ``QuerySet`` containing siblings of this model instance. Root nodes are considered to be siblings of other root nodes. If ``include_self`` is ``True``, the ``QuerySet`` will also include this model instance. """ if self.is_root_node(): queryset = self._tree_manager._mptt_filter(parent=None) else: parent_id = getattr(self, self._mptt_meta.parent_attr + '_id') queryset = self._tree_manager._mptt_filter(parent__pk=parent_id) if not include_self: queryset = queryset.exclude(pk=self.pk) return queryset def get_level(self): """ Returns the level of this node (distance from root) """ return getattr(self, self._mptt_meta.level_attr) def insert_at(self, target, position='first-child', save=False, allow_existing_pk=False): """ Convenience method for calling ``TreeManager.insert_node`` with this model instance. """ self._tree_manager.insert_node( self, target, position, save, allow_existing_pk=allow_existing_pk) def is_child_node(self): """ Returns ``True`` if this model instance is a child node, ``False`` otherwise. """ return not self.is_root_node() def is_leaf_node(self): """ Returns ``True`` if this model instance is a leaf node (it has no children), ``False`` otherwise. """ return not self.get_descendant_count() def is_root_node(self): """ Returns ``True`` if this model instance is a root node, ``False`` otherwise. """ return getattr(self, self._mptt_meta.parent_attr + '_id') is None @raise_if_unsaved def is_descendant_of(self, other, include_self=False): """ Returns ``True`` if this model is a descendant of the given node, ``False`` otherwise. If include_self is True, also returns True if the two nodes are the same node. """ opts = self._mptt_meta if include_self and other.pk == self.pk: return True if getattr(self, opts.tree_id_attr) != getattr(other, opts.tree_id_attr): return False else: left = getattr(self, opts.left_attr) right = getattr(self, opts.right_attr) return ( left > getattr(other, opts.left_attr) and right < getattr(other, opts.right_attr)) @raise_if_unsaved def is_ancestor_of(self, other, include_self=False): """ Returns ``True`` if this model is an ancestor of the given node, ``False`` otherwise. If include_self is True, also returns True if the two nodes are the same node. """ if include_self and other.pk == self.pk: return True return other.is_descendant_of(self) def move_to(self, target, position='first-child'): """ Convenience method for calling ``TreeManager.move_node`` with this model instance. NOTE: This is a low-level method; it does NOT respect ``MPTTMeta.order_insertion_by``. In most cases you should just move the node yourself by setting node.parent. """ self._tree_manager.move_node(self, target, position) def _is_saved(self, using=None): if not self.pk or self._mpttfield('tree_id') is None: return False opts = self._meta if opts.pk.rel is None: return True else: if not hasattr(self, '_mptt_saved'): manager = self.__class__._base_manager manager = manager.using(using) self._mptt_saved = manager.filter(pk=self.pk).exists() return self._mptt_saved def _get_user_field_names(self): """ Returns the list of user defined (i.e. non-mptt internal) field names. """ from django.db.models.fields import AutoField field_names = [] internal_fields = (self._mptt_meta.left_attr, self._mptt_meta.right_attr, self._mptt_meta.tree_id_attr, self._mptt_meta.level_attr, self._mptt_meta.parent_attr) for field in self._meta.fields: if (field.name not in internal_fields) and (not isinstance(field, AutoField)): field_names.append(field.name) return field_names def save(self, *args, **kwargs): """ If this is a new node, sets tree fields up before it is inserted into the database, making room in the tree structure as neccessary, defaulting to making the new node the last child of its parent. It the node's left and right edge indicators already been set, we take this as indication that the node has already been set up for insertion, so its tree fields are left untouched. If this is an existing node and its parent has been changed, performs reparenting in the tree structure, defaulting to making the node the last child of its new parent. In either case, if the node's class has its ``order_insertion_by`` tree option set, the node will be inserted or moved to the appropriate position to maintain ordering by the specified field. """ do_updates = self.__class__._mptt_updates_enabled track_updates = self.__class__._mptt_is_tracking opts = self._mptt_meta if not (do_updates or track_updates): # inside manager.disable_mptt_updates(), don't do any updates. # unless we're also inside TreeManager.delay_mptt_updates() if self._mpttfield('left') is None: # we need to set *some* values, though don't care too much what. parent = getattr(self, '_%s_cache' % opts.parent_attr, None) # if we have a cached parent, have a stab at getting # possibly-correct values. otherwise, meh. if parent: left = parent._mpttfield('left') + 1 setattr(self, opts.left_attr, left) setattr(self, opts.right_attr, left + 1) setattr(self, opts.level_attr, parent._mpttfield('level') + 1) setattr(self, opts.tree_id_attr, parent._mpttfield('tree_id')) self._tree_manager._post_insert_update_cached_parent_right(parent, 2) else: setattr(self, opts.left_attr, 1) setattr(self, opts.right_attr, 2) setattr(self, opts.level_attr, 0) setattr(self, opts.tree_id_attr, 0) return super(MPTTModel, self).save(*args, **kwargs) parent_id = opts.get_raw_field_value(self, opts.parent_attr) # determine whether this instance is already in the db force_update = kwargs.get('force_update', False) force_insert = kwargs.get('force_insert', False) collapse_old_tree = None if force_update or (not force_insert and self._is_saved(using=kwargs.get('using'))): # it already exists, so do a move old_parent_id = self._mptt_cached_fields[opts.parent_attr] same_order = old_parent_id == parent_id if same_order and len(self._mptt_cached_fields) > 1: get_raw_field_value = opts.get_raw_field_value for field_name, old_value in self._mptt_cached_fields.items(): if old_value != get_raw_field_value(self, field_name): same_order = False break if not do_updates and not same_order: same_order = True self.__class__._mptt_track_tree_modified(self._mpttfield('tree_id')) elif (not do_updates) and not same_order and old_parent_id is None: # the old tree no longer exists, so we need to collapse it. collapse_old_tree = self._mpttfield('tree_id') parent = getattr(self, opts.parent_attr) tree_id = parent._mpttfield('tree_id') left = parent._mpttfield('left') + 1 self.__class__._mptt_track_tree_modified(tree_id) setattr(self, opts.tree_id_attr, tree_id) setattr(self, opts.left_attr, left) setattr(self, opts.right_attr, left + 1) setattr(self, opts.level_attr, parent._mpttfield('level') + 1) same_order = True if not same_order: opts.set_raw_field_value(self, opts.parent_attr, old_parent_id) try: right_sibling = None if opts.order_insertion_by: right_sibling = opts.get_ordered_insertion_target( self, getattr(self, opts.parent_attr)) if parent_id is not None: parent = getattr(self, opts.parent_attr) # If we aren't already a descendant of the new parent, # we need to update the parent.rght so things like # get_children and get_descendant_count work correctly. update_cached_parent = ( getattr(self, opts.tree_id_attr) != getattr(parent, opts.tree_id_attr) or getattr(self, opts.left_attr) < getattr(parent, opts.left_attr) or getattr(self, opts.right_attr) > getattr(parent, opts.right_attr)) if right_sibling: self._tree_manager._move_node(self, right_sibling, 'left', save=False) else: # Default movement if parent_id is None: root_nodes = self._tree_manager.root_nodes() try: rightmost_sibling = root_nodes.exclude( pk=self.pk).order_by('-' + opts.tree_id_attr)[0] self._tree_manager._move_node( self, rightmost_sibling, 'right', save=False) except IndexError: pass else: self._tree_manager._move_node( self, parent, 'last-child', save=False) if parent_id is not None and update_cached_parent: # Update rght of cached parent right_shift = 2 * (self.get_descendant_count() + 1) self._tree_manager._post_insert_update_cached_parent_right( parent, right_shift) finally: # Make sure the new parent is always # restored on the way out in case of errors. opts.set_raw_field_value(self, opts.parent_attr, parent_id) else: opts.set_raw_field_value(self, opts.parent_attr, parent_id) if (not track_updates) and (django.get_version() >= '1.5'): # When not using delayed/disabled updates, # populate update_fields (Django 1.5 and later) with user defined model fields. # This helps preserve tree integrity when saving model on top of a modified tree. if len(args) > 3: if not args[3]: args[3] = self._get_user_field_names() else: if not kwargs.get("update_fields", None): kwargs["update_fields"] = self._get_user_field_names() else: # new node, do an insert if (getattr(self, opts.left_attr) and getattr(self, opts.right_attr)): # This node has already been set up for insertion. pass else: parent = getattr(self, opts.parent_attr) right_sibling = None # if we're inside delay_mptt_updates, don't do queries to find # sibling position. instead, do default insertion. correct # positions will be found during partial rebuild later. # *unless* this is a root node. (as update tracking doesn't # handle re-ordering of trees.) if do_updates or parent is None: if opts.order_insertion_by: right_sibling = opts.get_ordered_insertion_target(self, parent) if right_sibling: self.insert_at(right_sibling, 'left', allow_existing_pk=True) if parent: # since we didn't insert into parent, we have to update parent.rght # here instead of in TreeManager.insert_node() right_shift = 2 * (self.get_descendant_count() + 1) self._tree_manager._post_insert_update_cached_parent_right( parent, right_shift) else: # Default insertion self.insert_at(parent, position='last-child', allow_existing_pk=True) try: super(MPTTModel, self).save(*args, **kwargs) finally: if collapse_old_tree is not None: self._tree_manager._create_tree_space(collapse_old_tree, -1) self._mptt_saved = True opts.update_mptt_cached_fields(self) save.alters_data = True def delete(self, *args, **kwargs): """Calling ``delete`` on a node will delete it as well as its full subtree, as opposed to reattaching all the subnodes to its parent node. There are no argument specific to a MPTT model, all the arguments will be passed directly to the django's ``Model.delete``. ``delete`` will not return anything. """ tree_width = (self._mpttfield('right') - self._mpttfield('left') + 1) target_right = self._mpttfield('right') tree_id = self._mpttfield('tree_id') self._tree_manager._close_gap(tree_width, target_right, tree_id) parent = getattr(self, '_%s_cache' % self._mptt_meta.parent_attr, None) if parent: right_shift = -self.get_descendant_count() - 2 self._tree_manager._post_insert_update_cached_parent_right(parent, right_shift) super(MPTTModel, self).delete(*args, **kwargs) delete.alters_data = True
class BaseCurrency(six.with_metaclass(CurrencyMetaclass, object)): formatstr = pgettext_lazy("currency formatting", '%(val)s %(sym)s') base_precision = 2 def __init__(self, value=None, precision=0): self.precision = self.base_precision + precision if value is None: self.value = None else: self.set(value) def __str__(self): if self.value is None: value = self.name else: value = self.formatstr % { 'val': number_format(self.value, force_grouping=True), 'sym': self.symbol } return force_text(value) def __repr__(self): return "<%s object at 0x%x>" % (self.__class__.__name__, id(self)) # logic ..... def __bool__(self): return bool(self.value) def __nonzero__(self): return self.__bool__() def __lt__(self, other): if self.__class__ == other.__class__: return self.value < other.value raise TypeError("cannot compare '%s' with '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __le__(self, other): if self.__class__ == other.__class__: return self.value <= other.value raise TypeError("cannot compare '%s' with '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __gt__(self, other): if self.__class__ == other.__class__: return self.value > other.value raise TypeError("cannot compare '%s' with '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __ge__(self, other): if self.__class__ == other.__class__: return self.value >= other.value raise TypeError("cannot compare '%s' with '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __eq__(self, other): if self.__class__ == other.__class__: return self.value == other.value return False def __ne__(self, other): return not self.__eq__(other) # math ..... def __add__(self, other): """ Addition of currencies ... should only work with currencies! """ if self.__class__ == other.__class__: return self.__class__(self.value + other.value) raise TypeError("You can not add '%s' to '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __sub__(self, other): """ should only work with currencies! """ if self.__class__ == other.__class__: return self.__class__(self.value - other.value) raise TypeError("You can not substract '%s' from '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __mul__(self, other): """ Multiplication should work with int, float, decimal, but NOT with currency (it makes no sense) """ if isinstance(other, float): return self.__class__(Decimal(str(other)) * self.value) elif isinstance(other, (six.integer_types, Decimal)): return self.__class__(other * self.value) raise TypeError("You can not multiply '%s' and '%s'" % (self.__class__.__name__, other.__class__.__name__)) def __rmul__(self, other): return self.__mul__(other) def __floordiv__(self, other): """ Division should work with int, float, decimal, returning a currency and with Currency returning a decimal """ if isinstance(other, float): return self.__class__(self.value // Decimal(str(other))) elif isinstance(other, (six.integer_types, Decimal)): return self.__class__(self.value // other) elif self.__class__ == other.__class__: return self.value // other.value raise TypeError("You can not divide '%s' by '%s'" % (self.__class__.__name__, other.__class__.__name__)) # functions ..... def set(self, value): if isinstance(value, Decimal): self.value = value else: self.value = Decimal(value) if self.value.as_tuple().exponent > -self.base_precision: self.value = self.value.quantize( Decimal('1E-%s' % self.base_precision)) # TODO: move this to validation if self.value.as_tuple().exponent < -self.precision: self.value = self.value.quantize(Decimal('1E-%s' % self.precision))
class ModelFilterCollection( six.with_metaclass(ModelFilterCollectionMetaclass, BaseModelFilterCollection)): pass
class BaseCustomer(with_metaclass(deferred.ForeignKeyBuilder, models.Model)): """ Base class for shop customers. Customer is a profile model that extends the django User model if a customer is authenticated. On checkout, a User object is created for anonymous customers also (with unusable password). """ user = models.OneToOneField( settings.AUTH_USER_MODEL, primary_key=True, ) recognized = ChoiceEnumField( _("Recognized as"), enum_type=CustomerState, help_text=_("Designates the state the customer is recognized as."), ) last_access = models.DateTimeField( _("Last accessed"), default=timezone.now, ) extra = JSONField( editable=False, verbose_name=_("Extra information about this customer"), ) objects = CustomerManager() class Meta: abstract = True def __str__(self): return self.get_username() def get_username(self): return self.user.get_username() def get_full_name(self): return self.user.get_full_name() @property def first_name(self): # pending deprecation: warnings.warn("Property first_name is deprecated and will be removed") return self.user.first_name @first_name.setter def first_name(self, value): # pending deprecation: warnings.warn("Property first_name is deprecated and will be removed") self.user.first_name = value @property def last_name(self): # pending deprecation: warnings.warn("Property last_name is deprecated and will be removed") return self.user.last_name @last_name.setter def last_name(self, value): # pending deprecation: warnings.warn("Property last_name is deprecated and will be removed") self.user.last_name = value @property def email(self): return self.user.email @email.setter def email(self, value): self.user.email = value @property def date_joined(self): return self.user.date_joined @property def last_login(self): return self.user.last_login @property def groups(self): return self.user.groups def is_anonymous(self): return self.recognized in (CustomerState.UNRECOGNIZED, CustomerState.GUEST) def is_authenticated(self): return self.recognized is CustomerState.REGISTERED def is_recognized(self): """ Return True if the customer is associated with a User account. Unrecognized customers have accessed the shop, but did not register an account nor declared themselves as guests. """ return self.recognized is not CustomerState.UNRECOGNIZED def is_guest(self): """ Return true if the customer isn't associated with valid User account, but declared himself as a guest, leaving their email address. """ return self.recognized is CustomerState.GUEST def recognize_as_guest(self, request=None, commit=True): """ Recognize the current customer as guest customer. """ if self.recognized != CustomerState.GUEST: self.recognized = CustomerState.GUEST if commit: self.save(update_fields=['recognized']) customer_recognized.send(sender=self.__class__, customer=self, request=request) def is_registered(self): """ Return true if the customer has registered himself. """ return self.recognized is CustomerState.REGISTERED def recognize_as_registered(self, request=None, commit=True): """ Recognize the current customer as registered customer. """ if self.recognized != CustomerState.REGISTERED: self.recognized = CustomerState.REGISTERED if commit: self.save(update_fields=['recognized']) customer_recognized.send(sender=self.__class__, customer=self, request=request) def is_visitor(self): """ Always False for instantiated Customer objects. """ return False def is_expired(self): """ Return True if the session of an unrecognized customer expired or is not decodable. Registered customers never expire. Guest customers only expire, if they failed fulfilling the purchase. """ if self.recognized is CustomerState.UNRECOGNIZED: try: session_key = CustomerManager.decode_session_key( self.user.username) return not SessionStore.exists(session_key) except KeyError: msg = "Unable to decode username '{}' as session key" warnings.warn(msg.format(self.user.username)) return True return False def get_or_assign_number(self): """ Hook to get or to assign the customers number. It is invoked, every time an Order object is created. Using a customer number, which is different from the primary key is useful for merchants, wishing to assign sequential numbers only to customers which actually bought something. Otherwise the customer number (primary key) is increased whenever a site visitor puts something into the cart. If he never proceeds to checkout, that entity expires and may be deleted at any time in the future. """ return self.get_number() def get_number(self): """ Hook to get the customer's number. Customers haven't purchased anything may return None. """ return str(self.user_id) def save(self, **kwargs): if 'update_fields' not in kwargs: self.user.save(using=kwargs.get('using', DEFAULT_DB_ALIAS)) super(BaseCustomer, self).save(**kwargs) def delete(self, *args, **kwargs): if self.user.is_active and self.recognized is CustomerState.UNRECOGNIZED: # invalid state of customer, keep the referred User super(BaseCustomer, self).delete(*args, **kwargs) else: # also delete self through cascading self.user.delete(*args, **kwargs)
class Widget(six.with_metaclass(MediaDefiningClass)): is_hidden = False # Determines whether this corresponds to an <input type="hidden">. needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False def __init__(self, attrs=None): if attrs is not None: self.attrs = attrs.copy() else: self.attrs = {} def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj def subwidgets(self, name, value, attrs=None, choices=()): """ Yields all "subwidgets" of this widget. Used only by RadioSelect to allow template access to individual <input type="radio"> buttons. Arguments are the same as for render(). """ yield SubWidget(self, name, value, attrs, choices) def render(self, name, value, attrs=None): """ Returns this Widget rendered as HTML, as a Unicode string. The 'value' given is not guaranteed to be valid input, so subclass implementations should program defensively. """ raise NotImplementedError def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, returns the value of this widget. Returns None if it's not provided. """ return data.get(name, None) def _has_changed(self, initial, data): """ Return True if data differs from initial. """ # For purposes of seeing whether something has changed, None is # the same as an empty string, if the data or inital value we get # is None, replace it w/ ''. if data is None: data_value = '' else: data_value = data if initial is None: initial_value = '' else: initial_value = initial if force_text(initial_value) != force_text(data_value): return True return False def id_for_label(self, id_): """ Returns the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Returns None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_
class Block(six.with_metaclass(BaseBlock, object)): name = '' creation_counter = 0 TEMPLATE_VAR = 'value' class Meta: label = None icon = "placeholder" classname = None """ Setting a 'dependencies' list serves as a shortcut for the common case where a complex block type (such as struct, list or stream) relies on one or more inner block objects, and needs to ensure that the responses from the 'media' and 'html_declarations' include the relevant declarations for those inner blocks, as well as its own. Specifying these inner block objects in a 'dependencies' list means that the base 'media' and 'html_declarations' methods will return those declarations; the outer block type can then add its own declarations to the list by overriding those methods and using super(). """ dependencies = [] def __new__(cls, *args, **kwargs): # adapted from django.utils.deconstruct.deconstructible; capture the arguments # so that we can return them in the 'deconstruct' method obj = super(Block, cls).__new__(cls) obj._constructor_args = (args, kwargs) return obj def all_blocks(self): """ Return a list consisting of self and all block objects that are direct or indirect dependencies of this block """ result = [self] for dep in self.dependencies: result.extend(dep.all_blocks()) return result def all_media(self): media = forms.Media() for block in self.all_blocks(): media += block.media return media def all_html_declarations(self): declarations = filter( bool, [block.html_declarations() for block in self.all_blocks()]) return mark_safe('\n'.join(declarations)) def __init__(self, **kwargs): self.meta = self._meta_class() for attr, value in kwargs.items(): setattr(self.meta, attr, value) # Increase the creation counter, and save our local copy. self.creation_counter = Block.creation_counter Block.creation_counter += 1 self.definition_prefix = 'blockdef-%d' % self.creation_counter self.label = self.meta.label or '' def set_name(self, name): self.name = name if not self.meta.label: self.label = capfirst(force_text(name).replace('_', ' ')) @property def media(self): return forms.Media() def html_declarations(self): """ Return an HTML fragment to be rendered on the form page once per block definition - as opposed to once per occurrence of the block. For example, the block definition ListBlock(label="Shopping list", CharBlock(label="Product")) needs to output a <script type="text/template"></script> block containing the HTML for a 'product' text input, to that these can be dynamically added to the list. This template block must only occur once in the page, even if there are multiple 'shopping list' blocks on the page. Any element IDs used in this HTML fragment must begin with definition_prefix. (More precisely, they must either be definition_prefix itself, or begin with definition_prefix followed by a '-' character) """ return '' def js_initializer(self): """ Returns a Javascript expression string, or None if this block does not require any Javascript behaviour. This expression evaluates to an initializer function, a function that takes the ID prefix and applies JS behaviour to the block instance with that value and prefix. The parent block of this block (or the top-level page code) must ensure that this expression is not evaluated more than once. (The resulting initializer function can and will be called as many times as there are instances of this block, though.) """ return None def render_form(self, value, prefix='', errors=None): """ Render the HTML for this block with 'value' as its content. """ raise NotImplementedError('%s.render_form' % self.__class__) def value_from_datadict(self, data, files, prefix): raise NotImplementedError('%s.value_from_datadict' % self.__class__) def bind(self, value, prefix=None, errors=None): """ Return a BoundBlock which represents the association of this block definition with a value and a prefix (and optionally, a ValidationError to be rendered). BoundBlock primarily exists as a convenience to allow rendering within templates: bound_block.render() rather than blockdef.render(value, prefix) which can't be called from within a template. """ return BoundBlock(self, value, prefix=prefix, errors=errors) def get_default(self): """ Return this block's default value (conventionally found in self.meta.default), converted to the value type expected by this block. This caters for the case where that value type is not something that can be expressed statically at model definition type (e.g. something like StructValue which incorporates a pointer back to the block definion object). """ return self.meta.default def prototype_block(self): """ Return a BoundBlock that can be used as a basis for new empty block instances to be added on the fly (new list items, for example). This will have a prefix of '__PREFIX__' (to be dynamically replaced with a real prefix when it's inserted into the page) and a value equal to the block's default value. """ return self.bind(self.get_default(), '__PREFIX__') def clean(self, value): """ Validate value and return a cleaned version of it, or throw a ValidationError if validation fails. The thrown ValidationError instance will subsequently be passed to render() to display the error message; the ValidationError must therefore include all detail necessary to perform that rendering, such as identifying the specific child block(s) with errors, in the case of nested blocks. (It is suggested that you use the 'params' attribute for this; using error_list / error_dict is unreliable because Django tends to hack around with these when nested.) """ return value def to_python(self, value): """ Convert 'value' from a simple (JSON-serialisable) value to a (possibly complex) Python value to be used in the rest of the block API and within front-end templates . In simple cases this might be the value itself; alternatively, it might be a 'smart' version of the value which behaves mostly like the original value but provides a native HTML rendering when inserted into a template; or it might be something totally different (e.g. an image chooser will use the image ID as the clean value, and turn this back into an actual image object here). """ return value def get_prep_value(self, value): """ The reverse of to_python; convert the python value into JSON-serialisable form. """ return value def render(self, value): """ Return a text rendering of 'value', suitable for display on templates. By default, this will use a template if a 'template' property is specified on the block, and fall back on render_basic otherwise. """ template = getattr(self.meta, 'template', None) if template: return render_to_string(template, { 'self': value, self.TEMPLATE_VAR: value, }) else: return self.render_basic(value) def render_basic(self, value): """ Return a text rendering of 'value', suitable for display on templates. render() will fall back on this if the block does not define a 'template' property. """ return force_text(value) def get_searchable_content(self, value): """ Returns a list of strings containing text content within this block to be used in a search engine. """ return [] def check(self, **kwargs): """ Hook for the Django system checks framework - returns a list of django.core.checks.Error objects indicating validity errors in the block """ return [] def _check_name(self, **kwargs): """ Helper method called by container blocks as part of the system checks framework, to validate that this block's name is a valid identifier. (Not called universally, because not all blocks need names) """ errors = [] if not self.name: errors.append( checks.Error( "Block name %r is invalid" % self.name, hint="Block name cannot be empty", obj=kwargs.get('field', self), id='wagtailcore.E001', )) if ' ' in self.name: errors.append( checks.Error( "Block name %r is invalid" % self.name, hint="Block names cannot contain spaces", obj=kwargs.get('field', self), id='wagtailcore.E001', )) if '-' in self.name: errors.append( checks.Error( "Block name %r is invalid" % self.name, "Block names cannot contain dashes", obj=kwargs.get('field', self), id='wagtailcore.E001', )) if self.name and self.name[0].isdigit(): errors.append( checks.Error( "Block name %r is invalid" % self.name, "Block names cannot begin with a digit", obj=kwargs.get('field', self), id='wagtailcore.E001', )) return errors def id_for_label(self, prefix): """ Return the ID to be used as the 'for' attribute of <label> elements that refer to this block, when the given field prefix is in use. Return None if no 'for' attribute should be used. """ return None def deconstruct(self): # adapted from django.utils.deconstruct.deconstructible module_name = self.__module__ name = self.__class__.__name__ # Make sure it's actually there and not an inner class module = import_module(module_name) if not hasattr(module, name): raise ValueError( "Could not find object %s in %s.\n" "Please note that you cannot serialize things like inner " "classes. Please move the object into the main module " "body to use migrations.\n" % (name, module_name)) # if the module defines a DECONSTRUCT_ALIASES dictionary, see if the class has an entry in there; # if so, use that instead of the real path try: path = module.DECONSTRUCT_ALIASES[self.__class__] except (AttributeError, KeyError): path = '%s.%s' % (module_name, name) return ( path, self._constructor_args[0], self._constructor_args[1], ) def __eq__(self, other): """ The deep_deconstruct method in django.db.migrations.autodetector.MigrationAutodetector does not recurse into arbitrary lists and dicts. As a result, when it is passed a field such as: StreamField([ ('heading', CharBlock()), ]) the CharBlock object will be left in its constructed form. This causes problems when MigrationAutodetector compares two separate instances of the StreamField from different project states: since the CharBlocks are different objects, it will report a change where there isn't one. To prevent this, we implement the equality operator on Block instances such that the two CharBlocks are reported as equal. Since block objects are intended to be immutable with the exception of set_name(), it is sufficient to compare the 'name' property and the constructor args/kwargs of the two block objects. The 'deconstruct' method provides a convenient way to access the latter. """ if not isinstance(other, Block): # if the other object isn't a block at all, it clearly isn't equal. return False # Note that we do not require the two blocks to be of the exact same class. This is because # we may wish the following blocks to be considered equal: # # class FooBlock(StructBlock): # first_name = CharBlock() # surname = CharBlock() # # class BarBlock(StructBlock): # first_name = CharBlock() # surname = CharBlock() # # FooBlock() == BarBlock() == StructBlock([('first_name', CharBlock()), ('surname': CharBlock())]) # # For this to work, StructBlock will need to ensure that 'deconstruct' returns the same signature # in all of these cases, including reporting StructBlock as the path: # # FooBlock().deconstruct() == ( # 'wagtail.wagtailcore.blocks.StructBlock', # [('first_name', CharBlock()), ('surname': CharBlock())], # {} # ) # # This has the bonus side effect that the StructBlock field definition gets frozen into # the migration, rather than leaving the migration vulnerable to future changes to FooBlock / BarBlock # in models.py. return (self.name == other.name) and (self.deconstruct() == other.deconstruct()) def __ne__(self, other): return not self.__eq__(other) # Making block instances hashable in a way that's consistent with __eq__ is non-trivial, because # self.deconstruct() is liable to contain unhashable data (e.g. lists and dicts). So let's set # Block to be explicitly unhashable - Python 3 will do this automatically when defining __eq__, # but Python 2 won't, and we'd like the behaviour to be consistent on both. __hash__ = None
class Occurrence(with_metaclass(ModelBase, *get_model_bases('Occurrence'))): event = models.ForeignKey(Event, on_delete=models.CASCADE, verbose_name=_("event")) all_day = models.BooleanField(default=True) title = models.CharField(_("title"), max_length=255, blank=True) description = models.TextField(_("description"), blank=True) start = models.DateTimeField(_("start"), db_index=True) end = models.DateTimeField(_("end"), db_index=True, null=True) cancelled = models.BooleanField(_("cancelled"), default=False) original_start = models.DateTimeField(_("original start")) original_end = models.DateTimeField(_("original end")) created_on = models.DateTimeField(_("created on"), auto_now_add=True) updated_on = models.DateTimeField(_("updated on"), auto_now=True) color_event = models.CharField(_("Color event"), blank=True, max_length=10) class Meta(object): verbose_name = _("occurrence") verbose_name_plural = _("occurrences") app_label = 'schedule' index_together = (('start', 'end'), ) def __init__(self, *args, **kwargs): super(Occurrence, self).__init__(*args, **kwargs) if not self.title and self.event_id: self.title = self.event.title if not self.description and self.event_id: self.description = self.event.description def moved(self): return self.original_start != self.start or self.original_end != self.end moved = property(moved) def move(self, new_start, new_end): self.start = new_start self.end = new_end self.save() def cancel(self): self.cancelled = True self.save() def uncancel(self): self.cancelled = False self.save() @property def seconds(self): return (self.end - self.start).total_seconds() @property def minutes(self): return float(self.seconds) / 60 @property def hours(self): return float(self.seconds) / 3600 def get_absolute_url(self): if self.pk is not None: return reverse('occurrence', kwargs={ 'occurrence_id': self.pk, 'event_id': self.event.id }) return reverse('occurrence_by_date', kwargs={ 'event_id': self.event.id, 'year': self.start.year, 'month': self.start.month, 'day': self.start.day, 'hour': self.start.hour, 'minute': self.start.minute, 'second': self.start.second, }) def get_cancel_url(self): if self.pk is not None: return reverse('cancel_occurrence', kwargs={ 'occurrence_id': self.pk, 'event_id': self.event.id }) return reverse('cancel_occurrence_by_date', kwargs={ 'event_id': self.event.id, 'year': self.start.year, 'month': self.start.month, 'day': self.start.day, 'hour': self.start.hour, 'minute': self.start.minute, 'second': self.start.second, }) def get_edit_url(self): if self.pk is not None: return reverse('edit_occurrence', kwargs={ 'occurrence_id': self.pk, 'event_id': self.event.id }) return reverse('edit_occurrence_by_date', kwargs={ 'event_id': self.event.id, 'year': self.start.year, 'month': self.start.month, 'day': self.start.day, 'hour': self.start.hour, 'minute': self.start.minute, 'second': self.start.second, }) def __str__(self): return ugettext("%(start)s to %(end)s") % { 'start': date(self.start, django_settings.DATE_FORMAT), 'end': date(self.end, django_settings.DATE_FORMAT) } def __lt__(self, other): return self.end < other.end def __eq__(self, other): return (isinstance(other, Occurrence) and self.original_start == other.original_start and self.original_end == other.original_end)
class Workflow(six.with_metaclass(WorkflowMetaclass, object)): """ Example: .. code-block:: python class ExampleWorkflow(Workflow): class States: start = State(_('Start'), default=True, delete=False) deleted = State(_('Deleted'), update=False, delete=True) accepted = State(_('Accepted'), update=False, delete=True) class Transitions: accept = State(_('Accept'), 'start', 'accepted') delete = State(_('Delete'), ['start', 'accepted'], 'deleted', validate=False) """ def __init__(self, state=None): self.instance = None if state: self._set_state(state) else: self._current_state = self._default_state self._current_state_key = self._default_state_key self._initial_state = self._current_state self._initial_state_key = self._current_state_key def __str__(self): return force_text(self._current_state) def _from_here(self, object=None, user=None): out = [] for key, transition in self._transitions.items(): if self._current_state_key in transition.sources: if object and user and not transition.eval_condition( object, user): continue out.append((key, transition)) return out def _set_state(self, key): if key not in self._states: raise ValidationError(_("The state %s is not valid") % key) self._current_state = self._states[key] self._current_state_key = key def _call(self, key, instance, user): # check if key is valid if self._current_state_key not in self._transitions[key].sources: raise ValidationError(_("This transition is not valid")) user.djangobmf = Employee(user) # update object with instance and user (they come in handy in user-defined functions) self.instance = instance self.user = user # normaly the instance attribute should only be unset during the tests if not self.instance: self._set_state(self._transitions[key].target) return getattr(self, key)() # validate the instance if self._transitions[key].validate and self._current_state.update: self.instance.full_clean() # check the conditions of the transition if self._transitions[key].conditioned: self._transitions[key].eval_condition(instance, user) # everything is valid, we can set the new state self._set_state(self._transitions[key].target) # call function url = getattr(self, key)() return url
class Resource(six.with_metaclass(DeclarativeMetaclass)): """ Resource defines how objects are mapped to their import and export representations and handle importing and exporting data. """ @classmethod def get_result_class(self): """ Returns the class used to store the result of an import. """ return Result @classmethod def get_row_result_class(self): """ Returns the class used to store the result of a row import. """ return RowResult @classmethod def get_error_result_class(self): """ Returns the class used to store an error resulting from an import. """ return Error def get_use_transactions(self): if self._meta.use_transactions is None: return USE_TRANSACTIONS else: return self._meta.use_transactions def get_fields(self, **kwargs): """ Returns fields sorted according to :attr:`~import_export.resources.ResourceOptions.export_order`. """ return [self.fields[f] for f in self.get_export_order()] @classmethod def get_field_name(cls, field): """ Returns the field name for a given field. """ for field_name, f in cls.fields.items(): if f == field: return field_name raise AttributeError("Field %s does not exists in %s resource" % (field, cls)) def init_instance(self, row=None): raise NotImplementedError() def get_instance(self, instance_loader, row): """ Calls the :doc:`InstanceLoader <api_instance_loaders>`. """ return instance_loader.get_instance(row) def get_or_init_instance(self, instance_loader, row, **kwargs): """ Either fetches an already existing instance or initializes a new one. """ instance = self.get_instance(instance_loader, row) if instance: return (instance, False) else: return (self.init_instance(row), True) def save_instance(self, instance, using_transactions=True, dry_run=False): """ Takes care of saving the object to the database. Keep in mind that this is done by calling ``instance.save()``, so objects are not created in bulk! """ self.before_save_instance(instance, using_transactions, dry_run) if not using_transactions and dry_run: # we don't have transactions and we want to do a dry_run pass else: instance.save() self.after_save_instance(instance, using_transactions, dry_run) def before_save_instance(self, instance, using_transactions, dry_run): """ Override to add additional logic. Does nothing by default. """ pass def after_save_instance(self, instance, using_transactions, dry_run): """ Override to add additional logic. Does nothing by default. """ pass def delete_instance(self, instance, using_transactions=True, dry_run=False): """ Calls :meth:`instance.delete` as long as ``dry_run`` is not set. """ self.before_delete_instance(instance, dry_run) if not using_transactions and dry_run: # we don't have transactions and we want to do a dry_run pass else: instance.delete() self.after_delete_instance(instance, dry_run) def before_delete_instance(self, instance, dry_run): """ Override to add additional logic. Does nothing by default. """ pass def after_delete_instance(self, instance, dry_run): """ Override to add additional logic. Does nothing by default. """ pass def import_field(self, field, obj, data): """ Calls :meth:`import_export.fields.Field.save` if ``Field.attribute`` and ``Field.column_name`` are found in ``data``. """ if field.attribute and field.column_name in data: field.save(obj, data) def get_import_fields(self): return self.get_fields() def import_obj(self, obj, data, dry_run): """ Traverses every field in this Resource and calls :meth:`~import_export.resources.Resource.import_field`. """ for field in self.get_import_fields(): if isinstance(field.widget, widgets.ManyToManyWidget): continue self.import_field(field, obj, data) def save_m2m(self, obj, data, using_transactions, dry_run): """ Saves m2m fields. Model instance need to have a primary key value before a many-to-many relationship can be used. """ if not using_transactions and dry_run: # we don't have transactions and we want to do a dry_run pass else: for field in self.get_import_fields(): if not isinstance(field.widget, widgets.ManyToManyWidget): continue self.import_field(field, obj, data) def for_delete(self, row, instance): """ Returns ``True`` if ``row`` importing should delete instance. Default implementation returns ``False``. Override this method to handle deletion. """ return False def skip_row(self, instance, original): """ Returns ``True`` if ``row`` importing should be skipped. Default implementation returns ``False`` unless skip_unchanged == True. Override this method to handle skipping rows meeting certain conditions. """ if not self._meta.skip_unchanged: return False for field in self.get_import_fields(): try: # For fields that are models.fields.related.ManyRelatedManager # we need to compare the results if list(field.get_value(instance).all()) != list( field.get_value(original).all()): return False except AttributeError: if field.get_value(instance) != field.get_value(original): return False return True def get_diff_headers(self): """ Diff representation headers. """ return self.get_export_headers() def before_import(self, dataset, using_transactions, dry_run, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def after_import(self, dataset, result, using_transactions, dry_run, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def before_import_row(self, row, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def after_import_row(self, row, row_result, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def after_import_instance(self, instance, new, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def import_row(self, row, instance_loader, using_transactions=True, dry_run=False, **kwargs): """ Imports data from ``tablib.Dataset``. Refer to :doc:`import_workflow` for a more complete description of the whole import process. :param row: A ``dict`` of the row to import :param instance_loader: The instance loader to be used to load the row :param using_transactions: If ``using_transactions`` is set, a transaction is being used to wrap the import :param dry_run: If ``dry_run`` is set, or error occurs, transaction will be rolled back. """ row_result = self.get_row_result_class()() try: self.before_import_row(row, **kwargs) instance, new = self.get_or_init_instance(instance_loader, row, **kwargs) self.after_import_instance(instance, new, **kwargs) if new: row_result.import_type = RowResult.IMPORT_TYPE_NEW else: row_result.import_type = RowResult.IMPORT_TYPE_UPDATE row_result.new_record = new original = deepcopy(instance) diff = Diff(self, original, new) if self.for_delete(row, instance): if new: row_result.import_type = RowResult.IMPORT_TYPE_SKIP diff.compare_with(self, None, dry_run) else: row_result.import_type = RowResult.IMPORT_TYPE_DELETE self.delete_instance(instance, using_transactions, dry_run) diff.compare_with(self, None, dry_run) else: self.import_obj(instance, row, dry_run) if self.skip_row(instance, original): row_result.import_type = RowResult.IMPORT_TYPE_SKIP else: with transaction.atomic(): self.save_instance(instance, using_transactions, dry_run) self.save_m2m(instance, row, using_transactions, dry_run) diff.compare_with(self, instance, dry_run) row_result.diff = diff.as_html() # Add object info to RowResult for LogEntry if row_result.import_type != RowResult.IMPORT_TYPE_SKIP: row_result.object_id = instance.pk row_result.object_repr = force_text(instance) self.after_import_row(row, row_result, **kwargs) except Exception as e: row_result.import_type = RowResult.IMPORT_TYPE_ERROR # There is no point logging a transaction error for each row # when only the original error is likely to be relevant if not isinstance(e, TransactionManagementError): logging.exception(e) tb_info = traceback.format_exc() row_result.errors.append(self.get_error_result_class()(e, tb_info, row)) return row_result def import_data(self, dataset, dry_run=False, raise_errors=False, use_transactions=None, collect_failed_rows=False, **kwargs): """ Imports data from ``tablib.Dataset``. Refer to :doc:`import_workflow` for a more complete description of the whole import process. :param dataset: A ``tablib.Dataset`` :param raise_errors: Whether errors should be printed to the end user or raised regularly. :param use_transactions: If ``True`` the import process will be processed inside a transaction. :param collect_failed_rows: If ``True`` the import process will collect failed rows. :param dry_run: If ``dry_run`` is set, or an error occurs, if a transaction is being used, it will be rolled back. """ if use_transactions is None: use_transactions = self.get_use_transactions() connection = connections[DEFAULT_DB_ALIAS] supports_transactions = getattr(connection.features, "supports_transactions", False) if use_transactions and not supports_transactions: raise ImproperlyConfigured using_transactions = (use_transactions or dry_run) and supports_transactions if using_transactions: with transaction.atomic(): return self.import_data_inner(dataset, dry_run, raise_errors, using_transactions, collect_failed_rows, **kwargs) return self.import_data_inner(dataset, dry_run, raise_errors, using_transactions, collect_failed_rows, **kwargs) def import_data_inner(self, dataset, dry_run, raise_errors, using_transactions, collect_failed_rows, **kwargs): result = self.get_result_class()() result.diff_headers = self.get_diff_headers() result.total_rows = len(dataset) if using_transactions: # when transactions are used we want to create/update/delete object # as transaction will be rolled back if dry_run is set sp1 = savepoint() try: self.before_import(dataset, using_transactions, dry_run, **kwargs) except Exception as e: logging.exception(e) tb_info = traceback.format_exc() result.append_base_error(self.get_error_result_class()(e, tb_info)) if raise_errors: if using_transactions: savepoint_rollback(sp1) raise instance_loader = self._meta.instance_loader_class(self, dataset) # Update the total in case the dataset was altered by before_import() result.total_rows = len(dataset) if collect_failed_rows: result.add_dataset_headers(dataset.headers) for row in dataset.dict: row_result = self.import_row(row, instance_loader, using_transactions=using_transactions, dry_run=dry_run, **kwargs) result.increment_row_result_total(row_result) if row_result.errors: if collect_failed_rows: result.append_failed_row(row, row_result.errors[0]) if raise_errors: if using_transactions: savepoint_rollback(sp1) raise row_result.errors[-1].error if (row_result.import_type != RowResult.IMPORT_TYPE_SKIP or self._meta.report_skipped): result.append_row_result(row_result) try: self.after_import(dataset, result, using_transactions, dry_run, **kwargs) except Exception as e: logging.exception(e) tb_info = traceback.format_exc() result.append_base_error(self.get_error_result_class()(e, tb_info)) if raise_errors: if using_transactions: savepoint_rollback(sp1) raise if using_transactions: if dry_run or result.has_errors(): savepoint_rollback(sp1) else: savepoint_commit(sp1) return result def get_export_order(self): order = tuple(self._meta.export_order or ()) return order + tuple(k for k in self.fields.keys() if k not in order) def before_export(self, queryset, *args, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def after_export(self, queryset, data, *args, **kwargs): """ Override to add additional logic. Does nothing by default. """ pass def export_field(self, field, obj): field_name = self.get_field_name(field) method = getattr(self, 'dehydrate_%s' % field_name, None) if method is not None: return method(obj) return field.export(obj) def get_export_fields(self): return self.get_fields() def export_resource(self, obj): return [ self.export_field(field, obj) for field in self.get_export_fields() ] def get_export_headers(self): headers = [ force_text(field.column_name) for field in self.get_export_fields() ] return headers def get_user_visible_fields(self): return self.get_fields() def export(self, queryset=None, *args, **kwargs): """ Exports a resource. """ self.before_export(queryset, *args, **kwargs) if queryset is None: queryset = self.get_queryset() headers = self.get_export_headers() data = tablib.Dataset(headers=headers) if isinstance(queryset, QuerySet): # Iterate without the queryset cache, to avoid wasting memory when # exporting large datasets. iterable = queryset.iterator() else: iterable = queryset for obj in iterable: data.append(self.export_resource(obj)) self.after_export(queryset, data, *args, **kwargs) return data
class ClosureModel(with_metaclass(ClosureModelBase, models.Model)): """Provides methods to assist in a tree based structure.""" # pylint: disable=W5101 class Meta: """We make this an abstract class, it needs to be inherited from.""" # pylint: disable=W0232 # pylint: disable=R0903 abstract = True def __setattr__(self, name, value): if name.endswith('_id'): id_field_name = name else: id_field_name = "%s_id" % name if ( name.startswith(self._closure_sentinel_attr) and # It's the right attribute ( # It's already been set (hasattr(self, 'get_deferred_fields') and id_field_name not in self.get_deferred_fields() and hasattr(self, id_field_name)) or # Django>=1.8 (not hasattr(self, 'get_deferred_fields') and hasattr(self, id_field_name)) # Django<1.8 ) and not self._closure_change_check() # The old value isn't stored ): if name.endswith('_id'): obj_id = value elif value: obj_id = value.pk else: obj_id = None # If this is just setting the same value again, we don't need to do anything if getattr(self, id_field_name) != obj_id: # Already set once, and not already stored the old # value, need to take a copy before it changes self._closure_change_init() super(ClosureModel, self).__setattr__(name, value) @classmethod def _toplevel(cls): """Find the top level of the chain we're in. For example, if we have: C inheriting from B inheriting from A inheriting from ClosureModel C._toplevel() will return A. """ superclasses = ( list(set(ClosureModel.__subclasses__()) & set(cls._meta.get_parent_list())) ) return next(iter(superclasses)) if superclasses else cls @classmethod def rebuildtable(cls): """Regenerate the entire closuretree.""" cls._closure_model.objects.all().delete() cls._closure_model.objects.bulk_create([cls._closure_model( parent_id=x['pk'], child_id=x['pk'], depth=0 ) for x in cls.objects.values("pk")]) for node in cls.objects.all(): node._closure_createlink() @classmethod def closure_parentref(cls): """How to refer to parents in the closure tree""" return "%sclosure_children" % cls._toplevel().__name__.lower() # Backwards compatibility: _closure_parentref = closure_parentref @classmethod def closure_childref(cls): """How to refer to children in the closure tree""" return "%sclosure_parents" % cls._toplevel().__name__.lower() # Backwards compatibility: _closure_childref = closure_childref @property def _closure_sentinel_attr(self): """The attribute we need to watch to tell if the parent/child relationships have changed """ meta = getattr(self, 'ClosureMeta', None) return getattr(meta, 'sentinel_attr', self._closure_parent_attr) @property def _closure_parent_attr(self): '''The attribute or property that holds the parent object.''' meta = getattr(self, 'ClosureMeta', None) return getattr(meta, 'parent_attr', 'parent') @property def _closure_parent_pk(self): """What our parent pk is in the closure tree.""" if hasattr(self, "%s_id" % self._closure_parent_attr): return getattr(self, "%s_id" % self._closure_parent_attr) else: parent = getattr(self, self._closure_parent_attr) return parent.pk if parent else None def _closure_deletelink(self, oldparentpk): """Remove incorrect links from the closure tree.""" self._closure_model.objects.filter( **{ "parent__%s__child" % self._closure_parentref(): oldparentpk, "child__%s__parent" % self._closure_childref(): self.pk } ).delete() def _closure_createlink(self): """Create a link in the closure tree.""" linkparents = self._closure_model.objects.filter( child__pk=self._closure_parent_pk ).values("parent", "depth") linkchildren = self._closure_model.objects.filter( parent__pk=self.pk ).values("child", "depth") newlinks = [self._closure_model( parent_id=p['parent'], child_id=c['child'], depth=p['depth']+c['depth']+1 ) for p in linkparents for c in linkchildren] self._closure_model.objects.bulk_create(newlinks) def get_ancestors(self, include_self=False, depth=None): """Return all the ancestors of this object.""" if self.is_root_node(): if not include_self: return self._toplevel().objects.none() else: # Filter on pk for efficiency. return self._toplevel().objects.filter(pk=self.pk) params = {"%s__child" % self._closure_parentref():self.pk} if depth is not None: params["%s__depth__lte" % self._closure_parentref()] = depth ancestors = self._toplevel().objects.filter(**params) if not include_self: ancestors = ancestors.exclude(pk=self.pk) return ancestors.order_by("%s__depth" % self._closure_parentref()) def get_descendants(self, include_self=False, depth=None): """Return all the descendants of this object.""" params = {"%s__parent" % self._closure_childref():self.pk} if depth is not None: params["%s__depth__lte" % self._closure_childref()] = depth descendants = self._toplevel().objects.filter(**params) if not include_self: descendants = descendants.exclude(pk=self.pk) return descendants.order_by("%s__depth" % self._closure_childref()) def prepopulate(self, queryset): """Perpopulate a descendants query's children efficiently. Call like: blah.prepopulate(blah.get_descendants().select_related(stuff)) """ objs = list(queryset) hashobjs = dict([(x.pk, x) for x in objs] + [(self.pk, self)]) for descendant in hashobjs.values(): descendant._cached_children = [] for descendant in objs: assert descendant._closure_parent_pk in hashobjs parent = hashobjs[descendant._closure_parent_pk] parent._cached_children.append(descendant) def get_children(self): """Return all the children of this object.""" if hasattr(self, '_cached_children'): children = self._toplevel().objects.filter( pk__in=[n.pk for n in self._cached_children] ) children._result_cache = self._cached_children return children else: return self.get_descendants(include_self=False, depth=1) def get_root(self): """Return the furthest ancestor of this node.""" if self.is_root_node(): return self return self.get_ancestors().order_by( "-%s__depth" % self._closure_parentref() )[0] def is_child_node(self): """Is this node a child, i.e. has a parent?""" return not self.is_root_node() def is_root_node(self): """Is this node a root, i.e. has no parent?""" return self._closure_parent_pk is None def is_descendant_of(self, other, include_self=False): """Is this node a descendant of `other`?""" if other.pk == self.pk: return include_self return self._closure_model.objects.filter( parent=other, child=self ).exclude(pk=self.pk).exists() def is_ancestor_of(self, other, include_self=False): """Is this node an ancestor of `other`?""" return other.is_descendant_of(self, include_self=include_self) def _closure_change_init(self): """Part of the change detection. Setting up""" # More magic. We're setting this inside setattr... # pylint: disable=W0201 self._closure_old_parent_pk = self._closure_parent_pk def _closure_change_check(self): """Part of the change detection. Have we changed since we began?""" return hasattr(self,"_closure_old_parent_pk") def _closure_change_oldparent(self): """Part of the change detection. What we used to be""" return self._closure_old_parent_pk
raise forms.ValidationError("Invalid JSON data!") try: # Run the value through JSON so we can normalize formatting # and at least learn about malformed data: value = json.dumps(value, cls=DjangoJSONEncoder) except ValueError: raise forms.ValidationError("Invalid JSON data!") return super(JSONFormField, self).clean(value, *args, **kwargs) if LooseVersion(get_version()) > LooseVersion('1.8'): workaround_class = models.TextField else: workaround_class = six.with_metaclass(models.SubfieldBase, models.TextField) class JSONField(workaround_class): """ TextField which transparently serializes/unserializes JSON objects See: http://www.djangosnippets.org/snippets/1478/ """ formfield = JSONFormField def to_python(self, value): """Convert our string value to JSON after we load it from the DB""" if isinstance(value, dict):
class Widget(six.with_metaclass(MediaDefiningClass)): needs_multipart_form = False # Determines does this widget need multipart form is_localized = False is_required = False def __init__(self, attrs=None): if attrs is not None: self.attrs = attrs.copy() else: self.attrs = {} def __deepcopy__(self, memo): obj = copy.copy(self) obj.attrs = self.attrs.copy() memo[id(self)] = obj return obj @property def is_hidden(self): return self.input_type == 'hidden' if hasattr(self, 'input_type') else False def subwidgets(self, name, value, attrs=None, choices=()): """ Yields all "subwidgets" of this widget. Used only by RadioSelect to allow template access to individual <input type="radio"> buttons. Arguments are the same as for render(). """ yield SubWidget(self, name, value, attrs, choices) def render(self, name, value, attrs=None): """ Returns this Widget rendered as HTML, as a Unicode string. The 'value' given is not guaranteed to be valid input, so subclass implementations should program defensively. """ raise NotImplementedError('subclasses of Widget must provide a render() method') def build_attrs(self, extra_attrs=None, **kwargs): "Helper function for building an attribute dictionary." attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, returns the value of this widget. Returns None if it's not provided. """ return data.get(name, None) def id_for_label(self, id_): """ Returns the HTML ID attribute of this Widget for use by a <label>, given the ID of the field. Returns None if no ID is available. This hook is necessary because some widgets have multiple HTML elements and, thus, multiple IDs. In that case, this method should return an ID value that corresponds to the first ID in the widget's tags. """ return id_
""" Custom model fields to link to CMS content. """ import django from django.utils import six from django.core.exceptions import ValidationError from django.core.validators import URLValidator from django.db import models from any_urlfield.models.values import AnyUrlValue from any_urlfield.registry import UrlTypeRegistry if django.VERSION < (1, 8): base_class = six.with_metaclass(models.SubfieldBase, models.CharField) else: base_class = models.CharField class AnyUrlField(base_class): """ A CharField that can either refer to a CMS page ID, or external URL. .. figure:: /images/anyurlfield1.* :width: 363px :height: 74px :alt: AnyUrlField, with external URL input. .. figure:: /images/anyurlfield2.* :width: 290px :height: 76px :alt: AnyUrlField, with internal page input.
class Report(six.with_metaclass(ReportBase, object)): def __init__(self, request=None): if request is not None: self.set_request(request) def _raiseNotImplementedError(self, what, name): raise NotImplementedError( "The %s '%s' was not defined by '%s.%s'" % (what, name, self.__module__, self.__class__.__name__)) def queryset(self): """ Return the queryset for this report. """ self._raiseNotImplementedError('method', 'queryset') def fields(self, attr=None, value=None): """ Return a list of fields optionally filtering the list using attr=value @param attr: The attribute to filter @param value: The value of the attribute @return: List of fields """ fields = self._meta.fields if attr: return [field for field in fields if getattr(field, attr) == value] return fields def field(self, name): """ Retrieve a field by its name. """ for field in self.fields(): if field.name == name: return field def field_exists(self, name): return self.field(name) is not None def field_index(self, name): """ Retrieve the field index by field name. """ return self.fields().index(self.field(name)) def titles(self): """ Return a list of field titles. """ return [field.title for field in self.fields()] def writers(self): """ Return writers supported by this report. """ return self._meta.writers def supports_writer(self, writer): """ Return True if writer is supported by this report. """ writers = self.writers() if isinstance(writer, basestring): writers = [_writer.__class__.__name__ for _writer in writers] return writer in writers def cache_key(self): """ Return a cache key for this report. """ return 'report-%s' % self.__class__.__name__ def delete_cache(self): cache.delete(self.cache_key()) def data(self): """ Retrieve the data for a report and possibly cache it. """ if hasattr(self, '_data'): return self._data if self._meta.cache: # Check if the data exists in cache self._data = cache.get(self.cache_key()) if not self._data: self._data = self._gather_data() cache.set(self.cache_key(), self._data, self._meta.cache.seconds) else: self._data = self._gather_data() return self._data def set_request(self, request): """ Set the request object that is being used to display this report. """ self.request = request def _gather_data(self): """ Gather the data into a list of dicts @return list: A list of dicts with field names forming the dict keys. """ rows = [] for item in self.queryset(): row = {} for field in self.fields(): at = getattr(item, field.name) row[field.name] = at() if callable(at) else at rows.append(row) return rows def has_aggregates(self): return len([field for field in self.fields() if field.aggregate]) > 0 def get_aggregates(self): if not self.has_aggregates(): return None if hasattr(self, '_aggregates'): return self._aggregates self._aggregates = {} aggregate_fields = [ field for field in self.fields() if field.aggregate ] for item in self.data(): for field in aggregate_fields: if not field.name in self._aggregates: self._aggregates[field.name] = item[field.name] else: self._aggregates[field.name] += item[field.name] return self._aggregates def get_qs_for_term(self, term): """ Get a queryset object that uses term to filter the entire report. """ if not term or len(term) == 0: return None qs_params = None for field in self.fields('filter', True): q = field.get_qs_for_term(term) if q: qs_params = qs_params | q if qs_params else q return qs_params
import django from django.db import models from django import forms from django.utils import six from django_enumfield import validators if django.VERSION < (1, 8): base_class = six.with_metaclass(models.SubfieldBase, models.IntegerField) else: base_class = models.IntegerField class EnumField(base_class): """ EnumField is a convenience field to automatically handle validation of transitions between Enum values and set field choices from the enum. EnumField(MyEnum, default=MyEnum.INITIAL) """ def __init__(self, enum, *args, **kwargs): kwargs['choices'] = enum.choices() if 'default' not in kwargs: kwargs['default'] = enum.default() self.enum = enum models.IntegerField.__init__(self, *args, **kwargs) def get_db_prep_value(self, value, connection, prepared=False): """Returns field's value prepared for interacting with the database backend.
class Model(six.with_metaclass(ModelBase)): _deferred = False def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") fields_iter = iter(self._meta.fields) if not kwargs: # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.rel, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)): # This field will be populated on request. continue if kwargs: if isinstance(field.rel, ManyToOneRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError( "'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self) def __repr__(self): try: u = six.text_type(self) except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' return force_str('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if not six.PY3 and hasattr(self, '__unicode__'): if type(self).__unicode__ == Model.__str__: klass_name = type(self).__name__ raise RuntimeError("%s.__unicode__ is aliased to __str__. Did" " you apply @python_2_unicode_compatible" " without defining __str__?" % klass_name) return force_text(self).encode('utf-8') return '%s object' % self.__class__.__name__ def __eq__(self, other): return isinstance( other, self.__class__) and self._get_pk_val() == other._get_pk_val() def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(self._get_pk_val()) def __reduce__(self): """ Provides pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ if not self._deferred: return super(Model, self).__reduce__() data = self.__dict__ defers = [] for field in self._meta.fields: if isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute): defers.append(field.attname) model = self._meta.proxy_for_model return (model_unpickle, (model, defers), data) def _get_pk_val(self, meta=None): if not meta: meta = self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field_by_name(field_name)[0] except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Saves the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError( "Cannot force both insert and updating in model saving.") if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if len(update_fields) == 0: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do a "update_fields" save on the loaded fields. elif not force_insert and self._deferred and using == self._state.db: field_names = set() for field in self._meta.fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) deferred_fields = [ f.attname for f in self._meta.fields if f.attname not in self.__dict__ and isinstance( self.__class__.__dict__[f.attname], DeferredAttribute) ] loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Subclasses shouldn't need to override this method. It's separate from save() in order to hide the need for overrides of save() to pass around internal-only parameters ('raw', 'cls', and 'origin'). """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 if cls is None: cls = self.__class__ meta = cls._meta if not meta.proxy: origin = cls else: meta = cls._meta if origin and not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) # If we are in a raw save, save the object exactly as presented. # That means that we don't try to be smart about saving attributes # that might have come from the parent class - we just save the # attributes we have been given to the class we have been given. # We also go through this process to defer the save of proxy objects # to their actual underlying model. if not raw or meta.proxy: if meta.proxy: org = cls else: org = None for parent, field in meta.parents.items(): # At this point, parent's primary key field may be unknown # (for example, from administration form which doesn't fill # this field). If so, fill it. if field and getattr( self, parent._meta.pk.attname) is None and getattr( self, field.attname) is not None: setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self.save_base(cls=parent, origin=org, using=using, update_fields=update_fields) if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy, we # set attname directly, bypassing the descriptor. # Invalidate the related object cache, in case it's been # accidentally populated. A fresh instance will be # re-built from the database if necessary. cache_name = field.get_cache_name() if hasattr(self, cache_name): delattr(self, cache_name) if meta.proxy: return if not meta.proxy: non_pks = [f for f in meta.local_fields if not f.primary_key] if update_fields: non_pks = [ f for f in non_pks if f.name in update_fields or f.attname in update_fields ] # First, try an UPDATE. If that doesn't update anything, do an INSERT. pk_val = self._get_pk_val(meta) pk_set = pk_val is not None record_exists = True manager = cls._base_manager if pk_set: # Determine if we should do an update (pk already exists, forced update, # no force_insert) if ((force_update or update_fields) or (not force_insert and manager.using(using).filter(pk=pk_val).exists())): if force_update or non_pks: values = [(f, None, (raw and getattr(self, f.attname) or f.pre_save(self, False))) for f in non_pks] if values: rows = manager.using(using).filter( pk=pk_val)._update(values) if force_update and not rows: raise DatabaseError( "Forced update did not affect any rows.") if update_fields and not rows: raise DatabaseError( "Save with update_fields did not affect any rows." ) else: record_exists = False if not pk_set or not record_exists: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to order_value = manager.using(using).filter( **{ field.name: getattr(self, field.attname) }).count() self._order = order_value fields = meta.local_fields if not pk_set: if force_update or update_fields: raise ValueError( "Cannot force an update in save() with no primary key." ) fields = [ f for f in fields if not isinstance(f, AutoField) ] record_exists = False update_pk = bool(meta.has_auto_field and not pk_set) result = manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw) if update_pk: setattr(self, meta.pk.attname, result) transaction.commit_unless_managed(using=using) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if origin and not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not record_exists), update_fields=update_fields, raw=raw, using=using) save_base.alters_data = True def delete(self, using=None): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val( ) is not None, "%s object can't be deleted because its %s attribute is set to None." % ( self._meta.object_name, self._meta.pk.attname) collector = Collector(using=using) collector.collect([self]) collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) return force_text(dict(field.flatchoices).get(value, value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError( "get_next/get_previous cannot be used on unsaved objects.") op = is_next and 'gt' or 'lt' order = not is_next and '-' or '' param = force_text(getattr(self, field.attname)) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter( **kwargs).filter(q).order_by('%s%s' % (order, field.name), '%spk' % order) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = is_next and 'gt' or 'lt' order = not is_next and '-_order' or '_order' order_field = self._meta.order_with_respect_to obj = self._default_manager.filter( **{ order_field.name: getattr(self, order_field.attname) }).filter( **{ '_order__%s' % op: self._default_manager.values('_order').filter( **{self._meta.pk.name: self.pk}) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, unused): return self.pk def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Gather a list of checks to perform. Since validate_unique could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] for parent_class in self._meta.parents.keys(): if parent_class._meta.unique_together: unique_togethers.append( (parent_class, parent_class._meta.unique_together)) for model_class, unique_together in unique_togethers: for check in unique_together: for name in check: # If this is an excluded field, don't add this check. if name in exclude: break else: unique_checks.append((model_class, tuple(check))) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.parents.keys(): fields_with_class.append( (parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name, ))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append( (model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append( (model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append( (model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) if lookup_value is None: # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append( self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr( date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for)) return errors def date_error_message(self, lookup_type, field, unique_for): opts = self._meta return _( "%(field_name)s must be unique for %(date_field)s %(lookup)s.") % { 'field_name': six.text_type(capfirst(opts.get_field(field).verbose_name)), 'date_field': six.text_type(capfirst( opts.get_field(unique_for).verbose_name)), 'lookup': lookup_type, } def unique_error_message(self, model_class, unique_check): opts = model_class._meta model_name = capfirst(opts.verbose_name) # A unique field if len(unique_check) == 1: field_name = unique_check[0] field = opts.get_field(field_name) field_label = capfirst(field.verbose_name) # Insert the error into the error dict, very sneaky return field.error_messages['unique'] % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_label) } # unique_together else: field_labels = [ capfirst(opts.get_field(f).verbose_name) for f in unique_check ] field_labels = get_text_list(field_labels, _('and')) return _( "%(model_name)s with this %(field_label)s already exists.") % { 'model_name': six.text_type(model_name), 'field_label': six.text_type(field_labels) } def full_clean(self, exclude=None): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occured. """ errors = {} if exclude is None: exclude = [] try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing message_dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in validators.EMPTY_VALUES: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.messages if errors: raise ValidationError(errors)
class JsonAdapter(psycopg2.extras.Json): def dumps(self, obj): return json.dumps(obj, cls=get_encoder_class()) psycopg2.extensions.register_adapter(dict, JsonAdapter) psycopg2.extras.register_default_json(loads=json.loads) # so that psycopg2 knows also to convert jsonb fields correctly # http://schinckel.net/2014/05/24/python,-postgres-and-jsonb/ psycopg2.extras.register_json(loads=json.loads, oid=3802, array_oid=3807) if django.VERSION < (1, 8): base_field_class = six.with_metaclass(models.SubfieldBase, models.Field) else: base_field_class = models.Field class JsonField(base_field_class): empty_strings_allowed = False def __init__(self, *args, **kwargs): self._options = kwargs.pop("options", {}) super(JsonField, self).__init__(*args, **kwargs) def db_type(self, connection): if get_version(connection) < 90200: raise RuntimeError("django_pgjson does not supports postgresql version < 9.2") return "json"
# -*- coding: utf-8 -*- from __future__ import unicode_literals from decimal import Decimal from django import VERSION as DJANGO_VERSION from django.core.exceptions import ValidationError from django.db import models from django.utils.six import with_metaclass from django.utils.translation import ugettext_lazy as _ from .forms import LatLongField as FormLatLongField if DJANGO_VERSION < (1, 8): DjangoModelFieldBase = with_metaclass(models.SubfieldBase, models.Field) else: DjangoModelFieldBase = models.Field class LatLong(object): def __init__(self, latitude=0.0, longitude=0.0): self.latitude = Decimal(latitude) self.longitude = Decimal(longitude) @staticmethod def _equals_to_the_cent(a, b): return round(a, 6) == round(b, 6) @staticmethod def _no_equals_to_the_cent(a, b):