class AppDirectoriesFinder(BaseFinder): """ A static files finder that looks in the directory of each app as specified in the source_dir attribute of the given storage class. """ storage_class = AppStaticStorage def __init__(self, apps=None, *args, **kwargs): # The list of apps that are handled self.apps = [] # Mapping of app module paths to storage instances self.storages = SortedDict() if apps is None: apps = settings.INSTALLED_APPS for app in apps: app_storage = self.storage_class(app) if os.path.isdir(app_storage.location): self.storages[app] = app_storage if app not in self.apps: self.apps.append(app) super(AppDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all app storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists for path in utils.get_files(storage, ignore_patterns): yield path, storage def find(self, path, all=False): """ Looks for files in the app directories. """ matches = [] for app in self.apps: match = self.find_in_app(app, path) if match: if not all: return match matches.append(match) return matches def find_in_app(self, app, path): """ Find a requested static file in an app's static locations. """ storage = self.storages.get(app, None) if storage: if storage.prefix: prefix = '%s%s' % (storage.prefix, os.sep) if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path
def build_tree_structure(self, qs=None): """ Semi-Efficiently builds a python structure representing this tree structure Returns a list containing the root nodes All nodes have the following attributes prefetched: children_list - a python list of prefetched children parent """ if qs is None: qs = self.live() #qs = qs.order_by('path') nodes = SortedDict() ret = list() for node in qs: node.children_list = list() nodes[node.path] = node if not node.parent_id: ret.append(node) for node in nodes.itervalues(): parts = node.path.split(node.DELIMETER) if len(parts) > 1: subparts = parts[:-1] parent_key = node.DELIMETER.join(subparts) try: parent = nodes[parent_key] except KeyError: pass #parent is inactive, therefore we are inactive else: node.parent = parent parent.children_list.append(node) return ret
class AppDirectoriesFinder(BaseFinder): """ A static files finder that looks in the directory of each app as specified in the source_dir attribute of the given storage class. """ storage_class = AppStaticStorage def __init__(self, apps=None, *args, **kwargs): # The list of apps that are handled self.apps = [] # Mapping of app module paths to storage instances self.storages = SortedDict() if apps is None: apps = settings.INSTALLED_APPS for app in apps: app_storage = self.storage_class(app) if os.path.isdir(app_storage.location): self.storages[app] = app_storage if app not in self.apps: self.apps.append(app) super(AppDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all app storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists for path in utils.get_files(storage, ignore_patterns): yield path, storage def find(self, path, all=False): """ Looks for files in the app directories. """ matches = [] for app in self.apps: match = self.find_in_app(app, path) if match: if not all: return match matches.append(match) return matches def find_in_app(self, app, path): """ Find a requested static file in an app's static locations. """ storage = self.storages.get(app, None) if storage: if storage.prefix: prefix = '%s%s' % (storage.prefix, os.sep) if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path
def show_navigation(context): site = context.get('nexus_site', NexusModule.get_global('site')) request = NexusModule.get_request() category_link_set = SortedDict([(k, { 'label': v, 'links': [], }) for k, v in site.get_categories()]) for namespace, module in site._registry.iteritems(): module, category = module if not module.home_url: continue home_url = reverse(module.get_home_url(), current_app=module.name) active = request.path.startswith(home_url) if category not in category_link_set: if category: label = site.get_category_label(category) else: label = None category_link_set[category] = {'label': label, 'links': []} category_link_set[category]['links'].append( (module.get_title(), home_url, active)) category_link_set[category]['active'] = active return { 'nexus_site': site, 'category_link_set': category_link_set.itervalues(), }
def get_initkwargs(cls, form_list, initial_dict=None, instance_dict=None, condition_dict=None, *args, **kwargs): """ Creates a dict with all needed parameters for the form wizard instances. * `form_list` - is a list of forms. The list entries can be single form classes or tuples of (`step_name`, `form_class`). If you pass a list of forms, the formwizard will convert the class list to (`zero_based_counter`, `form_class`). This is needed to access the form for a specific step. * `initial_dict` - contains a dictionary of initial data dictionaries. The key should be equal to the `step_name` in the `form_list` (or the str of the zero based counter - if no step_names added in the `form_list`) * `instance_dict` - contains a dictionary of instance objects. This list is only used when `ModelForm`s are used. The key should be equal to the `step_name` in the `form_list`. Same rules as for `initial_dict` apply. * `condition_dict` - contains a dictionary of boolean values or callables. If the value of for a specific `step_name` is callable it will be called with the formwizard instance as the only argument. If the return value is true, the step's form will be used. """ kwargs.update({ 'initial_dict': initial_dict or {}, 'instance_dict': instance_dict or {}, 'condition_dict': condition_dict or {}, }) init_form_list = SortedDict() assert len(form_list) > 0, 'at least one form is needed' # walk through the passed form list for i, form in enumerate(form_list): if isinstance(form, (list, tuple)): # if the element is a tuple, add the tuple to the new created # sorted dictionary. init_form_list[unicode(form[0])] = form[1] else: # if not, add the form with a zero based counter as unicode init_form_list[unicode(i)] = form # walk through the ne created list of forms for form in init_form_list.itervalues(): if issubclass(form, formsets.BaseFormSet): # if the element is based on BaseFormSet (FormSet/ModelFormSet) # we need to override the form variable. form = form.form # check if any form contains a FileField, if yes, we need a # file_storage added to the formwizard (by subclassing). for field in form.base_fields.itervalues(): if (isinstance(field, forms.FileField) and not hasattr(cls, 'file_storage')): cls.file_storage = default_storage # raise NoFileStorageConfigured # build the kwargs for the formwizard instances kwargs['form_list'] = init_form_list return kwargs
def get_for_user(self, user, access=None, access_groups=True, with_projects=False): """ Returns a SortedDict of all teams a user has some level of access to. Each <Team> returned has an ``access_type`` attribute which holds the MEMBER_TYPE value. """ from sentry.models import TeamMember, TeamStatus, AccessGroup, Project results = SortedDict() if not user.is_authenticated(): return results all_teams = set() qs = TeamMember.objects.filter(user=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for tm in qs: team = tm.team team.access_type = tm.type all_teams.add(team) if access_groups: qs = AccessGroup.objects.filter( members=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for group in qs: team = group.team team.access_type = group.type all_teams.add(team) if settings.SENTRY_PUBLIC and access is None: for team in self.iterator(): all_teams.add(team) team.access_type = MEMBER_USER for team in sorted(all_teams, key=lambda x: x.name.lower()): if team.status == TeamStatus.VISIBLE: results[team.slug] = team if with_projects: # these kinds of queries make people sad :( new_results = SortedDict() for team in results.itervalues(): project_list = list( Project.objects.get_for_user(user, team=team)) new_results[team.slug] = (team, project_list) results = new_results return results
def get_initkwargs(cls, form_list, initial_dict=None, instance_dict=None, condition_dict=None, *args, **kwargs): """ Creates a dict with all needed parameters for the form wizard instances. * `form_list` - is a list of forms. The list entries can be single form classes or tuples of (`step_name`, `form_class`). If you pass a list of forms, the formwizard will convert the class list to (`zero_based_counter`, `form_class`). This is needed to access the form for a specific step. * `initial_dict` - contains a dictionary of initial data dictionaries. The key should be equal to the `step_name` in the `form_list` (or the str of the zero based counter - if no step_names added in the `form_list`) * `instance_dict` - contains a dictionary of instance objects. This list is only used when `ModelForm`s are used. The key should be equal to the `step_name` in the `form_list`. Same rules as for `initial_dict` apply. * `condition_dict` - contains a dictionary of boolean values or callables. If the value of for a specific `step_name` is callable it will be called with the formwizard instance as the only argument. If the return value is true, the step's form will be used. """ kwargs.update({ 'initial_dict': initial_dict or {}, 'instance_dict': instance_dict or {}, 'condition_dict': condition_dict or {}, }) init_form_list = SortedDict() assert len(form_list) > 0, 'at least one form is needed' # walk through the passed form list for i, form in enumerate(form_list): if isinstance(form, (list, tuple)): # if the element is a tuple, add the tuple to the new created # sorted dictionary. init_form_list[unicode(form[0])] = form[1] else: # if not, add the form with a zero based counter as unicode init_form_list[unicode(i)] = form # walk through the ne created list of forms for form in init_form_list.itervalues(): if issubclass(form, formsets.BaseFormSet): # if the element is based on BaseFormSet (FormSet/ModelFormSet) # we need to override the form variable. form = form.form # check if any form contains a FileField, if yes, we need a # file_storage added to the formwizard (by subclassing). for field in form.base_fields.itervalues(): if (isinstance(field, forms.FileField) and not hasattr(cls, 'file_storage')): raise NoFileStorageConfigured # build the kwargs for the formwizard instances kwargs['form_list'] = init_form_list return kwargs
def get_for_user(self, user, access=None, access_groups=True, with_projects=False): """ Returns a SortedDict of all teams a user has some level of access to. Each <Team> returned has an ``access_type`` attribute which holds the MEMBER_TYPE value. """ from sentry.models import TeamMember, TeamStatus, AccessGroup, Project results = SortedDict() if not user.is_authenticated(): return results all_teams = set() qs = TeamMember.objects.filter( user=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for tm in qs: team = tm.team team.access_type = tm.type all_teams.add(team) if access_groups: qs = AccessGroup.objects.filter( members=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for group in qs: team = group.team team.access_type = group.type all_teams.add(team) if settings.SENTRY_PUBLIC and access is None: for team in self.iterator(): all_teams.add(team) team.access_type = MEMBER_USER for team in sorted(all_teams, key=lambda x: x.name.lower()): if team.status == TeamStatus.VISIBLE: results[team.slug] = team if with_projects: # these kinds of queries make people sad :( new_results = SortedDict() for team in results.itervalues(): project_list = list(Project.objects.get_for_user( user, team=team)) new_results[team.slug] = (team, project_list) results = new_results return results
class AppDirectoriesFinder(BaseFinder): """ A static files finder that looks in the directory of each app as specified in the source_dir attribute of the given storage class. """ storage_class = AppStaticStorage def __init__(self, apps=None, *args, **kwargs): # Maps app modules to appropriate storage instances self.storages = SortedDict() if apps is not None: self.apps = apps else: self.apps = models.get_apps() for app in self.apps: self.storages[app] = self.storage_class(app) super(AppDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all app storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists prefix = storage.get_prefix() for path in utils.get_files(storage, ignore_patterns): yield path, prefix, storage def find(self, path, all=False): """ Looks for files in the app directories. """ matches = [] for app in self.apps: app_matches = self.find_in_app(app, path) if app_matches: if not all: return app_matches matches.append(app_matches) return matches def find_in_app(self, app, path): """ Find a requested static file in an app's static locations. """ storage = self.storages[app] prefix = storage.get_prefix() if prefix: prefix = '%s%s' % (prefix, os.sep) if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path
class AppLayerFinder(BaseFinder): storage_class = LayerStaticStorage def __init__(self, apps=None, *args, **kwargs): layers = getattr(settings, "LAYERS", {}) self.apps = [] self.storages = SortedDict() if apps is None: apps = settings.INSTALLED_APPS for app in apps: for layer in layers.keys(): app_storage = self.storage_class(app, layer) if os.path.isdir(app_storage.location): if app not in self.apps: self.apps.append(app) if app not in self.storages: self.storages[app] = {} self.storages[app][layer] = app_storage super(AppLayerFinder, self).__init__(*args, **kwargs) def find(self, path, all=False, layer=None): """ Looks for files in the app directories. """ matches = [] for app in self.apps: match = self.find_in_app(app, path, layer) if match: if not all: return match matches.append(match) return matches def find_in_app(self, app, path, layer=None): layer = layer or get_active_layer(get_current_request()) storage = self.storages.get(app, {}).get(layer, None) if storage: if layer: if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path def list(self, ignore_patterns, layer=None): """ List all files in all app storages. """ if not layer: return for storage in self.storages.itervalues(): layer_storage = storage.get(layer, None) if layer_storage and layer_storage.exists(''): for path in utils.get_files(layer_storage, ignore_patterns): yield path, layer_storage
class AppDirectoriesFinder(BaseFinder): """ A static files finder that looks in the ``media`` directory of each app. """ storage_class = AppStaticStorage def __init__(self, apps=None, *args, **kwargs): # Maps app modules to appropriate storage instances self.storages = SortedDict() if apps is not None: self.apps = apps else: self.apps = models.get_apps() for app in self.apps: self.storages[app] = self.storage_class(app) super(AppDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all app storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists prefix = storage.get_prefix() for path in utils.get_files(storage, ignore_patterns): yield path, prefix, storage def find(self, path, all=False): """ Looks for files in the app directories. """ matches = [] for app in self.apps: app_matches = self.find_in_app(app, path) if app_matches: if not all: return app_matches matches.append(app_matches) return matches def find_in_app(self, app, path): """ Find a requested static file in an app's media locations. """ storage = self.storages[app] prefix = storage.get_prefix() if prefix: prefix = '%s/' % prefix if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path
class AppLayerFinder(BaseFinder): storage_class = LayerStaticStorage def __init__(self, apps=None, *args, **kwargs): layers = getattr(settings, "LAYERS", {}) self.apps = [] self.storages = SortedDict() if apps is None: apps = settings.INSTALLED_APPS for app in apps: for layer in layers.keys(): app_storage = self.storage_class(app, layer) if os.path.isdir(app_storage.location): if app not in self.apps: self.apps.append(app) if app not in self.storages: self.storages[app] = {} self.storages[app][layer] = app_storage super(AppLayerFinder, self).__init__(*args, **kwargs) def find(self, path, all=False, layer=None): """ Looks for files in the app directories. """ matches = [] for app in self.apps: match = self.find_in_app(app, path, layer) if match: if not all: return match matches.append(match) return matches def find_in_app(self, app, path, layer=None): layer = layer or get_active_layer(get_current_request()) storage = self.storages.get(app, {}).get(layer, None) if storage: if layer: if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path def list(self, ignore_patterns, layer=None): """ List all files in all app storages. """ if not layer: return for storage in self.storages.itervalues(): layer_storage = storage.get(layer, None) if layer_storage and layer_storage.exists(''): for path in utils.get_files(layer_storage, ignore_patterns): yield path, layer_storage
def get_for_user(self, user, access=None, access_groups=True, with_projects=False): """ Returns a SortedDict of all teams a user has some level of access to. Each <Team> returned has a ``membership`` attribute which holds the <TeamMember> instance. """ from sentry.models import TeamMember, AccessGroup, Project results = SortedDict() if not user.is_authenticated(): return results if settings.PUBLIC and access is None: for team in self.order_by('name').iterator(): results[team.slug] = team else: all_teams = set() qs = TeamMember.objects.filter(user=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for tm in qs: all_teams.add(tm.team) if access_groups: qs = AccessGroup.objects.filter( members=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for group in qs: all_teams.add(group.team) for team in sorted(all_teams, key=lambda x: x.name): results[team.slug] = team if with_projects: # these kinds of queries make people sad :( new_results = SortedDict() for team in results.itervalues(): project_list = Project.objects.get_for_user(user, team=team)[:20] new_results[team.slug] = (team, project_list) results = new_results return results
def get_for_user(self, user, access=None, access_groups=True, with_projects=False): """ Returns a SortedDict of all teams a user has some level of access to. Each <Team> returned has a ``membership`` attribute which holds the <TeamMember> instance. """ from sentry.models import TeamMember, AccessGroup, Project results = SortedDict() if not user.is_authenticated(): return results if settings.SENTRY_PUBLIC and access is None: for team in self.order_by('name').iterator(): results[team.slug] = team else: all_teams = set() qs = TeamMember.objects.filter( user=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for tm in qs: all_teams.add(tm.team) if access_groups: qs = AccessGroup.objects.filter( members=user, ).select_related('team') if access is not None: qs = qs.filter(type__lte=access) for group in qs: all_teams.add(group.team) for team in sorted(all_teams, key=lambda x: x.name): results[team.slug] = team if with_projects: # these kinds of queries make people sad :( new_results = SortedDict() for team in results.itervalues(): project_list = Project.objects.get_for_user( user, team=team)[:20] new_results[team.slug] = (team, project_list) results = new_results return results
def list_files_in_app(app_names, source_dir, ignore_patterns): apps = [] storages = SortedDict() for app in app_names: app_storage = CustomAppStaticStorage(source_dir, app) if os.path.isdir(app_storage.location): storages[app] = app_storage if app not in apps: apps.append(app) for storage in storages.itervalues(): if storage.exists(''): # check if storage location exists for path in utils.get_files(storage, ignore_patterns): yield path, storage
def show_navigation(context): site = context.get('nexus_site', NexusModule.get_global('site')) request = NexusModule.get_request() category_link_set = SortedDict([(k, { 'label': v, 'links': [], }) for k, v in site.get_categories()]) for namespace, module in site._registry.iteritems(): module, category = module if module.permission and not request.user.has_perm(module.permission): continue home_url = None if 'request' in context: home_url = module.get_home_url(context['request']) if not home_url: continue active = request.path.startswith(home_url) if category not in category_link_set: if category: label = site.get_category_label(category) else: label = None category_link_set[category] = { 'label': label, 'links': [] } category_link_set[category]['links'].append((module.get_title(), home_url, active)) category_link_set[category]['active'] = active return { 'nexus_site': site, 'category_link_set': category_link_set.itervalues(), }
def show_navigation(context): site = context['nexus_site'] request = context['request'] category_link_set = SortedDict([(k, { 'label': v, 'links': [], }) for k, v in site.get_categories()]) for namespace, module in site._registry.iteritems(): module, category = module if not module.home_url or not module.show(request): continue if module.permission and not request.user.has_perm(module.permission): continue home_url = reverse(module.get_home_url(), current_app=module.name) active = request.path.startswith(home_url) if category not in category_link_set: if category: label = site.get_category_label(category) else: label = None category_link_set[category] = { 'label': label, 'links': [] } category_link_set[category]['links'].append((module.get_title(), home_url, active)) category_link_set[category]['active'] = active return { 'nexus_site': site, 'category_link_set': category_link_set.itervalues(), }
def as_view(cls, steps=None, *args, **kwargs): # pylint: ignore=E0213 steps = steps or cls.steps # validation view = '%s.%s' % (cls.__module__, cls.__name__) # used in errors if len(steps) == 0: raise ImproperlyConfigured("`%s` requires at least one step." % view) if not all( (isinstance(i, (tuple, list)) and len(i) == 2 for i in steps)): raise ImproperlyConfigured("`%s.steps` poorly formed." % view) forms_dict = SortedDict() # populate forms for name, forms in steps: if not isinstance(forms, (tuple, list)): forms = (forms, ) forms_dict[unicode(name)] = forms # If any forms are using FileField, ensure file storage is configured. if not cls.file_storage: for forms in (form for form in forms_dict.itervalues()): for form in forms: if hasattr(form, "form"): # formset form = form.form for field in form.base_fields.itervalues(): if isinstance(field, FileField): view = '%s.%s' % (cls.__module__, cls.__name__) raise NoFileStorageConfigured( "%s contains a FileField, but " "`%s.file_storage` was not specified." % (form, view)) # build the kwargs for the formwizard instances kwargs.setdefault('wizard_step_templates', cls.wizard_step_templates or {}) kwargs['forms'] = forms_dict return super(WizardMixin, cls).as_view(*args, **kwargs)
def as_view(cls, steps=None, *args, **kwargs): # pylint: ignore=E0213 steps = steps or cls.steps # validation view = '%s.%s' % (cls.__module__, cls.__name__) # used in errors if len(steps) == 0: raise ImproperlyConfigured("`%s` requires at least one step." % view) if not all((isinstance(i, (tuple, list)) and len(i) == 2 for i in steps)): raise ImproperlyConfigured("`%s.steps` poorly formed." % view) forms_dict = SortedDict() # populate forms for name, forms in steps: if not isinstance(forms, (tuple, list)): forms = (forms, ) forms_dict[unicode(name)] = forms # If any forms are using FileField, ensure file storage is configured. if not cls.file_storage: for forms in (form for form in forms_dict.itervalues()): for form in forms: if hasattr(form, "form"): # formset form = form.form for field in form.base_fields.itervalues(): if isinstance(field, FileField): view = '%s.%s' % (cls.__module__, cls.__name__) raise NoFileStorageConfigured( "%s contains a FileField, but " "`%s.file_storage` was not specified." % (form, view)) # build the kwargs for the formwizard instances kwargs.setdefault('wizard_step_templates', cls.wizard_step_templates or {}) kwargs['forms'] = forms_dict return super(WizardMixin, cls).as_view(*args, **kwargs)
class Collector(object): def __init__(self, using): self.using = using # Initially, {model: set([instances])}, later values become lists. self.data = {} self.batches = {} # {model: {field: set([instances])}} self.field_updates = {} # {model: {(field, value): set([instances])}} self.dependencies = {} # {model: set([models])} def add(self, objs, source=None, nullable=False, reverse_dependency=False): """ Adds 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it, and 'nullable' should be set to True if the relation can be null. Returns a list of all objects that were not already collected. """ if not objs: return [] new_objs = [] model = objs[0].__class__ instances = self.data.setdefault(model, set()) for obj in objs: if obj not in instances: new_objs.append(obj) instances.update(new_objs) # Nullable relationships can be ignored -- they are nulled out before # deleting, and therefore do not affect the order in which objects have # to be deleted. if source is not None and not nullable: if reverse_dependency: source, model = model, source self.dependencies.setdefault(source, set()).add(model) return new_objs def add_batch(self, model, field, objs): """ Schedules a batch delete. Every instance of 'model' that is related to an instance of 'obj' through 'field' will be deleted. """ self.batches.setdefault(model, {}).setdefault(field, set()).update(objs) def add_field_update(self, field, value, objs): """ Schedules a field update. 'objs' must be a homogenous iterable collection of model instances (e.g. a QuerySet). """ if not objs: return model = objs[0].__class__ self.field_updates.setdefault( model, {}).setdefault( (field, value), set()).update(objs) def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False): """ Adds 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogenous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. If 'reverse_dependency' is True, 'source' will be deleted before the current model, rather than after. (Needed for cascading to parent models, the one case in which the cascade follows the forwards direction of an FK rather than the reverse direction.) """ if not connections[self.using].features.supports_deleting_related_objects: collect_related = False new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) if not new_objs: return model = new_objs[0].__class__ # Recursively collect parent models, but not their related objects. # These will be found by meta.get_all_related_objects() for parent_model, ptr in model._meta.parents.iteritems(): if ptr: parent_objs = [getattr(obj, ptr.name) for obj in new_objs] self.collect(parent_objs, source=model, source_attr=ptr.rel.related_name, collect_related=False, reverse_dependency=True) if collect_related: for related in model._meta.get_all_related_objects(include_hidden=True): field = related.field if related.model._meta.auto_created: self.add_batch(related.model, field, new_objs) else: sub_objs = self.related_objects(related, new_objs) if not sub_objs: continue field.rel.on_delete(self, field, sub_objs, self.using) # TODO This entire block is only needed as a special case to # support cascade-deletes for GenericRelation. It should be # removed/fixed when the ORM gains a proper abstraction for virtual # or composite fields, and GFKs are reworked to fit into that. for relation in model._meta.many_to_many: if not relation.rel.through: sub_objs = relation.bulk_related_objects(new_objs, self.using) self.collect(sub_objs, source=model, source_attr=relation.rel.related_name, nullable=True) def related_objects(self, related, objs): """ Gets a QuerySet of objects related to ``objs`` via the relation ``related``. """ return related.model._base_manager.using(self.using).filter( **{"%s__in" % related.field.name: objs} ) def instances_with_model(self): for model, instances in self.data.iteritems(): for obj in instances: yield model, obj def sort(self): sorted_models = [] models = self.data.keys() while len(sorted_models) < len(models): found = False for model in models: if model in sorted_models: continue dependencies = self.dependencies.get(model) if not (dependencies and dependencies.difference(sorted_models)): sorted_models.append(model) found = True if not found: return self.data = SortedDict([(model, self.data[model]) for model in sorted_models]) @force_managed def delete(self): # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer contraint checks until the # end of a transaction. self.sort() # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send( sender=model, instance=obj, using=self.using ) # update fields for model, instances_for_fieldvalues in self.field_updates.iteritems(): query = sql.UpdateQuery(model) for (field, value), instances in instances_for_fieldvalues.iteritems(): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in self.data.itervalues(): instances.reverse() # delete batches for model, batches in self.batches.iteritems(): query = sql.DeleteQuery(model) for field, instances in batches.iteritems(): query.delete_batch([obj.pk for obj in instances], self.using, field) # delete instances for model, instances in self.data.iteritems(): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.delete_batch(pk_list, self.using) # send post_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.post_delete.send( sender=model, instance=obj, using=self.using ) # update collected instances for model, instances_for_fieldvalues in self.field_updates.iteritems(): for (field, value), instances in instances_for_fieldvalues.iteritems(): for obj in instances: setattr(obj, field.attname, value) for model, instances in self.data.iteritems(): for instance in instances: setattr(instance, model._meta.pk.attname, None)
class CrossGridReport(object): def __init__(self, title, row_reduce, row_map, col_reduce, agg_function, header_map, row_sort=None): """ row_reduce: reference to row reduce function it take element and should return row key row_map: function to map row basic object; take 1 argument: object; value, returned by this function will be used when printing row col_reduce: same as row_reduce but for columns agg_function: funciton will call when apped value to the column for extract data from object; take 2 arguments: object, current value header_map: function will call when adding new key into column header take 1 argument: object """ self.title = title self.row_reduce = row_reduce self.row_map = row_map self.col_reduce = col_reduce self.agg_function = agg_function self.header_map = header_map self.row_sort = row_sort self.row = SortedDict() self.columns = SortedDict() def append(self, obj): row_key = self.row_reduce(obj) col_key = self.col_reduce(obj) if col_key not in self.columns: self.columns[col_key] = self.header_map(obj) row_obj = self.row_map(obj) row = self.append_row(row_obj, row_key) row.append(col_key, obj) def append_row(self, row_obj, row_key): return self.row.setdefault(row_key, ReportRow(self, row_obj, row_key)) def append_column(self, col_obj, col_key): self.columns.setdefault(col_key, col_obj) def iter_columns(self): return self.columns.itervalues() def iter_columns_key(self): return self.columns.iterkeys() def iter_rows(self): return self.row.itervalues()
class Depender(object): """ The base class for this application; loads all the scripts, compresses them, and retains them in memory awaiting requests to concatenate scripts for repsonse. """ def __init__(self, root, config_file, debug=False): """ @param root: Root directory relative to which to resolve scripts.json references. @param config_file: Configuration file for Depender, which contains links to other config.json's. """ self.script_root = root self.debug = debug self.conf = self.parse_configuration(config_file) self.default_compression = self.conf['compression'] self.initialize_compressors() self.load_everything() def initialize_compressors(self): """ creates instances of supported compressors (defaults only to YUI) """ self.supported_compressors = { "yui": YUI() } self.compressors = {} for compression in self.conf['available_compressions']: if (self.supported_compressors.get(compression)): self.compressors[compression] = self.supported_compressors[compression] def relative(self, path): """ converts a path to the full path relative to the depender root """ return os.path.join(self.script_root, path) def parse_json_relative(self, path): "Returns decoded representation of JSON file at path." f = file(self.relative(path)) o = simplejson.load(f) return o def parse_configuration(self, config): """ parses a configuration file (config.json) to be the configuration for the Depender instance" """ conf = self.parse_json_relative(config) return conf def get_scripts(self, library): """ Gets all the scripts for a library and instantiates them as a Script, returning them in an array. """ base = self.conf["libs"][library]["scripts"] scripts = self.parse_json_relative(os.path.join(base, "scripts.json")) ret = [] for cat, cat_data in scripts.iteritems(): for script, data in cat_data.iteritems(): path = self.relative(os.path.join(base, cat, script + ".js")) s = Script(library=library, category=cat, name=script, path=path, data=data, compressors=self.compressors, debug=self.debug, copyright=self.conf["libs"][library].get('copyright', '')) ret.append(s) return ret def load_everything(self): """ Loads all scripts defined in config.json's 'libs' section into memory """ self.all_scripts = SortedDict() self.conf["libs"]["depender-client"] = { "scripts": self.script_root + "/../client/Source" } for library in self.conf["libs"]: scripts = self.get_scripts(library) for s in scripts: if s.name in self.all_scripts: raise Exception("%s defined in two libraries: %s and %s" % (s.name, self.all_scripts[s.name].library, s.library)) self.all_scripts[s.name] = s # Map the raw_deps (which are strings) into the objects for script in self.all_scripts.itervalues(): try: script.deps = [ self.all_scripts[x] for x in script.raw_deps ] except KeyError: raise Exception("%s could not be found in any library" % x) def accumulate_dependencies(self, script, accumulated_list, accumulated_set): """ determines the dependencies for a script """ for dep in script.deps: if dep is not script and dep not in accumulated_set: self.accumulate_dependencies(dep, accumulated_list, accumulated_set) if script not in accumulated_set: accumulated_list.append(script) accumulated_set.add(script) def get_dependencies(self, include_names, exclude_names, include_lib_names, exclude_lib_names): """ Recursively gather all dependencies for script, ignoring dependencies in exclude. """ for lib in include_lib_names: for script in self.get_scripts(lib): include_names.append(script.name) for lib in exclude_lib_names: for script in self.get_scripts(lib): exclude_names.append(script.name) scripts = [ self.all_scripts[name] for name in include_names ] excludes = [ self.all_scripts[name] for name in exclude_names ] acc_list = [] acc_set = set(excludes) for s in scripts: self.accumulate_dependencies(s, acc_list, acc_set) assert len(acc_list) == len(set(acc_list)) return acc_list def get_client_js(self, scripts, url): """ returns the javascript necessary to integrate with Depender.Client.js """ out = "\n\n" out += "Depender.loaded.combine(['" out += "','".join([ i.name for i in scripts ]) + "']);\n\n" out += "Depender.setOptions({\n" out += " builder: '" + url + "'\n" out += "});" return out; def get_output_filename(self): """ returns the filename for the header if download=true """ return self.conf.get('output filename', 'built.js')
class FieldSet(object): """ Data types fields definition. This class allows to define set of fields to build complex pages. """ def __init__(self, force_order=None, **fields): """Init method. @fields defines name/values for fields on this set, values can be another FieldSet instance, alloweing to build complex tree like structures, or a BaseType which will be the leaves of the tree and final fields. @force_order is a list of names to define the order that fields should be presented on forms and other areas. """ # check that each field value is a BaseType of FieldSet instance for value in fields.values(): if not isinstance(value, (BaseType, FieldSet)): raise ValueError('%s is not a valid type' % value) self.data = None # loaded data should be stored here self.cache = {} # store converted values from data types self.loaded = False # flag to mark that data was loaded self.fields = SortedDict() # fields for name in _ordering(force_order, fields): # store fields ordered self.fields[name] = fields[name] def inc_form(self, *names): """Include form method, must return a dict with context values and cannot miss tpl key with template path that will render the form.""" data = self._inc_form(*names) if data and 'tpl' not in data: raise ValueError('Missing form template') return data def _inc_form(self, *names): """Return form data needed to render form. Implement in subclass""" raise NotImplementedError('Implement in subclass') def load(self, values): """Load data into fields. Relies on subclasses _load method to do the propper task. Won't load if data was already loaded.""" if not self.loaded: self._load(values) self.loaded = True def _load(self, values): """Load data into fields, implement in subclass""" raise NotImplementedError('Implement in subclass') def clone(self): """Clone FieldSet, sub FieldSets will be cloned too if any.""" fields = dict((k, v.clone() if isinstance(v, FieldSet) else v) for k, v in self.fields.iteritems()) return self.__class__(force_order=self.fields.keys(), **fields) def done_percent(self): """Return fields done percent.""" vals = [item.done_percent() for item in self.fields.values()] return reduce(_reduce_pairs, vals) if vals else (len(self), 0) def __len__(self): """Return loaded data lenght""" return self.loaded and len(self.data) or 0 def __nonzero__(self): """Must returns FieldSet filled status, no fields or data means not filled. Implement in subclass.""" if self.loaded: data = any(bool(v) for v in self.data.itervalues()) \ if self.data else False fields = any(bool(v) for v in self.fields.itervalues() if isinstance(v, FieldSet)) \ if self.fields else False return data or fields else: return False def __hasitem__(self, name): return name in self.fields def __getitem__(self, name): """Dict like accessor to section fields. Sub FieldSets are returned directly while BaseType values are proccesed by it's type value method first.""" if name in self.fields: if self.loaded: item = self.fields[name] if isinstance(item, BaseType): if name not in self.cache: self.cache[name] = item.value(self.data.get(name)) return self.cache[name] elif isinstance(item, FieldSet): return item else: return None else: raise KeyError('Missing value "%s"' % name) def __getattr__(self, name): try: return self.__getitem__(name) except KeyError, e: raise AttributeError(str(e))
class BaseResource(BaseEndpoint): resource_class = '' #hint to the client how this resource is used form_class = EmptyForm resource_adaptor = None def __init__(self, **kwargs): assert 'resource_adaptor' in kwargs super(BaseResource, self).__init__(**kwargs) def post_register(self): if self.api_request: self.api_request.record_resource(self) super(BaseResource, self).post_register() @property def resource(self): #endpoints have a resource attribute return self def get_app_name(self): raise NotImplementedError app_name = property(get_app_name) def get_base_url_name(self): return self.app_name + '_' def create_link_prototypes(self): link_prototypes = super(BaseResource, self).create_link_prototypes() self.register_endpoints() for endpoint in self.endpoints.itervalues(): link_prototypes.update(endpoint.link_prototypes) return link_prototypes def register_endpoints(self): self.endpoints = SortedDict() for endpoint_cls, kwargs in self.get_view_endpoints(): self.register_endpoint(endpoint_cls, **kwargs) def register_endpoint(self, endpoint_cls, **kwargs): kwargs = self.get_endpoint_kwargs(**kwargs) endpoint = endpoint_cls(**kwargs) self.endpoints[endpoint.get_name_suffix()] = endpoint def get_endpoint_kwargs(self, **kwargs): kwargs.setdefault('parent', self) kwargs.setdefault('site', self._site) kwargs.setdefault('api_request', self.api_request) return kwargs def get_view_endpoints(self): """ Returns a list of tuples containing (endpoint class, endpoint kwargs) """ return [] def get_urls(self): urlpatterns = self.get_extra_urls() urls = [endpoint.get_url_object() for endpoint in self.endpoints.itervalues()] urlpatterns += patterns('', *urls) return urlpatterns def get_extra_urls(self): return patterns('',) def urls(self): return self.get_urls(), self.app_name, None urls = property(urls) def reverse(self, name, *args, **kwargs): return self.site.reverse(name, *args, **kwargs) def api_permission_check(self, request): return self.site.api_permission_check(request) def get_state_data(self): data = super(BaseResource, self).get_state_data() data.update({'resource_name': getattr(self, 'resource_name', None), 'app_name': self.app_name,}) return data def get_indexes(self): return {} def get_index(self, name): return self.get_indexes()[name] def get_index_query(self, name): raise NotImplementedError def get_item_url(self, item): return None def get_related_resource_from_field(self, field): return self.site.get_related_resource_from_field(field) def get_html_type_from_field(self, field): return self.site.get_html_type_from_field(field) def get_absolute_url(self): return self.get_url() def get_url(self, **kwargs): return self.get_main_link_prototype().get_url(**kwargs) def get_resource_link_item(self): return None def get_url_name(self): return self.get_base_url_name() + 'resource' def get_main_link_name(self): return 'list' def get_breadcrumb(self): bread = self.create_link_collection() bread.add_link('list', rel='breadcrumb', link_factor='LO', prompt=self.get_prompt()) return bread def get_breadcrumbs(self): if self.parent: breadcrumbs = self.parent.get_breadcrumbs() else: breadcrumbs = self.create_link_collection() breadcrumbs.extend(self.get_breadcrumb()) return breadcrumbs def get_paginator_kwargs(self): return {}
class Riff(object): widgets = [] riff_classes = [] display_name = None slug = None namespace = None app_name = None default_redirect_view = DefaultRedirectView widget_template = 'djam/_widget.html' def __init__(self, parent=None, namespace=None, app_name=None): self.parent = parent if self.display_name is None: raise ImproperlyConfigured('Please give me a display name') if self.slug is None: self.slug = slugify(self.display_name) self.namespace = namespace or self.namespace or self.slug if parent is None: self.base_riff = self self.path = (self,) else: self.base_riff = parent.base_riff self.path = parent.path + (self,) self._riffs = SortedDict() for riff_class in self.riff_classes: self.register(riff_class) def __getitem__(self, key): return self._riffs[key] def sort_riffs(self, key=None, reverse=False): if key is None: key = lambda r: r.display_name riffs = sorted(self._riffs.itervalues(), key=key, reverse=reverse) self._riffs.keyOrder = [r.namespace for r in riffs] @property def riffs(self): return self._riffs.values() def get_default_url(self): """ Returns the default base url for this riff. Must be implemented by subclasses. """ raise NotImplementedError('Subclasses must implement get_default_url.') def get_urls(self): urlpatterns = self.get_extra_urls() for riff in self.riffs: pattern = r'^{0}/'.format(riff.slug) if riff.slug else r'^' urlpatterns += patterns('', url(pattern, include(riff.urls)), ) if self.default_redirect_view is not None: urlpatterns += patterns('', url(r'^$', self.default_redirect_view.as_view(riff=self)), ) return urlpatterns def get_extra_urls(self): return patterns('',) @property def urls(self): return self.get_urls(), self.app_name, self.namespace def get_view_kwargs(self): return {'riff': self} def has_permission(self, request): if self.parent: return self.parent.has_permission(request) return True def is_hidden(self, request): return not self.has_permission(request) def get_unauthorized_response(self, request): if self.base_riff is not self: return self.base_riff.get_unauthorized_response(request) return HttpResponseForbidden() def wrap_view(self, view): return view def reverse(self, name, args=None, kwargs=None): return reverse('{namespace}:{viewname}'.format(namespace=self.full_namespace, viewname=name), args=args, kwargs=kwargs) @property def full_namespace(self): return ":".join([r.namespace for r in self.path]) def register(self, riff_class): riff = riff_class(parent=self) if riff.namespace in self._riffs: raise ValueError("Riff with namespace {0} already " "registered.".format(riff.namespace)) self._riffs[riff.namespace] = riff
class Riff(object): widgets = [] riff_classes = [] display_name = None slug = None namespace = None app_name = None default_redirect_view = DefaultRedirectView def __init__(self, parent=None, namespace=None, app_name=None): self.parent = parent if self.display_name is None: raise ImproperlyConfigured('Please give me a display name') if self.slug is None: self.slug = slugify(self.display_name) self.namespace = namespace or self.namespace or self.slug if parent is None: self.base_riff = self self.path = (self, ) else: self.base_riff = parent.base_riff self.path = parent.path + (self, ) self._riffs = SortedDict() for riff_class in self.riff_classes: self.register(riff_class) def __getitem__(self, key): return self._riffs[key] def sort_riffs(self, key=None, reverse=False): if key is None: key = lambda r: r.display_name riffs = sorted(self._riffs.itervalues(), key=key, reverse=reverse) self._riffs.keyOrder = [r.namespace for r in riffs] @property def riffs(self): return self._riffs.values() def get_default_url(self): """ Returns the default base url for this riff. Must be implemented by subclasses. """ raise NotImplementedError('Subclasses must implement get_default_url.') def get_urls(self): urlpatterns = self.get_extra_urls() for riff in self.riffs: pattern = r'^{0}/'.format(riff.slug) if riff.slug else r'^' urlpatterns += patterns( '', url(pattern, include(riff.get_urls_tuple())), ) if self.default_redirect_view is not None: urlpatterns += patterns( '', url(r'^$', self.default_redirect_view.as_view(riff=self)), ) return urlpatterns def get_extra_urls(self): return patterns('', ) def get_urls_tuple(self): return self.get_urls(), self.app_name, self.namespace def get_view_kwargs(self): return {'riff': self} def has_permission(self, request): if self.parent: return self.parent.has_permission(request) return True def is_hidden(self, request): return not self.has_permission(request) def get_unauthorized_response(self, request): if self.base_riff is not self: return self.base_riff.get_unauthorized_response(request) return HttpResponseForbidden() def wrap_view(self, view): return view def reverse(self, name, *args, **kwargs): return reverse('{namespace}:{viewname}'.format( namespace=self.full_namespace, viewname=name), args=args, kwargs=kwargs) @property def full_namespace(self): return ":".join([r.namespace for r in self.path]) def register(self, riff_class): riff = riff_class(parent=self) if riff.namespace in self._riffs: raise ValueError("Riff with namespace {0} already " "registered.".format(riff.namespace)) self._riffs[riff.namespace] = riff
class AppLayerFinder(BaseFinder): storage_class = LayerStaticStorage @staticmethod def get_apps(): if not hasattr(settings, 'LAYERED_APPS'): apps = settings.INSTALLED_APPS else: apps = getattr(settings, 'LAYERED_APPS') for app in apps: if not app in settings.INSTALLED_APPS: raise Exception("Application %s not listed in INSTALLED_APPS" % app) excluded_apps = getattr(settings, 'EXCLUDE_FROM_LAYERS', []) return [app for app in apps if not app in excluded_apps] @staticmethod def get_provider(): provider_path = getattr(settings, "LAYERS_PROVIDER", "layers.providers.DefaultLayerProvider") if provider_path is None: raise Exception("This finder requires the LAYER_PROVIDER variable to be set in the " "application's setting file") module_path, class_name = provider_path.rsplit('.', 1) module = import_module(module_path) provider_cls = getattr(module, class_name) if not issubclass(provider_cls, BaseLayerProvider): raise Exception("%s must be a descendant from layers.providers.BaseLayerProvider" % provider_cls) return provider_cls() def __init__(self, apps=None, *args, **kwargs): self.provider = AppLayerFinder.get_provider() self.layers = {} self.storages = SortedDict() if apps is None: self.apps = AppLayerFinder.get_apps() else: self.apps = apps self.update_storage() super(AppLayerFinder, self).__init__(*args, **kwargs) def update_storage(self): layers = self.provider.get_layers() for app in self.apps: for layer in layers.keys(): if not layer in self.layers: app_storage = self.storage_class(app, layer) if os.path.isdir(app_storage.location): if not app in self.storages: self.storages[app] = {} self.storages[app][layer] = app_storage # Remove from storage layers which are not longer present for layer in self.layers.keys(): if not layer in layers: for app in self.apps: if app in self.storages and layer in self.storages[app]: del self.storages[app][layer] del self.layers[layer] # Update the list of layers self.layers.update(layers) def find(self, path, all=False, layer=None): """ Looks for files in the app directories. """ self.update_storage() matches = [] for app in self.apps: match = self.find_in_app(app, path, layer) if match: if not all: return match matches.append(match) return matches def find_in_app(self, app, path, layer=None): layer = layer or get_active_layer(get_current_request()) storage = self.storages.get(app, {}).get(layer, None) if storage: if layer: if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path def list(self, ignore_patterns, layer=None): """ List all files in all app storages. """ if not layer: return self.update_storage() for storage in self.storages.itervalues(): layer_storage = storage.get(layer, None) if layer_storage and layer_storage.exists(''): for path in utils.get_files(layer_storage, ignore_patterns): yield path, layer_storage
class SiteDirectoriesFinder(BaseFinder): """ A static files finder that looks in the directory of each sites as specified in the source_dir attribute of the given storage class. """ storage_class = SiteStaticStorage def __init__(self, sites=None, *args, **kwargs): # The list of sites that are handled self.sites = [] # Mapping of site module paths to storage instances self.storages = SortedDict() # First, add the site dir to the path sys.path.append(settings.SITES_DIR) # Look up sites from the database if sites is None: sites = [site.folder_name for site in Site.objects.all() if site.folder_name is not None] for site in sites: site_storage = self.storage_class(site) if os.path.isdir(site_storage.location): self.storages[site] = site_storage if site not in self.sites: self.sites.append(site) super(SiteDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all site storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists for path in utils.get_files(storage, ignore_patterns): yield path, storage def find(self, path, all=False): """ Looks for files in the site directories. """ matches = [] for site in self.sites: match = self.find_in_site(site, path) if match: if not all: return match matches.append(match) return matches def find_in_site(self, site, path): """ Find a requested static file in an site's static locations. """ storage = self.storages.get(site, None) if storage: if storage.prefix: prefix = '%s%s' % (storage.prefix, os.sep) if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path
def report_validation(request, report_receipt): # entity, slug, year, month): try: report = NutritionReport.objects.get(receipt=report_receipt) except ValueError: raise Http404 data = request.POST or None reports = SortedDict(( ('pec_samp_report', report.pec_samp_report), ('pec_sam_report', report.pec_sam_report), ('pec_mam_report', report.pec_mam_report), ('pec_other_report', report.pec_other_report), ('cons_samp_report', report.cons_samp_report), ('cons_sam_report', report.cons_sam_report), ('cons_mam_report', report.cons_mam_report), ('order_samp_report', report.order_samp_report), ('order_sam_report', report.order_sam_report), ('order_mam_report', report.order_mam_report))) if not any(reports.itervalues()): raise Http404 formslist = [] if report.is_samp: formslist.append(('pec_samp_form', PECSAMPReportForm(data, instance=reports['pec_samp_report'], prefix='pec_samp_report'))) if report.is_sam: formslist.append(('pec_sam_form', PECSAMReportForm(data, instance=reports['pec_sam_report'], prefix='pec_sam_report'))) if report.is_mam: formslist.append(('pec_mam_form', PECMAMReportReportForm(data, instance=reports['pec_mam_report'], prefix='pec_mam_report'))) formslist.append(('pec_other_form', PECOthersReportForm(data, instance=reports['pec_other_report'], prefix="pec_other_report"))) if report.is_samp: formslist.append(('cons_samp_form', InputConsumptionReportFormSet(data, prefix='cons_samp_report', instance=reports['cons_samp_report']))) if report.is_sam: formslist.append(('cons_sam_form', InputConsumptionReportFormSet(data, prefix='cons_sam_report', instance=reports['cons_sam_report']))) if report.is_mam: formslist.append(('cons_mam_form', InputConsumptionReportFormSet(data, prefix='cons_mam_report', instance=reports['cons_mam_report']))) if report.is_samp: formslist.append(('order_samp_form', InputOrderReportFormSet(data, prefix='order_samp_report', instance=reports['order_samp_report']))) if report.is_sam: formslist.append(('order_sam_form', InputOrderReportFormSet(data, prefix='order_sam_report', instance=reports['order_sam_report']))) if report.is_mam: formslist.append(('order_mam_form', InputOrderReportFormSet(data, prefix='order_mam_report', instance=reports['order_mam_report']))) forms = SortedDict(tuple(formslist)) forms_with_instances = [] for i, form in enumerate(forms.itervalues()): forms_with_instances.append(form) for form in forms_with_instances: valid = form.is_valid() print(u"%s: %s" % (form.__class__, valid)) if not valid: print(form.errors) try: print(form.non_field_errors()) except: print(form.non_form_errors()) is_valid = all(form.is_valid() for form in forms_with_instances) if is_valid: print "VALID" # check others with PEC for form in forms_with_instances: form.save() messages.success(request, u'Le rapport %s pour la ' u'période du %s a été validé ' u'avec succès' % (report.receipt, report.period)) else: print "NOT VALID" ctx = locals() ctx.update({'category': 'validation'}) ctx.update(forms) ctx.update(reports) return render(request, 'validation.html', ctx)
class Options(object): def __init__(self, meta, app_label=None): self.local_fields, self.local_many_to_many = [], [] self.virtual_fields = [] self.module_name, self.verbose_name = None, None self.verbose_name_plural = None self.db_table = '' self.ordering = [] self.unique_together = [] self.permissions = [] self.object_name, self.app_label = None, app_label self.get_latest_by = None self.order_with_respect_to = None self.db_tablespace = settings.DEFAULT_TABLESPACE self.admin = None self.meta = meta self.pk = None self.has_auto_field, self.auto_field = False, None self.abstract = False self.managed = True self.proxy = False # For any class that is a proxy (including automatically created # classes for deferred object loading), proxy_for_model tells us # which class this model is proxying. Note that proxy_for_model # can create a chain of proxy models. For non-proxy models, the # variable is always None. self.proxy_for_model = None # For any non-abstract class, the concrete class is the model # in the end of the proxy_for_model chain. In particular, for # concrete models, the concrete_model is always the class itself. self.concrete_model = None self.parents = SortedDict() self.duplicate_targets = {} self.auto_created = False # To handle various inheritance situations, we need to track where # managers came from (concrete or abstract base classes). self.abstract_managers = [] self.concrete_managers = [] # List of all lookups defined in ForeignKey 'limit_choices_to' options # from *other* models. Needed for some admin checks. Internal use only. self.related_fkey_lookups = [] def contribute_to_class(self, cls, name): from django.db import connection from django.db.backends.util import truncate_name cls._meta = self self.installed = re.sub('\.models$', '', cls.__module__) in settings.INSTALLED_APPS # First, construct the default values for these options. self.object_name = cls.__name__ self.module_name = self.object_name.lower() self.verbose_name = get_verbose_name(self.object_name) # Next, apply any overridden values from 'class Meta'. if self.meta: meta_attrs = self.meta.__dict__.copy() for name in self.meta.__dict__: # Ignore any private attributes that Django doesn't care about. # NOTE: We can't modify a dictionary's contents while looping # over it, so we loop over the *original* dictionary instead. if name.startswith('_'): del meta_attrs[name] for attr_name in DEFAULT_NAMES: if attr_name in meta_attrs: setattr(self, attr_name, meta_attrs.pop(attr_name)) elif hasattr(self.meta, attr_name): setattr(self, attr_name, getattr(self.meta, attr_name)) # unique_together can be either a tuple of tuples, or a single # tuple of two strings. Normalize it to a tuple of tuples, so that # calling code can uniformly expect that. ut = meta_attrs.pop('unique_together', self.unique_together) if ut and not isinstance(ut[0], (tuple, list)): ut = (ut,) self.unique_together = ut # verbose_name_plural is a special case because it uses a 's' # by default. if self.verbose_name_plural is None: self.verbose_name_plural = string_concat(self.verbose_name, 's') # Any leftover attributes must be invalid. if meta_attrs != {}: raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys())) else: self.verbose_name_plural = string_concat(self.verbose_name, 's') del self.meta # If the db_table wasn't provided, use the app_label + module_name. if not self.db_table: self.db_table = "%s_%s" % (self.app_label, self.module_name) self.db_table = truncate_name(self.db_table, connection.ops.max_name_length()) def _prepare(self, model): if self.order_with_respect_to: self.order_with_respect_to = self.get_field(self.order_with_respect_to) self.ordering = ('_order',) model.add_to_class('_order', OrderWrt()) else: self.order_with_respect_to = None if self.pk is None: if self.parents: # Promote the first parent link in lieu of adding yet another # field. field = next(self.parents.itervalues()) # Look for a local field with the same name as the # first parent link. If a local field has already been # created, use it instead of promoting the parent already_created = [fld for fld in self.local_fields if fld.name == field.name] if already_created: field = already_created[0] field.primary_key = True self.setup_pk(field) else: auto = AutoField(verbose_name='ID', primary_key=True, auto_created=True) model.add_to_class('id', auto) # Determine any sets of fields that are pointing to the same targets # (e.g. two ForeignKeys to the same remote model). The query # construction code needs to know this. At the end of this, # self.duplicate_targets will map each duplicate field column to the # columns it duplicates. collections = {} for column, target in self.duplicate_targets.iteritems(): try: collections[target].add(column) except KeyError: collections[target] = set([column]) self.duplicate_targets = {} for elt in collections.itervalues(): if len(elt) == 1: continue for column in elt: self.duplicate_targets[column] = elt.difference(set([column])) def add_field(self, field): # Insert the given field in the order in which it was created, using # the "creation_counter" attribute of the field. # Move many-to-many related fields from self.fields into # self.many_to_many. if field.rel and isinstance(field.rel, ManyToManyRel): self.local_many_to_many.insert(bisect(self.local_many_to_many, field), field) if hasattr(self, '_m2m_cache'): del self._m2m_cache else: self.local_fields.insert(bisect(self.local_fields, field), field) self.setup_pk(field) if hasattr(self, '_field_cache'): del self._field_cache del self._field_name_cache if hasattr(self, '_name_map'): del self._name_map def add_virtual_field(self, field): self.virtual_fields.append(field) def setup_pk(self, field): if not self.pk and field.primary_key: self.pk = field field.serialize = False def setup_proxy(self, target): """ Does the internal setup so that the current model is a proxy for "target". """ self.pk = target._meta.pk self.proxy_for_model = target self.db_table = target._meta.db_table def __repr__(self): return '<Options for %s>' % self.object_name def __str__(self): return "%s.%s" % (smart_str(self.app_label), smart_str(self.module_name)) def verbose_name_raw(self): """ There are a few places where the untranslated verbose name is needed (so that we get the same value regardless of currently active locale). """ lang = get_language() deactivate_all() raw = force_unicode(self.verbose_name) activate(lang) return raw verbose_name_raw = property(verbose_name_raw) def _fields(self): """ The getter for self.fields. This returns the list of field objects available to this model (including through parent models). Callers are not permitted to modify this list, since it's a reference to this instance (not a copy). """ try: self._field_name_cache except AttributeError: self._fill_fields_cache() return self._field_name_cache fields = property(_fields) def get_fields_with_model(self): """ Returns a sequence of (field, model) pairs for all fields. The "model" element is None for fields on the current model. Mostly of use when constructing queries so that we know which model a field belongs to. """ try: self._field_cache except AttributeError: self._fill_fields_cache() return self._field_cache def _fill_fields_cache(self): cache = [] for parent in self.parents: for field, model in parent._meta.get_fields_with_model(): if model: cache.append((field, model)) else: cache.append((field, parent)) cache.extend([(f, None) for f in self.local_fields]) self._field_cache = tuple(cache) self._field_name_cache = [x for x, _ in cache] def _many_to_many(self): try: self._m2m_cache except AttributeError: self._fill_m2m_cache() return self._m2m_cache.keys() many_to_many = property(_many_to_many) def get_m2m_with_model(self): """ The many-to-many version of get_fields_with_model(). """ try: self._m2m_cache except AttributeError: self._fill_m2m_cache() return self._m2m_cache.items() def _fill_m2m_cache(self): cache = SortedDict() for parent in self.parents: for field, model in parent._meta.get_m2m_with_model(): if model: cache[field] = model else: cache[field] = parent for field in self.local_many_to_many: cache[field] = None self._m2m_cache = cache def get_field(self, name, many_to_many=True): """ Returns the requested field by name. Raises FieldDoesNotExist on error. """ to_search = many_to_many and (self.fields + self.many_to_many) or self.fields for f in to_search: if f.name == name: return f raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name)) def get_field_by_name(self, name): """ Returns the (field_object, model, direct, m2m), where field_object is the Field instance for the given name, model is the model containing this field (None for local fields), direct is True if the field exists on this model, and m2m is True for many-to-many relations. When 'direct' is False, 'field_object' is the corresponding RelatedObject for this field (since the field doesn't have an instance associated with it). Uses a cache internally, so after the first access, this is very fast. """ try: try: return self._name_map[name] except AttributeError: cache = self.init_name_map() return cache[name] except KeyError: raise FieldDoesNotExist('%s has no field named %r' % (self.object_name, name)) def get_all_field_names(self): """ Returns a list of all field names that are possible for this model (including reverse relation names). This is used for pretty printing debugging output (a list of choices), so any internal-only field names are not included. """ try: cache = self._name_map except AttributeError: cache = self.init_name_map() names = cache.keys() names.sort() # Internal-only names end with "+" (symmetrical m2m related names being # the main example). Trim them. return [val for val in names if not val.endswith('+')] def init_name_map(self): """ Initialises the field name -> field object mapping. """ cache = {} # We intentionally handle related m2m objects first so that symmetrical # m2m accessor names can be overridden, if necessary. for f, model in self.get_all_related_m2m_objects_with_model(): cache[f.field.related_query_name()] = (f, model, False, True) for f, model in self.get_all_related_objects_with_model(): cache[f.field.related_query_name()] = (f, model, False, False) for f, model in self.get_m2m_with_model(): cache[f.name] = (f, model, True, True) for f, model in self.get_fields_with_model(): cache[f.name] = (f, model, True, False) if app_cache_ready(): self._name_map = cache return cache def get_add_permission(self): return 'add_%s' % self.object_name.lower() def get_change_permission(self): return 'change_%s' % self.object_name.lower() def get_delete_permission(self): return 'delete_%s' % self.object_name.lower() def get_all_related_objects(self, local_only=False, include_hidden=False, include_proxy_eq=False): return [k for k, v in self.get_all_related_objects_with_model( local_only=local_only, include_hidden=include_hidden, include_proxy_eq=include_proxy_eq)] def get_all_related_objects_with_model(self, local_only=False, include_hidden=False, include_proxy_eq=False): """ Returns a list of (related-object, model) pairs. Similar to get_fields_with_model(). """ try: self._related_objects_cache except AttributeError: self._fill_related_objects_cache() predicates = [] if local_only: predicates.append(lambda k, v: not v) if not include_hidden: predicates.append(lambda k, v: not k.field.rel.is_hidden()) cache = (self._related_objects_proxy_cache if include_proxy_eq else self._related_objects_cache) return filter(lambda t: all([p(*t) for p in predicates]), cache.items()) def _fill_related_objects_cache(self): cache = SortedDict() parent_list = self.get_parent_list() for parent in self.parents: for obj, model in parent._meta.get_all_related_objects_with_model(include_hidden=True): if (obj.field.creation_counter < 0 or obj.field.rel.parent_link) and obj.model not in parent_list: continue if not model: cache[obj] = parent else: cache[obj] = model # Collect also objects which are in relation to some proxy child/parent of self. proxy_cache = cache.copy() for klass in get_models(include_auto_created=True, only_installed=False): for f in klass._meta.local_fields: if f.rel and not isinstance(f.rel.to, basestring): if self == f.rel.to._meta: cache[RelatedObject(f.rel.to, klass, f)] = None proxy_cache[RelatedObject(f.rel.to, klass, f)] = None elif self.concrete_model == f.rel.to._meta.concrete_model: proxy_cache[RelatedObject(f.rel.to, klass, f)] = None self._related_objects_cache = cache self._related_objects_proxy_cache = proxy_cache def get_all_related_many_to_many_objects(self, local_only=False): try: cache = self._related_many_to_many_cache except AttributeError: cache = self._fill_related_many_to_many_cache() if local_only: return [k for k, v in cache.items() if not v] return cache.keys() def get_all_related_m2m_objects_with_model(self): """ Returns a list of (related-m2m-object, model) pairs. Similar to get_fields_with_model(). """ try: cache = self._related_many_to_many_cache except AttributeError: cache = self._fill_related_many_to_many_cache() return cache.items() def _fill_related_many_to_many_cache(self): cache = SortedDict() parent_list = self.get_parent_list() for parent in self.parents: for obj, model in parent._meta.get_all_related_m2m_objects_with_model(): if obj.field.creation_counter < 0 and obj.model not in parent_list: continue if not model: cache[obj] = parent else: cache[obj] = model for klass in get_models(only_installed=False): for f in klass._meta.local_many_to_many: if f.rel and not isinstance(f.rel.to, basestring) and self == f.rel.to._meta: cache[RelatedObject(f.rel.to, klass, f)] = None if app_cache_ready(): self._related_many_to_many_cache = cache return cache def get_base_chain(self, model): """ Returns a list of parent classes leading to 'model' (order from closet to most distant ancestor). This has to handle the case were 'model' is a granparent or even more distant relation. """ if not self.parents: return if model in self.parents: return [model] for parent in self.parents: res = parent._meta.get_base_chain(model) if res: res.insert(0, parent) return res raise TypeError('%r is not an ancestor of this model' % model._meta.module_name) def get_parent_list(self): """ Returns a list of all the ancestor of this model as a list. Useful for determining if something is an ancestor, regardless of lineage. """ result = set() for parent in self.parents: result.add(parent) result.update(parent._meta.get_parent_list()) return result def get_ancestor_link(self, ancestor): """ Returns the field on the current model which points to the given "ancestor". This is possible an indirect link (a pointer to a parent model, which points, eventually, to the ancestor). Used when constructing table joins for model inheritance. Returns None if the model isn't an ancestor of this one. """ if ancestor in self.parents: return self.parents[ancestor] for parent in self.parents: # Tries to get a link field from the immediate parent parent_link = parent._meta.get_ancestor_link(ancestor) if parent_link: # In case of a proxied model, the first link # of the chain to the ancestor is that parent # links return self.parents[parent] or parent_link def get_ordered_objects(self): "Returns a list of Options objects that are ordered with respect to this object." if not hasattr(self, '_ordered_objects'): objects = [] # TODO #for klass in get_models(get_app(self.app_label)): # opts = klass._meta # if opts.order_with_respect_to and opts.order_with_respect_to.rel \ # and self == opts.order_with_respect_to.rel.to._meta: # objects.append(opts) self._ordered_objects = objects return self._ordered_objects def pk_index(self): """ Returns the index of the primary key field in the self.fields list. """ return self.fields.index(self.pk)
class XmlObjectForm(BaseForm): """Django Form based on an :class:`~eulcore.xmlmap.XmlObject` model, analogous to Django's ModelForm. Note that not all :mod:`eulcore.xmlmap.fields` are currently supported; all released field types are supported in their single-node variety, but no list field types are currently supported. Attempting to define an XmlObjectForm without excluding unsupported fields will result in an Exception. Unlike Django's ModelForm, which provides a save() method, XmlObjectForm provides analogous functionality via :meth:`update_instance`. Since an XmlObject by itself does not have a save method, and can only be saved in particular contexts (e.g., :mod:`eulcore.existdb` or :mod:`eulcore.fedora`), there is no meaningful way for an XmlObjectForm to save an associated model instance to the appropriate datastore. If you wish to customize the html display for an XmlObjectForm, rather than using the built-in form display functions, be aware that if your XmlObject has any fields of type :class:`~eulcore.xmlmap.fields.NodeField`, you should make sure to display the subforms for those fields. NOTE: If your XmlObject includes NodeField elements and you do not want empty elements in your XML output when empty values are entered into the form, you may wish to extend :meth:`eulcore.xmlmap.XmlObject.is_empty` to correctly identify when your NodeField elements should be considered empty (if the default definition is not accurate or appropriate). Empty elements will not be added to the :class:``eulcore.xmlmap.XmlObject` instance returned by :meth:`update_instance`. """ # django has a basemodelform with all the logic # and then a modelform with the metaclass declaration; do we need that? __metaclass__ = XmlObjectFormType _html_section = None # formatting for outputting object with subform subforms = {} """Sorted Dictionary of :class:`XmlObjectForm` instances for fields of type :class:`~eulcore.xmlmap.fields.NodeField` belonging to this Form's :class:`~eulcore.xmlmap.XmlObject` model, keyed on field name. Ordered by field creation order or by specified fields.""" form_label = None '''Label for this form or subform (set automatically for subforms & formsets, using the same logic that is used for field labels.''' def __init__(self, data=None, instance=None, prefix=None, initial={}, **kwargs): opts = self._meta # make a copy of any initial data for local use, since it may get updated with instance data local_initial = initial.copy() if instance is None: if opts.model is None: raise ValueError('XmlObjectForm has no XmlObject model class specified') # if we didn't get an instance, instantiate a new one # NOTE: if this is a subform, the data won't go anywhere useful # currently requires that instantiate_on_get param be set to True for NodeFields self.instance = opts.model() # track adding new instance instead of updating existing? else: self.instance = instance # generate dictionary of initial data based on current instance # allow initial data from instance to co-exist with other initial data local_initial.update(xmlobject_to_dict(self.instance)) #, prefix=prefix)) # fields, exclude? # FIXME: is this backwards? should initial data override data from instance? # initialize subforms for all nodefields that belong to the xmlobject model self._init_subforms(data, prefix) self._init_formsets(data, prefix) super_init = super(XmlObjectForm, self).__init__ super_init(data=data, prefix=prefix, initial=local_initial, **kwargs) # other kwargs accepted by XmlObjectForm.__init__: # files, auto_id, object_data, # error_class, label_suffix, empty_permitted def _init_subforms(self, data=None, prefix=None): # initialize each subform class with the appropriate model instance and data self.subforms = SortedDict() # create as sorted dictionary to preserve order for name, subform in self.__class__.subforms.iteritems(): # instantiate the new form with the current field as instance, if available if self.instance is not None: # get the relevant instance for the current NodeField variable # NOTE: calling create_foo will create the nodefield for element foo # creating here so subfields will be set correctly # if the resulting field is empty, it will be removed by update_instance getattr(self.instance, 'create_' + name)() subinstance = getattr(self.instance, name, None) else: subinstance = None if prefix: subprefix = '%s-%s' % (prefix, name) else: subprefix = name # instantiate the subform class with field data and model instance # - setting prefix based on field name, to distinguish similarly named fields newform = subform(data=data, instance=subinstance, prefix=subprefix) # depending on how the subform was declared, it may not have a label yet if newform.form_label is None: if name in self.subform_labels: newform.form_label = self.subform_labels[name] self.subforms[name] = newform def _init_formsets(self, data=None, prefix=None): self.formsets = {} for name, formset in self.__class__.formsets.iteritems(): if self.instance is not None: subinstances = getattr(self.instance, name, None) else: subinstances = None if prefix is not None: subprefix = '%s-%s' % (prefix, name) else: subprefix = name self.formsets[name] = formset(data=data, instances=subinstances, prefix=subprefix) def update_instance(self): """Save bound form data into the XmlObject model instance and return the updated instance.""" # NOTE: django model form has a save method - not applicable here, # since an XmlObject by itself is not expected to have a save method # (only likely to be saved in context of a fedora or exist object) if hasattr(self, 'cleaned_data'): # possible to have an empty object/no data opts = self._meta for name in self.instance._fields.iterkeys(): if opts.fields and name not in opts.parsed_fields.fields: continue if opts.exclude and name in opts.parsed_exclude.fields: continue if name in self.cleaned_data: # special case: we don't want empty attributes and elements # for fields which returned no data from the form # converting '' to None and letting XmlObject handle if self.cleaned_data[name] == '': self.cleaned_data[name] = None setattr(self.instance, name, self.cleaned_data[name]) # update sub-model portions via any subforms for name, subform in self.subforms.iteritems(): self._update_subinstance(name, subform) for formset in self.formsets.itervalues(): formset.update_instance() return self.instance def _update_subinstance(self, name, subform): """Save bound data for a single subform into the XmlObject model instance.""" old_subinstance = getattr(self.instance, name) new_subinstance = subform.update_instance() # if our instance previously had no node for the subform AND the # updated one has data, then attach the new node. if old_subinstance is None and not new_subinstance.is_empty(): setattr(self.instance, name, new_subinstance) # on the other hand, if the instance previously had a node for the # subform AND the updated one is empty, then remove the node. if old_subinstance is not None and new_subinstance.is_empty(): delattr(self.instance, name) def is_valid(self): """Returns True if this form and all subforms (if any) are valid. If all standard form-validation tests pass, uses :class:`~eulcore.xmlmap.XmlObject` validation methods to check for schema-validity (if a schema is associated) and reporting errors. Additonal notes: * schema validation requires that the :class:`~eulcore.xmlmap.XmlObject` be initialized with the cleaned form data, so if normal validation checks pass, the associated :class:`~eulcore.xmlmap.XmlObject` instance will be updated with data via :meth:`update_instance` * schema validation errors SHOULD NOT happen in a production system :rtype: boolean """ valid = super(XmlObjectForm, self).is_valid() and \ all(s.is_valid() for s in self.subforms.itervalues()) and \ all(s.is_valid() for s in self.formsets.itervalues()) # schema validation can only be done after regular validation passes, # because xmlobject must be updated with cleaned_data if valid and self.instance is not None: # update instance required to check schema-validity instance = self.update_instance() if instance.is_valid(): return True else: # if not schema-valid, add validation errors to error dictionary # NOTE: not overriding _get_errors because that is used by the built-in validation # append to any existing non-field errors if NON_FIELD_ERRORS not in self._errors: self._errors[NON_FIELD_ERRORS] = self.error_class() self._errors[NON_FIELD_ERRORS].append("There was an unexpected schema validation error. " + "This should not happen! Please report the following errors:") for err in instance.validation_errors(): self._errors[NON_FIELD_ERRORS].append('VALIDATION ERROR: %s' % err.message) return False return valid # NOTE: errors only returned for the *current* form, not for all subforms # - appears to be used only for form output, so this should be sensible def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row): """Extend BaseForm's helper function for outputting HTML. Used by as_table(), as_ul(), as_p(). Combines the HTML version of the main form's fields with the HTML content for any subforms. """ parts = [] parts.append(super(XmlObjectForm, self)._html_output(normal_row, error_row, row_ender, help_text_html, errors_on_separate_row)) def _subform_output(subform): return subform._html_output(normal_row, error_row, row_ender, help_text_html, errors_on_separate_row) for name, subform in self.subforms.iteritems(): parts.append(self._html_subform_output(subform, name, _subform_output)) for name, formset in self.formsets.iteritems(): parts.append(unicode(formset.management_form)) # use form label if one was set if hasattr(formset, 'form_label'): name = formset.form_label # collect the html output for all the forms in the formset subform_parts = list() for subform in formset.forms: subform_parts.append(self._html_subform_output(subform, gen_html=_subform_output, suppress_section=True)) # then wrap all forms in the section container, so formset label appears once parts.append(self._html_subform_output(name=name, content=u'\n'.join(subform_parts))) return mark_safe(u'\n'.join(parts)) def _html_subform_output(self, subform=None, name=None, gen_html=None, content=None, suppress_section=False): # pass the configured html section to subform in case of any sub-subforms if subform is not None: subform._html_section = self._html_section if gen_html is not None: content = gen_html(subform) # if html section is configured, add section label and wrapper for if self._html_section is not None and not suppress_section: return self._html_section % \ {'label': fieldname_to_label(name), 'content': content} else: return content # intercept the three standard html output formats to set an appropriate section format def as_table(self): """Behaves exactly the same as Django Form's as_table() method, except that it also includes the fields for any associated subforms in table format. Subforms, if any, will be grouped in a <tbody> labeled with a heading based on the label of the field. """ self._html_section = u'<tbody><tr><th colspan="2" class="section">%(label)s</th></tr><tr><td colspan="2"><table class="subform">\n%(content)s</table></td></tr></tbody>' #self._html_section = u'<tbody><tr><th class="section" colspan="2">%(label)s</th></tr>\n%(content)s</tbody>' return super(XmlObjectForm, self).as_table() def as_p(self): """Behaves exactly the same as Django Form's as_p() method, except that it also includes the fields for any associated subforms in paragraph format. Subforms, if any, will be grouped in a <div> of class 'subform', with a heading based on the label of the field. """ self._html_section = u'<div class="subform"><p class="label">%(label)s</p>%(content)s</div>' return super(XmlObjectForm, self).as_p() def as_ul(self): """Behaves exactly the same as Django Form's as_ul() method, except that it also includes the fields for any associated subforms in list format. Subforms, if any, will be grouped in a <ul> of class 'subform', with a heading based on the label of the field. """ self._html_section = u'<li class="subform"><p class="label">%(label)s</p><ul>%(content)s</ul></li>' return super(XmlObjectForm, self).as_ul()
class Collector(object): def __init__(self, using): self.using = using # Initially, {model: set([instances])}, later values become lists. self.data = {} self.batches = {} # {model: {field: set([instances])}} self.field_updates = {} # {model: {(field, value): set([instances])}} self.dependencies = {} # {model: set([models])} def add(self, objs, source=None, nullable=False, reverse_dependency=False): """ Adds 'objs' to the collection of objects to be deleted. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. Returns a list of all objects that were not already collected. """ if not objs: return [] new_objs = [] model = objs[0].__class__ instances = self.data.setdefault(model, set()) for obj in objs: if obj not in instances: new_objs.append(obj) instances.update(new_objs) # Nullable relationships can be ignored -- they are nulled out before # deleting, and therefore do not affect the order in which objects have # to be deleted. if new_objs and source is not None and not nullable: if reverse_dependency: source, model = model, source self.dependencies.setdefault(source, set()).add(model) return new_objs def add_batch(self, model, field, objs): """ Schedules a batch delete. Every instance of 'model' that is related to an instance of 'obj' through 'field' will be deleted. """ self.batches.setdefault(model, {}).setdefault(field, set()).update(objs) def add_field_update(self, field, value, objs): """ Schedules a field update. 'objs' must be a homogenous iterable collection of model instances (e.g. a QuerySet). """ if not objs: return model = objs[0].__class__ self.field_updates.setdefault(model, {}).setdefault((field, value), set()).update(objs) def collect(self, objs, source=None, nullable=False, collect_related=True, source_attr=None, reverse_dependency=False): """ Adds 'objs' to the collection of objects to be deleted as well as all parent instances. 'objs' must be a homogenous iterable collection of model instances (e.g. a QuerySet). If 'collect_related' is True, related objects will be handled by their respective on_delete handler. If the call is the result of a cascade, 'source' should be the model that caused it and 'nullable' should be set to True, if the relation can be null. If 'reverse_dependency' is True, 'source' will be deleted before the current model, rather than after. (Needed for cascading to parent models, the one case in which the cascade follows the forwards direction of an FK rather than the reverse direction.) """ new_objs = self.add(objs, source, nullable, reverse_dependency=reverse_dependency) if not new_objs: return model = new_objs[0].__class__ # Recursively collect parent models, but not their related objects. # These will be found by meta.get_all_related_objects() for parent_model, ptr in model._meta.parents.iteritems(): if ptr: parent_objs = [getattr(obj, ptr.name) for obj in new_objs] self.collect(parent_objs, source=model, source_attr=ptr.rel.related_name, collect_related=False, reverse_dependency=True) if collect_related: for related in model._meta.get_all_related_objects( include_hidden=True): field = related.field if related.model._meta.auto_created: self.add_batch(related.model, field, new_objs) else: sub_objs = self.related_objects(related, new_objs) if not sub_objs: continue field.rel.on_delete(self, field, sub_objs, self.using) # TODO This entire block is only needed as a special case to # support cascade-deletes for GenericRelation. It should be # removed/fixed when the ORM gains a proper abstraction for virtual # or composite fields, and GFKs are reworked to fit into that. for relation in model._meta.many_to_many: if not relation.rel.through: sub_objs = relation.bulk_related_objects( new_objs, self.using) self.collect(sub_objs, source=model, source_attr=relation.rel.related_name, nullable=True) def related_objects(self, related, objs): """ Gets a QuerySet of objects related to ``objs`` via the relation ``related``. """ return related.model._base_manager.using( self.using).filter(**{"%s__in" % related.field.name: objs}) def instances_with_model(self): for model, instances in self.data.iteritems(): for obj in instances: yield model, obj def sort(self): sorted_models = [] models = self.data.keys() while len(sorted_models) < len(models): found = False for model in models: if model in sorted_models: continue dependencies = self.dependencies.get(model) if not (dependencies and dependencies.difference(sorted_models)): sorted_models.append(model) found = True if not found: return self.data = SortedDict([(model, self.data[model]) for model in sorted_models]) @force_managed def delete(self): # sort instance collections for model, instances in self.data.items(): self.data[model] = sorted(instances, key=attrgetter("pk")) # if possible, bring the models in an order suitable for databases that # don't support transactions or cannot defer contraint checks until the # end of a transaction. self.sort() # send pre_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.pre_delete.send(sender=model, instance=obj, using=self.using) # update fields for model, instances_for_fieldvalues in self.field_updates.iteritems(): query = sql.UpdateQuery(model) for (field, value), instances in instances_for_fieldvalues.iteritems(): query.update_batch([obj.pk for obj in instances], {field.name: value}, self.using) # reverse instance collections for instances in self.data.itervalues(): instances.reverse() # delete batches for model, batches in self.batches.iteritems(): query = sql.DeleteQuery(model) for field, instances in batches.iteritems(): query.delete_batch([obj.pk for obj in instances], self.using, field) # delete instances for model, instances in self.data.iteritems(): query = sql.DeleteQuery(model) pk_list = [obj.pk for obj in instances] query.delete_batch(pk_list, self.using) # send post_delete signals for model, obj in self.instances_with_model(): if not model._meta.auto_created: signals.post_delete.send(sender=model, instance=obj, using=self.using) # update collected instances for model, instances_for_fieldvalues in self.field_updates.iteritems(): for (field, value), instances in instances_for_fieldvalues.iteritems(): for obj in instances: setattr(obj, field.attname, value) for model, instances in self.data.iteritems(): for instance in instances: setattr(instance, model._meta.pk.attname, None)
class SiteDirectoriesFinder(BaseFinder): """ A static files finder that looks in the directory of each sites as specified in the source_dir attribute of the given storage class. """ storage_class = SiteStaticStorage def __init__(self, sites=None, *args, **kwargs): # The list of sites that are handled self.sites = [] # Mapping of site module paths to storage instances self.storages = SortedDict() # First, add the site dir to the path sys.path.append(settings.SITES_DIR) # Look up sites from the database if sites is None: sites = [ site.folder_name for site in Site.objects.all() if site.folder_name not in (None, '') ] for site in sites: site_storage = self.storage_class(site) if os.path.isdir(site_storage.location): self.storages[site] = site_storage if site not in self.sites: self.sites.append(site) super(SiteDirectoriesFinder, self).__init__(*args, **kwargs) def list(self, ignore_patterns): """ List all files in all site storages. """ for storage in self.storages.itervalues(): if storage.exists(''): # check if storage location exists for path in utils.get_files(storage, ignore_patterns): yield path, storage def find(self, path, all=False): """ Looks for files in the site directories. """ matches = [] for site in self.sites: match = self.find_in_site(site, path) if match: if not all: return match matches.append(match) return matches def find_in_site(self, site, path): """ Find a requested static file in an site's static locations. """ storage = self.storages.get(site, None) if storage: if storage.prefix: prefix = '%s%s' % (storage.prefix, os.sep) if not path.startswith(prefix): return None path = path[len(prefix):] # only try to find a file if the source dir actually exists if storage.exists(path): matched_path = storage.path(path) if matched_path: return matched_path