def get_udfc_search_fields(self, user): from treemap.models import InstanceUser from treemap.udf import UDFModel from treemap.util import to_object_name, leaf_models_of_class from treemap.lib.perms import udf_write_level, READ, WRITE try: iu = self.instanceuser_set.get(user__pk=user.pk) except InstanceUser.DoesNotExist: iu = None data = DotDict({'models': set(), 'udfc': {}}) for clz in (leaf_models_of_class(UDFModel)): model_name = clz.__name__ items = ( (k, v) for k, v in getattr(clz, 'collection_udf_settings', {}).iteritems()) for k, v in items: udfds = (u for u in udf_defs(self, model_name) if u.name == k) for udfd in udfds: if udf_write_level(iu, udfd) in (READ, WRITE): nest_path = ('udfc.%s.models.%s' % (to_object_name(k), to_object_name(model_name))) data[nest_path] = { 'udfd': udfd, 'fields': udfd.datatype_dict[0]['choices'] } p = 'udfc.%s.' % to_object_name(k) data[p + 'action_verb'] = v['action_verb'] data[p + 'range_field_key'] = v['range_field_key'] data[p + 'action_field_key'] = v['action_field_key'] data['models'] |= {clz} return data
def _get_udfc_search_fields(self): from treemap.util import to_object_name empty_udfc = {to_object_name(n_k): {to_object_name(m_k): {'fields': [], 'udfd': None} for m_k in UDFC_MODELS} for n_k in UDFC_NAMES} udfds = [] for model_name in UDFC_MODELS: for udfd in udf_defs(self, model_name): if udfd.name in UDFC_NAMES: udfds.append(udfd) udfc = deepcopy(empty_udfc) for udfd in udfds: udfd_info = { 'udfd': udfd, 'fields': udfd.datatype_dict[0]['choices'] } name_dict = udfc[to_object_name(udfd.name)] name_dict[to_object_name(udfd.model_type)] = udfd_info return udfc
def get_udfc_search_fields(self, user): from treemap.models import InstanceUser from treemap.udf import UDFModel from treemap.util import to_object_name, leaf_models_of_class from treemap.lib.perms import udf_write_level, READ, WRITE try: iu = self.instanceuser_set.get(user__pk=user.pk) except InstanceUser.DoesNotExist: iu = None data = DotDict({"models": set(), "udfc": {}}) for clz in leaf_models_of_class(UDFModel): model_name = clz.__name__ for k, v in clz.collection_udf_settings.items(): udfds = (u for u in udf_defs(self, model_name) if u.name == k) for udfd in udfds: if udf_write_level(iu, udfd) in (READ, WRITE): _base_nest_path = "udfc.%s." % (to_object_name(k)) ids_nest_path = "%sids.%s" % (_base_nest_path, to_object_name(model_name)) models_nest_path = "%smodels.%s" % (_base_nest_path, to_object_name(model_name)) data[ids_nest_path] = udfd.pk data[models_nest_path] = {"udfd": udfd, "fields": udfd.datatype_dict[0]["choices"]} p = "udfc.%s." % to_object_name(k) data[p + "action_verb"] = v["action_verb"] data[p + "range_field_key"] = v["range_field_key"] data[p + "action_field_key"] = v["action_field_key"] data["models"] |= {clz} return data
def _commit_tree_data(self, data, plot, tree, tree_edited): for tree_attr, field_name in TreeImportRow.TREE_MAP.iteritems(): value = data.get(field_name, None) if value: tree_edited = True if tree is None: tree = Tree(instance=plot.instance) setattr(tree, tree_attr, value) ie = self.import_event tree_udf_defs = udf_defs(ie.instance, 'Tree') for udf_def in tree_udf_defs: udf_column_name = ie.get_udf_column_name(udf_def) value = data.get(udf_column_name, None) # Legitimate values could be falsey if value is not None: tree_edited = True if tree is None: tree = Tree(instance=plot.instance) tree.udfs[udf_def.name] = \ self._import_value_to_udf_value(udf_def, value) if tree_edited: tree.plot = plot tree.save_with_system_user_bypass_auth() tree.plot.update_updated_fields(ie.owner)
def udf_list(request, instance): editable_udf_models = instance.editable_udf_models() udf_models = \ sorted([{'name': clz.__name__, 'display_name': clz.display_name(instance)} for clz in editable_udf_models['core']], key=lambda model: model['name'], reverse=True) + \ sorted([{'name': clz.__name__, 'display_name': clz.display_name(instance)} for clz in editable_udf_models['gsi']], key=lambda model: model['name']) editable_gsi_models = [clz.__name__ for clz in editable_udf_models['gsi']] udf_model_names = sorted([model['name'] for model in udf_models]) udfs = sorted([udf for udf in udf_defs(instance) if udf.model_type in udf_model_names], key=lambda udf: (udf.model_type, udf.iscollection, udf.name)) def dict_update(d1, d2): d1.update(d2) return d1 udf_models = [dict_update(model, { 'specs': [{'udf': udf, 'datatype': _get_type_display(udf)} for udf in udfs if udf.model_type == model['name']] }) for model in udf_models] return { "udf_models": udf_models, "editable_gsi_models": editable_gsi_models }
def alert_identifiers(): def identifier(udf): model_name = udf.model_type.lower() return 'udf:%(model)s:%(pk)s.Status' % { 'model': model_name, 'pk': udf.pk} return [identifier(udf) for udf in udf_defs(instance) if udf.iscollection and udf.name == 'Alerts']
def _values_for_model(instance, job, table, model, select, select_params, prefix=None): if prefix: prefix += "__" else: prefix = "" prefixed_names = [] model_class = safe_get_model_class(model) dummy_instance = model_class() for field_name in ( perm.field_name for perm in field_permissions(job.user, instance, model) if perm.permission_level >= FieldPermission.READ_ONLY ): prefixed_name = prefix + field_name if field_name.startswith("udf:"): name = field_name[4:] if name in model_class.collection_udf_settings.keys(): field_definition_id = None for udfd in udf_defs(instance, model): if udfd.iscollection and udfd.name == name: field_definition_id = udfd.id if field_definition_id is None: continue select[ prefixed_name ] = """ WITH formatted_data AS ( SELECT concat('(', data, ')') as fdata FROM %s WHERE field_definition_id = %s and model_id = %s.id ) SELECT array_to_string(array_agg(fdata), ', ', '*') FROM formatted_data """ % ( UserDefinedCollectionValue._meta.db_table, field_definition_id, table, ) else: select[prefixed_name] = "{0}.udfs->%s".format(table) select_params.append(name) else: if not model_hasattr(dummy_instance, field_name): # Exception will be raised downstream if you look for # a field on a model that no longer exists but still # has a stale permission record. Here we check for that # case and don't include the field if it does not exist. continue prefixed_names.append(prefixed_name) return prefixed_names
def extra_select_and_values_for_model( instance, job, table, model, prefix=None): if prefix: prefix += '__' else: prefix = '' perms = permissions(job.user, instance, model) extra_select = {} prefixed_names = [] dummy_instance = safe_get_model_class(model)() for perm in (perm for perm in perms if perm.permission_level >= FieldPermission.READ_ONLY): field_name = perm.field_name prefixed_name = prefix + field_name if field_name in _UDFC_FIELDS: field_definition_id = None for udfd in udf_defs(instance, model): if udfd.iscollection and udfd.name == field_name[4:]: field_definition_id = udfd.id if field_definition_id is None: continue extra_select[prefixed_name] = ( """ WITH formatted_data AS ( SELECT concat('(', data, ')') as fdata FROM %s WHERE field_definition_id = %s and model_id = %s.id ) SELECT array_to_string(array_agg(fdata), ', ', '*') FROM formatted_data """ % (UserDefinedCollectionValue._meta.db_table, field_definition_id, table)) elif field_name.startswith('udf:'): name = field_name[4:] extra_select[prefixed_name] = "%s.udfs->'%s'" % (table, name) else: if not model_hasattr(dummy_instance, field_name): # Exception will be raised downstream if you look for # a field on a model that no longer exists but still # has a stale permission record. Here we check for that # case and don't include the field if it does not exist. continue prefixed_names.append(prefixed_name) return (extra_select, prefixed_names)
def validate_user_defined_fields(self): ie = self.import_event for udf_def in udf_defs(ie.instance): column_name = ie.get_udf_column_name(udf_def) value = self.datadict.get(column_name, None) if value: try: udf_def.clean_value(value) self.cleaned[column_name] = value except ValidationError as ve: self.append_error(errors.INVALID_UDF_VALUE, column_name, str(ve))
def validate_user_defined_fields(self): ie = self.import_event for udf_def in udf_defs(ie.instance): column_name = ie.get_udf_column_name(udf_def) value = self.datadict.get(column_name, None) if value: try: udf_def.clean_value(value) self.cleaned[column_name] = value except ValidationError as ve: self.append_error( errors.INVALID_UDF_VALUE, column_name, str(ve))
def _get_fields(instance, model_name): Model = safe_get_model_class(model_name) mobj = Model(instance=instance) udfs = {udf.canonical_name for udf in udf_defs(instance, model_name) if not udf.iscollection} concrete_fields = { f.name for f in mobj._meta.get_fields(include_parents=False) if _should_show_field(Model, f.name) and not f.is_relation} model_fields = concrete_fields | udfs model_fields = {'%s.%s' % (to_object_name(model_name), f) for f in model_fields} return model_fields
def info(group): group['fields'] = [ (field, labels.get(field), templates.get(field, "treemap/field/tr.html")) for field in group.get('field_keys', []) ] group['collection_udfs'] = [ next(udf for udf in udf_defs(instance) if udf.full_name == udf_name) for udf_name in group.get('collection_udf_keys', []) ] return group
def get_disabled_cudfs(group): if 'model' in group: models = (to_model_name(group['model']), ) else: models = ('Tree', 'Plot') udfs = {udf.full_name for udf in udf_defs(instance) if udf.iscollection and udf.model_type in models and (group['header'] not in collection_groups or udf.name == group['header'])} disabled_cudfs = udfs - set(group['collection_udf_keys']) return sorted(disabled_cudfs)
def get_alert_field_info(identifier, instance): from treemap.util import get_model_for_instance alert_match = ALERT_IDENTIFIER_PATTERN.match(identifier) if alert_match: model_name, pk = alert_match.groups() Model = get_model_for_instance(model_name, instance) udf_def = next(udf for udf in udf_defs(instance) if udf.pk == int(pk)) display_name = force_text(Model.terminology(instance)['singular']) return { 'identifier': identifier, 'search_type': 'DEFAULT', 'default_identifier': udf_def.full_name, 'label': 'Open %(model)s Alerts' % {'model': display_name}, } return None
def udf_bulk_update(request, instance): ''' udf_bulk_update(request, instance) 'instance': a treemap instance 'request': an HTTP request object whose body is a JSON representation of a dict containing the key 'choice_changes'. choice_changes is a list of directives per choice-type UserDefinedFieldDefinition. Each directive is a dict, defined as follows: { 'id': id of a UserDefinedFieldDefinition, 'changes': a list of changes to make to the UserDefinedFieldDefinition with that id. } Each change is either a delete, rename, or add request pertaining to one choice of the UserDefinedFieldDefinition. There should be no more than one change per choice, and the list should be ordered as deletes, then renames, then adds. See the docstring for `_udf_update_choice` for the structure of each choice change parameter. ''' params = json.loads(request.body) choice_changes = params.get('choice_changes', None) if choice_changes: choice_map = { int(param['id']): param['changes'] for param in choice_changes } udfds = [ udf for udf in udf_defs(instance) if udf.pk in choice_map.keys() ] # Update one at a time rather than doing bulk_update. # There won't be that many of them, and we need to go through # all the UDF machinery to update models and audit records. # Also, assume that the frontend will not send more than one change # (rename or delete) for the same choice, # or changes (rename or delete) for any new choices. for udf in udfds: for params in choice_map[udf.pk]: _udf_update_choice(udf, instance, params) return HttpResponse(_('Updated Custom Fields'))
def validate_user_defined_fields(self): ie = self.import_event for udf_def in udf_defs(ie.instance): column_name = ie.get_udf_column_name(udf_def) value = self.datadict.get(column_name, None) if value: try: udf_def.clean_value(value) self.cleaned[column_name] = value except ValidationError as ve: message = str(ve) if isinstance(ve.message_dict, dict): message = '\n'.join( [unicode(m) for m in ve.message_dict.values()]) self.append_error(errors.INVALID_UDF_VALUE, column_name, message)
def validate_user_defined_fields(self): ie = self.import_event for udf_def in udf_defs(ie.instance): column_name = ie.get_udf_column_name(udf_def) value = self.datadict.get(column_name, None) if value: try: udf_def.clean_value(value) self.cleaned[column_name] = value except ValidationError as ve: message = str(ve) if isinstance(ve.message_dict, dict): message = '\n'.join( [unicode(m) for m in ve.message_dict.values()]) self.append_error( errors.INVALID_UDF_VALUE, column_name, message)
def get_display_model_name(audit_name, instance=None): if audit_name.startswith('udf:'): try: # UDF Collections store their model names in the audit table as # udf:<pk of UserDefinedFieldDefinition> pk = int(audit_name[4:]) if not instance: udf_def = UserDefinedFieldDefinition.objects.get(pk=pk) return udf_def.name else: for udf_def in udf_defs(instance): if udf_def.pk == pk: return udf_def.name except (ValueError, UserDefinedFieldDefinition.DoesNotExist): pass # If something goes wrong, just use the defaults return audit_name
def get_disabled_cudfs(group): if 'model' in group: models = (to_model_name(group['model']), ) else: models = ('Tree', 'Plot') udfs = { udf.full_name for udf in udf_defs(instance) if udf.iscollection and udf.model_type in models and ( group['header'] not in collection_groups or udf.name == group['header']) } disabled_cudfs = udfs - set(group['collection_udf_keys']) return sorted(disabled_cudfs)
def delete(self, *args, **kwargs): if self.iscollection: UserDefinedCollectionValue.objects.filter(field_definition=self).delete() Audit.objects.filter(instance=self.instance).filter(model="udf:%s" % self.pk).delete() if "mobile_api_fields" in self.instance.config: for group in self.instance.mobile_api_fields: if self.full_name in group.get("collection_udf_keys", []): # If this is the only collection UDF with this name, # we remove the entire group, since there would be no # eligible items to go *in* the group after deletion if len([udf for udf in udf_defs(self.instance) if udf.name == self.name]) == 1: self.instance.mobile_api_fields.remove(group) # Otherwise, just remove this UDF from the group else: group["collection_udf_keys"].remove(self.full_name) self.instance.save() else: Model = safe_get_udf_model_class(self.model_type) objects_with_udf_data = Model.objects.filter(instance=self.instance).filter(udfs__contains=[self.name]) for obj in objects_with_udf_data: del obj.udfs[self.name] # save_base instead of save_with_user, # we delete the audits anyways obj.save_base() Audit.objects.filter(instance=self.instance).filter(model=self.model_type).filter( field=self.canonical_name ).delete() # If there is no mobile_api_field in the config, that means the # instance is using the default, which should not mutate if "mobile_api_fields" in self.instance.config: for group in self.instance.mobile_api_fields: if self.full_name in group.get("field_keys", []): group["field_keys"].remove(self.full_name) self.instance.save() # remove field permissions for this udf FieldPermission.objects.filter( model_name=self.model_type, field_name=self.canonical_name, instance=self.instance ).delete() super(UserDefinedFieldDefinition, self).delete(*args, **kwargs)
def udf_bulk_update(request, instance): ''' udf_bulk_update(request, instance) 'instance': a treemap instance 'request': an HTTP request object whose body is a JSON representation of a dict containing the key 'choice_changes'. choice_changes is a list of directives per choice-type UserDefinedFieldDefinition. Each directive is a dict, defined as follows: { 'id': id of a UserDefinedFieldDefinition, 'changes': a list of changes to make to the UserDefinedFieldDefinition with that id. } Each change is either a delete, rename, or add request pertaining to one choice of the UserDefinedFieldDefinition. There should be no more than one change per choice, and the list should be ordered as deletes, then renames, then adds. See the docstring for `_udf_update_choice` for the structure of each choice change parameter. ''' params = json.loads(request.body) choice_changes = params.get('choice_changes', None) if choice_changes: choice_map = {int(param['id']): param['changes'] for param in choice_changes} udfds = [udf for udf in udf_defs(instance) if udf.pk in choice_map.keys()] # Update one at a time rather than doing bulk_update. # There won't be that many of them, and we need to go through # all the UDF machinery to update models and audit records. # Also, assume that the frontend will not send more than one change # (rename or delete) for the same choice, # or changes (rename or delete) for any new choices. for udf in udfds: for params in choice_map[udf.pk]: _udf_update_choice(udf, instance, params) return HttpResponse(_('Updated Custom Fields'))
def _get_fields(instance, model_name): Model = safe_get_model_class(model_name) mobj = Model(instance=instance) udfs = { udf.canonical_name for udf in udf_defs(instance, model_name) if not udf.iscollection } concrete_fields = { f.name for f in mobj._meta.get_fields(include_parents=False) if _should_show_field(Model, f.name) and not f.is_relation } model_fields = concrete_fields | udfs model_fields = { '%s.%s' % (to_object_name(model_name), f) for f in model_fields } return model_fields
def _commit_plot_data(self, data, plot): plot_edited = False for plot_attr, field_name in TreeImportRow.PLOT_MAP.iteritems(): value = data.get(field_name, None) if value: plot_edited = True setattr(plot, plot_attr, value) ie = self.import_event plot_udf_defs = udf_defs(ie.instance, 'Plot') for udf_def in plot_udf_defs: udf_column_name = ie.get_udf_column_name(udf_def) value = data.get(udf_column_name, None) if value: plot_edited = True plot.udfs[udf_def.name] = value if plot_edited: plot.save_with_system_user_bypass_auth()
def udf_list(request, instance): editable_udf_models = instance.editable_udf_models() udf_models = \ sorted([{'name': clz.__name__, 'display_name': clz.display_name(instance)} for clz in editable_udf_models['core']], key=lambda model: model['name'], reverse=True) + \ sorted([{'name': clz.__name__, 'display_name': clz.display_name(instance)} for clz in editable_udf_models['gsi']], key=lambda model: model['name']) editable_gsi_models = [clz.__name__ for clz in editable_udf_models['gsi']] udf_model_names = sorted([model['name'] for model in udf_models]) udfs = sorted([ udf for udf in udf_defs(instance) if udf.model_type in udf_model_names ], key=lambda udf: (udf.model_type, udf.iscollection, udf.name)) def dict_update(d1, d2): d1.update(d2) return d1 udf_models = [ dict_update( model, { 'specs': [{ 'udf': udf, 'datatype': _get_type_display(udf) } for udf in udfs if udf.model_type == model['name']] }) for model in udf_models ] return { "udf_models": udf_models, "editable_gsi_models": editable_gsi_models }
def get_udfc_search_fields(instance, user): from treemap.models import InstanceUser from treemap.udf import UDFModel from treemap.util import to_object_name, leaf_models_of_class from treemap.lib.perms import udf_write_level, READ, WRITE try: iu = instance.instanceuser_set.get(user__pk=user.pk) except InstanceUser.DoesNotExist: iu = None data = DotDict({'models': set(), 'udfc': {}}) for clz in leaf_models_of_class(UDFModel): model_name = clz.__name__ if model_name not in ['Tree'] + instance.map_feature_types: continue for k, v in clz.collection_udf_settings.items(): udfds = (u for u in udf_defs(instance, model_name) if u.name == k) for udfd in udfds: if udf_write_level(iu, udfd) in (READ, WRITE): _base_nest_path = 'udfc.%s.' % (to_object_name(k)) ids_nest_path = ('%sids.%s' % (_base_nest_path, to_object_name(model_name))) models_nest_path = ('%smodels.%s' % (_base_nest_path, to_object_name(model_name))) data[ids_nest_path] = udfd.pk data[models_nest_path] = { 'udfd': udfd, 'fields': udfd.datatype_dict[0]['choices'] } p = 'udfc.%s.' % to_object_name(k) data[p + 'action_verb'] = v['action_verb'] data[p + 'range_field_key'] = v['range_field_key'] data[p + 'action_field_key'] = v['action_field_key'] data['models'] |= {clz} return data
def get_udfc_search_fields(instance, user): from treemap.models import InstanceUser from treemap.udf import UDFModel from treemap.util import to_object_name, leaf_models_of_class from treemap.lib.perms import udf_write_level, READ, WRITE try: iu = instance.instanceuser_set.get(user__pk=user.pk) except InstanceUser.DoesNotExist: iu = None data = DotDict({'models': set(), 'udfc': {}}) for clz in leaf_models_of_class(UDFModel): model_name = clz.__name__ if model_name not in ['Tree'] + instance.map_feature_types: continue for k, v in clz.collection_udf_settings.items(): udfds = (u for u in udf_defs(instance, model_name) if u.name == k) for udfd in udfds: if udf_write_level(iu, udfd) in (READ, WRITE): _base_nest_path = 'udfc.%s.' % (to_object_name(k)) ids_nest_path = ( '%sids.%s' % (_base_nest_path, to_object_name(model_name))) models_nest_path = ( '%smodels.%s' % (_base_nest_path, to_object_name(model_name))) data[ids_nest_path] = udfd.pk data[models_nest_path] = { 'udfd': udfd, 'fields': udfd.datatype_dict[0]['choices'] } p = 'udfc.%s.' % to_object_name(k) data[p + 'action_verb'] = v['action_verb'] data[p + 'range_field_key'] = v['range_field_key'] data[p + 'action_field_key'] = v['action_field_key'] data['models'] |= {clz} return data
def _commit_tree_data(self, data, plot, tree, tree_edited): for tree_attr, field_name in TreeImportRow.TREE_MAP.iteritems(): value = data.get(field_name, None) if value: tree_edited = True if tree is None: tree = Tree(instance=plot.instance) setattr(tree, tree_attr, value) ie = self.import_event tree_udf_defs = udf_defs(ie.instance, 'Tree') for udf_def in tree_udf_defs: udf_column_name = ie.get_udf_column_name(udf_def) value = data.get(udf_column_name, None) if value: tree_edited = True if tree is None: tree = Tree(instance=plot.instance) tree.udfs[udf_def.name] = value if tree_edited: tree.plot = plot tree.save_with_system_user_bypass_auth()
def update_map_feature(request_dict, user, feature): """ Update a map feature. Expects JSON in the request body to be: {'model.field', ...} Where model is either 'tree', 'plot', or another map feature type and field is any field on the model. UDF fields should be prefixed with 'udf:'. This method can be used to create a new map feature by passing in an empty MapFeature object (i.e. Plot(instance=instance)) """ feature_object_names = [ to_object_name(ft) for ft in feature.instance.map_feature_types ] if isinstance(feature, Convertible): # We're going to always work in display units here feature.convert_to_display_units() def set_attr_on_model(model, attr, val): field_classname = \ model._meta.get_field_by_name(attr)[0].__class__.__name__ if field_classname.endswith('PointField'): srid = val.get('srid', 3857) val = Point(val['x'], val['y'], srid=srid) val.transform(3857) elif field_classname.endswith('MultiPolygonField'): srid = val.get('srid', 4326) val = MultiPolygon(Polygon(val['polygon'], srid=srid), srid=srid) val.transform(3857) if attr == 'mapfeature_ptr': if model.mapfeature_ptr_id != value: raise Exception('You may not change the mapfeature_ptr_id') elif attr == 'id': if val != model.pk: raise Exception("Can't update id attribute") elif attr.startswith('udf:'): udf_name = attr[4:] if udf_name in [ field.name for field in model.get_user_defined_fields() ]: model.udfs[udf_name] = val else: raise KeyError('Invalid UDF %s' % attr) elif attr in model.fields(): model.apply_change(attr, val) else: raise Exception('Malformed request - invalid field %s' % attr) def save_and_return_errors(thing, user): try: if isinstance(thing, Convertible): thing.convert_to_database_units() thing.save_with_user(user) return {} except ValidationError as e: return package_field_errors(thing._model_name, e) tree = None rev_updates = ['universal_rev'] old_geom = feature.geom for (identifier, value) in request_dict.iteritems(): split_template = 'Malformed request - invalid field %s' object_name, field = dotted_split(identifier, 2, failure_format_string=split_template) if (object_name not in feature_object_names + ['tree']): raise Exception(split_template % identifier) tree_udfc_names = [ fdef.canonical_name for fdef in udf_defs(feature.instance, 'Tree') if fdef.iscollection ] if ((field in tree_udfc_names and feature.safe_get_current_tree() is None and value == [])): continue elif object_name in feature_object_names: model = feature elif object_name == 'tree' and feature.feature_type == 'Plot': # Get the tree or spawn a new one if needed tree = (tree or feature.safe_get_current_tree() or Tree(instance=feature.instance)) # We always edit in display units tree.convert_to_display_units() model = tree if field == 'species' and value: value = get_object_or_404(Species, instance=feature.instance, pk=value) elif field == 'plot' and value == unicode(feature.pk): value = feature else: raise Exception('Malformed request - invalid model %s' % object_name) set_attr_on_model(model, field, value) field_class = model._meta.get_field_by_name(field)[0] if isinstance(field_class, GeometryField): rev_updates.append('geo_rev') rev_updates.append('eco_rev') elif identifier in ['tree.species', 'tree.diameter']: rev_updates.append('eco_rev') errors = {} if feature.fields_were_updated(): errors.update(save_and_return_errors(feature, user)) if tree and tree.fields_were_updated(): tree.plot = feature errors.update(save_and_return_errors(tree, user)) if errors: # It simplifies the templates and client-side logic if the geometry # field errors are returned under the generic name if feature.geom_field_name in errors: errors['mapFeature.geom'] = errors[feature.geom_field_name] raise ValidationError(errors) if old_geom is not None and feature.geom != old_geom: update_hide_at_zoom_after_move(feature, user, old_geom) feature.instance.update_revs(*rev_updates) return feature, tree
def update_map_feature(request_dict, user, feature): """ Update a map feature. Expects JSON in the request body to be: {'model.field', ...} Where model is either 'tree', 'plot', or another map feature type and field is any field on the model. UDF fields should be prefixed with 'udf:'. This method can be used to create a new map feature by passing in an empty MapFeature object (i.e. Plot(instance=instance)) """ feature_object_names = [to_object_name(ft) for ft in feature.instance.map_feature_types] if isinstance(feature, Convertible): # We're going to always work in display units here feature.convert_to_display_units() def set_attr_on_model(model, attr, val): field_classname = \ model._meta.get_field_by_name(attr)[0].__class__.__name__ if field_classname.endswith('PointField'): srid = val.get('srid', 3857) val = Point(val['x'], val['y'], srid=srid) val.transform(3857) elif field_classname.endswith('MultiPolygonField'): srid = val.get('srid', 4326) val = MultiPolygon(Polygon(val['polygon'], srid=srid), srid=srid) val.transform(3857) if attr == 'mapfeature_ptr': if model.mapfeature_ptr_id != value: raise Exception( 'You may not change the mapfeature_ptr_id') elif attr == 'id': if val != model.pk: raise Exception("Can't update id attribute") elif attr.startswith('udf:'): udf_name = attr[4:] if udf_name in [field.name for field in model.get_user_defined_fields()]: model.udfs[udf_name] = val else: raise KeyError('Invalid UDF %s' % attr) elif attr in model.fields(): model.apply_change(attr, val) else: raise Exception('Malformed request - invalid field %s' % attr) def save_and_return_errors(thing, user): try: if isinstance(thing, Convertible): thing.convert_to_database_units() thing.save_with_user(user) return {} except ValidationError as e: return package_field_errors(thing._model_name, e) tree = None rev_updates = ['universal_rev'] old_geom = feature.geom for (identifier, value) in request_dict.iteritems(): split_template = 'Malformed request - invalid field %s' object_name, field = dotted_split(identifier, 2, failure_format_string=split_template) if (object_name not in feature_object_names + ['tree']): raise Exception(split_template % identifier) tree_udfc_names = [fdef.canonical_name for fdef in udf_defs(feature.instance, 'Tree') if fdef.iscollection] if ((field in tree_udfc_names and feature.safe_get_current_tree() is None and value == [])): continue elif object_name in feature_object_names: model = feature elif object_name == 'tree' and feature.feature_type == 'Plot': # Get the tree or spawn a new one if needed tree = (tree or feature.safe_get_current_tree() or Tree(instance=feature.instance)) # We always edit in display units tree.convert_to_display_units() model = tree if field == 'species' and value: value = get_object_or_404(Species, instance=feature.instance, pk=value) elif field == 'plot' and value == unicode(feature.pk): value = feature else: raise Exception( 'Malformed request - invalid model %s' % object_name) set_attr_on_model(model, field, value) field_class = model._meta.get_field_by_name(field)[0] if isinstance(field_class, GeometryField): rev_updates.append('geo_rev') rev_updates.append('eco_rev') elif identifier in ['tree.species', 'tree.diameter']: rev_updates.append('eco_rev') errors = {} if feature.fields_were_updated(): errors.update(save_and_return_errors(feature, user)) if tree and tree.fields_were_updated(): tree.plot = feature errors.update(save_and_return_errors(tree, user)) if errors: # It simplifies the templates and client-side logic if the geometry # field errors are returned under the generic name if feature.geom_field_name in errors: errors['mapFeature.geom'] = errors[feature.geom_field_name] raise ValidationError(errors) if old_geom is not None and feature.geom != old_geom: update_hide_at_zoom_after_move(feature, user, old_geom) feature.instance.update_revs(*rev_updates) return feature, tree
def assert_udf_def_count(self, model_name, count): defs = udf_defs(self.instance, model_name) self.assertEqual(len(defs), count)
def get_user_defined_fields(self): if hasattr(self, 'instance'): return udf_defs(self.instance, self._model_name) else: return []
def update_map_feature(request_dict, user, feature): """ Update a map feature. Expects JSON in the request body to be: {'model.field', ...} Where model is either 'tree', 'plot', or another map feature type and field is any field on the model. UDF fields should be prefixed with 'udf:'. This method can be used to create a new map feature by passing in an empty MapFeature object (i.e. Plot(instance=instance)) """ feature_object_names = [to_object_name(ft) for ft in feature.instance.map_feature_types] if isinstance(feature, Convertible): # We're going to always work in display units here feature.convert_to_display_units() def set_attr_on_model(model, attr, val): field_classname = model._meta.get_field_by_name(attr)[0].__class__.__name__ if field_classname.endswith("PointField"): srid = val.get("srid", 3857) val = Point(val["x"], val["y"], srid=srid) val.transform(3857) elif field_classname.endswith("MultiPolygonField"): srid = val.get("srid", 4326) val = MultiPolygon(Polygon(val["polygon"], srid=srid), srid=srid) val.transform(3857) if attr == "mapfeature_ptr": if model.mapfeature_ptr_id != value: raise Exception("You may not change the mapfeature_ptr_id") elif attr == "id": if val != model.pk: raise Exception("Can't update id attribute") elif attr.startswith("udf:"): udf_name = attr[4:] if udf_name in [field.name for field in model.get_user_defined_fields()]: model.udfs[udf_name] = val else: raise KeyError("Invalid UDF %s" % attr) elif attr in model.fields(): model.apply_change(attr, val) else: raise Exception("Malformed request - invalid field %s" % attr) def save_and_return_errors(thing, user): try: if isinstance(thing, Convertible): thing.convert_to_database_units() thing.save_with_user(user) return {} except ValidationError as e: return package_field_errors(thing._model_name, e) old_location = feature.geom tree = None for (identifier, value) in request_dict.iteritems(): split_template = "Malformed request - invalid field %s" object_name, field = dotted_split(identifier, 2, failure_format_string=split_template) if object_name not in feature_object_names + ["tree"]: raise Exception(split_template % identifier) tree_udfc_names = [fdef.canonical_name for fdef in udf_defs(feature.instance, "Tree") if fdef.iscollection] if field in tree_udfc_names and feature.safe_get_current_tree() is None and value == []: continue elif object_name in feature_object_names: model = feature elif object_name == "tree" and feature.feature_type == "Plot": # Get the tree or spawn a new one if needed tree = tree or feature.safe_get_current_tree() or Tree(instance=feature.instance) # We always edit in display units tree.convert_to_display_units() model = tree if field == "species" and value: value = get_object_or_404(Species, instance=feature.instance, pk=value) elif field == "plot" and value == unicode(feature.pk): value = feature else: raise Exception("Malformed request - invalid model %s" % object_name) set_attr_on_model(model, field, value) errors = {} if feature.fields_were_updated(): errors.update(save_and_return_errors(feature, user)) if tree and tree.fields_were_updated(): tree.plot = feature errors.update(save_and_return_errors(tree, user)) if errors: # It simplifies the templates and client-side logic if the geometry # field errors are returned under the generic name if feature.geom_field_name in errors: errors["mapFeature.geom"] = errors[feature.geom_field_name] raise ValidationError(errors) if old_location is None or not feature.geom.equals_exact(old_location): feature.instance.update_geo_rev() return feature, tree
def _validate_mobile_api_fields(self): # Validate that: # 1) overall structure is correct # 2) each individual group has a header and collection or normal fields # 3) Collection UDF groups only contain collection UDFs # 4) Collection UDF groups have a 'sort_key', which is present on all # fields for that group # 5) no field is referenced more than once # 6) all fields referenced exist # delayed import to avoid circular references from treemap.models import Plot, Tree def _truthy_of_type(item, types): return item and isinstance(item, types) field_groups = self.mobile_api_fields errors = set() scalar_udfs = {udef.full_name: udef for udef in udf_defs(self) if not udef.iscollection} collection_udfs = {udef.full_name: udef for udef in udf_defs(self) if udef.iscollection} if not _truthy_of_type(field_groups, (list, tuple)): raise ValidationError( {'mobile_api_fields': [API_FIELD_ERRORS['no_field_groups']]}) for group in field_groups: if not _truthy_of_type(group.get('header'), basestring): errors.add(API_FIELD_ERRORS['group_has_no_header']) if ((not isinstance(group.get('collection_udf_keys'), list) and not isinstance(group.get('field_keys'), list))): errors.add(API_FIELD_ERRORS['group_has_no_keys']) elif 'collection_udf_keys' in group and 'field_keys' in group: errors.add(API_FIELD_ERRORS['group_has_both_keys']) if isinstance(group.get('collection_udf_keys'), list): sort_key = group.get('sort_key') if not sort_key: errors.add(API_FIELD_ERRORS['group_has_no_sort_key']) for key in group['collection_udf_keys']: udef = collection_udfs.get(key) if udef is None: errors.add(API_FIELD_ERRORS['group_has_missing_cudf']) elif sort_key not in udef.datatype_by_field: errors.add( API_FIELD_ERRORS['group_has_invalid_sort_key']) elif isinstance(group.get('field_keys'), list): if group.get('model') not in {'tree', 'plot'}: errors.add(API_FIELD_ERRORS['group_missing_model']) else: for key in group['field_keys']: if not key.startswith(group['model']): errors.add(API_FIELD_ERRORS['group_invalid_model']) if errors: raise ValidationError({'mobile_api_fields': list(errors)}) scalar_fields = [key for group in field_groups for key in group.get('field_keys', [])] collection_fields = [key for group in field_groups for key in group.get('collection_udf_keys', [])] all_fields = scalar_fields + collection_fields if len(all_fields) != len(set(all_fields)): errors.add(API_FIELD_ERRORS['duplicate_fields']) for field in scalar_fields: model_name, name = field.split('.', 1) # maxsplit of 1 Model = Plot if model_name == 'plot' else Tree standard_fields = Model._meta.get_all_field_names() if ((name not in standard_fields and field not in scalar_udfs)): errors.add(API_FIELD_ERRORS['missing_field']) if errors: raise ValidationError({'mobile_api_fields': list(errors)})
def get_audits(logged_in_user, instance, query_vars, user, models, model_id, page=0, page_size=20, exclude_pending=True, should_count=False): start_pos = page * page_size end_pos = start_pos + page_size if instance: if instance.is_accessible_by(logged_in_user): instances = Instance.objects.filter(pk=instance.pk) else: instances = Instance.objects.none() # If we didn't specify an instance we only want to # show audits where the user has permission else: instances = Instance.objects\ .filter(pk__in=_instance_ids_edited_by(user))\ .filter(user_accessible_instance_filter( logged_in_user))\ .distinct() if not instances.exists(): # Force no results return {'audits': Audit.objects.none(), 'total_count': 0, 'next_page': None, 'prev_page': None} map_feature_models = set(MapFeature.subclass_dict().keys()) model_filter = Q() # We only want to show the TreePhoto's image, not other fields # and we want to do it automatically if 'Tree' was specified as # a model. The same goes for MapFeature(s) <-> MapFeaturePhoto # There is no need to check permissions, because photos are always visible if 'Tree' in models: model_filter = model_filter | Q(model='TreePhoto', field='image') if map_feature_models.intersection(models): model_filter = model_filter | Q(model='MapFeaturePhoto', field='image') for inst in instances: eligible_models = ({'Tree', 'TreePhoto', 'MapFeaturePhoto'} | set(inst.map_feature_types)) & set(models) if logged_in_user == user: eligible_udfs = {'udf:%s' % udf.id for udf in udf_defs(inst) if udf.model_type in eligible_models and udf.iscollection} # The logged-in user can see all their own edits model_filter = model_filter | Q( instance=inst, model__in=(eligible_models | eligible_udfs)) else: # Filter other users' edits by their visibility to the # logged-in user for model in eligible_models: ModelClass = get_auditable_class(model) fake_model = ModelClass(instance=inst) if issubclass(ModelClass, Authorizable): visible_fields = fake_model.visible_fields(logged_in_user) model_filter = model_filter |\ Q(model=model, field__in=visible_fields, instance=inst) else: model_filter = model_filter | Q(model=model, instance=inst) if issubclass(ModelClass, UDFModel): model_collection_udfs_audit_names = ( fake_model.visible_collection_udfs_audit_names( logged_in_user)) model_filter = model_filter | ( Q(model__in=model_collection_udfs_audit_names)) udf_bookkeeping_fields = Q( model__startswith='udf:', field__in=('id', 'model_id', 'field_definition')) audits = (Audit.objects .filter(model_filter) .filter(instance__in=instances) .select_related('instance') .exclude(udf_bookkeeping_fields) .exclude(user=User.system_user()) .order_by('-created')) if user: audits = audits.filter(user=user) if model_id: audits = audits.filter(model_id=model_id) if exclude_pending: audits = audits.exclude(requires_auth=True, ref__isnull=True) total_count = audits.count() if should_count else 0 audits = audits[start_pos:end_pos] query_vars = {k: v for (k, v) in query_vars.iteritems() if k != 'page'} next_page = None prev_page = None # We are using len(audits) instead of audits.count() because we # have already realized the queryset at this point if len(audits) == page_size: query_vars['page'] = page + 1 next_page = "?" + urllib.urlencode(query_vars) if page > 0: query_vars['page'] = page - 1 prev_page = "?" + urllib.urlencode(query_vars) return {'audits': audits, 'total_count': total_count, 'next_page': next_page, 'prev_page': prev_page}
def get_audits(logged_in_user, instance, query_vars, user=None, models=ALLOWED_MODELS, model_id=None, start_id=None, prev_start_ids=[], page_size=PAGE_DEFAULT, exclude_pending=True, should_count=False): if instance: if instance.is_accessible_by(logged_in_user): instances = Instance.objects.filter(pk=instance.pk) else: instances = Instance.objects.none() # If we didn't specify an instance we only want to # show audits where the user has permission else: instances = Instance.objects\ .filter(user_accessible_instance_filter(logged_in_user)) if user: instances = instances.filter(pk__in=_instance_ids_edited_by(user)) instances = instances.distinct() if not instances.exists(): # Force no results return {'audits': Audit.objects.none(), 'total_count': 0, 'next_page': None, 'prev_page': None} map_feature_models = set(MapFeature.subclass_dict().keys()) model_filter = Q() # We only want to show the TreePhoto's image, not other fields # and we want to do it automatically if 'Tree' was specified as # a model. The same goes for MapFeature(s) <-> MapFeaturePhoto # There is no need to check permissions, because photos are always visible if 'Tree' in models: model_filter = model_filter | Q(model='TreePhoto', field='image') if map_feature_models.intersection(models): model_filter = model_filter | Q(model='MapFeaturePhoto', field='image') for inst in instances: eligible_models = ({'Tree', 'TreePhoto', 'MapFeaturePhoto'} | set(inst.map_feature_types)) & set(models) if logged_in_user == user: eligible_udfs = {'udf:%s' % udf.id for udf in udf_defs(inst) if udf.model_type in eligible_models and udf.iscollection} # The logged-in user can see all their own edits model_filter = model_filter | Q( instance=inst, model__in=(eligible_models | eligible_udfs)) else: # Filter other users' edits by their visibility to the # logged-in user for model in eligible_models: ModelClass = get_auditable_class(model) fake_model = ModelClass(instance=inst) if issubclass(ModelClass, Authorizable): visible_fields = fake_model.visible_fields(logged_in_user) model_filter = model_filter |\ Q(model=model, field__in=visible_fields, instance=inst) else: model_filter = model_filter | Q(model=model, instance=inst) if issubclass(ModelClass, UDFModel): model_collection_udfs_audit_names = ( fake_model.visible_collection_udfs_audit_names( logged_in_user)) model_filter = model_filter | ( Q(model__in=model_collection_udfs_audit_names)) udf_bookkeeping_fields = Q( model__startswith='udf:', field__in=('id', 'model_id', 'field_definition')) audits = (Audit.objects .filter(model_filter) .filter(instance__in=instances) .select_related('instance') .exclude(udf_bookkeeping_fields) .exclude(user=User.system_user()) .order_by('-pk')) if user: audits = audits.filter(user=user) if model_id: audits = audits.filter(model_id=model_id) if exclude_pending: audits = audits.exclude(requires_auth=True, ref__isnull=True) # Slicing the QuerySet uses a SQL Limit, which has proven to be quite slow. # By relying on the fact the our list is ordered by primary key from newest # to oldest, we can rely on the index on the primary key, which is faster. if start_id is not None: audits = audits.filter(pk__lte=start_id) total_count = audits.count() if should_count else 0 audits = audits[:page_size] # Coerce the queryset into a list so we can get the last audit row on the # current page audits = list(audits) # We are using len(audits) instead of audits.count() because we # have already realized the queryset at this point if len(audits) == page_size: query_vars.setlist('prev', prev_start_ids + [audits[0].pk]) query_vars['start'] = audits[-1].pk - 1 next_page = "?" + query_vars.urlencode() else: next_page = None if prev_start_ids: if len(prev_start_ids) == 1: del query_vars['prev'] del query_vars['start'] else: prev_start_id = prev_start_ids.pop() query_vars.setlist('prev', prev_start_ids) query_vars['start'] = prev_start_id prev_page = "?" + query_vars.urlencode() else: prev_page = None return {'audits': audits, 'total_count': total_count, 'next_page': next_page, 'prev_page': prev_page}
def assert_udf_name(self, model_name, name): defs = udf_defs(self.instance, model_name) self.assertEqual(len(defs), 1) self.assertEqual(defs[0].name, name)
def get_audits(logged_in_user, instance, query_vars, user=None, models=ALLOWED_MODELS, model_id=None, start_id=None, prev_start_ids=[], page_size=PAGE_DEFAULT, exclude_pending=True, should_count=False): if instance: if instance.is_accessible_by(logged_in_user): instances = Instance.objects.filter(pk=instance.pk) else: instances = Instance.objects.none() # If we didn't specify an instance we only want to # show audits where the user has permission else: instances = Instance.objects\ .filter(user_accessible_instance_filter(logged_in_user)) if user: instances = instances.filter(pk__in=_instance_ids_edited_by(user)) instances = instances.distinct() if not instances.exists(): # Force no results return { 'audits': Audit.objects.none(), 'total_count': 0, 'next_page': None, 'prev_page': None } map_feature_models = set(MapFeature.subclass_dict().keys()) model_filter = Q() # We only want to show the TreePhoto's image, not other fields # and we want to do it automatically if 'Tree' was specified as # a model. The same goes for MapFeature(s) <-> MapFeaturePhoto # There is no need to check permissions, because photos are always visible if 'Tree' in models: model_filter = model_filter | Q(model='TreePhoto', field='image') if map_feature_models.intersection(models): model_filter = model_filter | Q(model='MapFeaturePhoto', field='image') for inst in instances: eligible_models = ({'Tree', 'TreePhoto', 'MapFeaturePhoto'} | set(inst.map_feature_types)) & set(models) if logged_in_user == user: eligible_udfs = { 'udf:%s' % udf.id for udf in udf_defs(inst) if udf.model_type in eligible_models and udf.iscollection } # The logged-in user can see all their own edits model_filter = model_filter | Q( instance=inst, model__in=(eligible_models | eligible_udfs)) else: # Filter other users' edits by their visibility to the # logged-in user for model in eligible_models: ModelClass = get_auditable_class(model) fake_model = ModelClass(instance=inst) if issubclass(ModelClass, Authorizable): visible_fields = fake_model.visible_fields(logged_in_user) model_filter = model_filter |\ Q(model=model, field__in=visible_fields, instance=inst) else: model_filter = model_filter | Q(model=model, instance=inst) if issubclass(ModelClass, UDFModel): model_collection_udfs_audit_names = ( fake_model.visible_collection_udfs_audit_names( logged_in_user)) model_filter = model_filter | (Q( model__in=model_collection_udfs_audit_names)) udf_bookkeeping_fields = Q(model__startswith='udf:', field__in=('id', 'model_id', 'field_definition')) audits = (Audit.objects.filter(model_filter).filter( instance__in=instances).select_related('instance').exclude( udf_bookkeeping_fields).exclude( user=User.system_user()).order_by('-pk')) if user: audits = audits.filter(user=user) if model_id: audits = audits.filter(model_id=model_id) if exclude_pending: audits = audits.exclude(requires_auth=True, ref__isnull=True) # Slicing the QuerySet uses a SQL Limit, which has proven to be quite slow. # By relying on the fact the our list is ordered by primary key from newest # to oldest, we can rely on the index on the primary key, which is faster. if start_id is not None: audits = audits.filter(pk__lte=start_id) total_count = audits.count() if should_count else 0 audits = audits[:page_size] # Coerce the queryset into a list so we can get the last audit row on the # current page audits = list(audits) # We are using len(audits) instead of audits.count() because we # have already realized the queryset at this point if len(audits) == page_size: query_vars.setlist('prev', prev_start_ids + [audits[0].pk]) query_vars['start'] = audits[-1].pk - 1 next_page = "?" + query_vars.urlencode() else: next_page = None if prev_start_ids: if len(prev_start_ids) == 1: del query_vars['prev'] del query_vars['start'] else: prev_start_id = prev_start_ids.pop() query_vars.setlist('prev', prev_start_ids) query_vars['start'] = prev_start_id prev_page = "?" + query_vars.urlencode() else: prev_page = None return { 'audits': audits, 'total_count': total_count, 'next_page': next_page, 'prev_page': prev_page }
def udf_column_names(model_name): return tuple(self.get_udf_column_name(udf_def) for udf_def in udf_defs(self.instance, model_name) if not udf_def.iscollection)
def udf_names(model_name): return tuple( self._get_udf_name(udf_def) for udf_def in udf_defs(self.instance, model_name) if not udf_def.iscollection)
def get_audits(logged_in_user, instance, query_vars, user, models, model_id, page=0, page_size=20, exclude_pending=True, should_count=False): start_pos = page * page_size end_pos = start_pos + page_size if instance: if instance.is_accessible_by(logged_in_user): instances = Instance.objects.filter(pk=instance.pk) else: instances = Instance.objects.none() # If we didn't specify an instance we only want to # show audits where the user has permission else: instances = Instance.objects\ .filter(pk__in=_instance_ids_edited_by(user))\ .filter(user_accessible_instance_filter( logged_in_user))\ .distinct() if not instances.exists(): # Force no results return { 'audits': Audit.objects.none(), 'total_count': 0, 'next_page': None, 'prev_page': None } map_feature_models = set(MapFeature.subclass_dict().keys()) model_filter = Q() # We only want to show the TreePhoto's image, not other fields # and we want to do it automatically if 'Tree' was specified as # a model. The same goes for MapFeature(s) <-> MapFeaturePhoto # There is no need to check permissions, because photos are always visible if 'Tree' in models: model_filter = model_filter | Q(model='TreePhoto', field='image') if map_feature_models.intersection(models): model_filter = model_filter | Q(model='MapFeaturePhoto', field='image') for inst in instances: eligible_models = ({'Tree', 'TreePhoto', 'MapFeaturePhoto'} | set(inst.map_feature_types)) & set(models) if logged_in_user == user: eligible_udfs = { 'udf:%s' % udf.id for udf in udf_defs(inst) if udf.model_type in eligible_models and udf.iscollection } # The logged-in user can see all their own edits model_filter = model_filter | Q( instance=inst, model__in=(eligible_models | eligible_udfs)) else: # Filter other users' edits by their visibility to the # logged-in user for model in eligible_models: ModelClass = get_auditable_class(model) fake_model = ModelClass(instance=inst) if issubclass(ModelClass, Authorizable): visible_fields = fake_model.visible_fields(logged_in_user) model_filter = model_filter |\ Q(model=model, field__in=visible_fields, instance=inst) else: model_filter = model_filter | Q(model=model, instance=inst) if issubclass(ModelClass, UDFModel): model_collection_udfs_audit_names = ( fake_model.visible_collection_udfs_audit_names( logged_in_user)) model_filter = model_filter | (Q( model__in=model_collection_udfs_audit_names)) udf_bookkeeping_fields = Q(model__startswith='udf:', field__in=('id', 'model_id', 'field_definition')) audits = (Audit.objects.filter(model_filter).filter( instance__in=instances).select_related('instance').exclude( udf_bookkeeping_fields).exclude( user=User.system_user()).order_by('-created')) if user: audits = audits.filter(user=user) if model_id: audits = audits.filter(model_id=model_id) if exclude_pending: audits = audits.exclude(requires_auth=True, ref__isnull=True) total_count = audits.count() if should_count else 0 audits = audits[start_pos:end_pos] query_vars = {k: v for (k, v) in query_vars.iteritems() if k != 'page'} next_page = None prev_page = None # We are using len(audits) instead of audits.count() because we # have already realized the queryset at this point if len(audits) == page_size: query_vars['page'] = page + 1 next_page = "?" + urllib.urlencode(query_vars) if page > 0: query_vars['page'] = page - 1 prev_page = "?" + urllib.urlencode(query_vars) return { 'audits': audits, 'total_count': total_count, 'next_page': next_page, 'prev_page': prev_page }
def _validate_mobile_api_fields(self): # Validate that: # 1) overall structure is correct # 2) each individual group has a header and collection or normal fields # 3) Collection UDF groups only contain collection UDFs # 4) Collection UDF groups have a 'sort_key', which is present on all # fields for that group # 5) no field is referenced more than once # 6) all fields referenced exist # delayed import to avoid circular references from treemap.models import Plot, Tree def _truthy_of_type(item, types): return item and isinstance(item, types) field_groups = self.mobile_api_fields errors = set() scalar_udfs = { udef.full_name: udef for udef in udf_defs(self) if not udef.iscollection } collection_udfs = { udef.full_name: udef for udef in udf_defs(self) if udef.iscollection } if not _truthy_of_type(field_groups, (list, tuple)): raise ValidationError( {'mobile_api_fields': [API_FIELD_ERRORS['no_field_groups']]}) for group in field_groups: if not _truthy_of_type(group.get('header'), basestring): errors.add(API_FIELD_ERRORS['group_has_no_header']) if ((not isinstance(group.get('collection_udf_keys'), list) and not isinstance(group.get('field_keys'), list))): errors.add(API_FIELD_ERRORS['group_has_no_keys']) elif 'collection_udf_keys' in group and 'field_keys' in group: errors.add(API_FIELD_ERRORS['group_has_both_keys']) if isinstance(group.get('collection_udf_keys'), list): sort_key = group.get('sort_key') if not sort_key: errors.add(API_FIELD_ERRORS['group_has_no_sort_key']) for key in group['collection_udf_keys']: udef = collection_udfs.get(key) if udef is None: errors.add(API_FIELD_ERRORS['group_has_missing_cudf']) elif sort_key not in udef.datatype_by_field: errors.add( API_FIELD_ERRORS['group_has_invalid_sort_key']) elif isinstance(group.get('field_keys'), list): if group.get('model') not in {'tree', 'plot'}: errors.add(API_FIELD_ERRORS['group_missing_model']) else: for key in group['field_keys']: if not key.startswith(group['model']): errors.add(API_FIELD_ERRORS['group_invalid_model']) if errors: raise ValidationError({'mobile_api_fields': list(errors)}) scalar_fields = [ key for group in field_groups for key in group.get('field_keys', []) ] collection_fields = [ key for group in field_groups for key in group.get('collection_udf_keys', []) ] all_fields = scalar_fields + collection_fields if len(all_fields) != len(set(all_fields)): errors.add(API_FIELD_ERRORS['duplicate_fields']) for field in scalar_fields: model_name, name = field.split('.', 1) # maxsplit of 1 Model = Plot if model_name == 'plot' else Tree standard_fields = Model._meta.get_all_field_names() if ((name not in standard_fields and field not in scalar_udfs)): errors.add(API_FIELD_ERRORS['missing_field']) if errors: raise ValidationError({'mobile_api_fields': list(errors)})