def do_update_attr(cls, obj, json_obj, attr): """Perform the update to ``obj`` required to make the attribute attr equivalent in ``obj`` and ``json_obj``. """ class_attr = getattr(obj.__class__, attr) attr_reflection = AttributeInfo.get_attr(obj.__class__, "_api_attrs", attr) update_raw = attr in AttributeInfo.gather_update_raw(obj.__class__) if update_raw: # The attribute has a special setter that can handle raw json fields # properly. This is used for special mappings such as custom attribute # values attr_name = attr value = json_obj.get(attr_name) elif isinstance(attr_reflection, SerializableAttribute): attr_name = attr value = json_obj.get(attr) if value: value = attr_reflection.deserialize(value) elif hasattr(attr, '__call__'): # The attribute has been decorated with a callable, grab the name and # invoke the callable to get the value attr_name = attr.attr_name value = attr(cls, obj, json_obj) elif not hasattr(cls, class_attr.__class__.__name__): # The attribute is a function on the obj like custom_attributes in # CustomAttributable mixin attr_name = attr value = class_attr(obj, json_obj) else: # Lookup the method to use to perform the update. Use reflection to # key off of the type of the attribute and invoke the method of the # same name. attr_name = attr method = getattr(cls, class_attr.__class__.__name__) value = method(obj, json_obj, attr_name, class_attr) if (isinstance(value, (set, list)) and not update_raw and ( not hasattr(class_attr, 'property') or not hasattr(class_attr.property, 'columns') or not isinstance( class_attr.property.columns[0].type, JsonType) )): cls._do_update_collection(obj, value, attr_name) else: try: setattr(obj, attr_name, value) except AttributeError as error: logger.error('Unable to set attribute %s: %s', attr_name, error) raise
def _get_properties(self, obj): """Get indexable properties and values. Properties should be returned in the following format: { property1: { subproperty1: value1, subproperty2: value2, ... }, ... } If there is no subproperty - empty string is used as a key """ if obj.type == "Snapshot": # Snapshots do not have any indexable content. The object content for # snapshots is stored in the revision. Snapshots can also be made for # different models so we have to get fulltext attrs for the actual child # that was snapshotted and get data for those from the revision content. tgt_class = getattr(ggrc.models.all_models, obj.child_type, None) if not tgt_class: return {} attrs = AttributeInfo.gather_attrs(tgt_class, '_fulltext_attrs') return {attr: {"": obj.revision.content.get(attr)} for attr in attrs} return {attr: {"": getattr(obj, attr)} for attr in self._fulltext_attrs}
def _get_model_properties(): """Get indexable properties for all snapshottable objects Args: None Returns: tuple(class_properties dict, custom_attribute_definitions dict) - Tuple of dictionaries, first one representing a list of searchable attributes for every model and second one representing dictionary of custom attribute definition attributes. """ # pylint: disable=protected-access from ggrc.models import all_models class_properties = dict() klass_names = Types.all cadef_klass_names = { getattr(all_models, klass)._inflector.table_singular for klass in klass_names } cad_query = db.session.query( models.CustomAttributeDefinition.id, models.CustomAttributeDefinition.title, ).filter( models.CustomAttributeDefinition.definition_type.in_(cadef_klass_names) ) for klass_name in klass_names: model_attributes = AttributeInfo.gather_attrs( getattr(all_models, klass_name), '_fulltext_attrs') class_properties[klass_name] = model_attributes return class_properties, cad_query.all()
def _set_attr_name_map(self): """ build a map for attributes names and display names Dict containing all display_name to attr_name mappings for all objects used in the current query Example: { Program: {"Program URL": "url", "Code": "slug", ...} ...} """ self.attr_name_map = {} for object_query in self.query: object_name = object_query["object_name"] object_class = self.object_map[object_name] tgt_class = object_class if object_name == "Snapshot": child_type = self._get_snapshot_child_type(object_query) tgt_class = getattr(models.all_models, child_type, object_class) aliases = AttributeInfo.gather_aliases(tgt_class) self.attr_name_map[tgt_class] = {} for key, value in aliases.items(): filter_by = None if isinstance(value, dict): filter_name = value.get("filter_by", None) if filter_name is not None: filter_by = getattr(tgt_class, filter_name, None) name = value["display_name"] else: name = value if name: self.attr_name_map[tgt_class][name.lower()] = (key.lower(), filter_by)
def _get_reserved_names(cls, definition_type): """Get a list of all attribute names in all objects. On first call this function computes all possible names that can be used by any model and stores them in a static frozen set. All later calls just get this set. Returns: frozen set containing all reserved attribute names for the current object. """ # pylint: disable=protected-access # The _inflector is a false positive in our app. with benchmark("Generate a list of all reserved attribute names"): if not cls._reserved_names.get(definition_type): definition_map = {model._inflector.table_singular: model for model in ggrc.models.all_models.all_models} definition_map.update({model._inflector.model_singular: model for model in ggrc.models.all_models.all_models}) definition_model = definition_map.get(definition_type) if not definition_model: raise ValueError("Invalid definition type") aliases = AttributeInfo.gather_aliases(definition_model) cls._reserved_names[definition_type] = frozenset( (value["display_name"] if isinstance( value, dict) else value).lower() for value in aliases.values() if value ) return cls._reserved_names[definition_type]
def log_json(self): """Get a dict with attributes and related objects of self. This method converts additionally person-mapping attributes and owners to person stubs. """ from ggrc import models res = self.log_json_base() for attr in self._people_log_mappings: if hasattr(self, attr): value = getattr(self, attr) # hardcoded [:-3] is used to strip "_id" suffix res[attr[:-3]] = self._person_stub(value) if value else None for attr_name in AttributeInfo.gather_publish_attrs(self.__class__): if is_attr_of_type(self, attr_name, models.Option): attr = getattr(self, attr_name) if attr: stub = create_stub(attr) stub["title"] = attr.title else: stub = None res[attr_name] = stub return res
def _attribute_name_map(self): """Get property to name mapping for object attributes.""" model = getattr(models.all_models, self.child_type, None) if not model: logger.warning("Exporting invalid snapshot model: %s", self.child_type) return {} aliases = AttributeInfo.gather_visible_aliases(model) aliases.update(AttributeInfo.get_acl_definitions(model)) aliases.update(self.CUSTOM_SNAPSHOT_ALIASES) if self.MAPPINGS_KEY in self.fields: aliases.update(self.SNAPSHOT_MAPPING_ALIASES) name_map = { key: value["display_name"] if isinstance(value, dict) else value for key, value in aliases.iteritems() } orderd_keys = AttributeInfo.get_column_order(name_map.keys()) return OrderedDict((key, name_map[key]) for key in orderd_keys)
def get_fulltext_attrs(cls): # type: () -> Dict[unicode, fulltext.attributes.FullTextAttr] """Get all fulltext attributes represented as FullTextAttribute objects """ raw_attrs = AttributeInfo.gather_attrs(cls, '_fulltext_attrs') # Convert attrs represented as string into FullTextAttr objects attrs = [attr if isinstance(attr, fulltext.attributes.FullTextAttr) else fulltext.attributes.FullTextAttr(attr, attr) for attr in raw_attrs] return attrs
def test_gather_aliases(self): """Test gather all aliases.""" class Child(object): # pylint: disable=too-few-public-methods _aliases = { "child_normal": "normal", "child_extended": { "display_name": "Extended", }, "child_filter_only": { "display_name": "Extended", "filter_only": True, }, } class Parent(Child): # pylint: disable=too-few-public-methods _aliases = { "parent_normal": "normal", "parent_extended": { "display_name": "Extended", }, "parent_filter_only": { "display_name": "Extended", "filter_only": True, }, } self.assertEqual( AttributeInfo.gather_aliases(Parent), { "parent_normal": "normal", "parent_extended": { "display_name": "Extended", }, "parent_filter_only": { "display_name": "Extended", "filter_only": True, }, "child_normal": "normal", "child_extended": { "display_name": "Extended", }, "child_filter_only": { "display_name": "Extended", "filter_only": True, }, } )
def get_fulltext_parsed_value(klass, key): """Get fulltext parser if it's exists """ attrs = AttributeInfo.gather_attrs(klass, '_fulltext_attrs') if not issubclass(klass, Indexed): return for attr in attrs: if isinstance(attr, FullTextAttr) and attr.with_template: attr_key = klass.PROPERTY_TEMPLATE.format(attr.alias) elif isinstance(attr, FullTextAttr): attr_key = attr.alias else: attr_key = klass.PROPERTY_TEMPLATE.format(attr) attr = FullTextAttr(key, key) if attr_key == key: return attr
def insert_definition(self, definition): """Insert a new custom attribute definition into database Args: definition: dictionary with field_name: value """ from ggrc.models.custom_attribute_definition \ import CustomAttributeDefinition field_names = AttributeInfo.gather_create_attrs( CustomAttributeDefinition) data = {fname: definition.get(fname) for fname in field_names} data["definition_type"] = self._inflector.table_singular cad = CustomAttributeDefinition(**data) db.session.add(cad)
def _get_model_names(cls, model): """Get tuple of all attribute names for model. Args: model: Model class. Returns: Tuple of all attributes for provided model. """ if not model: raise ValueError("Invalid definition type") aliases = AttributeInfo.gather_aliases(model) return ((value["display_name"] if isinstance(value, dict) else value).lower() for value in aliases.values() if value)
def _get_properties(self, obj): """Get indexable properties and values. Properties should be returned in the following format: { property1: { subproperty1: value1, subproperty2: value2, ... }, ... } If there is no subproperty - empty string is used as a key """ if obj.type == "Snapshot": # Snapshots do not have any indexable content. The object content for # snapshots is stored in the revision. Snapshots can also be made for # different models so we have to get fulltext attrs for the actual child # that was snapshotted and get data for those from the revision content. tgt_class = getattr(ggrc.models.all_models, obj.child_type, None) if not tgt_class: return {} attrs = AttributeInfo.gather_attrs(tgt_class, '_fulltext_attrs') return { attr: { "": obj.revision.content.get(attr) } for attr in attrs } if isinstance(obj, Indexed): property_tmpl = obj.PROPERTY_TEMPLATE else: property_tmpl = u"{}" properties = {} for attr in self._fulltext_attrs: if isinstance(attr, basestring): properties[property_tmpl.format(attr)] = { "": getattr(obj, attr) } elif isinstance(attr, FullTextAttr): if attr.with_template: property_name = property_tmpl.format(attr.alias) else: property_name = attr.alias properties[property_name] = attr.get_property_for(obj) return properties
def _get_model_names(cls, model): """Get tuple of all attribute names for model. Args: model: Model class. Returns: Tuple of all attributes for provided model. """ if not model: raise ValueError("Invalid definition type") aliases = AttributeInfo.gather_aliases(model) return ( (value["display_name"] if isinstance(value, dict) else value).lower() for value in aliases.values() if value )
def test_gather_aliases(self): """Test gather all aliases.""" class Child(object): # pylint: disable=too-few-public-methods _aliases = { "child_normal": "normal", "child_extended": { "display_name": "Extended", }, "child_filter_only": { "display_name": "Extended", "filter_only": True, }, } class Parent(Child): # pylint: disable=too-few-public-methods _aliases = { "parent_normal": "normal", "parent_extended": { "display_name": "Extended", }, "parent_filter_only": { "display_name": "Extended", "filter_only": True, }, } self.assertEqual( AttributeInfo.gather_aliases(Parent), { "parent_normal": "normal", "parent_extended": { "display_name": "Extended", }, "parent_filter_only": { "display_name": "Extended", "filter_only": True, }, "child_normal": "normal", "child_extended": { "display_name": "Extended", }, "child_filter_only": { "display_name": "Extended", "filter_only": True, }, })
def attributes_map(cls): if cls.CACHED_ATTRIBUTE_MAP: return cls.CACHED_ATTRIBUTE_MAP aliases = AttributeInfo.gather_aliases(cls) cls.CACHED_ATTRIBUTE_MAP = {} for key, value in aliases.items(): if isinstance(value, dict): name = value["display_name"] filter_by = None if value.get("filter_by"): filter_by = getattr(cls, value["filter_by"], None) else: name = value filter_by = None if not name: continue cls.CACHED_ATTRIBUTE_MAP[name.lower()] = (key.lower(), filter_by) return cls.CACHED_ATTRIBUTE_MAP
def _get_class_properties(): """Get indexable properties for all models Args: None Returns: class_properties dict - representing a list of searchable attributes for every model """ class_properties = defaultdict(list) for klass_name in Types.all: full_text_attrs = AttributeInfo.gather_attrs( getattr(all_models, klass_name), '_fulltext_attrs') for attr in full_text_attrs: if not isinstance(attr, FullTextAttr): attr = FullTextAttr(attr, attr) class_properties[klass_name].append(attr) return class_properties
def get_object_column_definitions(object_class, fields=None, include_hidden=False): """Attach additional info to attribute definitions. Fetches the attribute info (_aliases) for the given object class and adds additional data (handler class, validator function, default value) )needed for imports. Args: object_class (db.Model): Model for which we want to get column definitions for imports. include_hidden (bool): Flag which specifies if we should include column handlers for hidden attributes (they marked as 'hidden' in _aliases dict). Returns: dict: Updated attribute definitions dict with additional data. """ attributes = AttributeInfo.get_object_attr_definitions( object_class, fields=fields, include_hidden=include_hidden ) column_handlers = model_column_handlers(object_class) for key, attr in attributes.iteritems(): handler_key = attr.get("handler_key", key) # check full handler keys handler = column_handlers.get(handler_key) if not handler: # check handler key prefixes handler = column_handlers.get(handler_key.split(":")[0]) if not handler: # use default handler handler = handlers.ColumnHandler validator = None default = None if attr["type"] == AttributeInfo.Type.PROPERTY: validator = getattr(object_class, "validate_{}".format(key), None) default = getattr(object_class, "default_{}".format(key), None) attr["handler"] = attr.get("handler", handler) attr["validator"] = attr.get("validator", validator) attr["default"] = attr.get("default", default) return attributes
def _get_assignable_dict(people, notif): """Get dict data for assignable object in notification. Args: people (List[Person]): List o people objects who should receive the notification. notif (Notification): Notification that should be sent. Returns: dict: dictionary containing notification data for all people in the given list. """ obj = get_notification_object(notif) data = {} definitions = AttributeInfo.get_object_attr_definitions(obj.__class__) roles = _get_assignable_roles(obj) for person in people: # We should default to today() if no start date is found on the object. start_date = getattr(obj, "start_date", datetime.date.today()) data[person.email] = { "user": get_person_dict(person), notif.notification_type.name: { obj.id: { "title": obj.title, "start_date_statement": utils.get_digest_date_statement(start_date, "start", True), "url": get_object_url(obj), "notif_created_at": { notif.id: as_user_time(notif.created_at) }, "notif_updated_at": { notif.id: as_user_time(notif.updated_at) }, "updated_fields": _get_updated_fields(obj, notif.created_at, definitions, roles) if notif.notification_type.name == "assessment_updated" else None, } } } return data
def get_all_attributes_json(load_custom_attributes=False): """Get a list of all attribute definitions This exports all attributes related to a given model, including custom attributes and mapping attributes, that are used in csv import and export. """ with benchmark('Loading all attributes JSON'): published = {} ca_cache = collections.defaultdict(list) if load_custom_attributes: definitions = models.CustomAttributeDefinition.eager_query( ).group_by(models.CustomAttributeDefinition.title, models.CustomAttributeDefinition.definition_type) for attr in definitions: ca_cache[attr.definition_type].append(attr) for model in all_models.all_models: published[model.__name__] = \ AttributeInfo.get_attr_definitions_array(model, ca_cache=ca_cache) return as_json(published)
def get_all_attributes_json(load_custom_attributes=False): """Get a list of all attribute definitions This exports all attributes related to a given model, including custom attributes and mapping attributes, that are used in csv import and export. """ with benchmark('Loading all attributes JSON'): published = {} ca_cache = collections.defaultdict(list) if load_custom_attributes: definitions = models.CustomAttributeDefinition.eager_query().group_by( models.CustomAttributeDefinition.title, models.CustomAttributeDefinition.definition_type) for attr in definitions: ca_cache[attr.definition_type].append(attr) for model in all_models.all_models: published[model.__name__] = \ AttributeInfo.get_attr_definitions_array(model, ca_cache=ca_cache) return as_json(published)
def _get_class_properties(): """Get indexable properties for all models Args: None Returns: class_properties dict - representing a list of searchable attributes for every model """ class_properties = defaultdict(list) for klass_name in Types.all: full_text_attrs = AttributeInfo.gather_attrs( getattr(all_models, klass_name), '_fulltext_attrs' ) for attr in full_text_attrs: if not isinstance(attr, FullTextAttr): attr = FullTextAttr(attr, attr) class_properties[klass_name].append(attr) return class_properties
def do_update_attr(cls, obj, json_obj, attr): """Perform the update to ``obj`` required to make the attribute attr equivalent in ``obj`` and ``json_obj``. """ class_attr = getattr(obj.__class__, attr) update_raw = attr in AttributeInfo.gather_update_raw(obj.__class__) if update_raw: # The attribute has a special setter that can handle raw json fields # properly. This is used for special mappings such as custom attribute # values attr_name = attr value = json_obj.get(attr_name) elif hasattr(attr, '__call__'): # The attribute has been decorated with a callable, grab the name and # invoke the callable to get the value attr_name = attr.attr_name value = attr(cls, obj, json_obj) elif not hasattr(cls, class_attr.__class__.__name__): # The attribute is a function on the obj like custom_attributes in # CustomAttributable mixin attr_name = attr value = class_attr(obj, json_obj) else: # Lookup the method to use to perform the update. Use reflection to # key off of the type of the attribute and invoke the method of the # same name. attr_name = attr method = getattr(cls, class_attr.__class__.__name__) value = method(obj, json_obj, attr_name, class_attr) if (isinstance(value, (set, list)) and not update_raw and ( not hasattr(class_attr, 'property') or not hasattr(class_attr.property, 'columns') or not isinstance( class_attr.property.columns[0].type, JsonType) )): cls._do_update_collection(obj, value, attr_name) else: try: setattr(obj, attr_name, value) except AttributeError as error: logger.error('Unable to set attribute %s: %s', attr_name, error) raise
def __new__(cls, class_name, bases, attrs, extra_attrs=None): """Use model reflection to build up the list of factory attributes. The default attributes can be overridden by defining a subclass of `ModelFactory` and defining the attribute to be overriden. """ model_class = attrs.pop('MODEL', None) if model_class: attrs['FACTORY_FOR'] = dict attribute_info = AttributeInfo(model_class) for attr in attribute_info._create_attrs: if hasattr(attr, '__call__'): attr_name = attr.attr_name else: attr_name = attr if not hasattr(cls, attr_name): FactoryAttributeGenerator.generate(attrs, model_class, attr) return super(ModelFactoryMetaClass, cls).__new__(cls, class_name, bases, attrs)
def get_object_column_definitions(object_class, fields=None, include_hidden=False): """Attach additional info to attribute definitions. Fetches the attribute info (_aliases) for the given object class and adds additional data (handler class, validator function, default value) )needed for imports. Args: object_class (db.Model): Model for which we want to get column definitions for imports. include_hidden (bool): Flag which specifies if we should include column handlers for hidden attributes (they marked as 'hidden' in _aliases dict). Returns: dict: Updated attribute definitions dict with additional data. """ attributes = AttributeInfo.get_object_attr_definitions( object_class, fields=fields, include_hidden=include_hidden) column_handlers = model_column_handlers(object_class) for key, attr in attributes.iteritems(): handler_key = attr.get("handler_key", key) # check full handler keys handler = column_handlers.get(handler_key) if not handler: # check handler key prefixes handler = column_handlers.get(handler_key.split(":")[0]) if not handler: # use default handler handler = handlers.ColumnHandler validator = None default = None if attr["type"] == AttributeInfo.Type.PROPERTY: validator = getattr(object_class, "validate_{}".format(key), None) default = getattr(object_class, "default_{}".format(key), None) attr["handler"] = attr.get("handler", handler) attr["validator"] = attr.get("validator", validator) attr["default"] = attr.get("default", default) return attributes
def _get_assignable_dict(people, notif): """Get dict data for assignable object in notification. Args: people (List[Person]): List o people objects who should receive the notification. notif (Notification): Notification that should be sent. Returns: dict: dictionary containing notification data for all people in the given list. """ obj = get_notification_object(notif) data = {} definitions = AttributeInfo.get_object_attr_definitions(obj.__class__) roles = _get_assignable_roles(obj) for person in people: # We should default to today() if no start date is found on the object. start_date = getattr(obj, "start_date", datetime.date.today()) data[person.email] = { "user": get_person_dict(person), notif.notification_type.name: { obj.id: { "title": obj.title, "start_date_statement": utils.get_digest_date_statement( start_date, "start", True), "url": get_object_url(obj), "notif_created_at": { notif.id: as_user_time(notif.created_at)}, "notif_updated_at": { notif.id: as_user_time(notif.updated_at)}, "updated_fields": _get_updated_fields(obj, notif.created_at, definitions, roles) if notif.notification_type.name == "assessment_updated" else None, } } } return data
def __table_args__(cls): # pylint: disable=no-self-argument extra_table_args = AttributeInfo.gather_attrs(cls, '_extra_table_args') table_args = [] table_dict = {} for table_arg in extra_table_args: if callable(table_arg): table_arg = table_arg() if isinstance(table_arg, (list, tuple, set)): if isinstance(table_arg[-1], (dict,)): table_dict.update(table_arg[-1]) table_args.extend(table_arg[:-1]) else: table_args.extend(table_arg) elif isinstance(table_arg, (dict,)): table_dict.update(table_arg) else: table_args.append(table_arg) if len(table_dict) > 0: table_args.append(table_dict) return tuple(table_args,)
def attributes_map(cls): if cls.CACHED_ATTRIBUTE_MAP: return cls.CACHED_ATTRIBUTE_MAP aliases = AttributeInfo.gather_aliases(cls) cls.CACHED_ATTRIBUTE_MAP = {} for key, value in aliases.items(): if isinstance(value, dict): name = value["display_name"] filter_by = None if value.get("filter_by"): filter_by = getattr(cls, value["filter_by"], None) else: name = value filter_by = None if not name: continue tmp = getattr(cls, "PROPERTY_TEMPLATE", "{}") name = tmp.format(name) key = tmp.format(key) cls.CACHED_ATTRIBUTE_MAP[name.lower()] = (key.lower(), filter_by) return cls.CACHED_ATTRIBUTE_MAP
def attributes_map(cls): """Get class attributes map""" if cls.CACHED_ATTRIBUTE_MAP: return cls.CACHED_ATTRIBUTE_MAP aliases = AttributeInfo.gather_aliases(cls) cls.CACHED_ATTRIBUTE_MAP = {} for key, value in aliases.items(): if isinstance(value, dict): name = value["display_name"] filter_by = None if value.get("filter_by"): filter_by = getattr(cls, value["filter_by"], None) else: name = value filter_by = None if not name: continue tmp = getattr(cls, "PROPERTY_TEMPLATE", "{}") name = tmp.format(name) key = tmp.format(key) cls.CACHED_ATTRIBUTE_MAP[name.lower()] = (key.lower(), filter_by) return cls.CACHED_ATTRIBUTE_MAP
def _get_model_properties(): """Get indexable properties for all snapshottable objects Args: None Returns: tuple(class_properties dict, custom_attribute_definitions dict) - Tuple of dictionaries, first one representing a list of searchable attributes for every model and second one representing dictionary of custom attribute definition attributes. """ # pylint: disable=protected-access class_properties = dict() klass_names = Types.all cadef_klass_names = { getattr(all_models, klass)._inflector.table_singular for klass in klass_names } cad_query = db.session.query( models.CustomAttributeDefinition.id, models.CustomAttributeDefinition.title, ).filter( models.CustomAttributeDefinition.definition_type.in_(cadef_klass_names) ) for klass_name in klass_names: full_text_attrs = AttributeInfo.gather_attrs( getattr(all_models, klass_name), '_fulltext_attrs' ) model_attributes = [] for attr in full_text_attrs: if isinstance(attr, FullTextAttr): attr = attr.alias model_attributes.append(attr) class_properties[klass_name] = model_attributes return class_properties, cad_query.all()
def _get_model_properties(): """Get indexable properties for all snapshottable objects Args: None Returns: tuple(class_properties dict, custom_attribute_definitions dict) - Tuple of dictionaries, first one representing a list of searchable attributes for every model and second one representing dictionary of custom attribute definition attributes. """ # pylint: disable=protected-access from ggrc.models import all_models class_properties = dict() custom_attribute_definitions = dict() klass_names = Types.all cadef_klass_names = { getattr(all_models, klass)._inflector.table_singular for klass in klass_names } cad_query = db.session.query( models.CustomAttributeDefinition.id, models.CustomAttributeDefinition.title, ).filter( models.CustomAttributeDefinition.definition_type.in_(cadef_klass_names), models.CustomAttributeDefinition.attribute_type.in_( ["Text", "Rich Text", "Date"]) ) custom_attribute_definitions = dict(cad_query) for klass_name in klass_names: model_attributes = AttributeInfo.gather_attrs( getattr(all_models, klass_name), '_fulltext_attrs') class_properties[klass_name] = model_attributes return class_properties, custom_attribute_definitions
def test_query_all_aliases(self): """Tests query for all aliases""" def rhs(model, attr): attr = getattr(model, attr, None) if attr is not None and hasattr(attr, "_query_clause_element"): class_name = attr._query_clause_element( ).type.__class__.__name__ if class_name == "Boolean": return "1" return "1/1/2015" def data(model, attr, field): return [{ "object_name": model.__name__, "fields": "all", "filters": { "expression": { "left": field.lower(), "op": { "name": "=" }, "right": rhs(model, attr) }, } }] failed = set() for model in set(get_importables().values()): for attr, field in AttributeInfo(model)._aliases.items(): if field is None: continue try: field = field["display_name"] if isinstance( field, dict) else field res = self.export_csv(data(model, attr, field)) self.assertEqual(res.status_code, 200) except Exception as err: failed.add((model, attr, field, err)) self.assertEqual(sorted(failed), [])
def _get_properties(self, obj): """Get indexable properties and values. Properties should be returned in the following format: { property1: { subproperty1: value1, subproperty2: value2, ... }, ... } If there is no subproperty - empty string is used as a key """ if obj.type == "Snapshot": # Snapshots do not have any indexable content. The object content for # snapshots is stored in the revision. Snapshots can also be made for # different models so we have to get fulltext attrs for the actual child # that was snapshotted and get data for those from the revision content. tgt_class = getattr(all_models, obj.child_type, None) if not tgt_class: return {} attrs = AttributeInfo.gather_attrs(tgt_class, '_fulltext_attrs') return {attr: {"": obj.revision.content.get(attr)} for attr in attrs} if isinstance(obj, Indexed): property_tmpl = obj.PROPERTY_TEMPLATE else: property_tmpl = u"{}" properties = {} for attr in self._fulltext_attrs: if isinstance(attr, basestring): properties[property_tmpl.format(attr)] = {"": getattr(obj, attr)} elif isinstance(attr, FullTextAttr): properties.update(attr.get_property_for(obj)) return properties
def get_object_column_definitions(object_class): """Attach additional info to attribute definitions. Fetches the attribute info (_aliases) for the given object class and adds additional data (handler class, validator function, default value) )needed for imports. Args: object_class (db.Model): Model for which we want to get column definitions for imports. Returns: dict: Updated attribute definitions dict with additional data. """ attributes = AttributeInfo.get_object_attr_definitions(object_class, include_oca=True) column_handlers = model_column_handlers(object_class) for key, attr in attributes.iteritems(): handler_key = attr.get("handler_key", key) handler = column_handlers.get(handler_key, handlers.ColumnHandler) validator = None default = None if attr["type"] == AttributeInfo.Type.PROPERTY: validator = getattr(object_class, "validate_{}".format(key), None) default = getattr(object_class, "default_{}".format(key), None) elif attr["type"] == AttributeInfo.Type.MAPPING: handler = column_handlers.get(key, handlers.MappingColumnHandler) elif attr["type"] == AttributeInfo.Type.CUSTOM: handler = column_handlers.get( key, custom_attribute.CustomAttributeColumHandler) elif attr["type"] == AttributeInfo.Type.OBJECT_CUSTOM: handler = column_handlers.get( key, custom_attribute.ObjectCaColumnHandler) attr["handler"] = attr.get("handler", handler) attr["validator"] = attr.get("validator", validator) attr["default"] = attr.get("default", default) return attributes
def _get_properties(self, obj): """Get indexable properties and values. Properties should be returned in the following format: { property1: { subproperty1: value1, subproperty2: value2, ... }, ... } If there is no subproperty - empty string is used as a key """ if obj.type == "Snapshot": # Snapshots do not have any indexable content. The object content for # snapshots is stored in the revision. Snapshots can also be made for # different models so we have to get fulltext attrs for the actual child # that was snapshotted and get data for those from the revision content. tgt_class = getattr(ggrc.models.all_models, obj.child_type, None) if not tgt_class: return {} attrs = AttributeInfo.gather_attrs(tgt_class, '_fulltext_attrs') return { attr: { "": obj.revision.content.get(attr) } for attr in attrs } return { attr: { "": getattr(obj, attr) } for attr in self._fulltext_attrs }
def model_is_indexed(tgt_class): fulltext_attrs = AttributeInfo.gather_attrs(tgt_class, '_fulltext_attrs') return len(fulltext_attrs) > 0
def __init__(self, tgt_class): self._fulltext_attrs = AttributeInfo.gather_attrs( tgt_class, '_fulltext_attrs')
def __init__(self, tgt_class, indexer): self._fulltext_attrs = AttributeInfo.gather_attrs( tgt_class, '_fulltext_attrs') self.indexer = indexer
def get_column_order(columns): return AttributeInfo.get_column_order(columns)
def attributes(self): """Endpoint attributes""" return AttributeInfo.gather_attr_dicts(self.obj.model_class, '_api_attrs')