class GrandParent(Parent): age = IntType() family_secret = StringType() class Options: roles = { 'grandchildren': whitelist("age"), 'public': blacklist("id", "family_secret") }
class User(with_metaclass(ORMMeta, BaseModel, UserMixin)): _uniques = ['email'] firstname = StringType() lastname = StringType() email = StringType(required=True) password = StringType() active = BooleanType(default=True) confirmed_at = DateTimeType() last_login_at = DateTimeType(default=datetime.datetime.now) current_login_at = DateTimeType(default=datetime.datetime.now) registered_at = DateTimeType() last_login_ip = StringType() current_login_ip = StringType() login_count = IntType() sort_on = IntType() roles = ListType(ModelType(Role), default=[])
class StorageAdd(Model): """Storage add model structure """ _backends = ['CEPH_RBD'] backends = string_enum(*_backends) backends_dict = dict(zip(_backends, [1])) _types = ['CAPACITY', 'PERFORMANCE'] types = string_enum(*_types) name = StringType(required=True, serialized_name='NAME') uuid = UUIDType(serialized_name='UUID') disk_total = IntType(required=True, serialized_name='DISK_TOTAL') disk_used = IntType(required=True, serialized_name='DISK_USED') backend = StringType(required=True, serialized_name='BACKEND', choices=_backends) hosts = ListType(IPv4Type, serialized_name='HOSTS', default=[]) type = StringType(required=True, serialized_name='TYPE', choices=_types) domain = UUIDType(serialized_name='DOMAIN', required=True)
class GeoIPModel(BaseModel): collection_name = 'geoip' indexes = [{ 'index': [('timestamp', 1)], 'args': { 'expireAfterSeconds': GEO_IP_TTL } }] site = IntType(required=True) timestamp = DateTimeType(required=True) geo = ModelType(GeoData, strict=False)
def test_extract_subclass_fields(self): class Foo(Model): x = IntType() y = IntType() z = 5 # should be ignored bases = [Foo] attrs = {'i': IntType()} fields = _extract_fields(bases, attrs) expected = { 'i': attrs['i'], 'x': Foo.x, 'y': Foo.y, } self.assertEqual(fields, expected)
class BaseStatsModel(BaseModel): __metaclass__ = StatsModelMetaClass has_granularity = False granularity_name = None type_name = None stats_class = None data_fields = [] site = IntType(required=True) indexes = [{'index': [('site', 1), ('type', 1)]}] @classmethod def get_events(cls): return cls.totals.fields.keys() def find(self, query=None, *args, **kwargs): query = query or {} if self.type: query['type'] = self.type return super(BaseStatsModel, self).find(query, *args, **kwargs) def find_one(self, query=None, *args, **kwargs): query = query or {} if self.type: query['type'] = self.type return super(BaseStatsModel, self).find_one(query, *args, **kwargs) def get_totals(self, site_id, begin_date, end_date, events=None, group_fields=None, aggregate_by_values=False, limit=None): events = events or self.get_events() if group_fields is None: group_fields = self.data_fields match = { 'site': site_id, 'type': self.type, 'date': { '$gte': self.get_min_date(begin_date) if aggregate_by_values else begin_date, '$lte': end_date } } group = {'_id': {field: ('$%s' % field) for field in group_fields}} sort = SON([(event, -1) for event in events]) result = self._aggregate(begin_date, end_date, events, aggregate_by_values, filter_values=True, match=match, group=group, sort=sort, limit=limit) return result def _replace_date(self, data, begin_date, end_date): for item in data: item['date'] = item['date'].replace( **{self.granularity_name: item[self.granularity_name]}) del item[self.granularity_name] data = [ item for item in data if begin_date <= item['date'] <= end_date ] return data def _aggregate(self, begin_date, end_date, events, aggregate_by_values, filter_values, match, group, sort=None, limit=None): pipeline = [{'$match': match}] sum_key = 'values' if aggregate_by_values else 'totals' for event in events: group[event] = {'$sum': '$%s.%s' % (sum_key, event)} if aggregate_by_values: pipeline.append({'$unwind': '$values'}) if filter_values: granularity_range = self.granularity_range(begin_date) pipeline.append({ '$match': { 'values.%s' % self.granularity_name: { '$gte': getattr(begin_date, self.granularity_name) or granularity_range[0], '$lte': getattr(end_date, self.granularity_name) or granularity_range[1] } } }) pipeline.append({'$group': group}) # exclude zero values pipeline.append( {'$match': { '$or': [{ event: { '$ne': 0 } } for event in events] }}) if sort: pipeline.append({'$sort': sort}) if limit: pipeline.append({'$limit': limit}) result = list(self.collection.aggregate(pipeline, allowDiskUse=True)) result = self._replace_id(result) return result def get_report(self, site_id, begin_date, end_date, events, dimensions, summary=False, aggregate_by_values=False): from addnow.apps.tracker.models.reports import ALL_DIMENSIONS match = { 'site': site_id, 'date': { '$gte': self.get_min_date(begin_date) if aggregate_by_values else begin_date, '$lte': end_date }, 'type': { '$in': dimensions } } group = {'_id': {'date': '$date', 'dimension': '$type'}} if not summary: for dimension in dimensions: dimension_class = ALL_DIMENSIONS[dimension] for field in dimension_class.get_data_fields(): group['_id'][field] = '$%s' % field if aggregate_by_values: group['_id'][ self.granularity_name] = '$values.%s' % self.granularity_name sort = SON([('_id.date', 1), ('_id.%s' % self.granularity_name, 1)]) else: sort = {'_id.date': 1} result = self._aggregate(begin_date, end_date, events, aggregate_by_values, filter_values=False, match=match, group=group, sort=sort) if aggregate_by_values: result = self._replace_date(result, begin_date, end_date) return result def get_events_summary(self, site_id, begin_date, end_date, events, aggregate_by_values=False): from addnow.apps.tracker.models.reports import REPORT_TYPES dimensions = [REPORT_TYPES[event].event_singular for event in events] match = { 'site': site_id, 'date': { '$gte': self.get_min_date(begin_date) if aggregate_by_values else begin_date, '$lte': end_date }, 'type': { '$in': dimensions } } group = {'_id': {'date': '$date'}} if aggregate_by_values: group['_id'][ self.granularity_name] = '$values.%s' % self.granularity_name sort = SON([('_id.date', 1), ('_id.%s' % self.granularity_name, 1)]) else: sort = {'_id.date': 1} result = self._aggregate(begin_date, end_date, events, aggregate_by_values, filter_values=False, match=match, group=group, sort=sort) if aggregate_by_values: result = self._replace_date(result, begin_date, end_date) return result def _prepare_query(self, kwargs): self.import_data(kwargs) self.validate() query = self.to_native() if 'date' in query: query['date'] = self.get_min_date(self.date) del query['totals'] if 'values' in query: del query['values'] return query def _prepare_update(self, event, value=1): fields = {'totals.%s' % event: value} if self.has_granularity: fields['values.$.%s' % event] = value return fields def _preallocate(self, query): doc = {'totals': {}} doc.update(query) events = self.get_events() for event in events: doc['totals'][event] = 0 if self.has_granularity: doc['values'] = [] granularity_range = self.granularity_range(self.date) for period in range(*granularity_range): counters = {event: 0 for event in events} counters[self.granularity_name] = period doc['values'].append(counters) self.collection.insert_one(doc) def inc_counter(self, event, value=1, **kwargs): query = self._prepare_query(kwargs) update_query = query.copy() fields = self._prepare_update(event, value) if self.has_granularity: update_query[ 'values.%s' % self.granularity_name] = self.get_granularity_item_from_date( self.date) update_result = self.inc(update_query, fields, validate=False) if update_result.matched_count == 0: self._preallocate(query) # TODO: remove many=True after migration (merging counters) self.inc(update_query, fields, validate=False, many=True)
class ClickStats(Model): clicks = IntType(default=0)
def test_extract_class_fields(self): bases = [Model] attrs = {'i': IntType()} fields = _extract_fields(bases, attrs) self.assertEqual(attrs, fields)
class M(Model): a, b, c, d = IntType(), IntType(), IntType(), IntType(default=0)
class GrandParent(Parent): age = IntType()
class User(Model): name = StringType(required=True) age = IntType(required=True) account_level = IntType()
class CopyStats(Model): copies = IntType(default=0)
class FollowStats(Model): follows = IntType(default=0)
class ShareStats(Model): shares = IntType(default=0)
class NameTest(with_metaclass(ORMMeta, BaseModel)): name = StringType() something = IntType()
class IdTest(with_metaclass(ORMMeta, BaseModel)): something = IntType()
class Foo(Model): x = IntType() y = IntType() z = 5 # should be ignored
class User(Model): name = StringType(required=True) age = IntType(required=True)
class TestModel(Model): some_int = IntType()
class Player(Model): id = IntType()
class ViewStats(Model): views = IntType(default=0)