-
Notifications
You must be signed in to change notification settings - Fork 1
/
ext.py
364 lines (289 loc) · 11.1 KB
/
ext.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
import functools
import hashlib
from datetime import datetime
from dogpile.cache import make_region
from dogpile.cache.api import NO_VALUE
from flask_sqlalchemy import BaseQuery, DefaultMeta, Model, SQLAlchemy, _QueryProperty
from sqlalchemy import Column, DateTime, Integer, event, inspect
from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base, declared_attr
from sqlalchemy.orm.attributes import get_history
from sqlalchemy.orm.interfaces import MapperOption
from flask_security import Security
from flask_mail import Mail
from flask_debugtoolbar import DebugToolbarExtension
from flask import abort
from config import REDIS_URL
from corelib.db import PropsMixin, PropsItem
def md5_key_mangler(key):
if key.startswith("SELECT "):
key = hashlib.md5(key.encode("ascii")).hexdigest()
return key
regions = dict(
default=make_region(key_mangler=md5_key_mangler).configure("dogpile.cache.redis"),
arguments={"url": REDIS_URL},
)
def memoize(obj):
cache = obj.cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
def _key_from_query(query, qualifier=None):
stmt = query.with_labels().statement
compiled = stmt.compile()
params = compiled.params
return " ".join([str(compiled)] + [str(params[k] for k in sorted(params))])
class CachingQuery(BaseQuery):
def __init__(self, regions, entities, *args, **kwargs):
self.cache_regions = regions
super().__init__(entities=entities, *args, **kwargs)
def __iter__(self):
if hasattr(self, "_cache_region"):
return self.get_value(createfunc=lambda: list(BaseQuery.__iter__(self)))
else:
return BaseQuery.__iter__(self)
def _get_cache_plus_key(self):
dogpile_region = self.cache_regions[self._cache_region.region]
if self._cache_region.cache_key:
key = self._cache_region.cache_key
else:
key = _key_from_query(self)
return dogpile_region, key
def invalidate(self):
dogpile_region, cache_key = self._get_cache_plus_key()
dogpile_region.delete(cache_key)
def get_value(
self, merge=True, createfunc=None, expiration_time=None, ignore_expiration=False
):
dogpile_region, cache_key = self._get_cache_plus_key()
assert (
not ignore_expiration or not createfunc
), "can`t ignore expiration and also provide createfunc"
if ignore_expiration or not createfunc:
cached_value = dogpile_region.get(
cache_key,
expiration_time=expiration_time,
ignore_expiration=ignore_expiration,
)
else:
cached_value = dogpile_region.get_or_create(
cache_key, createfunc, expiration_time=expiration_time
)
if cached_value is NO_VALUE:
raise KeyError(cache_key)
if merge:
cached_value = self.merge_result(cached_value, load=False)
return cached_value
def set_value(self, value):
dogpile_region, cache_key = self._get_cache_plus_key()
dogpile_region.set(cache_key, value)
def query_callable(regions, query_cls=CachingQuery):
return functools.partial(query_cls, regions)
class FromCache(MapperOption):
propagate_to_loaders = False
def __init__(self, region="default", cache_key=None):
self.region = region
self.cache_key = cache_key
def process_query(self, query):
query._cache_region = self
class Query:
def __init__(self, entities):
self.entities = entities
def __iter__(self):
return self.entities
def first(self):
try:
return self.entities.__next__()
except StopIteration:
return None
def all(self):
return list(self.entities)
class Cache:
def __init__(self, model, regions, label):
self.model = model
self.regions = regions
self.label = label
self.pk = getattr(model, "cache_pk", "id")
def get(self, pk):
return self.model.query.options(self.from_cache(pk=pk)).get(pk)
def count(self, **kwargs):
if kwargs:
if len(kwargs) > 1:
raise TypeError("filter accept only one attribute for filtering")
key, value = list(kwargs.items())[0]
if key not in self._attrs():
raise TypeError(f"{self} does not have an attribute {key}")
cache_key = self._count_cache_key(**kwargs)
r = self.regions[self.label]
count = r.get(cache_key)
if count is NO_VALUE:
count = self.model.query.filter_by(**kwargs).count()
r.set(cache_key, count)
return count
def filter(self, order_by="asc", offset=None, limit=None, **kwargs):
if kwargs:
if len(kwargs) > 1:
raise TypeError("filter accept only one attribute for filtering")
key, value = list(kwargs.items())[0]
if key not in self._attrs():
raise TypeError(f"{self} does not have an attribute {key}")
cache_key = self._cache_key(**kwargs)
r = self.regions[self.label]
pks = r.get(cache_key)
if pks is NO_VALUE:
pks = [
o.id
for o in self.model.query.filter_by(**kwargs).with_entities(
getattr(self.model, self.pk)
)
]
r.set(cache_key, pks)
if order_by == "desc":
pks.reverse()
if offset is not None:
pks = pks[offset:]
if limit is not None:
pks = pks[:limit]
keys = [self._cache_key(id) for id in pks]
return Query(self.get_entities(pks, r.get_multi(keys)))
def get_entities(self, pks, objs):
for pos, obj in enumerate(objs):
if obj is NO_VALUE:
yield self.get(pks[pos])
else:
yield obj[0]
def flush(self, key):
self.regions[self.label].delete(key)
@memoize
def _attrs(self):
return [a.key for a in inspect(self.model).attrs if a.key != self.pk]
@memoize
def from_cache(self, cache_key=None, pk=None):
if pk:
cache_key = self._cache_key(pk)
return FromCache(self.label, cache_key)
@memoize
def _count_cache_key(self, pk="all", **kwargs):
return self._cache_key(pk, **kwargs) + "_count"
@memoize
def _cache_key(self, pk="all", **kwargs):
q_filter = "".join(f"{k}={v}" for k, v in kwargs.items()) or self.pk
return f"{self.model.__tablename__}.{q_filter}[{pk}]"
def _flush_all(self, obj):
for attr in self._attrs():
added, unchanged, deleted = get_history(obj, attr)
for value in list(deleted) + list(added):
self.flush(self._cache_key(**{attr: value}))
for key in (
self._cache_key(),
self._cache_key(getattr(obj, self.pk)),
self._count_cache_key(),
self._count_cache_key(getattr(obj, self.pk)),
):
self.flush(key)
class BaseModel(PropsMixin, Model):
cache_label = "default"
cache_regions = regions
query_class = query_callable(regions)
__table_args__ = {"mysql_charset": "utf8mb4"}
id = Column(Integer, primary_key=True)
created_at = Column(DateTime, default=datetime.utcnow())
updated_at = Column(DateTime, default=None)
def get_uuid(self):
return f"/bran/{self.__class__.__name__}/{self.id}"
def __repr__(self):
return f"<{self.__class__.__name__} id:{self.id}>"
@declared_attr
def cache(cls):
return Cache(cls, cls.cache_regions, cls.cache_label)
@classmethod
def get(cls, id):
return cls.query.get(id)
@classmethod
def get_or_404(cls, id):
rv = cls.get(id)
if rv is None:
abort(404)
return rv
@classmethod
def get_multi(cls, ids):
return [cls.get(id) for id in ids]
def url(self):
return f"/{self.__class__.__name__.lower()}/{self.id}"
def to_dict(self):
columns = self.__table__.columns.keys() + ["kind"]
return {key: getattr(self, key, None) for key in columns}
@staticmethod
def _flush_event(mapper, connection, target):
target.cache._flush_all(target)
target.__flush_event__(target)
@classmethod
def __flush_event__(cls, target):
pass
@staticmethod
def _flush_insert_event(mapper, connection, target):
target._flush_event(mapper, connection, target)
target.__flush_insert_event__(target)
@staticmethod
def _flush_before_update_event(mapper, connection, target):
target._flush_event(mapper, connection, target)
target.__flush_before_update_event__(target)
@staticmethod
def _flush_after_update_event(mapper, connection, target):
target._flush_event(mapper, connection, target)
target.__flush_after_update_event__(target)
@staticmethod
def _flush_delete_event(mapper, connection, target):
target._flush_event(mapper, connection, target)
target.__flush_delete_event__(target)
@classmethod
def __flush_insert_event__(cls, target):
pass
@classmethod
def __flush_before_update_event__(cls, target):
pass
@classmethod
def __flush_after_update_event__(cls, target):
pass
@classmethod
def __flush_delete_event__(cls, target):
pass
@classmethod
def __declare_last__(cls):
event.listen(cls, "after_insert", cls._flush_insert_event)
event.listen(cls, "before_update", cls._flush_before_update_event)
event.listen(cls, "after_update", cls._flush_after_update_event)
event.listen(cls, "after_delete", cls._flush_delete_event)
class BindDBPropertyMixin:
def __init__(cls, name, bases, d):
super().__init__(name, bases, d)
db_columns = []
for k, v in d.items():
if isinstance(v, PropsItem):
db_columns.append((k, v.default))
setattr(cls, "_db_columns", db_columns)
class CombinedMeta(BindDBPropertyMixin, DefaultMeta):
pass
class UnLockedAlchemy(SQLAlchemy):
def make_declarative_base(self, model, metadata=None):
if not isinstance(model, DeclarativeMeta):
model = declarative_base(
cls=model, name="Model", metadata=metadata, metaclass=CombinedMeta
)
if metadata is not None and model.metadata is not metadata:
model.metadata = metadata
if not getattr(model, "query_class", None):
model.query_class = self.Query
model.query = _QueryProperty(self)
return model
def apply_driver_hacks(self, app, info, options):
if "isolation_level" not in options:
options["isolation_level"] = "READ COMMITTED"
return super().apply_driver_hacks(app, info, options)
db = UnLockedAlchemy(model_class=BaseModel)
security = Security()
mail = Mail()
debug_bar = DebugToolbarExtension()