def __new__(mcs, class_name, bases, attrs): finals = [] for k, v in iteritems(attrs): if (k in mcs._finals and not getattr(v, '_override', False)): finals.append(k) if finals: raise RuntimeError( 'Class `{}` override some final attrs: {}. ' 'Please use the `@override` decorator to decorate them' ' if you understand what are you doing'.format( class_name, ', '.join(map('`{}`'.format, finals)))) options = {} for base in bases: meta = getattr(base, 'Options', None) if meta is not None: options.update(meta.__dict__) meta = attrs.get('Options', None) if meta: options.update(meta.__dict__) options = { k: v for k, v in iteritems(options) if not k.startswith('_') } attrs['Options'] = type('Options', (), options) attrs['_options'] = ModelOptions(**options) if '__abstract__' not in attrs: attrs['__abstract__'] = False return super(ModelMeta, mcs).__new__(mcs, class_name, bases, attrs)
def parse_attrs(cls, attrs, decrypt=True, output=True): res = {} for k, v in iteritems(attrs): if isinstance(v, Missing): continue # pragma: no cover # TODO if isinstance(v, Expression): continue field = getattr(cls, k) # TODO if not hasattr(field, 'type'): continue if v is not None or not field.noneable: if not type_checker(field.type, v): v = field.parse(v) if not type_checker(field.type, v): raise ParseError( 'The parsed value of {}.{} is not a {} type: {}. ' 'Please check the parser of this field' ' or your input data.'.format( cls.__name__, k, field.type, repr(v) ) ) if decrypt: v = field.decrypt_func(v) if field.encrypt else v if output: v = field.output(v) if field.output else v res[k] = v return res
def type_checker(type_, obj): # pylint: disable=too-many-return-statements if isinstance(type_, type) and isinstance(obj, type_): return True t = type(type_) if t != type(obj): return False if t is list: if not type_: return isinstance(obj, t) _t = type_[0] for e in obj: r = type_checker(_t, e) if not r: return False return True elif t is tuple: if len(type_) != len(obj): return False for i, e in enumerate(obj): r = type_checker(type_[i], e) if not r: return False return True elif t is dict: items = get_items(type_) if not items: return isinstance(obj, t) kt, vt = items[0] for k, v in iteritems(obj): if not type_checker(kt, k) or not type_checker(vt, v): return False return True return False
def __getstate__(self): state = self.__dict__.copy() for k, v in iteritems(state): if isinstance(v, (list, set, dict)): v = type(v)(v) state[k] = v return state
def _get_db_values(pairs): version_groups = defaultdict(list) for obj, field, version in pairs: assert obj is not None assert field is not None if isinstance(version, MigrationVersion): continue # pragma: no cover version_groups[version].append((obj, field)) if not version_groups: return mapping = {} for version, _pairs in iteritems(version_groups): for obj, field in _pairs: if version == 0: key = obj.get_finally_uuid() elif version == 1: key = field._get_db_field_key(obj) else: continue # pragma: no cover mapping[(obj._olo_get_signature(), field.name, version)] = key keys = list(set(mapping.values())) if not keys: return # pragma: no cover db = obj._get_db() values = db.db_get_multi(keys) res = [] for obj, field, version in pairs: key = mapping.get((obj._olo_get_signature(), field.name, version)) if not key: value = missing # pragma: no cover else: value = values.get(key, missing) res.append(value) return res
def decrypt_attrs(cls, attrs): res = dict(attrs) for name, field in iteritems(cls.__encrypted_fields__): if name not in res: continue # pragma: no cover res[name] = field.decrypt_func(res[name]) return res
def transform_type(obj, type_): # pylint: disable=too-many-return-statements if type_ is JSONLike: return obj if isinstance(type_, type) and isinstance(obj, type_): return obj if isinstance(type_, type) and issubclass(type_, Enum): if obj not in type_.__members__: raise TypeError(f'{obj} is not a valid type of enum {type_}') return getattr(type_, obj) if type_ is str: if isinstance(obj, unicode): return obj.encode('utf-8') # pragma: no cover if isinstance(obj, (list, dict)): return json.dumps(obj) if isinstance(obj, Enum): return obj.name return type_(obj) if type_ is unicode: if isinstance(obj, str): # pragma: no cover return obj.decode('utf-8') # pragma: no cover return type_(obj) # pragma: no cover if type_ in (list, dict): if isinstance(obj, str_types): obj = json.loads(obj) if isinstance(obj, type_): return obj return type_(obj) if type_ in (datetime, date): obj = dateparser.parse(obj) if type_ is date: return obj.date() return obj if type_ is tuple: if isinstance(obj, str_types): obj = literal_eval(obj) if isinstance(obj, type_): return obj return tuple(obj) if callable(type_): if type_ is Decimal: return type_(str(obj)) return type_(obj) t = type(type_) if t in (list, dict) and isinstance(obj, str_types): obj = json.loads(obj) if not isinstance(obj, t): raise TypeError('{} is not a {} type.'.format(repr(obj), t)) if isinstance(obj, list): return [transform_type(e, type_[0]) for e in obj] if isinstance(obj, dict): d = {} items = get_items(type_) kt, vt = items[0] for k, v in iteritems(obj): k = transform_type(k, kt) v = transform_type(v, vt) d[k] = v return d return obj
def _build_report_miss_msg(self, method_name, *args, **kwargs): return 'Miss cache method invocation: `{}.{}({})`'.format( self._model_class.__name__, method_name, ', '.join( filter( None, (', '.join(map(friendly_repr, list(args))) if args else '', ', '.join('{}={}'.format(k, friendly_repr(v)) for k, v in sorted(iteritems(kwargs)))))))
def detect_table_alias(sql_ast: AST) -> Tuple[AST, Dict[str, str]]: rev_alias_mapping = {} sql_ast = _detect_table_alias( sql_ast, rev_alias_mapping=rev_alias_mapping ) alias_mapping = {v: k for k, v in sorted(iteritems(rev_alias_mapping))} return sql_ast, alias_mapping
def clear_cursors(self): with self.lock: for k, v in iteritems(self.cursors): while not v.empty(): try: cur = v.get_nowait() self.close_cursor(cur) except Empty: pass self.cursors.clear()
def __getstate__(self): dct = dict(self.__dict__) dct.pop('_dirty_fields', None) dct.pop('_orig', None) dct.pop('_parsed_data', None) dct = dict(dct) _data = dct.get('_data', {}) if _data: dct['_data'] = { k: v for k, v in iteritems(_data) if k not in self.__db_fields__ } # Return tuple to distinguish the old version return dct
def __(*args, **kwargs): ret = func(*args, **kwargs) if not toggle(*args, **kwargs): return ret # pragma: no cover vals = dict(argspec.defaults) vals.update(dict(izip(argspec.args, args))) vals.update(kwargs) vals['%ret'] = ret vals = {k: v for k, v in iteritems(vals) if k in keys} str_ = fmt.format(**vals) logger.log(level, str_) return ret
def _split_attrs(cls, attrs, collect_expression=True): expressions = [] sql_attrs = {} db_attrs = {} for k, v in iteritems(attrs): if k in cls.__db_fields__: db_attrs[k] = v elif k in cls.__fields__: if not isinstance(v, Expression): sql_attrs[k] = v if collect_expression: f = getattr(cls, k) v = cls._deparse_attrs({k: v})[k] expressions.append(BinaryExpression(f, v, '=')) return expressions, sql_attrs, db_attrs
def _gen_cache_key(cls, _olo_suffix='_olo_data', **kwargs): suffix = _olo_suffix old_kwargs = dict(kwargs) _kwargs = dict(kwargs) _kwargs.pop('order_by', None) kwargs = cls._parse_attrs(_kwargs) old_kwargs.update(kwargs) key = '{}:db:{}:({}):{}'.format( cls._options.cache_key_prefix, cls._get_table_name(), ','.join('{}={}'.format(k, repr(v)) for k, v in sorted(iteritems(old_kwargs))), cls._options.cache_key_version) if suffix: key += ':suffix:%s' % suffix # avoid mc bug return key.replace(' ', ' ')
def _split_attrs( cls, attrs, collect_assignment=True) -> Tuple[List[Assignment], Dict, Dict]: assignments = [] sql_attrs = {} db_attrs = {} for k, v in iteritems(attrs): if k in cls.__db_fields__: db_attrs[k] = v elif k in cls.__fields__: if not isinstance(v, Expression): sql_attrs[k] = v if collect_assignment: f: Field = getattr(cls, k) v = cls._deparse_attrs({k: v})[k] assignments.append(Assignment(f, v)) return assignments, sql_attrs, db_attrs
def _get_base_sql_ast(self, modifier=None, entities=None): entities = self._entities if entities is None else entities if self._join: table_section = [ 'JOIN', ['TABLE', self.table_name], ['TABLE', self._join._get_table_name()] ] elif self._left_join: table_section = [ 'LEFT JOIN', ['TABLE', self.table_name], ['TABLE', self._left_join._get_table_name()] ] elif self._right_join: table_section = [ 'RIGHT JOIN', ['TABLE', self.table_name], ['TABLE', self._right_join._get_table_name()] ] else: table_section = ['TABLE', self.table_name] rev_alias_mapping = {} table_section = _detect_table_alias( table_section, rev_alias_mapping=rev_alias_mapping) alias_mapping = {v: k for k, v in sorted(iteritems(rev_alias_mapping))} with table_alias_mapping_context(alias_mapping): select_ast = [ 'SERIES', ] + [ e.get_sql_ast() if hasattr(e, 'get_sql_ast') else e for e in entities ] if len(select_ast) == 2 and select_ast[1][0] == 'SERIES': select_ast = select_ast[1] if modifier is not None: select_ast = ['MODIFIER', modifier, select_ast] sql_ast = ['SELECT'] sql_ast.append(select_ast) sql_ast.append(['FROM', table_section]) return sql_ast, alias_mapping
def _deparse_attrs(cls, attrs): res = {} for k, v in iteritems(attrs): field = getattr(cls, k) is_field = isinstance(field, Field) if not isinstance(v, Expression): if v is not None or not field.noneable: if is_field and not isinstance(v, VALID_TYPES): v = field.deparse(v) if is_field and not isinstance(v, VALID_TYPES): raise DeparseError( # pragma: no cover 'The deparsed type of {}.{} is invalid. ' 'Type: {}; Value: {}. ' 'Please check the deparser of this field.'.format( cls.__name__, k, type(v), repr(v))) v = field.encrypt_func(v) if field.encrypt else v v = field.input(v) if field.input else v res[k] = v return res
def type_checker(type_, obj): # pylint: disable=too-many-return-statements if type_ is JSONLike: return True if isinstance(type_, type) and isinstance(obj, type_): return True try: from typing import List if isinstance(type_, List.__class__): check_type('', obj, type_) return True except (TypeError, ImportError): pass t = type(type_) if t != type(obj): return False if t is list: if not type_: return isinstance(obj, t) _t = type_[0] for e in obj: r = type_checker(_t, e) if not r: return False return True if t is tuple: if len(type_) != len(obj): return False for i, e in enumerate(obj): r = type_checker(type_[i], e) if not r: return False return True if t is dict: items = get_items(type_) if not items: return isinstance(obj, t) kt, vt = items[0] for k, v in iteritems(obj): if not type_checker(kt, k) or not type_checker(vt, v): return False return True return False
def _get(self, instance, owner): if self._getter is None: raise AttributeError('batch field `{}.{}` has no getter!', owner.__name__, self.name) session = instance._olo_qs if session is None: entities = [instance] # pragma: no cover else: entities = session.entities name = self.name default = self.get_default() res = self._getter(owner, entities) entity_mapping = {e._get_singleness_pk_value(): e for e in entities} if isinstance(res, dict): for pv, item in iteritems(entity_mapping): if hasattr(item, '_olo_qs'): setattr(item, name, res.get(pv, default)) return res.get(instance._get_singleness_pk_value(), default) if isinstance(res, list): for idx, item in enumerate(entities): if hasattr(item, '_olo_qs'): try: v = res[idx] except IndexError: v = default setattr(item, name, v) try: return res[instance._olo_qs_idx] except IndexError: # pragma: no cover return default # pragma: no cover return default # pragma: no cover
def transform_func(func): if not isinstance(func, types.FunctionType): return func res = getattr(func, CK_TRANS_RES, None) if res is not None: return res ast, _, _ = decompile(func) FuncTranslator(ast) argspec = getargspec(func) args = argspec.args defaults = argspec.defaults varargs = argspec.varargs keywords = argspec.varkw arg_str = ', '.join( filter(None, [ ', '.join(args), ', '.join('{}={}'.format(k, repr(v)) for k, v in iteritems(defaults)), '*%s' % varargs if varargs else '', '**%s' % keywords if keywords else '', ])) src = 'lambda {}: {}'.format(arg_str, ast.src) globals = func.__globals__ if func.__closure__: globals = dict( globals, **dict( izip(func.func_code.co_freevars, (c.cell_contents for c in func.func_closure)))) res = eval_src(src, globals=globals) setattr(func, CK_TRANS_RES, res) return res
def f(self): session = self._olo_qs if session is None: entities = [self] # pragma: no cover else: entities = session.entities res = method(entities) entity_mapping = {e._get_singleness_pk_value(): e for e in entities} if isinstance(res, dict): for pv, item in iteritems(entity_mapping): if hasattr(item, '_olo_qs'): setattr(item, name, res.get(pv, default)) return res.get(self._get_singleness_pk_value(), default) if isinstance(res, list): for idx, item in enumerate(entities): if hasattr(item, '_olo_qs'): try: v = res[idx] except IndexError: v = default setattr(item, name, v) try: return res[self._olo_qs_idx] except IndexError: # pragma: no cover return default # pragma: no cover return default # pragma: no cover
def _check_validates(self, attrs): for field_name, validate in iteritems(self.__validates__): v = attrs.get(field_name, missing) if v is not missing: validate(v) self.olo_validate()
def __str__(self): return '{}({})'.format( # pragma: no cover self.__class__.__name__, ', '.join('{}={}'.format(k, friendly_repr(v)) for k, v in iteritems(self.__dict__)))
('=', '!=', '>', '<', '>=', '<=', 'IN', 'IS', 'IS NOT', 'NOT IN'), ('BETWEEN', 'CASE'), ('AND', '&&'), ('OR', '||'), ) OPERATOR_PRECEDENCES = { item: idx for idx, items in enumerate(reversed(_OPERATOR_PRECEDENCES)) for item in items } UNARY_NEG_OPERATOR = {'-': '+'} UNARY_NEG_OPERATOR = dict({v: k for k, v in iteritems(UNARY_NEG_OPERATOR)}, **UNARY_NEG_OPERATOR) BINARY_NEG_OPERATOR = { 'IN': 'NOT IN', 'IS': 'IS NOT', '=': '!=', '>': '<=', '<': '>=' } BINARY_NEG_OPERATOR = dict({v: k for k, v in iteritems(BINARY_NEG_OPERATOR)}, **BINARY_NEG_OPERATOR)
def update(self, **attrs): # pylint: disable=too-many-statements self._check_attrs(attrs) attrs = self._wash_attrs(attrs) if not attrs: return False if self._orig is None: self._set_orig() if self.before_update(**attrs) is False: self._rollback() return False for k in self.__setter_fields__: v = attrs.get(k, missing) if v is missing: continue f = getattr(self.__class__, k) v = f._setter(self, v) attrs[k] = v db = self._get_db() need_updates = {} for k, v in iteritems(self.__on_updates__): if k in attrs: continue try: res = v() except TypeError: res = v(self) need_updates[k] = res attrs = dict(need_updates, **attrs) assignments, sql_attrs, db_attrs = self._split_attrs(attrs) sql_attrs = self._validate_attrs(sql_attrs, decrypt=False) db_attrs = self._validate_attrs(db_attrs, decrypt=False) clean_attrs = dict(sql_attrs, **db_attrs) for k in db_attrs: # cache old db values getattr(self._orig, k, None) next_inst = self._clone() next_inst.__olo_setstate__(dict(self._data, **clean_attrs)) can_update = self._orig._will_update( next_inst, fields=clean_attrs.keys(), ) if can_update is False: self._rollback() return False if assignments: expression = self.unique_expression if expression is None: raise ExpressionError( 'Cannot update this instance because of ' # noqa pragma: no cover 'the model has no primary_key ' 'and unique_key') sql_ast = [ 'UPDATE', ['TABLE', self._get_table_name()], [ 'SET', ['SERIES'] + [asg.get_sql_ast() for asg in assignments] ], ['WHERE'] + [expression.get_sql_ast()] ] with db.transaction(): db.ast_execute(sql_ast) dynamic_exps = [ asg for asg in assignments if isinstance(asg.right, Expression) ] if dynamic_exps: keys = list(map(lambda x: x.left.attr_name, dynamic_exps)) q = self.__class__.query(*keys).filter( **{ attr_name: getattr(self, attr_name) for attr_name in self.__primary_key__ }) values = q.first() if not isinstance(values, tuple): values = [values] _attrs = dict(izip(keys, values)) sql_attrs.update(self._parse_attrs(_attrs)) before_update.send(self) clean_attrs = dict(sql_attrs, **db_attrs) self._data.update(clean_attrs) for k in clean_attrs: self._parsed_data.pop(k, None) for k, v in iteritems(db_attrs): field = getattr(self.__class__, k) field.db_set(self, v) _orig = self._orig def func(): db.commit_beansdb() after_update.send(self) self.after_update() if _orig is not None: self._orig = None self._did_update(_orig, fields=chain.from_iterable([ iterkeys(sql_attrs), iterkeys(db_attrs), ])) def rollback_handler(): self._rollback() if db.autocommit: func() else: db.add_lazy_func(func) db.add_rollback_handler(rollback_handler) return True
def _dict_to_expressions(model_class, dct): return [getattr(model_class, k) == v for k, v in iteritems(dct)]
def _wash_attrs(cls, attrs): return {k: v for k, v in iteritems(attrs) if v is not missing}
def _map_attrs(cls, attrs): return { # pragma: no cover getattr(cls, k).name: v for k, v in iteritems(attrs) }
def _olo_insert(self): if not self._olo_is_new: return False # pragma: no cover before_create_is_instance_method = getattr(self.before_create, '__self__', None) is self # noqa pylint: disable=C bcr = True if before_create_is_instance_method: bcr = self.before_create() attrs = dict(self._data) _, sql_attrs, db_attrs = self._split_attrs(attrs) if not before_create_is_instance_method: bcr = self.before_create(**attrs) # pragma: no cover # bcr will be none so must compare with False!!! if bcr is False: # noqa return False self._validate_attrs(attrs, parse=True, decrypt=self._olo_decrypt) db = self._get_db() assignments, _, _ = self._split_attrs(sql_attrs) if assignments: fields_ast = ['BRACKET'] values_ast = ['VALUES'] for asg in assignments: fields_ast.append(['QUOTE', asg.left.name]) values_ast.append(['VALUE', asg.right]) pk_name = self.get_singleness_pk_name() sql_ast = [ 'INSERT', ['TABLE', self._get_table_name()], fields_ast, values_ast, ['RETURNING', pk_name], ] with db.transaction(): id_ = db.ast_execute(sql_ast) if (hasattr(self.__class__, pk_name) and pk_name in self.__class__.__fields__ and pk_name not in self._data): self._data[pk_name] = id_ # need thinking self._extend_missing_data() for k, v in iteritems(db_attrs): field = getattr(self.__class__, k) field.db_set(self, v) self._olo_is_new = False def rollback_handler(): self._olo_is_new = True def func(): db.commit_beansdb() after_insert.send(self) if getattr(self.after_create, '__self__', None) is self: self.after_create() else: self.after_create(self) # pragma: no cover pylint: disable=E if db.autocommit: func() else: db.add_lazy_func(func) db.add_rollback_handler(rollback_handler) return True
def _clone(self): r = copy(self) for k, v in iteritems(self._data): if k in self.__db_fields__: r._data[k] = copy(v) return r