Example #1
0
def _prefetch_db_data(obj):
    if not obj:
        return  # pragma: no cover
    session = getattr(obj, '_olo_qs', None)
    if session is None:
        return
    entities = session.entities
    if len(entities) < 2:
        return
    qs_idx = getattr(obj, '_olo_qs_idx', None)
    qs_idx = 0 if not isinstance(qs_idx, int_types) else qs_idx
    if qs_idx < 1:
        return
    pairs = []
    first = entities[0]
    for entity in entities[qs_idx:]:
        need_feed = (set(DbField._get_data(first)) -
                     set(DbField._get_data(entity)))
        for attr_name in need_feed:
            field = entity._olo_get_db_field(attr_name)
            version = _get_db_field_version(field, entity)
            pairs.append((entity, field, version))
    db_values = _get_db_values(pairs)
    if not db_values:
        return
    for (entity, field, version), db_value in izip(pairs, db_values):
        if db_value is missing:
            continue
        value = _get_value_from_db_value(db_value, version, field)
        data = DbField._get_data(entity)
        data[field.attr_name] = _process_value(value, field)
Example #2
0
 def BUILD_CONST_KEY_MAP(decompiler, length):
     keys = decompiler.stack.pop()
     assert isinstance(keys, ast.Const)
     keys = [ast.Const(key) for key in keys.value]
     values = decompiler.pop_items(length)
     pairs = list(izip(keys, values))
     return ast.Dict(pairs)
Example #3
0
 def BUILD_MAP(decompiler, length):
     if sys.version_info < (3, 5):
         return ast.Dict(())
     data = decompiler.pop_items(
         2 * length)  # noqa [key1, value1, key2, value2, ...]
     it = iter(data)
     pairs = list(izip(it, it))  # [(key1, value1), (key2, value2), ...]
     return ast.Dict(pairs)
Example #4
0
 def CALL_FUNCTION_KW(decompiler, argc):
     if sys.version_info < (3, 6):
         return decompiler.CALL_FUNCTION(argc, star2=decompiler.stack.pop())
     keys = decompiler.stack.pop()
     assert isinstance(keys, ast.Const)
     keys = keys.value
     values = decompiler.pop_items(argc)
     assert len(keys) <= len(values)
     args = values[:-len(keys)]
     for key, value in izip(keys, values[-len(keys):]):
         args.append(ast.Keyword(key, value))
     return decompiler._call_function(args)
Example #5
0
def decompile(x):
    cells = {}
    if isinstance(x, types.CodeType):
        codeobject = x
    elif isinstance(x, types.GeneratorType):
        codeobject = x.gi_frame.f_code
    elif isinstance(x, types.FunctionType):
        codeobject = x.func_code if PY2 else x.__code__
        if PY2:
            if x.func_closure:
                cells = dict(izip(codeobject.co_freevars, x.func_closure))
        else:
            if x.__closure__:
                cells = dict(izip(codeobject.co_freevars, x.__closure__))
    else:
        throw(TypeError)
    key = get_codeobject_id(codeobject)
    result = ast_cache.get(key)
    if result is None:
        decompiler = Decompiler(codeobject)
        result = decompiler.ast, decompiler.external_names
        ast_cache.set(key, result)
    return result + (cells, )
Example #6
0
        def __(*args, **kwargs):
            ret = func(*args, **kwargs)

            if not toggle(*args, **kwargs):
                return ret  # pragma: no cover

            vals = dict(argspec.defaults)
            vals.update(dict(izip(argspec.args, args)))
            vals.update(kwargs)
            vals['%ret'] = ret
            vals = {k: v for k, v in iteritems(vals) if k in keys}
            str_ = fmt.format(**vals)
            logger.log(level, str_)
            return ret
Example #7
0
    def _iter_wrap_rv(self, rv):
        from olo.model import ModelMeta

        entity_count = len(self._entities)
        raw = self._raw

        producers = []
        idx = -1

        def make_field_producer(idx, v):
            def producer(item):
                if raw:
                    return item[idx]
                model = v.get_model()
                attrs = model._parse_attrs({v.attr_name: item[idx]})
                return attrs[v.attr_name]

            return producer

        for v in self._entities:
            idx += 1

            if isinstance(v, ModelMeta):
                fields_count = len(v.__fields__)
                producers.append((
                    lambda idx, v: lambda item: v._olo_instantiate(**dict(
                        izip(v.__sorted_fields__, item[idx:idx + fields_count])  # pylint: disable=W
                    )))(idx, v))
                idx += fields_count - 1
                continue

            if isinstance(v, Field):
                producers.append(make_field_producer(idx, v))
                continue

            producers.append((lambda idx, v: lambda item: item[idx])(idx, v))

        session = QuerySession()

        for idx, item in enumerate(rv):
            new_item = tuple(imap(lambda f: f(item), producers))  # noqa pylint: disable=W
            if entity_count == 1:
                new_item = new_item[0]

            session.add_entity(new_item)

        for entity in session.entities:
            yield entity
Example #8
0
def transform_func(func):
    if not isinstance(func, types.FunctionType):
        return func

    res = getattr(func, CK_TRANS_RES, None)
    if res is not None:
        return res

    ast, _, _ = decompile(func)
    FuncTranslator(ast)

    argspec = inspect.getargspec(func)
    args = argspec.args
    defaults = argspec.defaults or []
    l = len(defaults)
    if l != 0:
        args = args[: -l]
        defaults = zip(argspec.args[-l:], defaults)
    varargs = argspec.varargs
    keywords = argspec.keywords
    arg_str = ', '.join(filter(None, [
        ', '.join(args),
        ', '.join(
            '{}={}'.format(k, repr(v))
            for k, v in defaults
        ),
        '*%s' % varargs if varargs else '',
        '**%s' % keywords if keywords else '',
    ]))

    src = 'lambda {}: {}'.format(
        arg_str,
        ast.src
    )

    globals = func.__globals__
    if func.__closure__:
        globals = dict(globals, **dict(
            izip(
                func.func_code.co_freevars,
                (c.cell_contents for c in func.func_closure)
            )
        ))
    res = eval_src(src, globals=globals)
    setattr(func, CK_TRANS_RES, res)
    return res
Example #9
0
 def _extend_missing_data(self):
     missing_fields = [
         getattr(self.__class__, k) for k in self.__fields__
         if k not in self._data
     ]
     if not missing_fields:
         return  # pragma: no cover
     pk_dict = self._get_pk_dict()
     if not pk_dict:
         raise ORMError('No pk dict!!!')  # pragma: no cover
     values = self.__class__.query(*missing_fields).filter(
         **pk_dict).first()
     if len(missing_fields) == 1 and values is not None and not isinstance(
             values, list):  # noqa
         values = [values]  # pragma: no cover
     if values:
         self._data.update(
             dict(izip(map(lambda f: f.attr_name, missing_fields), values)))
Example #10
0
    def get_multi(self, idents, filter_none=True):
        def fallback():
            self._report_miss('get_multi', idents, filter_none=filter_none)
            return self._model_class._get_multi(idents,
                                                filter_none=filter_none)

        if not self._cache_client:
            return fallback()

        pk_name = self._model_class.get_singleness_pk_name()

        unique_keys = get_unique_keys(self._model_class)

        # gen_keys
        keys = []
        for ident in idents:
            if not isinstance(ident, dict):
                kwargs = {pk_name: ident}
            else:
                if get_str_key(ident) not in unique_keys:
                    raise CacheError('{} is not a unique key. '
                                     'The unique key is {}'.format(
                                         repr(tuple(ident)),
                                         repr(tuple(unique_keys))))
                kwargs = ident
            # pylint: disable=E1102
            key = self._gen_cache_key(**kwargs)
            # pylint: enable=E1102
            keys.append(key)

        key_mapping = dict(izip(map(str, idents), keys))
        mapping = self._cache_client.get_multi(keys)

        new_idents = []
        for ident in idents:
            key = key_mapping.get(str(ident))
            value = mapping.get(key)
            if value is None:
                new_idents.append(ident)

        items = self._model_class._get_multi(new_idents, filter_none=False)
        new_mapping = {}
        for item, ident in izip(items, new_idents):
            key = key_mapping.get(str(ident))
            if item is None:
                new_mapping[key] = missing
            else:
                new_mapping[key] = mapping[key] = item._data

        if new_mapping:
            self._cache_client.set_multi(new_mapping)

        session = QuerySession()
        model_class = self._model_class

        for ident in idents:
            key = key_mapping.get(str(ident))
            item = mapping.get(key)

            if isinstance(item, dict):
                item = model_class._olo_instantiate(_olo_decrypt=False, **item)
            else:
                item = None

            if item is None and filter_none:
                continue

            session.add_entity(item)

        self.add_handler(session.entities)

        return session.entities
Example #11
0
    def update(self, **attrs):
        # pylint: disable=too-many-statements
        self._check_attrs(attrs)

        attrs = self._wash_attrs(attrs)

        if not attrs:
            return False

        if self._orig is None:
            self._set_orig()

        if self.before_update(**attrs) is False:
            self._rollback()
            return False

        for k in self.__setter_fields__:
            v = attrs.get(k, missing)
            if v is missing:
                continue
            f = getattr(self.__class__, k)
            v = f._setter(self, v)
            attrs[k] = v

        db = self._get_db()

        need_updates = {}
        for k, v in iteritems(self.__on_updates__):
            if k in attrs:
                continue

            try:
                res = v()
            except TypeError:
                res = v(self)

            need_updates[k] = res

        attrs = dict(need_updates, **attrs)
        assignments, sql_attrs, db_attrs = self._split_attrs(attrs)

        sql_attrs = self._validate_attrs(sql_attrs, decrypt=False)
        db_attrs = self._validate_attrs(db_attrs, decrypt=False)
        clean_attrs = dict(sql_attrs, **db_attrs)

        for k in db_attrs:
            # cache old db values
            getattr(self._orig, k, None)

        next_inst = self._clone()
        next_inst.__olo_setstate__(dict(self._data, **clean_attrs))
        can_update = self._orig._will_update(
            next_inst,
            fields=clean_attrs.keys(),
        )
        if can_update is False:
            self._rollback()
            return False

        if assignments:
            expression = self.unique_expression
            if expression is None:
                raise ExpressionError(
                    'Cannot update this instance because of '  # noqa pragma: no cover
                    'the model has no primary_key '
                    'and unique_key')

            sql_ast = [
                'UPDATE', ['TABLE', self._get_table_name()],
                [
                    'SET',
                    ['SERIES'] + [asg.get_sql_ast() for asg in assignments]
                ], ['WHERE'] + [expression.get_sql_ast()]
            ]

            with db.transaction():
                db.ast_execute(sql_ast)

            dynamic_exps = [
                asg for asg in assignments
                if isinstance(asg.right, Expression)
            ]
            if dynamic_exps:
                keys = list(map(lambda x: x.left.attr_name, dynamic_exps))
                q = self.__class__.query(*keys).filter(
                    **{
                        attr_name: getattr(self, attr_name)
                        for attr_name in self.__primary_key__
                    })
                values = q.first()
                if not isinstance(values, tuple):
                    values = [values]
                _attrs = dict(izip(keys, values))
                sql_attrs.update(self._parse_attrs(_attrs))

        before_update.send(self)

        clean_attrs = dict(sql_attrs, **db_attrs)
        self._data.update(clean_attrs)
        for k in clean_attrs:
            self._parsed_data.pop(k, None)

        for k, v in iteritems(db_attrs):
            field = getattr(self.__class__, k)
            field.db_set(self, v)

        _orig = self._orig

        def func():
            db.commit_beansdb()
            after_update.send(self)
            self.after_update()
            if _orig is not None:
                self._orig = None
                self._did_update(_orig,
                                 fields=chain.from_iterable([
                                     iterkeys(sql_attrs),
                                     iterkeys(db_attrs),
                                 ]))

        def rollback_handler():
            self._rollback()

        if db.autocommit:
            func()
        else:
            db.add_lazy_func(func)
            db.add_rollback_handler(rollback_handler)

        return True