def _iter_wrap_rv(self, rv): from olo.model import ModelMeta entity_count = len(self._entities) raw = self._raw producers = [] idx = -1 def make_field_producer(idx, v): def producer(item): if raw: return item[idx] model = v.get_model() attrs = model._parse_attrs({ v.attr_name: item[idx] }) return attrs[v.attr_name] return producer for v in self._entities: idx += 1 if isinstance(v, ModelMeta): fields_count = len(v.__fields__) producers.append(( lambda idx, v: lambda item: v._olo_instantiate(**dict( izip(v.__fields__, item[idx: idx + fields_count]) # pylint: disable=W )) )(idx, v)) idx += fields_count - 1 continue if isinstance(v, Field): producers.append(make_field_producer(idx, v)) continue producers.append(( lambda idx, v: lambda item: item[idx] )(idx, v)) session = QuerySession() for idx, item in enumerate(rv): new_item = tuple(imap(lambda f: f(item), producers)) # pylint: disable=W if entity_count == 1: new_item = new_item[0] session.add_entity(new_item) for entity in session.entities: yield entity
def get_by(self, *args, **kwargs): def fallback(): self._report_miss('get_by', *args, **kwargs) return self._model_class._get_by(*args, **kwargs) if not self._cache_client or args: return fallback() # pragma: no cover str_key = get_str_key(kwargs) index_keys = get_index_keys(self._model_class) unique_keys = get_unique_keys(self._model_class) if str_key not in unique_keys: if str_key in index_keys: _res = self.get_multi_by(limit=1, **kwargs) if _res: return _res[0] return return fallback() # pylint: disable=E1102 key = self._gen_cache_key(**kwargs) # pylint: enable=E1102 data = self._cache_client.get(key) if data is None: res = self._model_class._get_by(**kwargs) if res is None: data = missing else: data = res._data self._cache_client.set(key, data) else: res = (self._model_class._olo_instantiate(_olo_decrypt=False, **data) if isinstance(data, dict) else None) session = QuerySession() session.add_entity(res) self.add_handler(res) return res
def get_multi(self, idents, filter_none=True): def fallback(): self._report_miss('get_multi', idents, filter_none=filter_none) return self._model_class._get_multi(idents, filter_none=filter_none) if not self._cache_client: return fallback() pk_name = self._model_class.get_singleness_pk_name() unique_keys = get_unique_keys(self._model_class) # gen_keys keys = [] for ident in idents: if not isinstance(ident, dict): kwargs = {pk_name: ident} else: if get_str_key(ident) not in unique_keys: raise CacheError('{} is not a unique key. ' 'The unique key is {}'.format( repr(tuple(ident)), repr(tuple(unique_keys)))) kwargs = ident # pylint: disable=E1102 key = self._gen_cache_key(**kwargs) # pylint: enable=E1102 keys.append(key) key_mapping = dict(izip(map(str, idents), keys)) mapping = self._cache_client.get_multi(keys) new_idents = [] for ident in idents: key = key_mapping.get(str(ident)) value = mapping.get(key) if value is None: new_idents.append(ident) items = self._model_class._get_multi(new_idents, filter_none=False) new_mapping = {} for item, ident in izip(items, new_idents): key = key_mapping.get(str(ident)) if item is None: new_mapping[key] = missing else: new_mapping[key] = mapping[key] = item._data if new_mapping: self._cache_client.set_multi(new_mapping) session = QuerySession() model_class = self._model_class for ident in idents: key = key_mapping.get(str(ident)) item = mapping.get(key) if isinstance(item, dict): item = model_class._olo_instantiate(_olo_decrypt=False, **item) else: item = None if item is None and filter_none: continue session.add_entity(item) self.add_handler(session.entities) return session.entities