def loadBefore(self, oid, tid): """Load last state for a given oid before a given tid :param str oid: Object ID :param str tid: Transaction timestamp :return: Object and its serial number and following serial number :rtype: tuple """ if self.debug: if oid not in self._cache.current: in_cache = False else: in_cache = True data, serial, tend = self.base.loadBefore(oid, tid) out_data = self._untransform(data) if self.debug and not in_cache: logging.debug( "id:%s, type:%s, transform: %s->%s" % ( encode_hex(oid), debug_loads(out_data), len(data), len(out_data), )) self._debug_download_size += len(data) self._debug_download_count += 1 return out_data, serial, tend
def loadBulk(self, oids, returns=True): """ Load multiple objects at once :param list oids: Iterable of oids to load :param bool returns: When False, we don't return objects but store them in cache :return: List of (object, serial) tuples :rtype: list """ if self.debug: logging.debug("Loading: " + ", ".join([encode_hex(oid) for oid in oids])) in_cache_before = {oid: oid in self._cache.current for oid in oids} base_result = self.base.loadBulk(oids) if self.debug: in_cache_after = {oid: oid in self._cache.current for oid in oids} if returns or self.debug: datas, serials = zip(*base_result) datas_out = map(self._untransform, datas) out = list(zip(datas_out, serials)) if self.debug: if datas: self._debug_download_count += 1 for data, out_data, oid in zip(datas, datas_out, oids): logline_prefix = "" if not in_cache_before[oid]: self._debug_download_size += len(data) if not in_cache_after[oid]: self._debug_download_count += 1 else: logline_prefix = "(from bulk) " logging.debug("%sid:%s, type:%s, transform: %s->%s" % (logline_prefix, encode_hex(oid), debug_loads(out_data), len(data), len(out_data))) if returns: return out
def loadBefore(self, oid, tid): """Load last state for a given oid before a given tid :param str oid: Object ID :param str tid: Transaction timestamp :return: Object and its serial number and following serial number :rtype: tuple """ if self.debug: if oid not in self._cache.current: in_cache = False else: in_cache = True data, serial, tend = self.base.loadBefore(oid, tid) out_data = self._untransform(data) if self.debug and not in_cache: logging.debug("id:%s, type:%s, transform: %s->%s" % ( encode_hex(oid), debug_loads(out_data), len(data), len(out_data), )) self._debug_download_size += len(data) self._debug_download_count += 1 return out_data, serial, tend
def loadBulk(self, oids, returns=True): """ Load multiple objects at once :param list oids: Iterable of oids to load :param bool returns: When False, we don't return objects but store them in cache :return: List of (object, serial) tuples :rtype: list """ if self.debug: in_cache_before = {oid: oid in self._cache.current for oid in oids} base_result = self.base.loadBulk(oids) if self.debug: in_cache_after = {oid: oid in self._cache.current for oid in oids} if returns or self.debug: datas, serials = zip(*base_result) datas_out = map(self._untransform, datas) out = zip(datas_out, serials) if self.debug: if datas: self._debug_download_count += 1 for data, out_data, oid in zip(datas, datas_out, oids): logline_prefix = "" if not in_cache_before[oid]: self._debug_download_size += len(data) if not in_cache_after[oid]: self._debug_download_count += 1 else: logline_prefix = "(from bulk) " logging.debug("%sid:%s, type:%s, transform: %s->%s" % (logline_prefix, oid.encode("hex"), debug_loads(out_data), len(data), len(out_data))) if returns: return out
def load(self, oid, version=''): """ Load object by oid :param str oid: Object ID :param version: Version to load (when we have version control) :return: Object and its serial number :rtype: tuple """ if self.debug: if oid not in self._cache.current: in_cache = False else: in_cache = True data, serial = self.base.load(oid, version) out_data = self._untransform(data) if self.debug and not in_cache: logging.debug("id:%s, type:%s, transform: %s->%s" % (encode_hex(oid), debug_loads(out_data), len(data), len(out_data))) self._debug_download_size += len(data) self._debug_download_count += 1 return out_data, serial