def zip2bytes(compressed): """ UNZIP DATA """ if hasattr(compressed, "read"): return gzip.GzipFile(fileobj=compressed, mode='r') buff = BytesIO(compressed) archive = gzip.GzipFile(fileobj=buff, mode='r') from mo_http.big_data import safe_size return safe_size(archive)
def read(self, key): source = self.get_meta(key) try: json = safe_size(source) except Exception as e: Log.error(READ_ERROR, e) if json == None: return None if source.key.endswith(".zip"): json = _unzip(json) elif source.key.endswith(".gz"): json = convert.zip2bytes(json) return json.decode("utf8")
def all_content(self): # response.content WILL LEAK MEMORY (?BECAUSE OF PYPY"S POOR HANDLING OF GENERATORS?) # THE TIGHT, SIMPLE, LOOP TO FILL blocks PREVENTS THAT LEAK if self._content is not False: self._cached_content = self._content elif self._cached_content is None: def read(size): if self.raw._fp.fp is not None: return self.raw.read(amt=size, decode_content=True) else: self.close() return None self._cached_content = safe_size(Data(read=read)) if hasattr(self._cached_content, "read"): self._cached_content.seek(0) return self._cached_content
def read_bytes(self, key): source = self.get_meta(key) return safe_size(source)