def read(self, size): if self.data: d = self.data.pop(0) if d is None: raise green.ChunkReadTimeout() else: return d else: return ''
def get_next_part(parts_iter): """ Gets next part of the body NOTE: for the moment only return one part (single range only) """ while True: try: with green.ChunkReadTimeout(CHUNK_TIMEOUT): start, end, length, headers, part = next(parts_iter[0]) return (start, end, length, headers, part) except green.ChunkReadTimeout: # TODO recover raise StopIteration
def iter_from_resp(part): bytes_consumed = 0 count = 0 buf = '' while True: try: with green.ChunkReadTimeout(self.read_timeout): data = part.read(READ_CHUNK_SIZE) count += 1 buf += data except green.ChunkReadTimeout: try: self.recover(bytes_consumed) except (exc.UnsatisfiableRange, ValueError): raise except exc.EmptyByteRange: # we are done already break buf = '' # find a new source to perform recovery new_source, new_chunk = self._get_source() if new_source: logger.warn("Retrying from another source") close_source(source[0]) # switch source source[0] = new_source parts_iter[0] = make_iter_from_resp(source[0]) try: _j, _j, _j, _j, part = get_next_part() except StopIteration: # failed to recover # we did our best return else: # no valid source found to recover raise else: # discard bytes if buf and self.discard_bytes: if self.discard_bytes < len(buf): buf = buf[self.discard_bytes:] bytes_consumed += self.discard_bytes self.discard_bytes = 0 else: self.discard_bytes -= len(buf) bytes_consumed += len(buf) buf = '' # no data returned # flush out buffer if not data: if buf: bytes_consumed += len(buf) yield buf buf = '' break # buffer to read_size if read_size is not None: while len(buf) >= read_size: read_d = buf[:read_size] buf = buf[read_size:] yield read_d bytes_consumed += len(read_d) else: yield buf bytes_consumed += len(buf) buf = '' # avoid starvation by forcing sleep() # every once in a while if count % 10 == 0: sleep()
def iter_from_resp(self, source, parts_iter, part, chunk): bytes_consumed = 0 count = 0 buf = b'' if self.perfdata is not None: rawx_perfdata = self.perfdata.setdefault('rawx', dict()) chunk_url = chunk['url'] while True: try: with green.ChunkReadTimeout(self.read_timeout): data = part.read(READ_CHUNK_SIZE) count += 1 buf += data except (green.ChunkReadTimeout, IOError) as crto: try: self.recover(bytes_consumed) except (exc.UnsatisfiableRange, ValueError): raise except exc.EmptyByteRange: # we are done already break buf = b'' # find a new source to perform recovery new_source, new_chunk = self._get_source() if new_source: self.logger.warn( "Failed to read from %s (%s), " "retrying from %s (reqid=%s)", chunk, crto, new_chunk, self.reqid) close_source(source[0], self.logger) # switch source source[0] = new_source chunk = new_chunk parts_iter[0] = make_iter_from_resp(source[0]) try: _j, _j, _j, _j, part = \ self.get_next_part(parts_iter) except StopIteration: # failed to recover # we did our best return else: self.logger.warn("Failed to read from %s (%s, reqid=%s)", chunk, crto, self.reqid) # no valid source found to recover raise else: # discard bytes if buf and self.discard_bytes: if self.discard_bytes < len(buf): buf = buf[self.discard_bytes:] bytes_consumed += self.discard_bytes self.discard_bytes = 0 else: self.discard_bytes -= len(buf) bytes_consumed += len(buf) buf = b'' # no data returned # flush out buffer if not data: if buf: bytes_consumed += len(buf) yield buf buf = b'' break # If buf_size is defined, yield bounded data buffers if self.buf_size is not None: while len(buf) >= self.buf_size: read_d = buf[:self.buf_size] buf = buf[self.buf_size:] yield read_d bytes_consumed += len(read_d) else: yield buf bytes_consumed += len(buf) buf = b'' # avoid starvation by yielding # every once in a while if count % 10 == 0: eventlet_yield() if self.perfdata is not None: download_end = monotonic_time() key = 'download.' + chunk_url rawx_perfdata[key] = rawx_perfdata.get(key, 0.0) \ + download_end - source[0].download_start