def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) duration, tags = None, [] if self.stream.metadata: duration = self.stream.metadata.value.get("duration") tags = [ Tag(TAG_TYPE_SCRIPT, timestamp=0, data=self.stream.metadata) ] self.concater = FLVTagConcat(tags=tags, duration=duration, flatten_timestamps=True)
def __init__(self, reader, *args, **kwargs): options = reader.stream.session.options kwargs["retries"] = options.get("hds-segment-attempts") kwargs["threads"] = options.get("hds-segment-threads") kwargs["timeout"] = options.get("hds-segment-timeout") SegmentedStreamWriter.__init__(self, reader, *args, **kwargs) duration, tags = None, [] if self.stream.metadata: duration = self.stream.metadata.value.get("duration") tags = [Tag(TAG_TYPE_SCRIPT, timestamp=0, data=self.stream.metadata)] self.concater = FLVTagConcat(tags=tags, duration=duration, flatten_timestamps=True)
class UHSStreamWriter(SegmentedStreamWriter): def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(tags=[], flatten_timestamps=True, sync_headers=True) def fetch(self, chunk, retries=None): if not retries or self.closed: return try: now = datetime.datetime.now(tz=utc) if chunk.available_at > now: time_to_wait = (chunk.available_at - now).total_seconds() log.debug("Waiting for chunk: {fname} ({wait:.01f}s)".format( fname=chunk.num, wait=time_to_wait)) sleep_until(chunk.available_at) return self.session.http.get(chunk.url, timeout=self.timeout, exception=StreamError) except StreamError as err: log.error("Failed to open chunk {0}: {1}", chunk.num, err) return self.fetch(chunk, retries - 1) def write(self, chunk, res, chunk_size=8192): try: for data in self.concater.iter_chunks(buf=res.content, skip_header=False): self.reader.buffer.write(data) if self.closed: return else: log.debug("Download of chunk {0} complete", chunk.num) except IOError as err: log.error("Failed to read chunk {0}: {1}", chunk.num, err)
class UHSStreamWriter(SegmentedStreamWriter): def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(tags=[], flatten_timestamps=True, sync_headers=True) def fetch(self, chunk, retries=None): if not retries or self.closed: return try: now = datetime.datetime.now(tz=utc) if chunk.available_at > now: time_to_wait = (chunk.available_at - now).total_seconds() log.debug("Waiting for chunk: {fname} ({wait:.01f}s)".format(fname=chunk.num, wait=time_to_wait)) sleep_until(chunk.available_at) return self.session.http.get(chunk.url, timeout=self.timeout, exception=StreamError) except StreamError as err: log.error("Failed to open chunk {0}: {1}", chunk.num, err) return self.fetch(chunk, retries - 1) def write(self, chunk, res, chunk_size=8192): try: for data in self.concater.iter_chunks(buf=res.content, skip_header=False): self.reader.buffer.write(data) if self.closed: return else: log.debug("Download of chunk {0} complete", chunk.num) except IOError as err: log.error("Failed to read chunk {0}: {1}", chunk.num, err)
class UHSStreamWriter(SegmentedStreamWriter): def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(flatten_timestamps=True, sync_headers=True) def fetch(self, chunk, retries=None): if not retries or self.closed: return try: params = {} if chunk.offset: params["start"] = chunk.offset return http.get(chunk.url, timeout=self.timeout, params=params, exception=StreamError) except StreamError as err: self.logger.error("Failed to open chunk {0}: {1}", chunk.num, err) return self.fetch(chunk, retries - 1) def write(self, chunk, res, chunk_size=8192): try: for data in self.concater.iter_chunks(buf=res.content, skip_header=not chunk.offset): self.reader.buffer.write(data) if self.closed: break else: self.logger.debug("Download of chunk {0} complete", chunk.num) except IOError as err: self.logger.error("Failed to read chunk {0}: {1}", chunk.num, err)
class UHSStreamWriter(SegmentedStreamWriter): def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(flatten_timestamps=True, sync_headers=True) def fetch(self, chunk, retries=None): if not retries or self.closed: return try: params = {} if chunk.offset: params["start"] = chunk.offset return http.get(chunk.url, timeout=self.timeout, params=params, exception=StreamError) except StreamError as err: self.logger.error("Failed to open chunk {0}: {1}", chunk.num, err) return self.fetch(chunk, retries - 1) def write(self, chunk, res, chunk_size=8192): try: for data in self.concater.iter_chunks(buf=res.content, skip_header=chunk.offset): self.reader.buffer.write(data) if self.closed: break else: self.logger.debug("Download of chunk {0} complete", chunk.num) except IOError as err: self.logger.error("Failed to read chunk {0}: {1}", chunk.num, err)
def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(tags=[], flatten_timestamps=True, sync_headers=True)
def __init__(self, *args, **kwargs): SegmentedStreamWriter.__init__(self, *args, **kwargs) self.concater = FLVTagConcat(flatten_timestamps=True, sync_headers=True)
def __init__(self, *args, **kwargs): FLVTagConcat.__init__(self, *args, **kwargs)
class HDSStreamWriter(SegmentedStreamWriter): def __init__(self, reader, *args, **kwargs): options = reader.stream.session.options kwargs["retries"] = options.get("hds-segment-attempts") kwargs["threads"] = options.get("hds-segment-threads") kwargs["timeout"] = options.get("hds-segment-timeout") SegmentedStreamWriter.__init__(self, reader, *args, **kwargs) duration, tags = None, [] if self.stream.metadata: duration = self.stream.metadata.value.get("duration") tags = [Tag(TAG_TYPE_SCRIPT, timestamp=0, data=self.stream.metadata)] self.concater = FLVTagConcat(tags=tags, duration=duration, flatten_timestamps=True) def fetch(self, fragment, retries=None): if self.closed or not retries: return try: request_params = self.stream.request_params.copy() params = request_params.pop("params", {}) params.pop("g", None) return self.session.http.get(fragment.url, stream=True, timeout=self.timeout, exception=StreamError, params=params, **request_params) except StreamError as err: log.error(f"Failed to open fragment {fragment.segment}-{fragment.fragment}: {err}") return self.fetch(fragment, retries - 1) def write(self, fragment, res, chunk_size=8192): fd = StreamIOIterWrapper(res.iter_content(chunk_size)) self.convert_fragment(fragment, fd) def convert_fragment(self, fragment, fd): mdat = None try: f4v = F4V(fd, raw_payload=True) # Fast forward to mdat box for box in f4v: if box.type == "mdat": mdat = box.payload.data break except F4VError as err: log.error(f"Failed to parse fragment {fragment.segment}-{fragment.fragment}: {err}") return if not mdat: log.error(f"No MDAT box found in fragment {fragment.segment}-{fragment.fragment}") return try: for chunk in self.concater.iter_chunks(buf=mdat, skip_header=True): self.reader.buffer.write(chunk) if self.closed: break else: log.debug(f"Download of fragment {fragment.segment}-{fragment.fragment} complete") except IOError as err: if "Unknown tag type" in str(err): log.error("Unknown tag type found, this stream is probably encrypted") self.close() return log.error(f"Error reading fragment {fragment.segment}-{fragment.fragment}: {err}")