def notify_playable(self): self.prebufprogress = 1.0 self.playable = True if self.usernotified: return mimetype = self.get_mimetype() complete = self.storage.is_finished() if complete: stream = None filename = self.storage.get_dest_path() else: stream = MovieTransportStreamWrapper(self) filename = None try: self.vodeventfunc( self.fileinfo, VODEVENT_START, { 'complete': complete, 'filename': filename, 'mimetype': mimetype, 'stream': stream, 'length': self.storage.get_content_length(), 'bitrate': self.bitrate }) except: log_exc()
def refill_buffer(self): self.data_ready.acquire() try: if self.prebuffering or self._complete or not self.playing or self.done(): return mx = self.max_buffer_size() length = self.storage.get_content_length() while self.outbuflen < mx and self.outbufpos < length: numbytes = mx - self.outbuflen if DEBUG: log(self.log_prefix + 'refill_buffer: read from storage: pos', self.outbufpos, 'numbytes', numbytes, 'outbuflen', self.outbuflen, 'mx', mx) data = self.storage.read(self.outbufpos, numbytes) if not data: if DEBUG: log(self.log_prefix + 'refill_buffer: no data available: pos', self.outbufpos) break datalen = len(data) self.outbuf.append((self.outbufpos, data)) self.stat_outbuf.append((self.outbufpos, datalen)) self.outbuflen += datalen self.outbufpos += datalen self.data_ready.notify() if DEBUG: log(self.log_prefix + 'refill_buffer: got data from storage: datalen', datalen, 'outbufpos', self.outbufpos, 'outbuflen', self.outbuflen) except: log_exc() finally: self.data_ready.release()
def _request(self): import encodings.ascii import encodings.punycode import encodings.idna self.error = None self.received_data = None try: self.connection.request('GET', self.url, None, {'User-Agent': VERSION}) r = self.connection.getresponse() self.connection_status = r.status self.received_data = r.read() except Exception as e: log_exc() self.error = 'error accessing http seed: ' + str(e) try: self.connection.close() except: pass try: self.connection = HTTPConnection(self.netloc) except: self.connection = None self.downloader.rawserver.add_task(self.request_finished)
def _request(self): import encodings.ascii import encodings.punycode import encodings.idna self.error = None self.received_data = None try: self.connection.request('GET', self.url, None, {'User-Agent': VERSION}) r = self.connection.getresponse() self.connection_status = r.status self.received_data = r.read() except Exception as e: log_exc() self.error = 'error accessing http seed: ' + str(e) try: self.connection.close() except: pass try: self.connection = HTTPConnection(self.netloc) except: self.connection = None self.downloader.rawserver.add_task(self.request_finished)
def error(self, e, url): if self.server.errorcallback is not None: self.server.errorcallback(e, url) else: log_exc() if self.server.statuscallback is not None: self.server.statuscallback('Error playing video:' + str(e))
def network_vod_event_callback(self, videoinfo, event, params): if DEBUG: log('lm::network_vod_event_callback: event %s, params %s' % (event, params)) try: videoinfo['usercallback'](event, params) except: log_exc()
def refill_buffer(self): self.data_ready.acquire() try: if self.prebuffering or self._complete or not self.playing or self.done(): return mx = self.max_buffer_size() length = self.storage.get_content_length() while self.outbuflen < mx and self.outbufpos < length: numbytes = mx - self.outbuflen if DEBUG: log(self.log_prefix + 'refill_buffer: read from storage: pos', self.outbufpos, 'numbytes', numbytes, 'outbuflen', self.outbuflen, 'mx', mx) data = self.storage.read(self.outbufpos, numbytes) if not data: if DEBUG: log(self.log_prefix + 'refill_buffer: no data available: pos', self.outbufpos) break datalen = len(data) self.outbuf.append((self.outbufpos, data)) self.stat_outbuf.append((self.outbufpos, datalen)) self.outbuflen += datalen self.outbufpos += datalen self.data_ready.notify() if DEBUG: log(self.log_prefix + 'refill_buffer: got data from storage: datalen', datalen, 'outbufpos', self.outbufpos, 'outbuflen', self.outbuflen) except: log_exc() finally: self.data_ready.release()
def data_came_in(self, packets): if packets: try: self.dispersy.data_came_in(packets) except: log_exc() raise
def save_torrent_local(self, tdef, checksum): save_name = binascii.hexlify(tdef.get_infohash()) + ".torrent" torrent_dir = self.get_torrent_collecting_dir() save_path = os.path.join(torrent_dir, save_name) if DEBUG: log( "session::save_torrent_local: save torrent: save_path", save_path, "checksum", binascii.hexlify(checksum), ) torrent_data = tdef.save(save_path) extra_info = {"status": "good"} extra_info["filename"] = save_name extra_info["checksum"] = checksum db = self.open_dbhandler(NTFY_TORRENTS) if db is None: return try: db.addExternalTorrent(tdef, source="", extra_info=extra_info) except: if DEBUG: log_exc() finally: self.close_dbhandler(db)
def get_file_handle(self, file, for_write): if self.handles.has_key(file): if for_write and not self.whandles.has_key(file): self._close(file) try: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to reopen ' + file + ': ' + str(e)) else: try: if for_write: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) else: f = self._open(file, 'rb') self.handles[file] = f except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to open ' + file + ': ' + str(e)) return self.handles[file]
def data_came_in(self, connection, s): self.Encoder.measurefunc(len(s)) while 1: if self.closed: return i = self.next_len - self.buffer.tell() if i > len(s): self.buffer.write(s) return self.buffer.write(s[:i]) s = s[i:] m = self.buffer.getvalue() self.buffer.reset() self.buffer.truncate() try: x = self.next_func(m) except: log_exc() self.next_len, self.next_func = 1, self.read_dead raise if x is None: if DEBUG: print >> sys.stderr, 'encoder: function failed', self.next_func self.close() return self.next_len, self.next_func = x
def data_came_in(self, connection, s): self.Encoder.measurefunc(len(s)) while 1: if self.closed: return i = self.next_len - self.buffer.tell() if i > len(s): self.buffer.write(s) return self.buffer.write(s[:i]) s = s[i:] m = self.buffer.getvalue() self.buffer.reset() self.buffer.truncate() try: x = self.next_func(m) except: log_exc() self.next_len, self.next_func = 1, self.read_dead raise if x is None: if DEBUG: print >> sys.stderr, 'encoder: function failed', self.next_func self.close() return self.next_len, self.next_func = x
def error(self, e, url): if self.server.errorcallback is not None: self.server.errorcallback(e, url) else: log_exc() if self.server.statuscallback is not None: self.server.statuscallback('Error playing video:' + str(e))
def get_file_handle(self, file, for_write): if self.handles.has_key(file): if for_write and not self.whandles.has_key(file): self._close(file) try: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to reopen ' + file + ': ' + str(e)) else: try: if for_write: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) else: f = self._open(file, 'rb') self.handles[file] = f except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to open ' + file + ': ' + str(e)) return self.handles[file]
def load_from_url(url, use_cache = True): if url.startswith(P2PURL_SCHEME): metainfo, swarmid = makeurl.p2purl2metainfo(url) metainfo['info']['url-compat'] = 1 t = TorrentDef._create(metainfo) return t else: b64_url = b64encode(url) if use_cache: if b64_url in TorrentDef.torrent_cache: tdef_from_cache = TorrentDef.torrent_cache[b64_url] if DEBUG: log('TorrentDef::load_from_url: found in cache: url', url, 'timestamp', tdef_from_cache['timestamp']) if tdef_from_cache['timestamp'] < time.time() - TORRENT_CACHE_EXPIRE: if DEBUG: log('TorrentDef::load_from_url: expired, delete from cache') del TorrentDef.torrent_cache[b64_url] else: return tdef_from_cache['tdef'] if url.startswith('file:///'): try: url = dunno2unicode(urllib2.unquote(url)) except: log_exc() f = urlOpenTimeout(url) tdef = TorrentDef._read(f) if DEBUG: log('TorrentDef::load_from_url: add to cache, url', url) TorrentDef.torrent_cache[b64_url] = {'tdef': tdef.copy(), 'timestamp': time.time()} return tdef
def run(self): while True: task = None timeout = None flag = False self.cond.acquire() while True: while len(self.queue) == 0 or flag: flag = False if timeout is None: self.cond.wait() else: self.cond.wait(timeout) self.queue.sort() when, count, task, id = self.queue[0] now = time() if now < when: timeout = when - now if DEBUG or self.debug: log('ttqueue::run: event not due: timeout', timeout, 'task', task) flag = True else: self.queue.pop(0) if DEBUG or self.debug: log('ttqueue::run: event due: task', task, 'len(queue)', len(self.queue)) if DEBUG_STACK: stack = self.callstack.pop(count) break self.cond.release() try: if task == 'stop': break elif task == 'quit': if len(self.queue) == 0: break else: when, count, task, id = self.queue[-1] t = when - time() + 0.001 self.add_task('quit', t) else: t = time() task() if DEBUG or self.debug: log('ttqueue::run: task finished: time', time() - t, 'task', task) except: log_exc() if DEBUG_STACK: print >> sys.stderr, '<<<<<<<<<<<<<<<<' print >> sys.stderr, 'TASK QUEUED FROM' print >> sys.stderr, ''.join(stack) print >> sys.stderr, '>>>>>>>>>>>>>>>>' if DEBUG: log('ttqueue::run: exit loop')
def check_outstanding_requests(self, downloads): if not self.transporter: return now = time.time() cancel_requests = [] in_high_range = self.videostatus.in_high_range playing_mode = self.videostatus.playing and not self.videostatus.paused piece_due = self.transporter.piece_due if playing_mode: min_delay, max_delay, offset_delay = self.playing_delay else: min_delay, max_delay, offset_delay = self.buffering_delay if DEBUG: log('pps::check_outstanding_requests: num_downloads', len(downloads), 'num_outstanding_requests', len(self.outstanding_requests)) for download in downloads: total_length = 0 download_rate = download.get_short_term_rate() for piece_id, begin, length in download.active_requests: try: time_request = self.outstanding_requests[piece_id, begin, length] except KeyError: if DEBUG: log('pps::check_outstanding_requests: not outstanding request: piece', piece_id, 'begin', begin, 'length', length) continue total_length += length if now < time_request + min_delay: if DEBUG: log('pps::check_outstanding_requests: have time to complete: piece', piece_id, 'begin', begin, 'length', length, 'delay', now - time_request, 'min_delay', min_delay, 'now', now, 'time_request', time_request) continue if in_high_range(piece_id) or self.videostatus.prebuffering and piece_id in self.videostatus.prebuf_needed_pieces: if download_rate == 0: if DEBUG: log('pps::check_outstanding_requests:cancel: download not started yet for piece', piece_id, 'chunk', begin, 'on', download.ip) cancel_requests.append((piece_id, begin, length)) else: if playing_mode: time_until_deadline = min(piece_due(piece_id), time_request + max_delay - now) else: time_until_deadline = time_request + max_delay - now time_until_download = total_length / download_rate if time_until_deadline < time_until_download - offset_delay: if DEBUG: log('pps::check_outstanding_requests:cancel: download speed too slow for piece', piece_id, 'chunk', begin, 'on', download.ip, 'Deadline in', time_until_deadline, 'while estimated download in', time_until_download) cancel_requests.append((piece_id, begin, length)) elif DEBUG: log('pps::check_outstanding_requests: no deadline: piece', piece_id, 'begin', begin, 'length', length, 'time_until_deadline', time_until_deadline, 'time_until_download', time_until_download, 'offset_delay', offset_delay) elif DEBUG: log('pps::check_outstanding_requests: not in high range: piece', piece_id, 'begin', begin, 'length', length) if cancel_requests: if DEBUG: log('pps::check_outstanding_requests: cancel_requests', cancel_requests) try: self.downloader.cancel_requests(cancel_requests, allowrerequest=False) except: log_exc()
def run(self): while True: task = None timeout = None flag = False self.cond.acquire() while True: while len(self.queue) == 0 or flag: flag = False if timeout is None: self.cond.wait() else: self.cond.wait(timeout) self.queue.sort() when, count, task, id = self.queue[0] now = time() if now < when: timeout = when - now if DEBUG or self.debug: log('ttqueue::run: event not due: timeout', timeout, 'task', task) flag = True else: self.queue.pop(0) if DEBUG or self.debug: log('ttqueue::run: event due: task', task, 'len(queue)', len(self.queue)) if DEBUG_STACK: stack = self.callstack.pop(count) break self.cond.release() try: if task == 'stop': break elif task == 'quit': if len(self.queue) == 0: break else: when, count, task, id = self.queue[-1] t = when - time() + 0.001 self.add_task('quit', t) else: t = time() task() if DEBUG or self.debug: log('ttqueue::run: task finished: time', time() - t, 'task', task) except: log_exc() if DEBUG_STACK: print >> sys.stderr, '<<<<<<<<<<<<<<<<' print >> sys.stderr, 'TASK QUEUED FROM' print >> sys.stderr, ''.join(stack) print >> sys.stderr, '>>>>>>>>>>>>>>>>' if DEBUG: log('ttqueue::run: exit loop')
def fatalerrorfunc(self, data): log(self.log_prefix + ':fatalerrorfunc called', data) if type(data) == StringType: log(self.log_prefix + 'LEGACY CORE FATAL ERROR', data) print_stack() self.set_error_func(ACEStreamLegacyException(data)) else: log_exc() self.set_error_func(data) self.shutdown()
def SeekDataCallback(self, pos, sid): try: if True: streaminfo = self.get_inputstream(sid) streaminfo['stream'].seek(pos, os.SEEK_SET) return 0 return -1 except: log_exc() return -1
def SeekDataCallback(self, pos, sid): try: if True: streaminfo = self.get_inputstream(sid) streaminfo['stream'].seek(pos, os.SEEK_SET) return 0 return -1 except: log_exc() return -1
def run_torrent_check(self): self.update_torrent_checking_period() self.rawserver.add_task(self.run_torrent_check, self.torrent_checking_period) try: from ACEStream.TrackerChecking.TorrentChecking import TorrentChecking t = TorrentChecking() t.start() except Exception as e: log_exc() self.rawserver_nonfatalerrorfunc(e)
def get_bitrate_from_metainfo(file, metainfo): info = metainfo['info'] if file is None: bitrate = None try: playtime = None if info.has_key('playtime'): playtime = parse_playtime_to_secs(info['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = info['length'] / playtime if DEBUG: print >> sys.stderr, 'TorrentDef: get_bitrate: Found bitrate', bitrate except: log_exc() return bitrate if file is not None and 'files' in info: for i in range(len(info['files'])): x = info['files'][i] intorrentpath = '' for elem in x['path']: intorrentpath = os.path.join(intorrentpath, elem) bitrate = None try: playtime = None if x.has_key('playtime'): playtime = parse_playtime_to_secs(x['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = x['length'] / playtime except: log_exc() if intorrentpath == file: return bitrate raise ValueError('File not found in torrent') else: raise ValueError('File not found in single-file torrent: ' + file)
def get_bitrate_from_metainfo(file, metainfo): info = metainfo['info'] if file is None: bitrate = None try: playtime = None if info.has_key('playtime'): playtime = parse_playtime_to_secs(info['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = info['length'] / playtime if DEBUG: print >> sys.stderr, 'TorrentDef: get_bitrate: Found bitrate', bitrate except: log_exc() return bitrate if file is not None and 'files' in info: for i in range(len(info['files'])): x = info['files'][i] intorrentpath = '' for elem in x['path']: intorrentpath = os.path.join(intorrentpath, elem) bitrate = None try: playtime = None if x.has_key('playtime'): playtime = parse_playtime_to_secs(x['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = x['length'] / playtime except: log_exc() if intorrentpath == file: return bitrate raise ValueError('File not found in torrent') else: raise ValueError('File not found in single-file torrent: ' + file)
def clear_mp4_metadata_tag(tag, data): try: pos = data.find(tag) if pos == -1: return None if DEBUG: log('clear_mp4_metadata_tag: tag found: tag', tag, 'pos', pos) if pos < 4: if DEBUG: log('clear_mp4_metadata_tag: truncated data start: tag', tag, 'pos', pos) return None item_atom_size = data[pos - 4:pos] item_atom_size = int(binascii.hexlify(item_atom_size), 16) datalen = len(data) if pos - 1 + item_atom_size > datalen: if DEBUG: log('clear_mp4_metadata_tag: truncated data end: tag', tag, 'pos', pos, 'item_atom_size', item_atom_size, 'datalen', datalen) return None data_size = data[pos + 4:pos + 8] data_size = int(binascii.hexlify(data_size), 16) if item_atom_size - data_size != 8: if DEBUG: log( 'clear_mp4_metadata_tag: sizse does not match: item_atom_size', item_atom_size, 'data_size', data_size) return None data_elem = data[pos + 8:pos + 12] data_flags = data[pos + 12:pos + 20] data_flags = binascii.hexlify(data_flags) value_start = pos + 20 value_end = pos + 4 + data_size value = data[value_start:value_end] if DEBUG: log('clear_mp4_metadata_tag: item_atom_size', item_atom_size, 'data_size', data_size, 'data_elem', data_elem, 'data_flags', data_flags, 'value_start', value_start, 'value_end', value_end) if data_elem != 'data' or data_flags != '0000000100000000': if DEBUG: log('clear_mp4_metadata_tag: malformed data') return None new_data = data[:value_start] + chr(0) * len(value) + data[value_end:] if len(new_data) != datalen: if DEBUG: log( 'clear_mp4_metadata_tag: modified data size mismatch: datalen', datalen, 'newdatalen', len(new_data)) return None return new_data except: log_exc() return None
def preallocate_file(self): try: i = 0 t = time.time() path = self.path pos = self.tops[path] total_write = pos size = self.size allocsize = 1048576 allocbuf = chr(255) * allocsize h = self.get_file_handle(path, True) h.seek(pos) while pos < size: if self.closed: if DEBUG: log(self.log_prefix + 'preallocate_file: storage is closed') return e = min(size - pos, allocsize) total_write += e h.write(allocbuf[:e]) pos += allocsize if DEBUG: if i % 100 == 0: log( self.log_prefix + 'preallocate_file: progress: path', self.path, 'progress', int(total_write / float(size) * 100), 'size', size, 'done', total_write) i += 1 time.sleep(0.01) if DEBUG: log(self.log_prefix + 'preallocate_file: path', self.path, 'size', self.size, 'written', total_write, 'time', time.time() - t) h.flush() self.lock.acquire() try: self.move_temp_files() self.file_allocated = True if self.amount_left == 0: if DEBUG: log( self.log_prefix + 'preallocate_file: download completed: ranges', self.ranges, 'size', self.size) self.notify_finished() finally: self.lock.release() except: log_exc()
def _auto_close(self): if not self.complete: if DEBUG: log('Encoder.Connection:_auto_close: ', self.get_myip(), self.get_myport(), 'to', self.get_ip(), self.get_port()) repexer = self.Encoder.repexer if repexer and not self.closed: try: repexer.connection_timeout(self) except: log_exc() self.close()
def _auto_close(self): if not self.complete: if DEBUG: log('Encoder.Connection:_auto_close: ', self.get_myip(), self.get_myport(), 'to', self.get_ip(), self.get_port()) repexer = self.Encoder.repexer if repexer and not self.closed: try: repexer.connection_timeout(self) except: log_exc() self.close()
def _run(self): try: self.start_upnp() self.start_multicast() self.multihandler.listen_forever() except Exception as e: log_exc() self.session.on_error(e) finally: if self.internaltracker is not None: self.internaltracker.save_state() self.stop_upnp() self.rawserver.shutdown() self.session.on_stop()
def get_ts_metadata_from_db(self, infohash): if DEBUG: log("session::get_ts_metadata_from_db: infohash", binascii.hexlify(infohash)) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: return db.get(infohash) except: log_exc() return finally: if db is not None: self.close_dbhandler(db)
def enough_buffer(self): try: if not self.bitrate_set: return True if not self.wait_sufficient_speed: return True expected_download_time = self.expected_download_time() expected_playback_time = self.expected_playback_time() if DEBUG: log(self.log_prefix + 'enough_buffer: expected_download_time', expected_download_time, 'expected_playback_time', expected_playback_time) return max(0.0, expected_download_time - expected_playback_time) == 0.0 except: log_exc() return True
def enough_buffer(self): try: if not self.bitrate_set: return True if not self.wait_sufficient_speed: return True expected_download_time = self.expected_download_time() expected_playback_time = self.expected_playback_time() if DEBUG: log(self.log_prefix + 'enough_buffer: expected_download_time', expected_download_time, 'expected_playback_time', expected_playback_time) return max(0.0, expected_download_time - expected_playback_time) == 0.0 except: log_exc() return True
def get_ts_bitrate_from_duration(self, idx = 0): bitrate = None try: duration = self.get_ts_duration(idx) if duration is None: return if 'files' in self.metainfo['info']: length = self.metainfo['info']['files'][idx]['length'] else: length = self.metainfo['info']['length'] bitrate = int(length) / int(duration) except: log_exc() return bitrate
def get_ts_metadata_from_db(self, infohash): if DEBUG: log('session::get_ts_metadata_from_db: infohash', binascii.hexlify(infohash)) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: return db.get(infohash) except: log_exc() return finally: if db is not None: self.close_dbhandler(db)
def remove(self, d, removecontent = False): self.sesslock.acquire() try: dltype = d.get_type() if DEBUG: log('lm::remove: d', d, 'type', dltype, 'removecontent', removecontent) d.stop_remove(removestate=True, removecontent=removecontent) dlhash = d.get_hash() del self.downloads[dltype][dlhash] if DEBUG: log('lm::remove: done: len(self.downloads)', len(self.downloads[dltype])) except: log_exc() finally: self.sesslock.release()
def get_ts_bitrate_from_duration(self, idx=0): bitrate = None try: duration = self.get_ts_duration(idx) if duration is None: return if "files" in self.metainfo["info"]: length = self.metainfo["info"]["files"][idx]["length"] else: length = self.metainfo["info"]["length"] bitrate = int(length) / int(duration) except: log_exc() return bitrate
def save_ts_metadata_db(self, infohash, metadata): if metadata is None: return if DEBUG: log("session::save_ts_metadata_db: infohash", binascii.hexlify(infohash), "metadata", metadata) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: db.put(infohash, metadata) except: log_exc() finally: if db is not None: self.close_dbhandler(db)
def get_ts_bitrate_from_duration(self, idx=0): bitrate = None try: duration = self.get_ts_duration(idx) if duration is None: return if 'files' in self.metainfo['info']: length = self.metainfo['info']['files'][idx]['length'] else: length = self.metainfo['info']['length'] bitrate = int(length) / int(duration) except: log_exc() return bitrate
def _open(self, file, mode): if self.mtimes.has_key(file): try: newmtime = os.path.getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: log(self.log_prefix + '_open:' + file + ' modified: ' + strftime('(%x %X)', time.localtime(self.mtimes[file])) + strftime(' != (%x %X) ?', time.localtime(os.path.getmtime(file)))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def save_ts_metadata_db(self, infohash, metadata): if metadata is None: return if DEBUG: log('session::save_ts_metadata_db: infohash', binascii.hexlify(infohash), 'metadata', metadata) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: db.put(infohash, metadata) except: log_exc() finally: if db is not None: self.close_dbhandler(db)
def sever(self, closeall=False): self.closed = True if self.Encoder.connections.has_key(self.connection): self.Encoder.admin_close(self.connection) repexer = self.Encoder.repexer if repexer and not self.complete: try: repexer.connection_closed(self) except: log_exc() if self.complete: self.connecter.connection_lost(self) elif self.locally_initiated: incompletecounter.decrement() if not closeall: self.Encoder._start_connection_from_queue(sched=False)
def network_shutdown(self): try: if self.peer_db is not None: db = SQLiteCacheDB.getInstance() db.commit() mainlineDHT.deinit() if DEBUG: ts = enumerate() log('LM::network_shutdown: number of threads still running', len(ts)) for t in ts: log('LM::network_shutdown: thread still running', t.name, 'daemon', t.daemon, 'instance', t) except: log_exc() self.sessdoneflag.set() self.session.uch.shutdown()
def sever(self, closeall = False): self.closed = True if self.Encoder.connections.has_key(self.connection): self.Encoder.admin_close(self.connection) repexer = self.Encoder.repexer if repexer and not self.complete: try: repexer.connection_closed(self) except: log_exc() if self.complete: self.connecter.connection_lost(self) elif self.locally_initiated: incompletecounter.decrement() if not closeall: self.Encoder._start_connection_from_queue(sched=False)
def clear_mp4_metadata_tag(tag, data): try: pos = data.find(tag) if pos == -1: return None if DEBUG: log('clear_mp4_metadata_tag: tag found: tag', tag, 'pos', pos) if pos < 4: if DEBUG: log('clear_mp4_metadata_tag: truncated data start: tag', tag, 'pos', pos) return None item_atom_size = data[pos - 4:pos] item_atom_size = int(binascii.hexlify(item_atom_size), 16) datalen = len(data) if pos - 1 + item_atom_size > datalen: if DEBUG: log('clear_mp4_metadata_tag: truncated data end: tag', tag, 'pos', pos, 'item_atom_size', item_atom_size, 'datalen', datalen) return None data_size = data[pos + 4:pos + 8] data_size = int(binascii.hexlify(data_size), 16) if item_atom_size - data_size != 8: if DEBUG: log('clear_mp4_metadata_tag: sizse does not match: item_atom_size', item_atom_size, 'data_size', data_size) return None data_elem = data[pos + 8:pos + 12] data_flags = data[pos + 12:pos + 20] data_flags = binascii.hexlify(data_flags) value_start = pos + 20 value_end = pos + 4 + data_size value = data[value_start:value_end] if DEBUG: log('clear_mp4_metadata_tag: item_atom_size', item_atom_size, 'data_size', data_size, 'data_elem', data_elem, 'data_flags', data_flags, 'value_start', value_start, 'value_end', value_end) if data_elem != 'data' or data_flags != '0000000100000000': if DEBUG: log('clear_mp4_metadata_tag: malformed data') return None new_data = data[:value_start] + chr(0) * len(value) + data[value_end:] if len(new_data) != datalen: if DEBUG: log('clear_mp4_metadata_tag: modified data size mismatch: datalen', datalen, 'newdatalen', len(new_data)) return None return new_data except: log_exc() return None
def network_engine_wrapper_created_callback(self, download, download_engine, exc, pstate): if exc is None: try: if download_engine is not None: dltype = download.get_type() if dltype == DLTYPE_TORRENT: self.queue_for_hashcheck(download_engine) live = download.get_def().get_live() elif dltype == DLTYPE_DIRECT: live = False if pstate is None and not live: dlhash, pstate = download.network_checkpoint() self.save_download_pstate(dltype, dlhash, pstate) else: raise ACEStreamException('lm: network_engine_wrapper_created_callback: download_engine is None!') except Exception as e: log_exc() download.set_error(e)
def _open(self, file, mode): if self.mtimes.has_key(file): try: newmtime = os.path.getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: log(self.log_prefix + '_open:' + file + ' modified: ' + strftime('(%x %X)', time.localtime(self.mtimes[file])) + strftime(' != (%x %X) ?', time.localtime(os.path.getmtime(file)))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def preallocate_file(self): try: i = 0 t = time.time() path = self.path pos = self.tops[path] total_write = pos size = self.size allocsize = 1048576 allocbuf = chr(255) * allocsize h = self.get_file_handle(path, True) h.seek(pos) while pos < size: if self.closed: if DEBUG: log(self.log_prefix + 'preallocate_file: storage is closed') return e = min(size - pos, allocsize) total_write += e h.write(allocbuf[:e]) pos += allocsize if DEBUG: if i % 100 == 0: log(self.log_prefix + 'preallocate_file: progress: path', self.path, 'progress', int(total_write / float(size) * 100), 'size', size, 'done', total_write) i += 1 time.sleep(0.01) if DEBUG: log(self.log_prefix + 'preallocate_file: path', self.path, 'size', self.size, 'written', total_write, 'time', time.time() - t) h.flush() self.lock.acquire() try: self.move_temp_files() self.file_allocated = True if self.amount_left == 0: if DEBUG: log(self.log_prefix + 'preallocate_file: download completed: ranges', self.ranges, 'size', self.size) self.notify_finished() finally: self.lock.release() except: log_exc()
def get_mimetype(self, file): prefix, ext = os.path.splitext(file) ext = ext.lower() mimetype = None if sys.platform == 'win32': try: from ACEStream.Video.utils import win32_retrieve_video_play_command mimetype, playcmd = win32_retrieve_video_play_command(ext, file) if DEBUG: log(self.log_prefix + 'get_mimetype: Win32 reg said MIME type is', mimetype) except: if DEBUG: log_exc() else: try: import mimetypes homedir = get_home_dir() homemapfile = os.path.join(homedir, '.mimetypes') mapfiles = [homemapfile] + mimetypes.knownfiles mimetypes.init(mapfiles) mimetype, encoding = mimetypes.guess_type(file) if DEBUG: log(self.log_prefix + 'get_mimetype: /etc/mimetypes+ said MIME type is', mimetype, file) except: log_exc() if mimetype is None: if ext == '.avi': mimetype = 'video/avi' elif ext == '.mpegts' or ext == '.ts': mimetype = 'video/mp2t' elif ext == '.mkv': mimetype = 'video/x-matroska' elif ext == '.ogg' or ext == '.ogv': mimetype = 'video/ogg' elif ext == '.oga': mimetype = 'audio/ogg' elif ext == '.webm': mimetype = 'video/webm' else: mimetype = 'video/mpeg' return mimetype
def _get_file_handle(self, file, for_write): if self.handles.has_key(file): if for_write and not self.whandles.has_key(file): self._close(file) try: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to reopen ' + file + ': ' + str(e)) if self.handlebuffer: if self.handlebuffer[-1] != file: self.handlebuffer.remove(file) self.handlebuffer.append(file) elif self.handlebuffer is not None: self.handlebuffer.append(file) else: try: if for_write: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) else: f = self._open(file, 'rb') self.handles[file] = f if self.lock_while_reading: self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to open ' + file + ': ' + str(e)) if self.handlebuffer is not None: self.handlebuffer.append(file) if len(self.handlebuffer) > self.max_files_open: self._close(self.handlebuffer.pop(0)) return self.handles[file]
def _open(self, file, mode): if DEBUG: log(self.log_prefix + '_open: file', file, 'mode', mode) if self.mtimes.has_key(file): try: if self.handlebuffer is not None: newmtime = getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: print file + ' modified: ' + strftime( '(%x %X)', localtime(self.mtimes[file])) + strftime( ' != (%x %X) ?', localtime(getmtime(file))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def ReadDataCallback(self, bufc, buflen, sid): try: if self.oldsid is not None and self.oldsid != sid: oldstream = self.sid2streaminfo[self.oldsid]['stream'] del self.sid2streaminfo[self.oldsid] try: oldstream.close() except: log_exc() self.oldsid = sid streaminfo = self.get_inputstream(sid) data = streaminfo['stream'].read(buflen) size = len(data) if size == 0: return 0 bufc[0:size] = data return size except: log_exc() return -1
def save_torrent_local(self, tdef, checksum): save_name = binascii.hexlify(tdef.get_infohash()) + '.torrent' torrent_dir = self.get_torrent_collecting_dir() save_path = os.path.join(torrent_dir, save_name) if DEBUG: log('session::save_torrent_local: save torrent: save_path', save_path, 'checksum', binascii.hexlify(checksum)) torrent_data = tdef.save(save_path) extra_info = {'status': 'good'} extra_info['filename'] = save_name extra_info['checksum'] = checksum db = self.open_dbhandler(NTFY_TORRENTS) if db is None: return try: db.addExternalTorrent(tdef, source='', extra_info=extra_info) except: if DEBUG: log_exc() finally: self.close_dbhandler(db)