def error(self, e, url): if self.server.errorcallback is not None: self.server.errorcallback(e, url) else: log_exc() if self.server.statuscallback is not None: self.server.statuscallback('Error playing video:' + str(e))
def data_came_in(self, connection, s): self.Encoder.measurefunc(len(s)) while 1: if self.closed: return i = self.next_len - self.buffer.tell() if i > len(s): self.buffer.write(s) return self.buffer.write(s[:i]) s = s[i:] m = self.buffer.getvalue() self.buffer.reset() self.buffer.truncate() try: x = self.next_func(m) except: log_exc() self.next_len, self.next_func = 1, self.read_dead raise if x is None: if DEBUG: print >> sys.stderr, 'encoder: function failed', self.next_func self.close() return self.next_len, self.next_func = x
def network_vod_event_callback(self, videoinfo, event, params): if DEBUG: log('lm::network_vod_event_callback: event %s, params %s' % (event, params)) try: videoinfo['usercallback'](event, params) except: log_exc()
def _request(self): import encodings.ascii import encodings.punycode import encodings.idna self.error = None self.received_data = None try: self.connection.request('GET', self.url, None, {'User-Agent': VERSION}) r = self.connection.getresponse() self.connection_status = r.status self.received_data = r.read() except Exception as e: log_exc() self.error = 'error accessing http seed: ' + str(e) try: self.connection.close() except: pass try: self.connection = HTTPConnection(self.netloc) except: self.connection = None self.downloader.rawserver.add_task(self.request_finished)
def notify_playable(self): self.prebufprogress = 1.0 self.playable = True if self.usernotified: return mimetype = self.get_mimetype() complete = self.storage.is_finished() if complete: stream = None filename = self.storage.get_dest_path() else: stream = MovieTransportStreamWrapper(self) filename = None try: self.vodeventfunc( self.fileinfo, VODEVENT_START, { 'complete': complete, 'filename': filename, 'mimetype': mimetype, 'stream': stream, 'length': self.storage.get_content_length(), 'bitrate': self.bitrate }) except: log_exc()
def data_came_in(self, packets): if packets: try: self.dispersy.data_came_in(packets) except: log_exc() raise
def get_file_handle(self, file, for_write): if self.handles.has_key(file): if for_write and not self.whandles.has_key(file): self._close(file) try: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to reopen ' + file + ': ' + str(e)) else: try: if for_write: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) else: f = self._open(file, 'rb') self.handles[file] = f except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to open ' + file + ': ' + str(e)) return self.handles[file]
def load_from_url(url, use_cache = True): if url.startswith(P2PURL_SCHEME): metainfo, swarmid = makeurl.p2purl2metainfo(url) metainfo['info']['url-compat'] = 1 t = TorrentDef._create(metainfo) return t else: b64_url = b64encode(url) if use_cache: if b64_url in TorrentDef.torrent_cache: tdef_from_cache = TorrentDef.torrent_cache[b64_url] if DEBUG: log('TorrentDef::load_from_url: found in cache: url', url, 'timestamp', tdef_from_cache['timestamp']) if tdef_from_cache['timestamp'] < time.time() - TORRENT_CACHE_EXPIRE: if DEBUG: log('TorrentDef::load_from_url: expired, delete from cache') del TorrentDef.torrent_cache[b64_url] else: return tdef_from_cache['tdef'] if url.startswith('file:///'): try: url = dunno2unicode(urllib2.unquote(url)) except: log_exc() f = urlOpenTimeout(url) tdef = TorrentDef._read(f) if DEBUG: log('TorrentDef::load_from_url: add to cache, url', url) TorrentDef.torrent_cache[b64_url] = {'tdef': tdef.copy(), 'timestamp': time.time()} return tdef
def run(self): while True: task = None timeout = None flag = False self.cond.acquire() while True: while len(self.queue) == 0 or flag: flag = False if timeout is None: self.cond.wait() else: self.cond.wait(timeout) self.queue.sort() when, count, task, id = self.queue[0] now = time() if now < when: timeout = when - now if DEBUG or self.debug: log('ttqueue::run: event not due: timeout', timeout, 'task', task) flag = True else: self.queue.pop(0) if DEBUG or self.debug: log('ttqueue::run: event due: task', task, 'len(queue)', len(self.queue)) if DEBUG_STACK: stack = self.callstack.pop(count) break self.cond.release() try: if task == 'stop': break elif task == 'quit': if len(self.queue) == 0: break else: when, count, task, id = self.queue[-1] t = when - time() + 0.001 self.add_task('quit', t) else: t = time() task() if DEBUG or self.debug: log('ttqueue::run: task finished: time', time() - t, 'task', task) except: log_exc() if DEBUG_STACK: print >> sys.stderr, '<<<<<<<<<<<<<<<<' print >> sys.stderr, 'TASK QUEUED FROM' print >> sys.stderr, ''.join(stack) print >> sys.stderr, '>>>>>>>>>>>>>>>>' if DEBUG: log('ttqueue::run: exit loop')
def check_outstanding_requests(self, downloads): if not self.transporter: return now = time.time() cancel_requests = [] in_high_range = self.videostatus.in_high_range playing_mode = self.videostatus.playing and not self.videostatus.paused piece_due = self.transporter.piece_due if playing_mode: min_delay, max_delay, offset_delay = self.playing_delay else: min_delay, max_delay, offset_delay = self.buffering_delay if DEBUG: log('pps::check_outstanding_requests: num_downloads', len(downloads), 'num_outstanding_requests', len(self.outstanding_requests)) for download in downloads: total_length = 0 download_rate = download.get_short_term_rate() for piece_id, begin, length in download.active_requests: try: time_request = self.outstanding_requests[piece_id, begin, length] except KeyError: if DEBUG: log('pps::check_outstanding_requests: not outstanding request: piece', piece_id, 'begin', begin, 'length', length) continue total_length += length if now < time_request + min_delay: if DEBUG: log('pps::check_outstanding_requests: have time to complete: piece', piece_id, 'begin', begin, 'length', length, 'delay', now - time_request, 'min_delay', min_delay, 'now', now, 'time_request', time_request) continue if in_high_range(piece_id) or self.videostatus.prebuffering and piece_id in self.videostatus.prebuf_needed_pieces: if download_rate == 0: if DEBUG: log('pps::check_outstanding_requests:cancel: download not started yet for piece', piece_id, 'chunk', begin, 'on', download.ip) cancel_requests.append((piece_id, begin, length)) else: if playing_mode: time_until_deadline = min(piece_due(piece_id), time_request + max_delay - now) else: time_until_deadline = time_request + max_delay - now time_until_download = total_length / download_rate if time_until_deadline < time_until_download - offset_delay: if DEBUG: log('pps::check_outstanding_requests:cancel: download speed too slow for piece', piece_id, 'chunk', begin, 'on', download.ip, 'Deadline in', time_until_deadline, 'while estimated download in', time_until_download) cancel_requests.append((piece_id, begin, length)) elif DEBUG: log('pps::check_outstanding_requests: no deadline: piece', piece_id, 'begin', begin, 'length', length, 'time_until_deadline', time_until_deadline, 'time_until_download', time_until_download, 'offset_delay', offset_delay) elif DEBUG: log('pps::check_outstanding_requests: not in high range: piece', piece_id, 'begin', begin, 'length', length) if cancel_requests: if DEBUG: log('pps::check_outstanding_requests: cancel_requests', cancel_requests) try: self.downloader.cancel_requests(cancel_requests, allowrerequest=False) except: log_exc()
def run_torrent_check(self): self.update_torrent_checking_period() self.rawserver.add_task(self.run_torrent_check, self.torrent_checking_period) try: from freestream.TrackerChecking.TorrentChecking import TorrentChecking t = TorrentChecking() t.start() except Exception as e: log_exc() self.rawserver_nonfatalerrorfunc(e)
def fatalerrorfunc(self, data): log(self.log_prefix + ':fatalerrorfunc called', data) if type(data) == StringType: log(self.log_prefix + 'LEGACY CORE FATAL ERROR', data) print_stack() self.set_error_func(FreeStreamLegacyException(data)) else: log_exc() self.set_error_func(data) self.shutdown()
def SeekDataCallback(self, pos, sid): try: if True: streaminfo = self.get_inputstream(sid) streaminfo['stream'].seek(pos, os.SEEK_SET) return 0 return -1 except: log_exc() return -1
def get_bitrate_from_metainfo(file, metainfo): info = metainfo['info'] if file is None: bitrate = None try: playtime = None if info.has_key('playtime'): playtime = parse_playtime_to_secs(info['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = info['length'] / playtime if DEBUG: print >> sys.stderr, 'TorrentDef: get_bitrate: Found bitrate', bitrate except: log_exc() return bitrate if file is not None and 'files' in info: for i in range(len(info['files'])): x = info['files'][i] intorrentpath = '' for elem in x['path']: intorrentpath = os.path.join(intorrentpath, elem) bitrate = None try: playtime = None if x.has_key('playtime'): playtime = parse_playtime_to_secs(x['playtime']) elif 'playtime' in metainfo: playtime = parse_playtime_to_secs(metainfo['playtime']) elif 'azureus_properties' in metainfo: azprop = metainfo['azureus_properties'] if 'Content' in azprop: content = metainfo['azureus_properties']['Content'] if 'Speed Bps' in content: bitrate = float(content['Speed Bps']) if playtime is not None: bitrate = x['length'] / playtime except: log_exc() if intorrentpath == file: return bitrate raise ValueError('File not found in torrent') else: raise ValueError('File not found in single-file torrent: ' + file)
def preallocate_file(self): try: i = 0 t = time.time() path = self.path pos = self.tops[path] total_write = pos size = self.size allocsize = 1048576 allocbuf = chr(255) * allocsize h = self.get_file_handle(path, True) h.seek(pos) while pos < size: if self.closed: if DEBUG: log(self.log_prefix + 'preallocate_file: storage is closed') return e = min(size - pos, allocsize) total_write += e h.write(allocbuf[:e]) pos += allocsize if DEBUG: if i % 100 == 0: log( self.log_prefix + 'preallocate_file: progress: path', self.path, 'progress', int(total_write / float(size) * 100), 'size', size, 'done', total_write) i += 1 time.sleep(0.01) if DEBUG: log(self.log_prefix + 'preallocate_file: path', self.path, 'size', self.size, 'written', total_write, 'time', time.time() - t) h.flush() self.lock.acquire() try: self.move_temp_files() self.file_allocated = True if self.amount_left == 0: if DEBUG: log( self.log_prefix + 'preallocate_file: download completed: ranges', self.ranges, 'size', self.size) self.notify_finished() finally: self.lock.release() except: log_exc()
def clear_mp4_metadata_tag(tag, data): try: pos = data.find(tag) if pos == -1: return None if DEBUG: log('clear_mp4_metadata_tag: tag found: tag', tag, 'pos', pos) if pos < 4: if DEBUG: log('clear_mp4_metadata_tag: truncated data start: tag', tag, 'pos', pos) return None item_atom_size = data[pos - 4:pos] item_atom_size = int(binascii.hexlify(item_atom_size), 16) datalen = len(data) if pos - 1 + item_atom_size > datalen: if DEBUG: log('clear_mp4_metadata_tag: truncated data end: tag', tag, 'pos', pos, 'item_atom_size', item_atom_size, 'datalen', datalen) return None data_size = data[pos + 4:pos + 8] data_size = int(binascii.hexlify(data_size), 16) if item_atom_size - data_size != 8: if DEBUG: log( 'clear_mp4_metadata_tag: sizse does not match: item_atom_size', item_atom_size, 'data_size', data_size) return None data_elem = data[pos + 8:pos + 12] data_flags = data[pos + 12:pos + 20] data_flags = binascii.hexlify(data_flags) value_start = pos + 20 value_end = pos + 4 + data_size value = data[value_start:value_end] if DEBUG: log('clear_mp4_metadata_tag: item_atom_size', item_atom_size, 'data_size', data_size, 'data_elem', data_elem, 'data_flags', data_flags, 'value_start', value_start, 'value_end', value_end) if data_elem != 'data' or data_flags != '0000000100000000': if DEBUG: log('clear_mp4_metadata_tag: malformed data') return None new_data = data[:value_start] + chr(0) * len(value) + data[value_end:] if len(new_data) != datalen: if DEBUG: log( 'clear_mp4_metadata_tag: modified data size mismatch: datalen', datalen, 'newdatalen', len(new_data)) return None return new_data except: log_exc() return None
def _auto_close(self): if not self.complete: if DEBUG: log('Encoder.Connection:_auto_close: ', self.get_myip(), self.get_myport(), 'to', self.get_ip(), self.get_port()) repexer = self.Encoder.repexer if repexer and not self.closed: try: repexer.connection_timeout(self) except: log_exc() self.close()
def _run(self): try: self.start_upnp() self.start_multicast() self.multihandler.listen_forever() except Exception as e: log_exc() self.session.on_error(e) finally: if self.internaltracker is not None: self.internaltracker.save_state() self.stop_upnp() self.rawserver.shutdown() self.session.on_stop()
def get_ts_metadata_from_db(self, infohash): if DEBUG: log('session::get_ts_metadata_from_db: infohash', binascii.hexlify(infohash)) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: return db.get(infohash) except: log_exc() return finally: if db is not None: self.close_dbhandler(db)
def save_ts_metadata_db(self, infohash, metadata): if metadata is None: return if DEBUG: log('session::save_ts_metadata_db: infohash', binascii.hexlify(infohash), 'metadata', metadata) db = self.open_dbhandler(NTFY_TS_METADATA) if db is None: return try: db.put(infohash, metadata) except: log_exc() finally: if db is not None: self.close_dbhandler(db)
def remove(self, d, removecontent = False): self.sesslock.acquire() try: dltype = d.get_type() if DEBUG: log('lm::remove: d', d, 'type', dltype, 'removecontent', removecontent) d.stop_remove(removestate=True, removecontent=removecontent) dlhash = d.get_hash() del self.downloads[dltype][dlhash] if DEBUG: log('lm::remove: done: len(self.downloads)', len(self.downloads[dltype])) except: log_exc() finally: self.sesslock.release()
def get_ts_bitrate_from_duration(self, idx = 0): bitrate = None try: duration = self.get_ts_duration(idx) if duration is None: return if 'files' in self.metainfo['info']: length = self.metainfo['info']['files'][idx]['length'] else: length = self.metainfo['info']['length'] bitrate = int(length) / int(duration) except: log_exc() return bitrate
def _open(self, file, mode): if self.mtimes.has_key(file): try: newmtime = os.path.getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: log(self.log_prefix + '_open:' + file + ' modified: ' + strftime('(%x %X)', time.localtime(self.mtimes[file])) + strftime(' != (%x %X) ?', time.localtime(os.path.getmtime(file)))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def sever(self, closeall = False): self.closed = True if self.Encoder.connections.has_key(self.connection): self.Encoder.admin_close(self.connection) repexer = self.Encoder.repexer if repexer and not self.complete: try: repexer.connection_closed(self) except: log_exc() if self.complete: self.connecter.connection_lost(self) elif self.locally_initiated: incompletecounter.decrement() if not closeall: self.Encoder._start_connection_from_queue(sched=False)
def sever(self, closeall=False): self.closed = True if self.Encoder.connections.has_key(self.connection): self.Encoder.admin_close(self.connection) repexer = self.Encoder.repexer if repexer and not self.complete: try: repexer.connection_closed(self) except: log_exc() if self.complete: self.connecter.connection_lost(self) elif self.locally_initiated: incompletecounter.decrement() if not closeall: self.Encoder._start_connection_from_queue(sched=False)
def enough_buffer(self): try: if not self.bitrate_set: return True if not self.wait_sufficient_speed: return True expected_download_time = self.expected_download_time() expected_playback_time = self.expected_playback_time() if DEBUG: log(self.log_prefix + 'enough_buffer: expected_download_time', expected_download_time, 'expected_playback_time', expected_playback_time) return max(0.0, expected_download_time - expected_playback_time) == 0.0 except: log_exc() return True
def network_shutdown(self): try: if self.peer_db is not None: db = SQLiteCacheDB.getInstance() db.commit() mainlineDHT.deinit() if DEBUG: ts = enumerate() log('LM::network_shutdown: number of threads still running', len(ts)) for t in ts: log('LM::network_shutdown: thread still running', t.name, 'daemon', t.daemon, 'instance', t) except: log_exc() self.sessdoneflag.set() self.session.uch.shutdown()
def clear_mp4_metadata_tag(tag, data): try: pos = data.find(tag) if pos == -1: return None if DEBUG: log('clear_mp4_metadata_tag: tag found: tag', tag, 'pos', pos) if pos < 4: if DEBUG: log('clear_mp4_metadata_tag: truncated data start: tag', tag, 'pos', pos) return None item_atom_size = data[pos - 4:pos] item_atom_size = int(binascii.hexlify(item_atom_size), 16) datalen = len(data) if pos - 1 + item_atom_size > datalen: if DEBUG: log('clear_mp4_metadata_tag: truncated data end: tag', tag, 'pos', pos, 'item_atom_size', item_atom_size, 'datalen', datalen) return None data_size = data[pos + 4:pos + 8] data_size = int(binascii.hexlify(data_size), 16) if item_atom_size - data_size != 8: if DEBUG: log('clear_mp4_metadata_tag: sizse does not match: item_atom_size', item_atom_size, 'data_size', data_size) return None data_elem = data[pos + 8:pos + 12] data_flags = data[pos + 12:pos + 20] data_flags = binascii.hexlify(data_flags) value_start = pos + 20 value_end = pos + 4 + data_size value = data[value_start:value_end] if DEBUG: log('clear_mp4_metadata_tag: item_atom_size', item_atom_size, 'data_size', data_size, 'data_elem', data_elem, 'data_flags', data_flags, 'value_start', value_start, 'value_end', value_end) if data_elem != 'data' or data_flags != '0000000100000000': if DEBUG: log('clear_mp4_metadata_tag: malformed data') return None new_data = data[:value_start] + chr(0) * len(value) + data[value_end:] if len(new_data) != datalen: if DEBUG: log('clear_mp4_metadata_tag: modified data size mismatch: datalen', datalen, 'newdatalen', len(new_data)) return None return new_data except: log_exc() return None
def network_engine_wrapper_created_callback(self, download, download_engine, exc, pstate): if exc is None: try: if download_engine is not None: dltype = download.get_type() if dltype == DLTYPE_TORRENT: self.queue_for_hashcheck(download_engine) live = download.get_def().get_live() elif dltype == DLTYPE_DIRECT: live = False if pstate is None and not live: dlhash, pstate = download.network_checkpoint() self.save_download_pstate(dltype, dlhash, pstate) else: raise FreeStreamException('lm: network_engine_wrapper_created_callback: download_engine is None!') except Exception as e: log_exc() download.set_error(e)
def get_mimetype(self, file): prefix, ext = os.path.splitext(file) ext = ext.lower() mimetype = None if sys.platform == 'win32': try: from freestream.Video.utils import win32_retrieve_video_play_command mimetype, playcmd = win32_retrieve_video_play_command(ext, file) if DEBUG: log(self.log_prefix + 'get_mimetype: Win32 reg said MIME type is', mimetype) except: if DEBUG: log_exc() else: try: import mimetypes homedir = get_home_dir() homemapfile = os.path.join(homedir, '.mimetypes') mapfiles = [homemapfile] + mimetypes.knownfiles mimetypes.init(mapfiles) mimetype, encoding = mimetypes.guess_type(file) if DEBUG: log(self.log_prefix + 'get_mimetype: /etc/mimetypes+ said MIME type is', mimetype, file) except: log_exc() if mimetype is None: if ext == '.avi': mimetype = 'video/avi' elif ext == '.mpegts' or ext == '.ts': mimetype = 'video/mp2t' elif ext == '.mkv': mimetype = 'video/x-matroska' elif ext == '.ogg' or ext == '.ogv': mimetype = 'video/ogg' elif ext == '.oga': mimetype = 'audio/ogg' elif ext == '.webm': mimetype = 'video/webm' else: mimetype = 'video/mpeg' return mimetype
def preallocate_file(self): try: i = 0 t = time.time() path = self.path pos = self.tops[path] total_write = pos size = self.size allocsize = 1048576 allocbuf = chr(255) * allocsize h = self.get_file_handle(path, True) h.seek(pos) while pos < size: if self.closed: if DEBUG: log(self.log_prefix + 'preallocate_file: storage is closed') return e = min(size - pos, allocsize) total_write += e h.write(allocbuf[:e]) pos += allocsize if DEBUG: if i % 100 == 0: log(self.log_prefix + 'preallocate_file: progress: path', self.path, 'progress', int(total_write / float(size) * 100), 'size', size, 'done', total_write) i += 1 time.sleep(0.01) if DEBUG: log(self.log_prefix + 'preallocate_file: path', self.path, 'size', self.size, 'written', total_write, 'time', time.time() - t) h.flush() self.lock.acquire() try: self.move_temp_files() self.file_allocated = True if self.amount_left == 0: if DEBUG: log(self.log_prefix + 'preallocate_file: download completed: ranges', self.ranges, 'size', self.size) self.notify_finished() finally: self.lock.release() except: log_exc()
def _open(self, file, mode): if DEBUG: log(self.log_prefix + '_open: file', file, 'mode', mode) if self.mtimes.has_key(file): try: if self.handlebuffer is not None: newmtime = getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: print file + ' modified: ' + strftime('(%x %X)', localtime(self.mtimes[file])) + strftime(' != (%x %X) ?', localtime(getmtime(file))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def _get_file_handle(self, file, for_write): if self.handles.has_key(file): if for_write and not self.whandles.has_key(file): self._close(file) try: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to reopen ' + file + ': ' + str(e)) if self.handlebuffer: if self.handlebuffer[-1] != file: self.handlebuffer.remove(file) self.handlebuffer.append(file) elif self.handlebuffer is not None: self.handlebuffer.append(file) else: try: if for_write: f = self._open(file, 'rb+') self.handles[file] = f self.whandles[file] = 1 self.lock_file(file, f) else: f = self._open(file, 'rb') self.handles[file] = f if self.lock_while_reading: self.lock_file(file, f) except (IOError, OSError) as e: if DEBUG: log_exc() raise IOError('unable to open ' + file + ': ' + str(e)) if self.handlebuffer is not None: self.handlebuffer.append(file) if len(self.handlebuffer) > self.max_files_open: self._close(self.handlebuffer.pop(0)) return self.handles[file]
def save_torrent_local(self, tdef, checksum): save_name = binascii.hexlify(tdef.get_infohash()) + '.torrent' torrent_dir = self.get_torrent_collecting_dir() save_path = os.path.join(torrent_dir, save_name) if DEBUG: log('session::save_torrent_local: save torrent: save_path', save_path, 'checksum', binascii.hexlify(checksum)) torrent_data = tdef.save(save_path) extra_info = {'status': 'good'} extra_info['filename'] = save_name extra_info['checksum'] = checksum db = self.open_dbhandler(NTFY_TORRENTS) if db is None: return try: db.addExternalTorrent(tdef, source='', extra_info=extra_info) except: if DEBUG: log_exc() finally: self.close_dbhandler(db)
def refill_buffer(self): self.data_ready.acquire() try: if self.prebuffering or self._complete or not self.playing or self.done( ): return mx = self.max_buffer_size() length = self.storage.get_content_length() while self.outbuflen < mx and self.outbufpos < length: numbytes = mx - self.outbuflen if DEBUG: log( self.log_prefix + 'refill_buffer: read from storage: pos', self.outbufpos, 'numbytes', numbytes, 'outbuflen', self.outbuflen, 'mx', mx) data = self.storage.read(self.outbufpos, numbytes) if not data: if DEBUG: log( self.log_prefix + 'refill_buffer: no data available: pos', self.outbufpos) break datalen = len(data) self.outbuf.append((self.outbufpos, data)) self.stat_outbuf.append((self.outbufpos, datalen)) self.outbuflen += datalen self.outbufpos += datalen self.data_ready.notify() if DEBUG: log( self.log_prefix + 'refill_buffer: got data from storage: datalen', datalen, 'outbufpos', self.outbufpos, 'outbuflen', self.outbuflen) except: log_exc() finally: self.data_ready.release()
def _open(self, file, mode): if DEBUG: log(self.log_prefix + '_open: file', file, 'mode', mode) if self.mtimes.has_key(file): try: if self.handlebuffer is not None: newmtime = getmtime(file) oldmtime = self.mtimes[file] except: if DEBUG: print file + ' modified: ' + strftime( '(%x %X)', localtime(self.mtimes[file])) + strftime( ' != (%x %X) ?', localtime(getmtime(file))) raise IOError('modified during download') try: return open(file, mode) except: if DEBUG: log_exc() raise
def ReadDataCallback(self, bufc, buflen, sid): try: if self.oldsid is not None and self.oldsid != sid: oldstream = self.sid2streaminfo[self.oldsid]['stream'] del self.sid2streaminfo[self.oldsid] try: oldstream.close() except: log_exc() self.oldsid = sid streaminfo = self.get_inputstream(sid) data = streaminfo['stream'].read(buflen) size = len(data) if size == 0: return 0 bufc[0:size] = data return size except: log_exc() return -1
def _setup_direct_download(self, dcfg, pstate, initialdlstatus, lmcreatedcallback, lmvodeventcallback): self.dllock.acquire() try: if DEBUG: if pstate is None: resumedata = None else: resumedata = pstate['engineresumedata'] log(self.log_prefix + '_setup_direct_download: resumedata', resumedata) if dcfg is None: cdcfg = DownloadStartupConfig() else: cdcfg = dcfg self.dlconfig = copy.copy(cdcfg.dlconfig) for k, v in self.session.get_current_startup_config_copy().sessconfig.iteritems(): self.dlconfig.setdefault(k, v) if pstate is not None: if pstate.has_key('dlstate'): self.progressbeforestop = pstate['dlstate'].get('progress', 0.0) path = None resumedata = pstate.get('engineresumedata', None) if resumedata is not None: self.pstate_content_length = resumedata.get('size', None) filename = resumedata.get('filename', None) if filename is not None: self.pstate_filename = os.path.join(self.dlconfig['saveas'], filename) if DEBUG: log(self.log_prefix + '_setup_direct_download: pstate_filename', self.pstate_filename) if initialdlstatus != DLSTATUS_STOPPED: if pstate is None or pstate['dlstate']['status'] != DLSTATUS_STOPPED: self.starting = True self.create_direct_download_engine(pstate, lmcreatedcallback, lmvodeventcallback) self.pstate_for_restart = pstate except Exception as e: log_exc() self.set_error(e) finally: self.dllock.release()
def set_duration(self, duration): try: self.set_bitrate(self.selected_movie['size'] / duration) except: log_exc()
def __init__(self, piecelen, fileinfo, videoinfo, authparams, is_extra = False): self.piecelen = piecelen self.sigsize = 0 self.fileinfo = fileinfo self.videoinfo = videoinfo self.authparams = authparams self.piecelock = Lock() self.high_prob_curr_time = 20 self.high_prob_curr_time_limit = (10, 180, 10) self.high_prob_curr_pieces = 6 self.high_prob_curr_pieces_limit = (4, 50, 4) index = self.videoinfo['index'] if index == -1: index = 0 self.fileindex = index movie_offset = sum((filesize for _, filesize in fileinfo[:index] if filesize)) movie_name = fileinfo[index][0] movie_size = fileinfo[index][1] self.selected_movie = {'offset': movie_offset, 'name': movie_name, 'size': movie_size} movie_begin = movie_offset movie_end = movie_offset + movie_size - 1 self.movie_range = ((movie_begin / piecelen, movie_begin % piecelen), (movie_end / piecelen, movie_end % piecelen)) self.first_piecelen = piecelen - self.movie_range[0][1] self.last_piecelen = self.movie_range[1][1] + 1 self.first_piece = self.movie_range[0][0] self.last_piece = self.movie_range[1][0] self.movie_numpieces = self.last_piece - self.first_piece + 1 self.completed = 0.0 self.can_be_downloaded = not is_extra self.min_download_percent = 0.0 self.is_extra = is_extra self.numhave = 0 self.have = [] if DEBUG: log('VideoStatus:__init__: index', index, 'movie_offset', movie_offset, 'movie_size', movie_size, 'self.first_piece', self.first_piece, 'self.last_piece', self.last_piece, 'self.movie_numpieces', self.movie_numpieces) self.live_streaming = videoinfo['live'] self.live_startpos = None self.live_first_piece = None self.live_first_piece_with_offset = None self.live_last_piece = None self.live_first_ts = None self.live_last_ts = None self.live_buffer_pieces = 0 self.playback_pos_is_live = True self.playback_pos_observers = [] self.wraparound = self.live_streaming and LIVE_WRAPAROUND self.wraparound_delta = max(4, self.movie_numpieces / 8) self.playback_pos = self.first_piece self.playback_pos_real = self.playback_pos self.last_read_pos = None if self.live_streaming: self.set_bitrate(videoinfo['bitrate']) self.live_hook_left_offset_min = self.time_to_pieces(10) self.live_hook_left_offset = self.live_hook_left_offset_min self.live_hook_left_offset_step = self.live_hook_left_offset self.live_hook_left_offset_max = self.wraparound_delta elif not DEBUG_SKIP_METADATA and videoinfo['bitrate']: if DEBUG: log('vs::__init__: got bitrate', videoinfo['bitrate']) self.set_bitrate(videoinfo['bitrate']) else: if movie_size < 52428800: fake_bitrate = 64 elif movie_size < 104857600: fake_bitrate = 128 elif movie_size < 1073741824: fake_bitrate = 256 else: fake_bitrate = 512 self.set_bitrate(fake_bitrate * 1024, True) mimetype = None if 'mimetype' in self.videoinfo: mimetype = self.videoinfo['mimetype'] self.prebuf_extra_pieces = None self.got_prebuf_pieces = False self.prebuf_high_priority_pieces = [] self.prebuf_high_priority_length = 0 self.prebuf_needed_pieces = [] if self.live_streaming: self.prebuf_missing_pieces = [] else: high_range_len = self.get_high_range_length() self.prebuf_pieces = min(self.movie_numpieces, 2 * high_range_len) self.prebuf_needed_pieces.extend(self.generate_range((self.first_piece, self.first_piece + self.prebuf_pieces))) if DEBUG: log('vs::__init__: set needed pieces: total_pieces', self.movie_numpieces, 'high_range_len', high_range_len, 'prebuf_pieces', self.prebuf_pieces, 'prebuf_needed_pieces', self.prebuf_needed_pieces) if not DEBUG_SKIP_METADATA and videoinfo.has_key('prebuf_pieces') and videoinfo['prebuf_pieces']: try: self.prebuf_extra_pieces = [ int(x) for x in videoinfo['prebuf_pieces'].split(',') ] if len(self.prebuf_extra_pieces) == 1 and self.prebuf_extra_pieces[0] == 0: self.prebuf_extra_pieces = [] self.got_prebuf_pieces = True if DEBUG: log('vs::__init__: got prebuf pieces', videoinfo['prebuf_pieces'], 'extra', self.prebuf_extra_pieces) except: log_exc() if not self.got_prebuf_pieces: self.prebuf_extra_pieces = [] if mimetype == 'video/mpeg' or mimetype == 'video/mp4': p = int(floor(self.last_piece * 0.997)) self.prebuf_extra_pieces.extend(self.generate_range((p, self.last_piece + 1))) elif not mimetype.startswith('audio'): tail = 0 if movie_size > 1073741824: tail = int(ceil(8388608 / self.piecelen)) elif movie_size > 524288000: tail = int(ceil(7340032 / self.piecelen)) elif movie_size > 157286400: tail = int(ceil(4194304 / self.piecelen)) else: tail = int(ceil(2097152 / self.piecelen)) if tail > 0: self.prebuf_extra_pieces.extend(self.generate_range((self.last_piece - tail + 1, self.last_piece + 1))) if DEBUG: log('vs::__init__: set extra pieces: movie_size', movie_size, 'mimetype', mimetype, 'tail', tail, 'prebuf_extra_pieces', self.prebuf_extra_pieces) self.prebuf_needed_pieces.extend(self.prebuf_extra_pieces) self.prebuf_needed_pieces = list(set(self.prebuf_needed_pieces)) self.prebuf_needed_pieces.sort() self.prebuf_missing_pieces = self.prebuf_needed_pieces[:] if DEBUG: log('vs::__init__: prebuf configuration: mimetype', mimetype, 'size', movie_size, 'piecelen', self.piecelen, 'first', self.first_piece, 'last', self.last_piece, 'needed', self.prebuf_needed_pieces) if self.live_streaming: self.dropping = True else: self.dropping = False self.playing = False self.paused = False self.autoresume = False self.prebuffering = True self.pausable = VODEVENT_PAUSE in videoinfo['userevents'] and VODEVENT_RESUME in videoinfo['userevents']
def unpickle(self, data): try: files = {} pfiles = {} l = data['files'] l = [ l[x:x + 3] for x in xrange(0, len(l), 3) ] for f, size, mtime in l: files[f] = (size, mtime) l = data.get('partial files', []) l = [ l[x:x + 3] for x in xrange(0, len(l), 3) ] for file, size, mtime in l: pfiles[file] = (size, mtime) valid_pieces = {} for i in xrange(len(self.files)): if self.disabled[i]: continue r = self.file_ranges[i] if not r: continue start, end, offset, file = r if DEBUG_RESTORE: log(self.log_prefix + 'unpickle: adding', file) for p in xrange(int(start / self.piece_length), int((end - 1) / self.piece_length) + 1): valid_pieces[p] = 1 if DEBUG: print valid_pieces.keys() def test(old, size, mtime): oldsize, oldmtime = old if size != oldsize: return False if mtime > oldmtime + 1: return False if mtime < oldmtime - 1: return False return True for i in xrange(len(self.files)): if self.disabled[i]: for file, start, end in self._get_disabled_ranges(i)[2]: f1 = basename(file) if not pfiles.has_key(f1) or not test(pfiles[f1], getsize(file), getmtime(file)): if DEBUG_RESTORE: log(self.log_prefix + 'unpickle: removing_1', file) for p in xrange(int(start / self.piece_length), int((end - 1) / self.piece_length) + 1): if valid_pieces.has_key(p): del valid_pieces[p] continue file, size = self.files[i] if not size: continue if not files.has_key(i) or not test(files[i], getsize(file), getmtime(file)): start, end, offset, file = self.file_ranges[i] if DEBUG_RESTORE: log(self.log_prefix + 'unpickle: removing_2', file) for p in xrange(int(start / self.piece_length), int((end - 1) / self.piece_length) + 1): if valid_pieces.has_key(p): del valid_pieces[p] except: if DEBUG or DEBUG_RESTORE: log_exc() return [] if DEBUG_RESTORE: log(self.log_prefix + 'unpickle: valid_pieces', valid_pieces.keys()) return valid_pieces.keys()
def __init__(self, scfg = None, ignore_singleton = False, on_error = lambda e: None, on_stop = lambda : None, app_http_handler = None, network_thread_daemon = True): if not ignore_singleton: if Session.__single: raise RuntimeError, 'Session is singleton' Session.__single = self self.sesslock = RLock() self.on_error = on_error self.on_stop = on_stop self.app_http_handler = app_http_handler first_run = False if scfg is None: try: state_dir = Session.get_default_state_dir() cfgfilename = Session.get_default_config_filename(state_dir) scfg = SessionStartupConfig.load(cfgfilename) except: log_exc() scfg = SessionStartupConfig() self.sessconfig = scfg.sessconfig else: self.sessconfig = copy.copy(scfg.sessconfig) state_dir = self.sessconfig['state_dir'] if state_dir is None: state_dir = Session.get_default_state_dir() self.sessconfig['state_dir'] = state_dir if not os.path.isdir(state_dir): first_run = True os.makedirs(state_dir) collected_torrent_dir = self.sessconfig['torrent_collecting_dir'] if not collected_torrent_dir: collected_torrent_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_TORRENTCOLL_DIR) self.sessconfig['torrent_collecting_dir'] = collected_torrent_dir collected_subtitles_dir = self.sessconfig.get('subtitles_collecting_dir', None) if not collected_subtitles_dir: collected_subtitles_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_SUBSCOLL_DIR) self.sessconfig['subtitles_collecting_dir'] = collected_subtitles_dir if not os.path.exists(collected_torrent_dir): first_run = True os.makedirs(collected_torrent_dir) buffer_dir = self.sessconfig.get('buffer_dir', None) if not buffer_dir: buffer_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_BUFFER_DIR) self.sessconfig['buffer_dir'] = buffer_dir if not os.path.exists(buffer_dir): first_run = True os.makedirs(buffer_dir) ads_dir = self.sessconfig.get('ads_dir', None) if not ads_dir: ads_dir = os.path.join(self.sessconfig['state_dir'], STATEDIR_ADS_DIR) self.sessconfig['ads_dir'] = ads_dir if not os.path.exists(ads_dir): first_run = True os.makedirs(ads_dir) if 'ts_login' in self.sessconfig: if first_run and len(self.sessconfig['ts_login']) == 0: self.sessconfig['ts_login'] = '******' else: self.sessconfig['ts_login'] = sessdefaults['ts_login'] if 'ts_password' in self.sessconfig: if first_run and len(self.sessconfig['ts_password']) == 0: self.sessconfig['ts_password'] = '******' else: self.sessconfig['ts_password'] = sessdefaults['ts_password'] if 'ts_user_key' not in self.sessconfig: self.sessconfig['ts_user_key'] = sessdefaults['ts_user_key'] if 'max_socket_connects' not in self.sessconfig: self.sessconfig['max_socket_connects'] = sessdefaults['max_socket_connects'] if not self.sessconfig['peer_icon_path']: self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'], STATEDIR_PEERICON_DIR) self.keypair = None dlpstatedir = os.path.join(self.sessconfig['state_dir'], STATEDIR_DLPSTATE_DIR) if not os.path.isdir(dlpstatedir): os.mkdir(dlpstatedir) dl_direct_pstatedir = os.path.join(self.sessconfig['state_dir'], STATEDIR_DLDIRECT_PSTATE_DIR) if not os.path.isdir(dl_direct_pstatedir): os.mkdir(dl_direct_pstatedir) trackerdir = self.get_internal_tracker_dir() if not os.path.isdir(trackerdir): os.mkdir(trackerdir) if self.sessconfig['tracker_dfile'] is None: self.sessconfig['tracker_dfile'] = os.path.join(trackerdir, 'tracker.db') if self.sessconfig['tracker_allowed_dir'] is None: self.sessconfig['tracker_allowed_dir'] = trackerdir if self.sessconfig['tracker_logfile'] is None: if sys.platform == 'win32': sink = 'nul' else: sink = '/dev/null' self.sessconfig['tracker_logfile'] = sink if self.sessconfig['superpeer_file'] is None: self.sessconfig['superpeer_file'] = os.path.join(self.sessconfig['install_dir'], LIBRARYNAME, 'Core', 'superpeer.txt') if 'crawler_file' not in self.sessconfig or self.sessconfig['crawler_file'] is None: self.sessconfig['crawler_file'] = os.path.join(self.sessconfig['install_dir'], LIBRARYNAME, 'Core', 'Statistics', 'crawler.txt') if self.sessconfig['overlay'] and self.sessconfig['download_help']: if self.sessconfig['download_help_dir'] is None: self.sessconfig['download_help_dir'] = os.path.join(get_default_dest_dir(), DESTDIR_COOPDOWNLOAD) if not os.path.isdir(self.sessconfig['download_help_dir']): os.makedirs(self.sessconfig['download_help_dir']) if self.sessconfig['peer_icon_path'] is None: self.sessconfig['peer_icon_path'] = os.path.join(self.sessconfig['state_dir'], STATEDIR_PEERICON_DIR) if not os.path.isdir(self.sessconfig['peer_icon_path']): os.mkdir(self.sessconfig['peer_icon_path']) for key, defvalue in sessdefaults.iteritems(): if key not in self.sessconfig: self.sessconfig[key] = defvalue if 'live_aux_seeders' not in self.sessconfig: self.sessconfig['live_aux_seeders'] = sessdefaults['live_aux_seeders'] if 'nat_detect' not in self.sessconfig: self.sessconfig['nat_detect'] = sessdefaults['nat_detect'] if 'puncturing_internal_port' not in self.sessconfig: self.sessconfig['puncturing_internal_port'] = sessdefaults['puncturing_internal_port'] if 'stun_servers' not in self.sessconfig: self.sessconfig['stun_servers'] = sessdefaults['stun_servers'] if 'pingback_servers' not in self.sessconfig: self.sessconfig['pingback_servers'] = sessdefaults['pingback_servers'] if 'mainline_dht' not in self.sessconfig: self.sessconfig['mainline_dht'] = sessdefaults['mainline_dht'] self.http_seeds = {} self.save_pstate_sessconfig() self.uch = UserCallbackHandler(self) self.lm = FreeStreamLaunchMany(network_thread_daemon) self.lm.register(self, self.sesslock) self.lm.start()
def _rerequest_single(self, t, s, l): try: closer = [None] def timedout(self = self, l = l, closer = closer): if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single:timedout: unwait: l', l, 't', t, 'thread', currentThread().name) self.errorcodes['troublecode'] = 'Problem connecting to tracker - timeout exceeded' self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single:timedout: no trip: l', l, 't', t, 'thread', currentThread().name) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: if DEBUG or DEBUG_ANNOUNCE: log(self.log_prefix + '_rerequest_single: request tracker', merge_announce(t, s), 'thread', currentThread().name) h = urlopen(merge_announce(t, s), silent=True) closer[0] = h.close data = h.read() except (IOError, socket.error) as e: err = 'Problem connecting to tracker - ' + str(e) if DEBUG: log(self.log_prefix + '_rerequest_single: failed to connect to tracker') except: err = 'Problem connecting to tracker' if DEBUG: log(self.log_prefix + '_rerequest_single: failed to connect to tracker') try: h.close() except: pass if err: if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: got error, unwait: l', l, 't', t, 'thread', currentThread().name, 'err', err) self.errorcodes['troublecode'] = err self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: got error, no trip: l', l, 't', t, 'thread', currentThread().name, 'err', err) return if not data: if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: no date, unwait: l', l, 't', t, 'thread', currentThread().name) self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: no data, no trip: l', l, 't', t, 'thread', currentThread().name) return try: r = bdecode(data, sloppy=1) if DEBUG or DEBUG_ANNOUNCE: log(self.log_prefix + '_rerequest_single: respose from tracker: t', t, 'r', r, 'thread', currentThread().name) check_peers(r) except ValueError as e: if DEBUG: log_exc() if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: exception while decoding data, unwait: l', l, 't', t, 'thread', currentThread().name) self.errorcodes['bad_data'] = 'bad data from tracker - ' + str(e) self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: exception while decoding data, no trip: l', l, 't', t, 'thread', currentThread().name) return if r.has_key('failure reason'): if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: got failure reason, unwait: l', l, 't', t, 'thread', currentThread().name) self.errorcodes['rejected'] = self.rejectedmessage + r['failure reason'] self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: got failure reason, no trip: l', l, 't', t, 'thread', currentThread().name) return if self.lock.trip(l, True): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: trip success, unwait: l', l, 't', t, 'thread', currentThread().name) self.lock.unwait(l) elif DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: trip success, no trip: l', l, 't', t, 'thread', currentThread().name) def add(self = self, r = r): self.postrequest(r, 'tracker=' + t, self.notifiers) self.externalsched(add) except: print_exc() if self.lock.trip(l): if DEBUG_LOCK: log(self.log_prefix + '_rerequest_single: got exception, unwait: l', l, 't', t, 'thread', currentThread().name) self.lock.unwait(l)
def postrequest(self, r, source, notifiers = []): try: if source is None: source = '' if r.has_key('warning message'): if DEBUG: log(self.log_prefix + 'postrequest: tracker warning:', r['warning message']) self.errorfunc('warning from tracker - ' + r['warning message']) self.announce_interval = r.get('interval', self.announce_interval) self.interval = r.get('min interval', self.interval) if DEBUG: log(self.log_prefix + 'postrequest: %s: announce min is' % source, self.announce_interval, self.interval) self.trackerid = r.get('tracker id', self.trackerid) self.last = r.get('last', self.last) peers = [] p = r.get('peers') if p is not None: if type(p) == type(''): for x in xrange(0, len(p), 6): ip = '.'.join([ str(ord(i)) for i in p[x:x + 4] ]) port = ord(p[x + 4]) << 8 | ord(p[x + 5]) peers.append(((ip, port), 0)) else: for x in p: peers.append(((x['ip'].strip(), x['port']), x.get('peer id', 0))) else: p = r.get('peers6') if type(p) == type(''): for x in xrange(0, len(p), 18): hexip = binascii.b2a_hex(p[x:x + 16]) ip = '' for i in xrange(0, len(hexip), 4): ip += hexip[i:i + 4] if i + 4 != len(hexip): ip += ':' port = ord(p[x + 16]) << 8 | ord(p[x + 17]) peers.append(((ip, port), 0)) else: for x in p: peers.append(((x['ip'].strip(), x['port']), x.get('peer id', 0))) log(self.log_prefix + 'Got IPv6 peer addresses, not yet supported, ignoring.') peers = [] if DEBUG: log(self.log_prefix + 'postrequest: %s: Got peers' % source, peers) ps = len(peers) + self.howmany() if ps < self.maxpeers: if self.doneflag.isSet(): if r.get('num peers', 1000) - r.get('done peers', 0) > ps * 1.2: self.last = None elif r.get('num peers', 1000) > ps * 1.2: self.last = None if peers: random.shuffle(peers) if self.am_video_source: if DEBUG: log(self.log_prefix + 'postrequest: do not start connections for live source') else: self.connect(peers) for notifier in notifiers: notifier(peers) except: log(self.log_prefix + 'postrequest: error in postrequest') log_exc()