def send_request(self, params, timeout=5): try: get_params = [] if len(params): for k, v in params.iteritems(): get_params.append(k + '=' + urllib.quote_plus(v)) query_string = '' if len(get_params): query_string = '?' + '&'.join(get_params) if DEBUG: log('tns::send_request: query_string', query_string) for url in self.url_list['default']: try: url += query_string if DEBUG: log('tns::send_request: url', url) stream = urlOpenTimeout(url, timeout=timeout, cookiejar=self.cookie_jar) stream.read() stream.close() except: if DEBUG: log('tns::send_request: failed: url', url) except: if DEBUG: print_exc()
def send_request(self, params, timeout = 5): try: get_params = [] if len(params): for k, v in params.iteritems(): get_params.append(k + '=' + urllib.quote_plus(v)) query_string = '' if len(get_params): query_string = '?' + '&'.join(get_params) if DEBUG: log('tns::send_request: query_string', query_string) for url in self.url_list['default']: try: url += query_string if DEBUG: log('tns::send_request: url', url) stream = urlOpenTimeout(url, timeout=timeout, cookiejar=self.cookie_jar) stream.read() stream.close() except: if DEBUG: log('tns::send_request: failed: url', url) except: if DEBUG: print_exc()
def getStatus(url, info_hash): try: resp = timeouturlopen.urlOpenTimeout(url, timeout=HTTP_TIMEOUT) response = resp.read() except IOError: return (-1, -1) except AttributeError: return (-2, -2) try: response_dict = bdecode(response) except: return (-2, -2) try: status = response_dict['files'][info_hash] seeder = status['complete'] if seeder < 0: seeder = 0 leecher = status['incomplete'] if leecher < 0: leecher = 0 except KeyError: try: if response_dict.has_key('flags'): if response_dict['flags'].has_key('min_request_interval'): return (-3, -3) except: pass return (-2, -2) return (seeder, leecher)
def load_from_url(url, use_cache = True): if url.startswith(P2PURL_SCHEME): metainfo, swarmid = makeurl.p2purl2metainfo(url) metainfo['info']['url-compat'] = 1 t = TorrentDef._create(metainfo) return t else: b64_url = b64encode(url) if use_cache: if b64_url in TorrentDef.torrent_cache: tdef_from_cache = TorrentDef.torrent_cache[b64_url] if DEBUG: log('TorrentDef::load_from_url: found in cache: url', url, 'timestamp', tdef_from_cache['timestamp']) if tdef_from_cache['timestamp'] < time.time() - TORRENT_CACHE_EXPIRE: if DEBUG: log('TorrentDef::load_from_url: expired, delete from cache') del TorrentDef.torrent_cache[b64_url] else: return tdef_from_cache['tdef'] if url.startswith('file:///'): try: url = dunno2unicode(urllib2.unquote(url)) except: log_exc() f = urlOpenTimeout(url) tdef = TorrentDef._read(f) if DEBUG: log('TorrentDef::load_from_url: add to cache, url', url) TorrentDef.torrent_cache[b64_url] = {'tdef': tdef.copy(), 'timestamp': time.time()} return tdef
def parse(self): self.title2entrymap = {} print >> sys.stderr, 'feedp: Parsing', self.feedurl stream = urlOpenTimeout(self.feedurl, 10) self.tree = etree.parse(stream) entries = self.tree.findall('{http://www.w3.org/2005/Atom}entry') for entry in entries: titleelement = entry.find('{http://www.w3.org/2005/Atom}title') self.title2entrymap[titleelement.text] = entry
def send_pixel(self, timeout = 5): for url in self.url_list['pixel']: try: if DEBUG: log('tns::send_pixel: url', url) stream = urlOpenTimeout(url, timeout=timeout, cookiejar=self.cookie_jar) stream.read() stream.close() except: if DEBUG: log('tns::send_pixel: failed')
def check_settings(self, timeout = 120): try: if DEBUG: t = time.time() stream = urlOpenTimeout(RemoteStatisticsSettings.SETTINGS_URL, timeout=timeout) response = stream.read() stream.close() if DEBUG: log('RemoteStatisticsSettings::check_settings: got response: time', time.time() - t, 'response', response) self.reset_settings() response = json.loads(response) for stat_type, options in response.iteritems(): if stat_type == '_expires': expires = long(options) else: mode = options['mode'] parsed_options = {'mode': None, 'url-list': options['url-list'], 'whitelist_infohash': None, 'whitelist_content': None, 'blacklist_infohash': None, 'blacklist_content': None, 'options': options.get('options', None)} if mode == 'all': parsed_options['mode'] = RemoteStatisticsSettings.MODE_ALLOW_ALL elif mode == 'none': parsed_options['mode'] = RemoteStatisticsSettings.MODE_DENY_ALL elif mode == 'whitelist': parsed_options['mode'] = RemoteStatisticsSettings.MODE_WHITELIST if options.has_key('infohashes'): parsed_options['whitelist_infohash'] = set(options['infohashes']) elif options.has_key('content'): parsed_options['whitelist_content'] = set(options['content']) else: raise Exception, 'missing content identifiers' elif mode == 'blacklist': parsed_options['mode'] = RemoteStatisticsSettings.MODE_BLACKLIST if options.has_key('infohashes'): parsed_options['blacklist_infohash'] = set(options['infohashes']) elif options.has_key('content'): parsed_options['blacklist_content'] = set(options['content']) else: raise Exception, 'missing content identifiers' else: raise Exception, 'unknown mode' self.settings[stat_type] = parsed_options return expires except: self.reset_settings() if DEBUG: print_exc() return RemoteStatisticsSettings.RETRY_ON_ERROR
def parse(self): self.feedurls = [] stream = urlOpenTimeout(self.metafeedurl, 10) self.tree = etree.parse(stream) entries = self.tree.findall('{http://www.w3.org/2005/Atom}entry') for entry in entries: titleelement = entry.find('{http://www.w3.org/2005/Atom}title') linkelement = entry.find('{http://www.w3.org/2005/Atom}link') if linkelement is not None: if linkelement.attrib['type'] == 'application/atom+xml': feedurl = linkelement.attrib['href'] self.feedurls.append(feedurl)
def send_pixel(self, timeout=5): for url in self.url_list['pixel']: try: if DEBUG: log('tns::send_pixel: url', url) stream = urlOpenTimeout(url, timeout=timeout, cookiejar=self.cookie_jar) stream.read() stream.close() except: if DEBUG: log('tns::send_pixel: failed')
def _predownload(self, callback, timeout): self.lock.acquire() self.running = True try: if DEBUG: log(self.log_prefix + '_predownload: url', self.url, 'timeout', timeout) stream = urlOpenTimeout(self.url, timeout=timeout) content_type = stream.info().getheader('Content-Type') content_length = stream.info().getheader('Content-Length') if DEBUG: log( self.log_prefix + '_predownload: request finished: content_type', content_type, 'content_length', content_length) data = '' while True: if self.shutdown_flag: if DEBUG: log( self.log_prefix + '_predownload: got shutdown flag while reading: url', self.url) break buf = stream.read(524288) if not buf: if DEBUG: log(self.log_prefix + '_predownload: eof: url', self.url) break data += buf if DEBUG: log(self.log_prefix + '_predownload: read chunk: url', self.url, 'read_len', len(data)) stream.close() if not self.shutdown_flag: if DEBUG: log( self.log_prefix + '_predownload: finished, run callback: url', self.url, 'content_type', content_type, 'content_length', content_length, 'data_len', len(data)) callback(content_type, data) except Exception as e: if DEBUG: print_exc() self.failed_func(e) finally: self.running = False self.lock.release()
def reopen(self): while True: try: print >> sys.stderr, 'createlivestream: open stream: url', self.url, 'reader', self.reader if self.reader == 'urllib2': self.stream = urllib2.urlopen(self.url) else: self.stream = urlOpenTimeout(self.url) break except KeyboardInterrupt: raise except: if DEBUG: print_exc() retry_in = 5 print >> sys.stderr, 'createlivestream: failed to open url', self.url, 'retrying in', retry_in, 'seconds' time.sleep(retry_in)
def _predownload(self, callback, timeout): self.lock.acquire() self.running = True try: if DEBUG: log(self.log_prefix + '_predownload: url', self.url, 'timeout', timeout) stream = urlOpenTimeout(self.url, timeout=timeout) content_type = stream.info().getheader('Content-Type') content_length = stream.info().getheader('Content-Length') if DEBUG: log(self.log_prefix + '_predownload: request finished: content_type', content_type, 'content_length', content_length) data = '' while True: if self.shutdown_flag: if DEBUG: log(self.log_prefix + '_predownload: got shutdown flag while reading: url', self.url) break buf = stream.read(524288) if not buf: if DEBUG: log(self.log_prefix + '_predownload: eof: url', self.url) break data += buf if DEBUG: log(self.log_prefix + '_predownload: read chunk: url', self.url, 'read_len', len(data)) stream.close() if not self.shutdown_flag: if DEBUG: log(self.log_prefix + '_predownload: finished, run callback: url', self.url, 'content_type', content_type, 'content_length', content_length, 'data_len', len(data)) callback(content_type, data) except Exception as e: if DEBUG: print_exc() self.failed_func(e) finally: self.running = False self.lock.release()
def load_from_url(url, use_cache=True): if url.startswith(P2PURL_SCHEME): metainfo, swarmid = makeurl.p2purl2metainfo(url) metainfo['info']['url-compat'] = 1 t = TorrentDef._create(metainfo) return t else: b64_url = b64encode(url) if use_cache: if b64_url in TorrentDef.torrent_cache: tdef_from_cache = TorrentDef.torrent_cache[b64_url] if DEBUG: log('TorrentDef::load_from_url: found in cache: url', url, 'timestamp', tdef_from_cache['timestamp']) if tdef_from_cache['timestamp'] < time.time( ) - TORRENT_CACHE_EXPIRE: if DEBUG: log('TorrentDef::load_from_url: expired, delete from cache' ) del TorrentDef.torrent_cache[b64_url] else: return tdef_from_cache['tdef'] if url.startswith('file:///'): try: url = dunno2unicode(urllib2.unquote(url)) except: log_exc() f = urlOpenTimeout(url) tdef = TorrentDef._read(f) if DEBUG: log('TorrentDef::load_from_url: add to cache, url', url) TorrentDef.torrent_cache[b64_url] = { 'tdef': tdef.copy(), 'timestamp': time.time() } return tdef
def load_from_url(url, use_cache=True): if url.startswith(P2PURL_SCHEME): metainfo, swarmid = makeurl.p2purl2metainfo(url) metainfo["info"]["url-compat"] = 1 t = TorrentDef._create(metainfo) return t else: b64_url = b64encode(url) if use_cache: if b64_url in TorrentDef.torrent_cache: tdef_from_cache = TorrentDef.torrent_cache[b64_url] if DEBUG: log( "TorrentDef::load_from_url: found in cache: url", url, "timestamp", tdef_from_cache["timestamp"], ) if tdef_from_cache["timestamp"] < time.time() - TORRENT_CACHE_EXPIRE: if DEBUG: log("TorrentDef::load_from_url: expired, delete from cache") del TorrentDef.torrent_cache[b64_url] else: return tdef_from_cache["tdef"] if url.startswith("file:///"): try: url = dunno2unicode(urllib2.unquote(url)) except: log_exc() f = urlOpenTimeout(url) tdef = TorrentDef._read(f) if DEBUG: log("TorrentDef::load_from_url: add to cache, url", url) TorrentDef.torrent_cache[b64_url] = {"tdef": tdef.copy(), "timestamp": time.time()} return tdef
def send_request(self, method, params={}, data=None, content_type=None, use_random=False, use_timestamp=False, timeout=5, server_type=SERVER_TYPE_SERVICE, parse_response=True): if data is not None and content_type is None: raise ValueError, 'Data passed without content type' if params.has_key('r'): raise ValueError, "Cannot use reserved parameter 'r'" if params.has_key('t'): raise ValueError, "Cannot use reserved parameter 't'" if params.has_key('v'): raise ValueError, "Cannot use reserved parameter 'v'" params['v'] = VERSION if use_random: request_random = random.randint(1, sys.maxint) params['r'] = str(request_random) else: request_random = None if use_timestamp: params['t'] = str(long(time.time())) get_params = [] payload = [] if len(params): for k in sorted(params.keys()): v = params[k] get_params.append(k + '=' + urllib.quote_plus(v)) payload.append(k + '=' + v) if DEBUG: log('tsservice::send_request: got params: get_params', get_params, 'payload', payload) if data is not None: payload.append(data) if DEBUG: log('tsservice::send_request: got data') if len(payload): payload = '#'.join(payload) payload += self.REQUEST_SECRET signature = hashlib.sha1(payload).hexdigest() get_params.append('s=' + signature) if DEBUG: log('tsservice::send_request: sign data: signature', signature) query = '/' + method if len(get_params): query += '?' + '&'.join(get_params) if DEBUG: log('tsservice::send_request: query', query) servers = self.get_servers(server_type) random.shuffle(servers) response = None for serv in servers: try: url = serv + query if DEBUG: log('tsservice::send_request: url', url) stream = urlOpenTimeout(url, timeout, content_type, None, data) response = stream.read() stream.close() if DEBUG: log('tsservice::send_request: got response: url', url, 'response', response) if parse_response: response = self.check_response(response, request_random) break except BadResponseException as e: response = None if DEBUG: log('tsservice::send_request: bad response: ' + str(e)) except (URLError, HTTPError) as e: response = None if DEBUG: log('tsservice::send_request: http error: ' + str(e)) except: response = None if DEBUG: print_exc() return response
def send_request( self, method, params={}, data=None, content_type=None, use_random=False, use_timestamp=False, timeout=5, server_type=SERVER_TYPE_SERVICE, parse_response=True, ): if data is not None and content_type is None: raise ValueError, "Data passed without content type" if params.has_key("r"): raise ValueError, "Cannot use reserved parameter 'r'" if params.has_key("t"): raise ValueError, "Cannot use reserved parameter 't'" if params.has_key("v"): raise ValueError, "Cannot use reserved parameter 'v'" params["v"] = VERSION if use_random: request_random = random.randint(1, sys.maxint) params["r"] = str(request_random) else: request_random = None if use_timestamp: params["t"] = str(long(time.time())) get_params = [] payload = [] if len(params): for k in sorted(params.keys()): v = params[k] get_params.append(k + "=" + urllib.quote_plus(v)) payload.append(k + "=" + v) if DEBUG: log("tsservice::send_request: got params: get_params", get_params, "payload", payload) if data is not None: payload.append(data) if DEBUG: log("tsservice::send_request: got data") if len(payload): payload = "#".join(payload) payload += self.REQUEST_SECRET signature = hashlib.sha1(payload).hexdigest() get_params.append("s=" + signature) if DEBUG: log("tsservice::send_request: sign data: signature", signature) query = "/" + method if len(get_params): query += "?" + "&".join(get_params) if DEBUG: log("tsservice::send_request: query", query) servers = self.get_servers(server_type) random.shuffle(servers) response = None for serv in servers: try: url = serv + query if DEBUG: log("tsservice::send_request: url", url) stream = urlOpenTimeout(url, timeout, content_type, None, data) response = stream.read() stream.close() if DEBUG: log("tsservice::send_request: got response: url", url, "response", response) if parse_response: response = self.check_response(response, request_random) break except BadResponseException as e: response = None if DEBUG: log("tsservice::send_request: bad response: " + str(e)) except (URLError, HTTPError) as e: response = None if DEBUG: log("tsservice::send_request: http error: " + str(e)) except: response = None if DEBUG: print_exc() return response
def check_settings(self, timeout=120): try: if DEBUG: t = time.time() stream = urlOpenTimeout(RemoteStatisticsSettings.SETTINGS_URL, timeout=timeout) response = stream.read() stream.close() if DEBUG: log( 'RemoteStatisticsSettings::check_settings: got response: time', time.time() - t, 'response', response) self.reset_settings() response = json.loads(response) for stat_type, options in response.iteritems(): if stat_type == '_expires': expires = long(options) else: mode = options['mode'] parsed_options = { 'mode': None, 'url-list': options['url-list'], 'whitelist_infohash': None, 'whitelist_content': None, 'blacklist_infohash': None, 'blacklist_content': None, 'options': options.get('options', None) } if mode == 'all': parsed_options[ 'mode'] = RemoteStatisticsSettings.MODE_ALLOW_ALL elif mode == 'none': parsed_options[ 'mode'] = RemoteStatisticsSettings.MODE_DENY_ALL elif mode == 'whitelist': parsed_options[ 'mode'] = RemoteStatisticsSettings.MODE_WHITELIST if options.has_key('infohashes'): parsed_options['whitelist_infohash'] = set( options['infohashes']) elif options.has_key('content'): parsed_options['whitelist_content'] = set( options['content']) else: raise Exception, 'missing content identifiers' elif mode == 'blacklist': parsed_options[ 'mode'] = RemoteStatisticsSettings.MODE_BLACKLIST if options.has_key('infohashes'): parsed_options['blacklist_infohash'] = set( options['infohashes']) elif options.has_key('content'): parsed_options['blacklist_content'] = set( options['content']) else: raise Exception, 'missing content identifiers' else: raise Exception, 'unknown mode' self.settings[stat_type] = parsed_options return expires except: self.reset_settings() if DEBUG: print_exc() return RemoteStatisticsSettings.RETRY_ON_ERROR