def GetTheData(self): add = None # if user called stop: Stop! if self.stopall: self.failed(_("User stopped HTML scan")) return # Check the original URL try: answer = urlopen(self.url, encoding = None, cookies = self.cookies) except IOError: answer = None if not answer: self.failed(_("Could not open URL")) return if "application/x-bittorrent" in answer.getHeader("Content-Type"): self.success(answer) return if not "text/html" in answer.getHeader("Content-Type"): if self.allowBrowser: wx.LaunchDefaultBrowser(self.url) else: self.failed(_("Invalid HTML page")) return try: self.parser.feed(answer.read()) except Exception, e: self.failed(str(e)) return
def _aggregate_senddata(self, url): # just send, don't attempt to error check, try: # discard any returned data h = urlopen(url) h.read() h.close() except: return
def getMetainfo(self, src, openoptions = 'rb', style = "file"): if src is None: return None metainfo = None try: metainfo_file = None # We're getting a url if style == "rawdata": return bdecode(src) elif style == "url": metainfo_file = urlopen(src) # We're getting a file that exists elif os.access(src, os.R_OK): metainfo_file = open(src, openoptions) if metainfo_file is not None: metainfo = bdecode(metainfo_file.read()) metainfo_file.close() except: if metainfo_file is not None: try: metainfo_file.close() except: pass metainfo = None return metainfo
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes[ 'troublecode'] = 'Problem connecting to tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, error), e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes[ 'bad_data'] = 'bad data from tracker - ' + str(e) self.lock.unwait(l) return
def __perform_search(self, url, engine, callback, keyword): try: answer = urlopen(url, encoding = None) if answer: xmlHandler = XMLHandler(engine) parseString(answer.read(), xmlHandler) callback(xmlHandler.data, keyword, engine) except (IOError, ValueError, SAXParseException), e: print url, e wx.LogDebug(str(e))
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes["troublecode"] = "Problem connecting to tracker - timeout exceeded" self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split("?", 1) q += "&" + s except: url = t q = s try: h = urlopen(url + "?" + q) closer[0] = h.close data = h.read() except (IOError, error), e: err = "Problem connecting to tracker - " + str(e) except: err = "Problem connecting to tracker" try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes["troublecode"] = err self.lock.unwait(l) return if data == "": if self.lock.trip(l): self.errorcodes["troublecode"] = "no data from tracker" self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes["bad_data"] = "bad data from tracker - " + str(e) self.lock.unwait(l) return
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self = self, l = l, closer = closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: h = urlopen(t+s) closer[0] = h.close data = h.read() except (IOError, error), e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError, e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' + str(e) self.lock.unwait(l) return
def getTorrentFromURL(url): # Copy file from web and call addnewproc ######################################### btmetafile = None try: url_splitted = urlsplit(url) h = urlopen(urlunsplit([url_splitted[0], url_splitted[1], quote(unquote(url_splitted[2])), url_splitted[3], url_splitted[4]])) btmetafile = h.read() h.close() except: try: h.close() except: pass return btmetafile
def get_metainfo(src, openoptions = 'rb', style = "file", cookies = None): """ Get the metainfo for a torrent """ if src is None: return None metainfo = None try: metainfo_file = None if style == "rawdata": # Raw bencoded data return bdecode(src) if style == "url": # We're getting a url url_splitted = urlsplit(src) try: url_to_open = urlunsplit([url_splitted[0], url_splitted[1], quote(unquote(url_splitted[2])), url_splitted[3], url_splitted[4]]) except: url_to_open = src metainfo_file = urlopen(url_to_open, encoding = None, cookies = cookies) elif os.access(src, os.R_OK): # We're getting a file that exists metainfo_file = open(src, openoptions) if metainfo_file is not None: metainfo = bdecode(metainfo_file.read(), sloppy = 1) metainfo_file.close() except: if metainfo_file is not None: try: metainfo_file.close() except: pass metainfo = None return metainfo
def __init__(self, parent): self.parent = parent self.utility = parent.utility content = "" try: h = urlopen("http://pingpong-abc.sourceforge.net/lastest_version.txt") lines = h.read() h.close() content += "<FONT SIZE=-1>" splitted = lines.split("\n") for line in splitted: content += "<BR>" + line + "\n" content += "</FONT>" except: content = self.utility.lang.get("cantconnectwebserver") title = self.utility.lang.get("abclatestversion") MyHtmlDialog.__init__(self, parent, title, content)
def __init__(self, parent): self.parent = parent self.utility = parent.utility content = "" try: h = urlopen( 'http://pingpong-abc.sourceforge.net/lastest_version.txt') lines = h.read() h.close() content += "<FONT SIZE=-1>" splitted = lines.split('\n') for line in splitted: content += "<BR>" + line + "\n" content += "</FONT>" except: content = self.utility.lang.get('cantconnectwebserver') title = self.utility.lang.get('abclatestversion') MyHtmlDialog.__init__(self, parent, title, content)
def getTorrentFromURL(url): # Copy file from web and call addnewproc ######################################### btmetafile = None try: url_splitted = urlsplit(url) h = urlopen( urlunsplit([ url_splitted[0], url_splitted[1], quote(unquote(url_splitted[2])), url_splitted[3], url_splitted[4] ])) btmetafile = h.read() h.close() except: try: h.close() except: pass return btmetafile
return except Exception, e: self.failed(str(e)) return # if user called stop: Stop! if self.stopall: self.failed(_("User stopped HTML scan")) return #if there aren't any- check the Content-Type for link in self.parser.allLinks: if self.stopall: break try: answer = urlopen(link, encoding = None, cookies = self.cookies) if answer: contenttype = answer.getHeader("Content-Type") if "application/x-bittorrent" in contenttype: self.success(answer) if self.single: return except Exception, e: self.failed(str(e)) return self.failed(_("Couldn't find .torrent links")) def failed(self, message): self.parent.HtmlTorrentScannerFailed(self.article, message)
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to ' \ 'tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, error) as e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError as e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' \ + str(e) self.lock.unwait(l) return if 'failure reason' in r: if self.lock.trip(l): self.errorcodes['rejected'] = self.rejectedmessage + \ r['failure reason'] self.lock.unwait(l) return if self.lock.trip(l, True): # success! self.lock.unwait(l) else: # attempt timed out, don't do a callback callback = lambda: None # even if the attempt timed out, go ahead and process data def add(self=self, r=r, callback=callback): self.postrequest(r, callback) self.externalsched(add) except: self.exception(callback)
def _rerequest_single(self, t, s, l, callback): try: closer = [None] def timedout(self=self, l=l, closer=closer): if self.lock.trip(l): self.errorcodes['troublecode'] = 'Problem connecting to ' \ 'tracker - timeout exceeded' self.lock.unwait(l) try: closer[0]() except: pass self.externalsched(timedout, self.timeout) err = None try: url, q = t.split('?', 1) q += '&' + s except: url = t q = s try: h = urlopen(url + '?' + q) closer[0] = h.close data = h.read() except (IOError, error) as e: err = 'Problem connecting to tracker - ' + str(e) except: err = 'Problem connecting to tracker' try: h.close() except: pass if err: if self.lock.trip(l): self.errorcodes['troublecode'] = err self.lock.unwait(l) return if data == '': if self.lock.trip(l): self.errorcodes['troublecode'] = 'no data from tracker' self.lock.unwait(l) return try: r = bdecode(data, sloppy=1) check_peers(r) except ValueError as e: if self.lock.trip(l): self.errorcodes['bad_data'] = 'bad data from tracker - ' \ + str(e) self.lock.unwait(l) return if 'failure reason' in r: if self.lock.trip(l): self.errorcodes['rejected'] = self.rejectedmessage + \ r['failure reason'] self.lock.unwait(l) return if self.lock.trip(l, True): # success! self.lock.unwait(l) else: # attempt timed out, don't do a callback callback = lambda: None # even if the attempt timed out, go ahead and process data def add(self=self, r=r, callback=callback): self.postrequest(r, callback) self.externalsched(add) except: self.exception(callback)