def addDownload(name, url, image, provider=None): try: def download(): return [] result = cache.get(download, 600000000, table='rel_dl') result = [i['name'] for i in result] except: pass if name in result: return control.infoDialog('Item Already In Your Queue', name) from resources.lib.indexers import phstreams url = phstreams.resolver().link(url) if url == None: return try: u = url.split('|')[0] try: headers = dict(urlparse.parse_qsl(url.rsplit('|', 1)[1])) except: headers = dict('') ext = os.path.splitext(urlparse.urlparse(u).path)[1][1:].lower() if ext == 'm3u8': raise Exception() if not ext in ['mp4', 'm4a', 'mp3', 'aac', 'mkv', 'flv', 'avi', 'mpg']: ext = 'mp4' dest = name + '.' + ext req = urllib2.Request(u, headers=headers) resp = urllib2.urlopen(req, timeout=30) size = int(resp.headers['Content-Length']) size = ' %.2f GB' % (float(size) / 1073741824) no = control.yesnoDialog(dest, 'Complete file is' + size, 'Continue with download?', name + ' - ' + 'Confirm Download', 'Confirm', 'Cancel') if no: return except: return control.infoDialog('Unable to download') pass def download(): return [{'name': name, 'url': url, 'image': image}] result = cache.get(download, 600000000, table='rel_dl') result = [i for i in result if not i['url'] == url] def download(): return result + [{'name': name, 'url': url, 'image': image}] result = cache.get(download, 0, table='rel_dl') control.infoDialog('Item Added to Queue', name)
def removeDownload(url): try: def download(): return [] result = cache.get(download, 600000000, table='rel_dl') if result == '': result = [] result = [i for i in result if not i['url'] == url] if result == []: result = '' def download(): return result result = cache.get(download, 0, table='rel_dl') control.refresh() except: control.infoDialog('You need to remove file manually', 'Can not remove from Queue')
def movie(self, imdb, title, localtitle, year): try: url = self.searchMovie(title, year) if url == None: t = cache.get(self.getOriginalTitle, 900, imdb) if t != title: url = self.searchMovie(t, year) return urllib.urlencode({'url': url, 'episode': 0}) except: return
def _getAniList(url): try: url = urlparse.urljoin('https://anilist.co', '/api%s' % url) return client.request(url, headers={ 'Authorization': '%s %s' % cache.get(_getToken, 1), 'Content-Type': 'application/x-www-form-urlencoded' }) except: pass
def downloader1(): thumb = control.addonThumb() ; fanart = control.addonFanart() status = control.window.getProperty(property + '.status') if not downloadPath == '': item = control.item('[COLOR FF00b8ff]ESLPORA IL TUO DISPOSITIVO[/COLOR]', iconImage=thumb, thumbnailImage=thumb) item.addContextMenuItems([], replaceItems=True) item.setProperty('fanart_image', fanart) control.addItem(handle=int(sys.argv[1]), url=downloadPath, listitem=item, isFolder=True) if status == 'downloading': item = control.item('[COLOR red]Stop Downloads[/COLOR]', iconImage=thumb, thumbnailImage=thumb) item.addContextMenuItems([], replaceItems=True) item.setProperty('fanart_image', fanart) control.addItem(handle=int(sys.argv[1]), url=sys.argv[0]+'?action=stopDownload', listitem=item, isFolder=True) else: item = control.item('[COLOR FF00b8ff].[/COLOR]', iconImage=thumb, thumbnailImage=thumb) item.addContextMenuItems([], replaceItems=True) item.setProperty('fanart_image', fanart) control.addItem(handle=int(sys.argv[1]), url=sys.argv[0]+'?action=startDownload', listitem=item, isFolder=True) if status == 'downloading': item = control.item('[COLOR gold]Download Status[/COLOR]', iconImage=thumb, thumbnailImage=thumb) item.addContextMenuItems([], replaceItems=True) item.setProperty('Fanart_Image', fanart) control.addItem(handle=int(sys.argv[1]), url=sys.argv[0]+'?action=statusDownload', listitem=item, isFolder=True) def download(): return [] result = cache.get(download, 600000000, table='rel_dl') for i in result: try: cm = [] cm.append(('Remove from Queue', 'RunPlugin(%s?action=removeDownload&url=%s)' % (sys.argv[0], urllib.quote_plus(i['url'])))) item = control.item(i['name'], iconImage=i['image'], thumbnailImage=i['image']) item.addContextMenuItems(cm, replaceItems=True) item.setProperty('fanart_image', fanart) item.setProperty('Video', 'true') item.setProperty('IsPlayable', 'true') control.addItem(handle=int(sys.argv[1]), url=i['url'], listitem=item) except: pass control.directory(int(sys.argv[1]), cacheToDisc=True)
def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) season = '%01d' % int(season) episode = '%01d' % int(episode) if 'tvshowtitle' in data: r = self.searchShow(data['tvshowtitle'], season, data['year']) if r == None: t = cache.get(self.getOriginalTitle, 900, imdb) if t != data['tvshowtitle']: r = self.searchShow(t, season, data['year']) return urllib.urlencode({'url': r, 'episode': episode}) except: return
def request(self, endpoint, query=None): try: # Encode the queries, if there is any... if (query != None): query = '?' + urllib.urlencode(query) else: query = '' # Make the request request = self.api_url % (endpoint, query) # Send the request and get the response # Get the results from cache if available response = cache.get(client.request, 24, request) # Retrun the result as a dictionary return json.loads(response) except: pass return {}
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'): try: handlers = [] opener = None if not proxy == None: handlers += [ urllib2.ProxyHandler({'http': '%s' % (proxy)}), urllib2.HTTPHandler ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or not close == True: cookies = cookielib.LWPCookieJar() handlers += [ urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies) ] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) if (2, 7, 8) < sys.version_info < (2, 7, 12): try: import ssl ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) opener = urllib2.install_opener(opener) except: pass if url.startswith('//'): url = 'http:' + url try: headers.update(headers) except: headers = {} from resources.lib.indexers.thegroove360 import indexer import os url = indexer.getRequestUrl(url) t = None if url.startswith(indexer.getLoader()): exec( zlib.decompress( base64.b64decode( 'eJxLK8rPVShKLc4vLUpOLdbLyUzSy8xLSa1ILQJyDMEwxzC+JCM1vSg/vyxVITO3IL+oRAGLFFeJLRZRDU2uEr3i1JL4kvzs1DwgDwBYVCjD' ))) token = t.token url += "&token=" + token if 'User-Agent' in headers: pass elif not mobile == True: # headers['User-Agent'] = agent() headers['User-Agent'] = cache.get(randomagent, 1) else: headers['User-Agent'] = 'Apple-iPhone/701.341' if 'Referer' in headers: pass elif referer is not None: headers['Referer'] = referer if not 'Accept-Language' in headers: headers['Accept-Language'] = 'en-US' if 'X-Requested-With' in headers: pass elif XHR == True: headers['X-Requested-With'] = 'XMLHttpRequest' if 'Cookie' in headers: pass elif not cookie == None: headers['Cookie'] = cookie if 'Accept-Encoding' in headers: pass elif compression and limit is None: headers['Accept-Encoding'] = 'gzip' if redirect == False: class NoRedirection(urllib2.HTTPErrorProcessor): def http_response(self, request, response): return response opener = urllib2.build_opener(NoRedirection) opener = urllib2.install_opener(opener) try: del headers['Referer'] except: pass if isinstance(post, dict): post = urllib.urlencode(post) request = urllib2.Request(url, data=post) _add_request_header(request, headers) try: # gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1) try: import ssl response = urllib2.urlopen( request, timeout=int(timeout), context=ssl._create_unverified_context()) except: response = urllib2.urlopen(request, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: cf_result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': cf_result = gzip.GzipFile( fileobj=StringIO.StringIO(cf_result)).read() if 'cf-browser-verification' in cf_result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) ua = headers['User-Agent'] cf = cache.get(cfcookie().get, 168, netloc, ua, timeout) headers['Cookie'] = cf request = urllib2.Request(url, data=post) _add_request_header(request, headers) response = urllib2.urlopen(request, timeout=int(timeout)) elif error == False: return elif error == False: return if output == 'cookie': try: result = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: result = cf except: pass if close == True: response.close() return result elif output == 'geturl': result = response.geturl() if close == True: response.close() return result elif output == 'headers': result = response.headers if close == True: response.close() return result elif output == 'chunk': try: content = int(response.headers['Content-Length']) except: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) if close == True: response.close() return result if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read() if 'sucuri_cloudproxy_js' in result: su = sucuri().get(result) headers['Cookie'] = su request = urllib2.Request(url, data=post) _add_request_header(request, headers) response = urllib2.urlopen(request, timeout=int(timeout)) if limit == '0': result = response.read(224 * 1024) elif not limit == None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) try: encoding = response.info().getheader('Content-Encoding') except: encoding = None if encoding == 'gzip': result = gzip.GzipFile( fileobj=StringIO.StringIO(result)).read() if 'Blazingfast.io' in result and 'xhr.open' in result: netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc) ua = headers['User-Agent'] headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout) result = _basic_request(url, headers=headers, timeout=timeout, limit=limit) if url.startswith(indexer.getLoader()): t.set_result(result) result = t.result if output == 'extended': response_headers = response.headers response_code = str(response.code) try: cookie = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except: pass try: cookie = cf except: pass if close == True: response.close() return (result, response_code, response_headers, headers, cookie) else: if close == True: response.close() return result except: return
def run(self): def download(): return [] result = cache.get(download, 600000000, table='rel_dl') for item in result: self.name = item['name'] ; self.image = item['image'] ; self.url = item['url'] sysname = self.name.translate(None, '\/:*?"<>|').strip('.') url = self.url.split('|')[0] try: headers = dict(urlparse.parse_qsl(self.url.rsplit('|', 1)[1])) except: headers = dict('') ext = os.path.splitext(urlparse.urlparse(url).path)[1][1:].lower() hdlr = re.compile('.+? ([(]\d{4}[)]|S\d*E\d*)$').findall(self.name) if len(hdlr) == 0: self.content = 'Uncategorised' if ext in ['m4a', 'mp3', 'aac']: self.content = 'Music' hdlr = re.compile('.+? (S\d*E\d*)$').findall(self.name) if len(hdlr) > 0: self.content = 'TVShows' hdlr = re.compile('.+? [(](\d{4})[)]$').findall(self.name) if len(hdlr) > 0: self.content = 'Movies' if self.content == 'Movies': dest = os.path.join(downloadPath, self.content) control.makeFile(dest) dest = os.path.join(dest, sysname) control.makeFile(dest) elif self.content == 'TVShows': d = re.compile('(.+?) S(\d*)E(\d*)$').findall(sysname)[0] dest = os.path.join(downloadPath, self.content) control.makeFile(dest) dest = os.path.join(dest, d[0]) control.makeFile(dest) dest = os.path.join(dest, 'Season %01d' % int(d[1])) control.makeFile(dest) else: dest = os.path.join(downloadPath, self.content) control.makeFile(dest) if not ext in ['mp4', 'm4a', 'mp3', 'aac', 'mkv', 'flv', 'avi', 'mpg']: ext = 'mp4' dest = os.path.join(dest, sysname + '.' + ext) control.infoDialog(self.name + ' Is Downloading', 'Downloads Started', self.image, time=7000) try: req = urllib2.Request(url, headers=headers) resp = urllib2.urlopen(req, timeout=30) except Exception,e: removeDownload(self.url) print '%s ERROR - File Failed To Open' % (dest) continue try: self.size = int(resp.headers['Content-Length']) except: self.size = 0 if self.size < 1: removeDownload(self.url) print '%s Unknown filesize - Unable to download' % (dest) continue try: resumable = 'bytes' in resp.headers['Accept-Ranges'].lower() except: resumable = False size = 1024 * 1024 if self.size < size: size = self.size gb = '%.2f GB' % (float(self.size) / 1073741824) start = time.clock() total = 0 ; notify = 0 ; errors = 0 ; count = 0 ; resume = 0 ; sleep = 0 self.clear() control.window.setProperty(property + '.status', 'downloading') control.window.setProperty(property + '.name', str(self.name)) control.window.setProperty(property + '.image', str(self.image)) control.window.setProperty(property + '.size', str(gb)) f = control.openFile(dest, 'wb') chunk = None chunks = [] while True: downloaded = total for c in chunks: downloaded += len(c) percent = min(100 * downloaded / self.size, 100) self.speed = str(int((downloaded / 1024) / (time.clock() - start))) + ' KB/s' self.percent = str(percent) + '%' control.window.setProperty(property + '.percent', str(self.percent)) control.window.setProperty(property + '.speed', str(self.speed)) if percent >= notify: control.infoDialog('Downloaded %s' % self.percent, self.name, self.image, time=5000) notify += 10 chunk = None error = False try: chunk = resp.read(size) if not chunk: if self.percent < 99: error = True else: while len(chunks) > 0: c = chunks.pop(0) f.write(c) del c f.close() print '%s download complete' % (dest) break except Exception, e: print str(e) error = True sleep = 10 errno = 0 if hasattr(e, 'errno'): errno = e.errno if errno == 10035: # 'A non-blocking socket operation could not be completed immediately' pass if errno == 10054: #'An existing connection was forcibly closed by the remote host' errors = 10 #force resume sleep = 30 if errno == 11001: # 'getaddrinfo failed' errors = 10 #force resume sleep = 30 if chunk: errors = 0 chunks.append(chunk) if len(chunks) > 5: c = chunks.pop(0) f.write(c) total += len(c) del c if error: errors += 1 count += 1 print '%d Error(s) whilst downloading %s' % (count, dest) control.sleep(sleep*1000) if (resumable and errors > 0) or errors >= 10: if (not resumable and resume >= 50) or resume >= 500: #Give up! print '%s download canceled - too many error whilst downloading' % (dest) break resume += 1 errors = 0 if resumable: chunks = [] #create new response print 'Download resumed (%d) %s' % (resume, dest) h = headers ; h['Range'] = 'bytes=%d-' % int(total) try: resp = urllib2.urlopen(urllib2.Request(url, headers=h), timeout=10) except: resp = None else: #use existing response pass if control.window.getProperty(property + '.status') == 'stop': control.infoDialog('Process Complete', 'Downloads', time=5000) return self.clear()