コード例 #1
0
 def sky_list(self, group=None):
     try:
         channels = []
         if group:
             channels = self.channels[group]
         else:
             for i in self.channels.itervalues():
                 channels.extend(i)
         url = self.sky_programme_link % (','.join(channels), (
             self.uk_datetime).strftime('%Y%m%d%H%M'))
         result = cache.Cache().cacheMini(client.request, url)
         result = json.loads(result)['channels']
         for i in result:
             try:
                 if bool(
                         re.search('(season|episode|s\s*\d+.*(e|ep)\s*\d+)',
                                   i['program']['shortDesc'],
                                   re.IGNORECASE)):
                     continue
             except:
                 pass
             name = self.nameClean(name=i['title'], group=group).upper()
             title = client.replaceHTMLCodes(
                 i['program']['title'].strip()).encode('utf-8')
             try:
                 year = int(
                     re.findall('[(](\d{4})[)]', i['program']['shortDesc'])
                     [0].strip().encode('utf-8'))
             except:
                 year = None
             self.items.append((name, title, year))
         return len(self.items) > 0
     except:
         tools.Logger.error()
         return False
コード例 #2
0
 def servicesList(self, onlyEnabled=False):
     hosts = []
     try:
         if (not onlyEnabled
                 or self.streamingHoster()) and self.accountValid():
             url = 'http://premium.rpnet.biz/hoster2.json'
             result = cache.Cache().cacheMedium(client.request, url)
             result = tools.Converter.jsonFrom(result)
             result = result['supported']
             hosts = [i.lower() for i in result]
     except:
         tools.Logger.error()
         pass
     return hosts
コード例 #3
0
 def servicesList(self, onlyEnabled=False):
     hosts = []
     try:
         if (not onlyEnabled
                 or self.streamingHoster()) and self.accountValid():
             url = 'https://api.alldebrid.com/hosts'
             result = cache.Cache().cacheMedium(client.request, url)
             result = tools.Converter.jsonFrom(result)
             result = result['hosts']
             hosts = []
             for i in result:
                 if i['status']:
                     hosts.append(i['domain'])
                     try:
                         hosts.extend(i['altDomains'])
                     except:
                         pass
             return list(set([i.lower() for i in hosts]))
     except:
         tools.Logger.error()
     return hosts
コード例 #4
0
 def sky_channels(self):
     try:
         result = cache.Cache().cacheLong(client.request,
                                          self.sky_channels_link)
         result = json.loads(result)['channels']
         channels = []
         for i in self.groups:
             self.channels[i] = []
             names = []
             for j in result:
                 if j['genre'] in self.generes:
                     name = j['title'].lower()
                     if name.startswith(i):
                         name = self.nameClean(name=name,
                                               group=i.replace(' ', ''))
                         if not name in names:
                             names.append(name)
                             names.append(name.replace(' ', ''))
                             self.channels[i].append(j['channelid'])
         return len(self.channels.keys()) > 0
     except:
         tools.Logger.error()
         return False
コード例 #5
0
	def account(self, cached = True, minimal = False):
		account = None
		try:
			if self.accountValid():
				import datetime
				from resources.lib.externals.beautifulsoup import BeautifulSoup

				if cached: accountHtml = cache.Cache().cacheShort(self._request, Core.LinkAccount)
				else: accountHtml = cache.Cache().cacheClear(self._request, Core.LinkAccount)

				if accountHtml == None or accountHtml == '': raise Exception()

				accountHtml = BeautifulSoup(accountHtml)
				accountHtml = accountHtml.find_all('form', id = 'accountForm')[0]
				accountHtml = accountHtml.find_all('table', recursive = False)[0]
				accountHtml = accountHtml.find_all('tr', recursive = False)

				accountUsername = accountHtml[0].find_all('td', recursive = False)[1].getText()
				accountType = accountHtml[1].find_all('td', recursive = False)[2].getText()
				accountStatus = accountHtml[3].find_all('td', recursive = False)[2].getText()

				accountExpiration = accountHtml[2].find_all('td', recursive = False)[2].getText()
				accountTimestamp = convert.ConverterTime(accountExpiration, format = convert.ConverterTime.FormatDate).timestamp()
				accountExpiration = datetime.datetime.fromtimestamp(accountTimestamp)

				account = {
					'user' : accountUsername,
					'type' : accountType,
					'status' : accountStatus,
			 		'expiration' : {
						'timestamp' : accountTimestamp,
						'date' : accountExpiration.strftime('%Y-%m-%d'),
						'remaining' : (accountExpiration - datetime.datetime.today()).days,
					}
				}

				if not minimal:
					if cached: usageHtml = cache.Cache().cacheShort(self._request, Core.LinkUsage)
					else: usageHtml = cache.Cache().cacheClear(self._request, Core.LinkUsage)

					if usageHtml == None or usageHtml == '': raise Exception()

					usageHtml = BeautifulSoup(usageHtml)
					usageHtml = usageHtml.find_all('div', class_ = 'table-responsive')[0]
					usageHtml = usageHtml.find_all('table', recursive = False)[0]
					usageHtml = usageHtml.find_all('tr', recursive = False)

					usageTotal = usageHtml[0].find_all('td', recursive = False)[1].getText()
					index = usageTotal.find('(')
					if index >= 0: usageTotal = int(usageTotal[index + 1 : usageTotal.find(' ', index)].replace(',', '').strip())
					else: usageTotal = 0

					usageConsumed = usageHtml[1].find_all('td', recursive = False)[2].getText()
					index = usageConsumed.find('(')
					if index >= 0: usageConsumed = int(usageConsumed[index + 1 : usageConsumed.find(' ', index)].replace(',', '').strip())
					else: usageConsumed = 0

					usageWeb = usageHtml[2].find_all('td', recursive = False)[2].getText()
					index = usageWeb.find('(')
					if index >= 0: usageWeb = int(usageWeb[index + 1 : usageWeb.find(' ', index)].replace(',', '').strip())
					else: usageWeb = 0

					usageNntp = usageHtml[3].find_all('td', recursive = False)[2].getText()
					index = usageNntp.find('(')
					if index >= 0: usageNntp = int(usageNntp[index + 1 : usageNntp.find(' ', index)].replace(',', '').strip())
					else: usageNntp = 0

					usageNntpUnlimited = usageHtml[4].find_all('td', recursive = False)[2].getText()
					index = usageNntpUnlimited.find('(')
					if index >= 0: usageNntpUnlimited = int(usageNntpUnlimited[index + 1 : usageNntpUnlimited.find(' ', index)].replace(',', '').strip())
					else: usageNntpUnlimited = 0

					usageRemaining = usageHtml[5].find_all('td', recursive = False)[2].getText()
					index = usageRemaining.find('(')
					if index >= 0: usageRemaining = int(usageRemaining[index + 1 : usageRemaining.find(' ', index)].replace(',', '').strip())
					else: usageRemaining = 0

					usageLoyalty = usageHtml[6].find_all('td', recursive = False)[2].getText()
					index = usageLoyalty.find('(')
					if index >= 0:
						usageLoyaltyTime = usageLoyalty[:index].strip()
						usageLoyaltyTimestamp = convert.ConverterTime(usageLoyaltyTime, format = convert.ConverterTime.FormatDate).timestamp()
						usageLoyaltyTime = datetime.datetime.fromtimestamp(usageLoyaltyTimestamp)
						usageLoyaltyPoints = float(usageLoyalty[index + 1 : usageLoyalty.find(')', index)].strip())
					else:
						usageLoyaltyTimestamp = 0
						usageLoyaltyTime = None

					usagePrecentageRemaining = usageRemaining / float(usageTotal)
					usagePrecentageConsumed = usageConsumed / float(usageTotal)
					usagePrecentageWeb = usageWeb / float(usageTotal)
					usagePrecentageNntp = usageNntp / float(usageTotal)
					usagePrecentageNntpUnlimited = usageNntpUnlimited / float(usageTotal)

					account.update({
						'loyalty' : {
							'time' : {
								'timestamp' : usageLoyaltyTimestamp,
								'date' : usageLoyaltyTime.strftime('%Y-%m-%d')
							},
							'points' : usageLoyaltyPoints,
						},
						'usage' : {
							'total' : {
								'size' : {
									'bytes' : usageTotal,
									'description' : convert.ConverterSize(float(usageTotal)).stringOptimal(),
								},
							},
							'remaining' : {
								'value' : usagePrecentageRemaining,
								'percentage' : round(usagePrecentageRemaining * 100.0, 1),
								'size' : {
									'bytes' : usageRemaining,
									'description' : convert.ConverterSize(float(usageRemaining)).stringOptimal(),
								},
								'description' : '%.0f%%' % round(usagePrecentageRemaining * 100.0, 0), # Must round, otherwise 2.5% changes to 2% instead of 3%.
							},
							'consumed' : {
								'value' : usagePrecentageConsumed,
								'percentage' : round(usagePrecentageConsumed * 100.0, 1),
								'size' : {
									'bytes' : usageConsumed,
									'description' : convert.ConverterSize(usageConsumed).stringOptimal(),
								},
								'description' : '%.0f%%' % round(usagePrecentageConsumed * 100.0, 0), # Must round, otherwise 2.5% changes to 2% instead of 3%.
								'web' : {
									'value' : usagePrecentageWeb,
									'percentage' : round(usagePrecentageWeb * 100.0, 1),
									'size' : {
										'bytes' : usageWeb,
										'description' : convert.ConverterSize(usageWeb).stringOptimal(),
									},
									'description' : '%.0f%%' % round(usagePrecentageWeb * 100.0, 0), # Must round, otherwise 2.5% changes to 2% instead of 3%.
								},
								'nntp' : {
									'value' : usagePrecentageNntp,
									'percentage' : round(usagePrecentageNntp * 100.0, 1),
									'size' : {
										'bytes' : usageNntp,
										'description' : convert.ConverterSize(usageNntp).stringOptimal(),
									},
									'description' : '%.0f%%' % round(usagePrecentageNntp * 100.0, 0), # Must round, otherwise 2.5% changes to 2% instead of 3%.
								},
								'nntpunlimited' : {
									'value' : usagePrecentageNntpUnlimited,
									'percentage' : round(usagePrecentageNntpUnlimited * 100.0, 1),
									'size' : {
										'bytes' : usageNntpUnlimited,
										'description' : convert.ConverterSize(usageNntpUnlimited).stringOptimal(),
									},
									'description' : '%.0f%%' % round(usagePrecentageNntpUnlimited * 100.0, 0), # Must round, otherwise 2.5% changes to 2% instead of 3%.
								},
							}
						}
					})
		except:
			pass
		return account
コード例 #6
0
def _getAniList(url):
	try:
		url = urlparse.urljoin('https://anilist.co', '/api%s' % url)
		return client.request(url, headers={'Authorization': '%s %s' % cache.Cache().cacheShort(_getAniToken), 'Content-Type': 'application/x-www-form-urlencoded'})
	except:
		pass
コード例 #7
0
ファイル: client.py プロジェクト: gaiaorigin/gaiasource
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30', ignoreSsl = False, flare = True, ignoreErrors = None):
	try:
		timeout = str(timeout)

		# Gaia
		if url == None: return None
		handlers = []

		if not proxy == None:
			handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
			opener = urllib2.build_opener(*handlers)
			opener = urllib2.install_opener(opener)

		if output == 'cookie' or output == 'extended' or not close == True:
			cookies = cookielib.LWPCookieJar()
			handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
			opener = urllib2.build_opener(*handlers)
			opener = urllib2.install_opener(opener)

		if ignoreSsl or ((2, 7, 8) < sys.version_info < (2, 7, 12)):
			try:
				import ssl; ssl_context = ssl.create_default_context()
				ssl_context.check_hostname = False
				ssl_context.verify_mode = ssl.CERT_NONE
				handlers += [urllib2.HTTPSHandler(context=ssl_context)]
				opener = urllib2.build_opener(*handlers)
				opener = urllib2.install_opener(opener)
			except:
				pass

		if url.startswith('//'): url = 'http:' + url

		try: headers.update(headers)
		except: headers = {}
		if 'User-Agent' in headers:
			pass
		elif not mobile == True:
			#headers['User-Agent'] = agent()
			headers['User-Agent'] = cache.Cache().cacheShort(randomagent)
		else:
			headers['User-Agent'] = 'Apple-iPhone/701.341'
		if 'Referer' in headers:
			pass
		elif referer is not None:
			headers['Referer'] = referer
		if not 'Accept-Language' in headers:
			headers['Accept-Language'] = 'en-US'
		if 'X-Requested-With' in headers:
			pass
		elif XHR == True:
			headers['X-Requested-With'] = 'XMLHttpRequest'
		if 'Cookie' in headers:
			pass
		elif not cookie == None:
			headers['Cookie'] = cookie
		if 'Accept-Encoding' in headers:
			pass
		elif compression and limit is None:
			headers['Accept-Encoding'] = 'gzip'

		if redirect == False:

			class NoRedirection(urllib2.HTTPErrorProcessor):
				def http_response(self, request, response): return response

			opener = urllib2.build_opener(NoRedirection)
			opener = urllib2.install_opener(opener)

			try: del headers['Referer']
			except: pass

		if isinstance(post, dict):
			# [GAIA]
			# Gets rid of the error: 'ascii' codec can't decode byte 0xd0 in position 0: ordinal not in range(128)
			for key, value in post.iteritems():
				try: post[key] = value.encode('utf-8')
				except: pass
			# [/GAIA]

			post = urllib.urlencode(post)

		request = urllib2.Request(url, data=post)
		_add_request_header(request, headers)

		try:
			response = urllib2.urlopen(request, timeout=int(timeout))
		except urllib2.HTTPError as response:
			try: ignore = ignoreErrors and (int(response.code) == ignoreErrors or int(response.code) in ignoreErrors)
			except: ignore = False
			if not ignore:
				if response.code in [301, 307, 308, 503]:
					cf_result = response.read(5242880)
					try: encoding = response.info().getheader('Content-Encoding')
					except: encoding = None
					if encoding == 'gzip':
						cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read()
					if flare and 'cloudflare' in str(response.info()).lower():
						try:
							from resources.lib.externals.cfscrape import cfscrape
							if isinstance(post, dict):
								data = post
							else:
								try: data = urlparse.parse_qs(post)
								except: data = None
							scraper = cfscrape.CloudflareScraper()
							response = scraper.request(method = 'GET' if post == None else 'POST', url = url, headers = headers, data = data, timeout = int(timeout))
							result = response.content
							flare = 'cloudflare' # Used below
							try:
								cookies = response.request._cookies
							except:
								from resources.lib.extensions import tools
								tools.Logger.error()
						except:
							from resources.lib.extensions import tools
							tools.Logger.error()

					elif 'cf-browser-verification' in cf_result:
						netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
						ua = headers['User-Agent']
						cf = cache.Cache().cacheLong(cfcookie().get, netloc, ua, timeout)
						headers['Cookie'] = cf
						request = urllib2.Request(url, data=post)
						_add_request_header(request, headers)
						response = urllib2.urlopen(request, timeout=int(timeout))
					else:
						log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG)
						if error == False: return
				else:
					log_utils.log('Request-Error (%s): %s' % (str(response.code), url), log_utils.LOGDEBUG)
					if error == False: return

		if output == 'cookie':
			try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
			except: pass
			try: result = cf
			except: pass
			if close == True: response.close()
			return result

		elif output == 'geturl':
			result = response.geturl()
			if close == True: response.close()
			return result

		elif output == 'headers':
			result = response.headers
			if close == True: response.close()
			return result

		elif output == 'chunk':
			try: content = int(response.headers['Content-Length'])
			except: content = (2049 * 1024)
			if content < (2048 * 1024): return
			result = response.read(16 * 1024)
			if close == True: response.close()
			return result

		if not flare == 'cloudflare':
			if limit == '0':
				result = response.read(224 * 1024)
			elif not limit == None:
				result = response.read(int(limit) * 1024)
			else:
				result = response.read(5242880)

		try: encoding = response.info().getheader('Content-Encoding')
		except: encoding = None
		if encoding == 'gzip':
			result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()


		if 'sucuri_cloudproxy_js' in result:
			su = sucuri().get(result)

			headers['Cookie'] = su

			request = urllib2.Request(url, data=post)
			_add_request_header(request, headers)

			response = urllib2.urlopen(request, timeout=int(timeout))

			if limit == '0':
				result = response.read(224 * 1024)
			elif not limit == None:
				result = response.read(int(limit) * 1024)
			else:
				result = response.read(5242880)

			try: encoding = response.info().getheader('Content-Encoding')
			except: encoding = None
			if encoding == 'gzip':
				result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()

		if 'Blazingfast.io' in result and 'xhr.open' in result:
			netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
			ua = headers['User-Agent']
			headers['Cookie'] = cache.Cache().cacheLong(bfcookie().get, netloc, ua, timeout)
			result = _basic_request(url, headers=headers, post=post, timeout=timeout, limit=limit)

		if output == 'extended':
			try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()])
			except: response_headers = response.headers
			try: response_code = str(response.code)
			except: response_code = str(response.status_code) # object from CFScrape Requests object.
			try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
			except: pass
			try: cookie = cf
			except: pass
			if close == True: response.close()
			return (result, response_code, response_headers, headers, cookie)
		else:
			if close == True: response.close()
			return result
	except Exception as e:
		from resources.lib.extensions import tools
		tools.Logger.error()
		log_utils.log('Request-Error: (%s) => %s' % (str(e), url), log_utils.LOGDEBUG)
		return
コード例 #8
0
    def items_list(self, i):
        try:
            item = cache.Cache().cacheMedium(trakt.SearchAll,
                                             urllib.quote_plus(i[1]), i[2],
                                             True)[0]

            content = item.get('movie')
            if not content: content = item.get('show')
            item = content

            title = item.get('title')
            title = client.replaceHTMLCodes(title)

            originaltitle = title

            year = item.get('year', 0)
            year = re.sub('[^0-9]', '', str(year))

            imdb = item.get('ids', {}).get('imdb', '0')
            imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))

            tmdb = str(item.get('ids', {}).get('tmdb', 0))

            premiered = item.get('released', '0')
            try:
                premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(
                    premiered)[0]
            except:
                premiered = '0'

            genre = item.get('genres', [])
            genre = [x.title() for x in genre]
            genre = ' / '.join(genre).strip()
            if not genre: genre = '0'

            duration = str(item.get('Runtime', 0))
            try:
                duration = str(int(duration) * 60)
            except:
                pass

            rating = item.get('rating', '0')
            if not rating or rating == '0.0': rating = '0'

            votes = item.get('votes', '0')

            mpaa = item.get('certification', '0')

            tagline = item.get('tagline', '0')

            plot = item.get('overview', '0')

            people = trakt.getPeople(imdb, 'movies')
            director = writer = ''
            cast = []

            if people:
                if 'crew' in people and 'directing' in people['crew']:
                    director = ', '.join([
                        director['person']['name']
                        for director in people['crew']['directing']
                        if director['job'].lower() == 'director'
                    ])
                if 'crew' in people and 'writing' in people['crew']:
                    writer = ', '.join([
                        writer['person']['name']
                        for writer in people['crew']['writing']
                        if writer['job'].lower() in
                        ['writer', 'screenplay', 'author']
                    ])
                for person in people.get('cast', []):
                    cast.append({
                        'name': person['person']['name'],
                        'role': person['character']
                    })
                cast = [(person['name'], person['role']) for person in cast]

            try:
                if self.lang == 'en' or self.lang not in item.get(
                        'available_translations', [self.lang]):
                    raise Exception()

                trans_item = trakt.getMovieTranslation(imdb,
                                                       self.lang,
                                                       full=True)

                title = trans_item.get('title') or title
                tagline = trans_item.get('tagline') or tagline
                plot = trans_item.get('overview') or plot
            except:
                pass

            item = {
                'title': title,
                'originaltitle': originaltitle,
                'year': year,
                'premiered': premiered,
                'genre': genre,
                'duration': duration,
                'rating': rating,
                'votes': votes,
                'ratingtrakt': rating,
                'votestrakt': votes,
                'mpaa': mpaa,
                'director': director,
                'writer': writer,
                'cast': cast,
                'plot': plot,
                'tagline': tagline,
                'imdb': imdb,
                'tmdb': tmdb,
                'poster': '0',
                'channel': i[0]
            }
            item.update(tools.Rater.extract(item))
            self.list.append(item)
        except:
            pass