コード例 #1
0
    def _http_get(self,
                  url,
                  data=None,
                  headers=None,
                  XHR=False,
                  method=None,
                  cache_limit=8):

        if data != None:
            data = client.encodePostData(data)

        html = client.request(url, post=data, headers=headers, XHR=XHR)

        if html == None:
            return

        if '<span>Log In</span>' not in html:
            log(type='INFO',
                method='Login',
                err='Logging in for url (%s)' % (url))
            self.__login()
            html = client.request(url, post=data, headers=headers, XHR=XHR)

        self.__get_token(html)
        return html
コード例 #2
0
def T3DonlineFilms(url):
    error = ''
    try:
        data = urlparse.parse_qs(url)
        headers = {}
        headers['Referer'] = 'http://3donlinefilms.com'
        b = data['page'][0]
        cook = client.request(b, output='cookie')

        l0 = 'http://3donlinefilms.com/update.php'
        post_data = {'file': data['src_file'][0]}

        cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook
        headers['Referer'] = data['page'][0]
        headers['User-Agent'] = client.agent()
        headers['Cookie'] = cookie

        try:
            ret = client.request(l0,
                                 post=client.encodePostData(post_data),
                                 output='extended',
                                 XHR=True,
                                 cookie=cookie)
        except:
            pass

        u = '%s?file=%s' % (data['file'][0], data['src_file'][0].replace(
            ' ', ''))

        paramsx = {'headers': headers}
        params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
    except Exception as e:
        error = '%s' % e
    return u, params, error
コード例 #3
0
	def __login(self):
		flix_up = control.setting('control_flixanity_user_pass')
		if flix_up != None:
			try:
				self.username = flix_up.split(':')[0]
				self.password = flix_up.split(':')[1]
			except:
				log(type='ERROR', method='Login', err='FliXanity User:Pass not set or not in correct format of User:Pass')
		
		# return all uncached blank pages if no user or pass
		if not self.username or not self.password:
			log(type='FAIL', method='Login', err='FliXanity Login Failed - No User:Pass set')
			return ''

		if (self.username+':'+self.password) == base64.b64decode(base64.b64decode(control.flix_up)):
			self.mode = self.modes[0]
		else:
			self.mode = self.modes[1]
		
		url = urlparse.urljoin(self.base_url, '/ajax/login.php')
		self.__get_token()
		if self.__token == None:
			log(type='FAIL', method='Login', err='FliXanity Login Failed - Could not get token')
			raise Exception('FliXanity Login Failed')
			
		data = {'username': self.username, 'password': self.password, 'action': 'login', 'token': self.__token, 't': ''}
		data = client.encodePostData(data)
		html = client.request(url, post=data, XHR=True)
		if html != '0':
			log(type='FAIL', method='Login', err='FliXanity Login Failed')
			raise Exception('FliXanity Login Failed')
		log(type='INFO', method='Login', err='FliXanity Login Done for User: %s' % self.username)
		r = client.request(self.base_url, post=data, XHR=True, output='extended')
		self.cookie = r[3] ; headers = r[1] ; html = r[0]
コード例 #4
0
def resolve(url, online=None, USE_POST=False, page_url=None, **kwargs):

	try:
		if online == None:
			if check(url) == False: 
				raise Exception('Video not available')
		elif online == False: 
			raise Exception('Video not available')

		video_url = None
		err = ''
		data = {'confirm.x':44, 'confirm.y':55, 'block':1}
		edata = client.encodePostData(data)
		headers = {u'Referer': url, u'User-Agent': client.USER_AGENT}
		if len(RV_COOKIES) > 0:
			headers['Cookie'] = RV_COOKIES[0]
				
		try:
			cookies = None
			page_link = url
			page_data_string, r2, r3, cookies = client.request(page_link, post=edata, headers=headers, httpsskip=True, output='extended')
			
			if USE_POST == True:
				page_data_string, r2, r3, cookies = client.request(page_link, post=edata, headers=headers, httpsskip=True, output='extended')
			else:
				page_data_string, r2, r3, cookies = client.request(page_link, headers=headers, httpsskip=True, output='extended')
				
			if 'pad.php' in page_data_string:
				USE_POST = not USE_POST
				
				if USE_POST == True:
					page_data_string, r2, r3, cookies = client.request(page_link, post=edata, headers=headers, httpsskip=True, output='extended')
				else:
					page_data_string, r2, r3, cookies = client.request(page_link, headers=headers, httpsskip=True, output='extended')
			
			if 'captcha.php' in page_data_string:
				raise Exception('RapidVideo %s requires captcha verification' % url)
				
			if cookies != None and len(cookies) > 0:
				del RV_COOKIES[:]
				RV_COOKIES.append(cookies)
				
			video_url = client.parseDOM(page_data_string, 'div', attrs = {'id': 'home_video'})[0]
			try:	
				video_url = client.parseDOM(video_url, 'source', ret='src')[0]
			except:
				raise Exception('No mp4 video found')
		except Exception as e:
			err = e
			log('FAIL', 'resolve', 'link > %s : %s' % (url, e), dolog=True)

		return (video_url, err, None)
		
	except Exception as e:
		e = '{}'.format(e)
		return (None, e, None)
コード例 #5
0
def decode(url, page_url):
    items = []
    err = ''
    try:
        id = re.compile('//.+?/(?:embed|v)/([0-9a-zA-Z-_]+)').findall(url)[0]
        headersx = {
            'Referer': 'https://www.xstreamcdn.com/v/%s' % id,
            'User-Agent': client.agent()
        }
        post_data = {'r': page_url, 'd': 'www.xstreamcdn.com'}
        api_url = 'https://www.xstreamcdn.com/api/source/%s' % id
        page_data = client.request(api_url,
                                   post=client.encodePostData(post_data),
                                   headers=headersx)

        j_data = json.loads(page_data)
        success = j_data['success']
        if success == False:
            try:
                msd = j_data['data']
            except:
                msd = ""
            raise Exception(
                'API returned error: %s | Data: %s | Return msg: %s' %
                (api_url, post_data, msd))
        else:
            srcs = j_data['data']
            for src in srcs:
                q = src['label']
                u = src['file']
                fs = client.getFileSize(u, retry429=True, headers=headersx)
                online = check(u)
                u1 = client.request(u, output='geturl')
                if u1 != None:
                    u = u1
                urldata = client.b64encode(json.dumps('', encoding='utf-8'))
                params = client.b64encode(json.dumps('', encoding='utf-8'))
                paramsx = {'headers': headersx}
                params = client.b64encode(json.dumps(paramsx,
                                                     encoding='utf-8'))

                items.append({
                    'quality': q,
                    'src': u,
                    'fs': fs,
                    'online': online,
                    'params': params,
                    'urldata': urldata
                })
        if len(items) == 0:
            raise Exception('No videos found !')
    except Exception as e:
        err = 'xtreamcdn Error: %s' % e

    return items, err
コード例 #6
0
def T3DonlineFilms(url):
	error = ''
	try:
		data = urlparse.parse_qs(url)
		headers = {}
		
		if '3donlinefilms.com' in url:
			headers['Referer'] = 'https://3donlinefilms.com'
			l0 = 'https://3donlinefilms.com/update.php'
		elif 'freedocufilms.com' in url:
			headers['Referer'] = 'https://freedocufilms.com'
			l0 = 'https://freedocufilms.com/update.php'
		else:
			headers['Referer'] = 'https://3dmoviesfullhd.com'
			l0 = 'https://3dmoviesfullhd.com/update.php'
		
		u = data['file'][0]
		u = u.replace('//freedocufilms','//www.freedocufilms')
				
		page = data['page'][0]
		cook = client.request(page, output='cookie')
		
		post_data = {'file':data['src_file'][0]}
		
		cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook
		headers['Referer'] = data['page'][0]
		headers['User-Agent'] = client.agent()
		headers['Cookie'] = cookie
		
		try:
			ret = client.request(l0, post=client.encodePostData(post_data), output='extended', XHR=True, cookie=cookie)
		except:
			pass
		
		paramsx = {'headers':headers}
		params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
		
	except Exception as e:
		error = '%s' % e
	return u, error, params
コード例 #7
0
	def process(self, url, q, r, headers, page_url):
		items = []

		try:
			if 'vcstream.to' in url:
				id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0]
				headersx = {'Referer': url, 'User-Agent': client.agent()}
				page_data = client.request('https://vcstream.to/player?fid=%s&page=embed' % id, headers=headersx)
				srcs = re.findall(r'sources:.\[(.*?)\]', page_data)[0]
				srcs = srcs.replace('\\n','').replace('\\','')
				srcs = '''[%s]''' % srcs
				j_data = json.loads(srcs)
				for j in j_data:
					t = j['name']
					label = j['label']
					u = j['src']
					if label.lower() == 'raw':
						q = source_utils.check_sd_url(t)
					else:
						q = label
					r = source_utils.check_sd_url_rip(t)
					
					fs = client.getFileSize(u, retry429=True, headers=headers)
					if fs == None or int(fs) == 0:
						fs = client.getFileSize(u, retry429=True)
					q = qual_based_on_fs(q,fs)
					online = check(u)
					urldata = client.b64encode(json.dumps('', encoding='utf-8'))
					params = client.b64encode(json.dumps('', encoding='utf-8'))
					if headers != None:
						paramsx = {'headers':headers}
						params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
					
					items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True})
					
			elif '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url:
				data = urlparse.parse_qs(url)
				headers = {}
				
				if '3donlinefilms.com' in url:
					headers['Referer'] = 'http://3donlinefilms.com'
					l0 = 'https://3donlinefilms.com/update.php'
				elif 'freedocufilms.com' in url:
					headers['Referer'] = 'http://freedocufilms.com'
					l0 = 'https://freedocufilms.com/update.php'
				else:
					headers['Referer'] = 'http://3dmoviesfullhd.com'
					l0 = 'https://3dmoviesfullhd.com/update.php'
					
				page = data['page'][0]
				cook = client.request(page, output='cookie')
				post_data = {'file':data['src_file'][0]}
				
				cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook
				headers['Referer'] = page
				headers['User-Agent'] = client.agent()
				headers['Cookie'] = cookie
				
				u = data['file'][0]
				u = u.replace('//freedocufilms','//www.freedocufilms')
	
				try:
					ret = client.request(l0, post=client.encodePostData(post_data),headers=headers, output='extended', XHR=True, cookie=cookie)
				except Exception as e:
					log(type='FAIL', method='process', err='%s' % e, dolog=False, logToControl=False, doPrint=True)
				
				ret = client.request(u, output='headers', headers=headers, XHR=True)
				
				try:
					fs = int(re.findall(r'Content-Length:(.*)', str(ret), re.MULTILINE)[0].strip())
				except:
					fs = 0

				q = qual_based_on_fs(q,fs)
				online = False
				
				if int(fs) > 0:
					online = True
					
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				paramsx = {'headers':headers}
				params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				
				items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False, 'allowsDownload':True})
			elif 'cooltvseries.com' in url:
				urlx = client.request(url, output='geturl', headers=headers)
				urlx = '%s?e=file.mp4' % urlx
				fs = client.getFileSize(url, retry429=True, headers=headers)
				if fs == None or int(fs) == 0:
					fs = client.getFileSize(url, retry429=True)
				q = qual_based_on_fs(q,fs)
				online = check(url)
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				params = client.b64encode(json.dumps('', encoding='utf-8'))
				if headers != None:
					paramsx = {'headers':headers}
					params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				allowsDownload = True
				items.append({'quality':q, 'riptype':r, 'src':urlx, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload})
			else:
				fs = client.getFileSize(url, retry429=True, headers=headers)
				if fs == None or int(fs) == 0:
					fs = client.getFileSize(url, retry429=True)
				q = qual_based_on_fs(q,fs)
				online = check(url)
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				params = client.b64encode(json.dumps('', encoding='utf-8'))
				if headers != None:
					paramsx = {'headers':headers}
					params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				allowsDownload = True
				if '.m3u8' in url:
					allowsDownload = False
				items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload})
					
		except Exception as e:
			log(type='ERROR',method='process', err=u'%s' % e)

		if len(items) == 0:
			fs = client.getFileSize(url, retry429=True, headers=headers)
			if fs == None or int(fs) == 0:
				fs = client.getFileSize(url, retry429=True)
			q = qual_based_on_fs(q,fs)
			online = check(url)
			urldata = client.b64encode(json.dumps('', encoding='utf-8'))
			params = client.b64encode(json.dumps('', encoding='utf-8'))
			if headers != None:
				paramsx = {'headers':headers}
				params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
			items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True})
			
		return items
コード例 #8
0
    def get_sources(self, page_url):

        try:
            sources = []

            html = client.request(page_url)

            action = 'getEpisodeEmb' if '/episode/' in page_url else 'getMovieEmb'

            match = re.search('elid\s*=\s*"([^"]+)', html)
            if self.__token is None:
                self.__get_token()

            if match and self.__token is not None:
                elid = urllib.quote(
                    base64.encodestring(str(int(time.time()))).strip())
                data = {
                    'action': action,
                    'idEl': match.group(1),
                    'token': self.__token,
                    'elid': elid,
                    'nopop': ''
                }
                ajax_url = urlparse.urljoin(self.base_url, SOURCES_URL)
                headers = {
                    'authorization':
                    'Bearer %s' % (self.__get_bearer()),
                    'Referer':
                    page_url,
                    'User-Agent':
                    'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
                }
                #headers.update(XHR)

                try:
                    poster = client.parseDOM(html,
                                             'div',
                                             attrs={'class': 'poster'})[0]
                    poster = client.parseDOM(poster, 'img', ret='data-src')[0]
                except:
                    poster = None

                data = client.encodePostData(data)
                html = client.request(ajax_url,
                                      post=data,
                                      cookie=self.cookie,
                                      headers=headers)
                html = html.replace('\\"', '"').replace('\\/', '/')
                rep_txt = re.findall(r'<iframe(.*?)</iframe>', html,
                                     re.IGNORECASE)
                for rep in rep_txt:
                    html = html.replace(rep, rep.replace('"', '\''))

                if html == None or len(html) == 0:
                    raise Exception('HTML data not found on %s' % ajax_url)

                json_html = json.loads(html)

                for k in json_html.keys():
                    html = json_html[k]['embed']
                    quality, t = cleantitle.getQuality2(
                        json_html[k]['type'].replace('fbcdn',
                                                     '').replace('-',
                                                                 '').strip())
                    pattern = '<iframe\s+src=\'([^\']+)'
                    for match in re.finditer(pattern, html, re.DOTALL | re.I):
                        url = match.group(1)
                        host = client.geturlhost(url)

                        direct = True

                        if host == 'gvideo':
                            direct = True
                            quality = client.googletag(url)
                        else:
                            if 'vk.com' in url and url.endswith('oid='):
                                continue  # skip bad vk.com links
                            direct = False
                            host = urlparse.urlparse(url).hostname

                        source = {
                            'multi-part': False,
                            'url': url,
                            'host': host,
                            'quality': quality,
                            'views': None,
                            'rating': None,
                            'direct': direct,
                            'poster': poster
                        }
                        sources.append(source)
        except:
            pass

        return sources
コード例 #9
0
	def get_sources(self, url, hosthdDict=None, hostDict=None, locDict=None, proxy_options=None, key=None, testing=False):
		try:
			sources = []
			if control.setting('Provider-%s' % name) == False:
				log('INFO','get_sources','Provider Disabled by User')
				log('INFO', 'get_sources', 'Completed')
				return sources
			if url == None: 
				log('FAIL','get_sources','url == None. Could not find a matching title: %s' % cleantitle.title_from_key(key), dolog=not testing)
				log('INFO', 'get_sources', 'Completed')
				return sources
			
			data = urlparse.parse_qs(url)
			data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
			title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
			year = data['year']
			aliases = eval(data['aliases'])
			#cookie = '; approve_search=yes'
			query = self.search_link % (urllib.quote_plus(title))
			query = urlparse.urljoin(self.base_link, query)
			
			log(type='INFO', method='get_sources', err='Searching - %s' % query, dolog=False, logToControl=False, doPrint=True)
			result = client.request(query) #, cookie=cookie)
			
			links_m = []
			
			try:
				if 'episode' in data:
					r = client.parseDOM(result, 'div', attrs={'class': 'ml-item'})
					r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'))
					r = [(i[0], i[1], re.findall('(.*?)\s+-\s+Season\s+(\d+)', i[1])) for i in r]
					r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0]
					url = [i[0] for i in r if self.matchAlias(i[2][0], aliases) and i[2][1] == data['season']][0]

					url = '%swatch' % url
					result = client.request(url)

					url = re.findall('a href=\"(.+?)\" class=\"btn-eps first-ep \">Episode %02d' % int(data['episode']), result)[0]

				else:
					r = client.parseDOM(result, 'div', attrs={'class': 'ml-item'})
					r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title'), client.parseDOM(r, 'img', ret='data-original'))
					
					results = [(i[0], i[1], re.findall(r'images/(.*?)-', i[2])) for i in r]
					
					try:
						r = [(i[0], i[1], i[2][0]) for i in results if len(i[2]) > 0]
						url = [i[0] for i in r if self.matchAlias(i[1], aliases) and (year == i[2])][0]
					except Exception as e:
						print e
						url = None
						pass
						
					if (url == None):
						url = [i[0] for i in results if self.matchAlias(i[1], aliases)][0]
					url = urlparse.urljoin(url, 'watch')

				#url = client.request(url, output='geturl')
				if url == None: raise Exception()
			except Exception as e:
			  raise Exception('Step 1 Failed: %s > %s' % (url,e))

			url = url if 'http' in url else urlparse.urljoin(self.base_link, url)
			log(type='INFO', method='get_sources', err='Match found - %s' % url, dolog=False, logToControl=False, doPrint=True)
			
			result = client.request(url)
			try:
				poster = client.parseDOM(result, 'img', attrs={'itemprop':'image'}, ret='src')[0]
			except:
				poster = None
				
			Juicy = False
			ss = []
			riptype = 'BRRIP'
			
			if testing == False:
				trailer_res = client.parseDOM(result, 'div', attrs={'class':'block-trailer'})[0]
				trailer_res = client.parseDOM(trailer_res, 'a', ret='href')[0]
				trailer_res = client.request(trailer_res)
				
				trailers = []
				try:
					matches = re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+').findall(trailer_res)
					for match in matches:
						try:
							if 'youtube.com' in match:
								match = match.replace('embed/','watch?v=')
								trailers.append(match)
						except:
							pass
				except Exception as e:
					pass
					
				for trailer in trailers:
					try:
						l = resolvers.createMeta(trailer, self.name, self.logo, '720p', [], key, poster=poster, vidtype='Trailer', testing=testing)
						for ll in l:
							if ll != None and 'key' in ll.keys():
								links_m.append(ll)
					except:
						pass
			
			if 'streamdor' in result and Juicy == True:
				src = re.findall('src\s*=\s*"(.*streamdor.co\/video\/\d+)"', result)[0]
				if src.startswith('//'):
					src = 'http:'+src
				episodeId = re.findall('.*streamdor.co/video/(\d+)', src)[0]
				p = client.request(src, referer=url)
				
				try:
					#log(type='INFO', method='get_sources', err='Juicy Code', dolog=False, logToControl=False, doPrint=True)
					p = re.findall(r'JuicyCodes.Run\(([^\)]+)', p, re.IGNORECASE)[0]
					p = re.sub(r'\"\s*\+\s*\"','', p)
					p = re.sub(r'[^A-Za-z0-9+\\/=]','', p)
					p = base64.b64decode(p)
					p = jsunpack.unpack(p)
					p = unicode(p, 'utf-8')

					post = client.encodePostData({'id': episodeId})
					
					p2 = client.request('https://embed.streamdor.co/token.php?v=5', post=post, referer=src, XHR=True, timeout=60)
					
					js = json.loads(p2)
					tok = js['token']
					quali = 'SD'
					try:
						quali = re.findall(r'label:"(.*?)"',p)[0]
					except:
						pass
					
					p = re.findall(r'var\s+episode=({[^}]+});',p)[0]
					js = json.loads(p)
					
					try:
						rtype = js['eName']
						if '0p' in rtype.lower() or 'sd' in rtype.lower() or 'hd' in rtype.lower():
							raise
						riptype = rtype
					except:
						pass

					if 'fileEmbed' in js and js['fileEmbed'] != '':
						ss.append([js['fileEmbed'], quali, riptype])
					if 'filePlaylist' in js and js['filePlaylist'] != '':
						js_data = client.request('https://embed.streamdor.co/play/sources?hash=%s&token=%s'%(js['filePlaylist'],tok), referer=src, XHR=True)
						
						js = json.loads(js_data)
						m_srcs = js['playlist'][0]['sources']
						if 'error' not in m_srcs:
							for m_src in m_srcs:
								ss.append([m_src['file'], m_src['label'], riptype])
					if 'fileHLS' in js and js['fileHLS'] != '':
						ss.append(['https://hls.streamdor.co/%s%s'%(tok, js['fileHLS']), quali, riptype])
						
				except Exception as e:
					raise Exception('Step 2 Failed: %s > %s' % (url,e))
			else:
				#log(type='INFO', method='get_sources', err='Embed Code', dolog=False, logToControl=False, doPrint=True)
				div_s = client.parseDOM(result, 'div', attrs={'id': 'list-eps'})[0]
				pages = client.parseDOM(div_s, 'a', ret='href')
				#print pages
				quals = re.findall(r'>(.*?)</a>',div_s)
				#print quals
				c=0
				for p in pages:
					try:
						p1 = client.request(p, referer=url)
						file_id = re.findall(r'load_player\.html\?e=(.*?)\"',p1)[0]
						file_loc = 'https://api.streamdor.co/episode/embed/%s' % file_id
						js_data = client.request(file_loc, referer=p)
						js = json.loads(js_data)
						m_srcs = js['embed']
						try:
							rtype = quals[c]
							if '0p' in rtype.lower() or 'sd' in rtype.lower() or 'hd' in rtype.lower():
								raise
							riptype = 'CAM'
						except:
							pass
						ss.append([m_srcs, file_quality(quals[c]), riptype])
						c=c+1
					except:
						pass

			for link in ss:
				#print link
				try:
					if 'google' in url:
						xs = client.googletag(url)
						for x in xs:
							try:
								l = resolvers.createMeta(x['url'], self.name, self.logo, x['quality'], [], key, riptype, poster=poster, testing=testing)
								for ll in l:
									if ll != None and 'key' in ll.keys():
										links_m.append(ll)
								if testing == True and len(links_m) > 0:
									break
							except:
								pass
					else:
						try:
							l = resolvers.createMeta(link[0], self.name, self.logo, link[1], [], key, link[2], poster=poster, testing=testing)
							for ll in l:
								if ll != None and 'key' in ll.keys():
									links_m.append(ll)
							if testing == True and len(links_m) > 0:
								break
						except:
							pass
				except:
					pass
			
			for l in links_m:
				if l != None and 'key' in l.keys():
					sources.append(l)

			if len(sources) == 0:
				log('FAIL','get_sources','Could not find a matching title: %s' % cleantitle.title_from_key(key))
			else:
				log('SUCCESS', 'get_sources','%s sources : %s' % (cleantitle.title_from_key(key), len(sources)))
				
			log('INFO', 'get_sources', 'Completed')
			
			return sources
		except Exception as e:
			log('ERROR', 'get_sources', '%s' % e)
			log('INFO', 'get_sources', 'Completed')
			return sources
コード例 #10
0
    def get_sources(self,
                    url,
                    hosthdDict=None,
                    hostDict=None,
                    locDict=None,
                    proxy_options=None,
                    key=None,
                    testing=False):
        try:
            sources = []
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_sources', 'Provider Disabled by User')
                return sources
            if url == None:
                log('FAIL',
                    'get_sources',
                    'url == None. Could not find a matching title: %s' %
                    cleantitle.title_from_key(key),
                    dolog=not testing)
                return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            title = data['tvshowtitle'] if 'tvshowtitle' in data else data[
                'title']
            year = data['year']
            aliases = eval(data['aliases'])
            #cookie = '; approve_search=yes'
            query = self.search_link % (urllib.quote_plus(title))
            query = urlparse.urljoin(self.base_link, query)

            log(type='INFO',
                method='get_sources',
                err='Searching - %s' % query,
                dolog=False,
                logToControl=False,
                doPrint=True)
            result = client.request(query)  #, cookie=cookie)

            links_m = []

            try:
                if 'episode' in data:
                    r = client.parseDOM(result,
                                        'div',
                                        attrs={'class': 'ml-item'})
                    r = zip(client.parseDOM(r, 'a', ret='href'),
                            client.parseDOM(r, 'a', ret='title'))
                    r = [(i[0], i[1],
                          re.findall('(.*?)\s+-\s+Season\s+(\d+)', i[1]))
                         for i in r]
                    r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0]
                    url = [
                        i[0] for i in r if self.matchAlias(i[2][0], aliases)
                        and i[2][1] == data['season']
                    ][0]

                    url = '%swatch' % url
                    result = client.request(url)

                    url = re.findall(
                        'a href=\"(.+?)\" class=\"btn-eps first-ep \">Episode %02d'
                        % int(data['episode']), result)[0]

                else:
                    r = client.parseDOM(result,
                                        'div',
                                        attrs={'class': 'ml-item'})
                    r = zip(client.parseDOM(r, 'a', ret='href'),
                            client.parseDOM(r, 'a', ret='title'),
                            client.parseDOM(r, 'img', ret='data-original'))

                    results = [(i[0], i[1], re.findall(r'images/(.*?)-', i[2]))
                               for i in r]

                    try:
                        r = [(i[0], i[1], i[2][0]) for i in results
                             if len(i[2]) > 0]
                        url = [
                            i[0] for i in r
                            if self.matchAlias(i[1], aliases) and (
                                year == i[2])
                        ][0]
                    except Exception as e:
                        print e
                        url = None
                        pass

                    if (url == None):
                        url = [
                            i[0] for i in results
                            if self.matchAlias(i[1], aliases)
                        ][0]
                    url = urlparse.urljoin(url, 'watch')

                #url = client.request(url, output='geturl')
                if url == None: raise Exception()
            except Exception as e:
                raise Exception('Step 1 Failed: %s > %s' % (url, e))

            url = url if 'http' in url else urlparse.urljoin(
                self.base_link, url)
            result = client.request(url)
            try:
                poster = client.parseDOM(result,
                                         'img',
                                         attrs={'itemprop': 'image'},
                                         ret='src')[0]
            except:
                poster = None
            src = re.findall('src\s*=\s*"(.*streamdor.co\/video\/\d+)"',
                             result)[0]
            if src.startswith('//'):
                src = 'http:' + src
            episodeId = re.findall('.*streamdor.co/video/(\d+)', src)[0]
            p = client.request(src, referer=url)

            riptype = 'BRRIP'

            try:
                p = re.findall(r'JuicyCodes.Run\(([^\)]+)', p,
                               re.IGNORECASE)[0]
                p = re.sub(r'\"\s*\+\s*\"', '', p)
                p = re.sub(r'[^A-Za-z0-9+\\/=]', '', p)
                p = base64.b64decode(p)
                p = jsunpack.unpack(p)
                p = unicode(p, 'utf-8')

                post = client.encodePostData({'id': episodeId})

                p2 = client.request('https://embed.streamdor.co/token.php?v=5',
                                    post=post,
                                    referer=src,
                                    XHR=True,
                                    timeout=60)

                js = json.loads(p2)
                tok = js['token']
                quali = 'SD'
                try:
                    quali = re.findall(r'label:"(.*?)"', p)[0]
                except:
                    pass

                p = re.findall(r'var\s+episode=({[^}]+});', p)[0]
                js = json.loads(p)
                ss = []

                try:
                    rtype = js['eName']
                    if '0p' in rtype.lower() or 'sd' in rtype.lower(
                    ) or 'hd' in rtype.lower():
                        raise
                    riptype = rtype
                except:
                    pass

                #print js

                #if 'eName' in js and js['eName'] != '':
                #	quali = source_utils.label_to_quality(js['eName'])
                if 'fileEmbed' in js and js['fileEmbed'] != '':
                    ss.append([js['fileEmbed'], quali])
                if 'filePlaylist' in js and js['filePlaylist'] != '':
                    js_data = client.request(
                        'https://embed.streamdor.co/play/sources?hash=%s&token=%s'
                        % (js['filePlaylist'], tok),
                        referer=src,
                        XHR=True)

                    js = json.loads(js_data)
                    m_srcs = js['playlist'][0]['sources']
                    if 'error' not in m_srcs:
                        for m_src in m_srcs:
                            ss.append([m_src['file'], m_src['label']])
                if 'fileHLS' in js and js['fileHLS'] != '':
                    ss.append([
                        'https://hls.streamdor.co/%s%s' % (tok, js['fileHLS']),
                        quali
                    ])
            except Exception as e:
                raise Exception('Step 2 Failed: %s > %s' % (url, e))

            for link in ss:
                try:
                    if 'google' in url:
                        xs = client.googletag(url)
                        for x in xs:
                            try:
                                links_m = resolvers.createMeta(x['url'],
                                                               self.name,
                                                               self.logo,
                                                               x['quality'],
                                                               links_m,
                                                               key,
                                                               riptype,
                                                               poster=poster,
                                                               testing=testing)
                                if testing == True and len(links_m) > 0:
                                    break
                            except:
                                pass
                    else:
                        try:
                            links_m = resolvers.createMeta(link[0],
                                                           self.name,
                                                           self.logo,
                                                           link[1],
                                                           links_m,
                                                           key,
                                                           riptype,
                                                           poster=poster,
                                                           testing=testing)
                            if testing == True and len(links_m) > 0:
                                break
                        except:
                            pass
                except:
                    pass

            for l in links_m:
                sources.append(l)

            if len(sources) == 0:
                log(
                    'FAIL', 'get_sources',
                    'Could not find a matching title: %s' %
                    cleantitle.title_from_key(key))
                return sources

            log('SUCCESS',
                'get_sources',
                '%s sources : %s' %
                (cleantitle.title_from_key(key), len(sources)),
                dolog=not testing)
            return sources
        except Exception as e:
            log('ERROR', 'get_sources', '%s' % e, dolog=not testing)
            return sources