Ejemplo n.º 1
0
    def movie(self, imdb, title, localtitle, aliases, year):
        try:
            session = self._createSession(randomagent())

            lowerTitle = title.lower()
            stringConstant, searchHTML = self._getSearch(lowerTitle, session)

            possibleTitles = set((lowerTitle, ) + tuple(
                (alias['title'].lower()
                 for alias in aliases) if aliases else ()))
            soup = BeautifulSoup(searchHTML,
                                 'html.parser',
                                 parse_only=SoupStrainer('div',
                                                         recursive=False))
            for div in soup:
                if div.span and (year
                                 in div.span.text) and (div.a.text.lower()
                                                        in possibleTitles):
                    return {
                        'type': 'movie',
                        'pageURL': self.BASE_URL + div.a['href'],
                        'sConstant': stringConstant,
                        'UA': session.headers['User-Agent'],
                        'cookies': session.cookies.get_dict()
                    }
            return None  # No results found.
        except:
            self._logException()
            return None
Ejemplo n.º 2
0
    def episode(self, data, imdb, tvdb, title, premiered, season, episode):
        try:
            session = self._createSession(randomagent())

            # Search with the TV show name and season number string.
            lowerTitle = data
            stringConstant, searchHTML = self._getSearch(
                lowerTitle + ' ' + season, session)

            soup = BeautifulSoup(searchHTML, 'html.parser')
            for div in soup.findAll('div', recursive=False):
                resultName = div.a.text.lower()
                if lowerTitle in resultName and season in resultName:
                    return {
                        'type': 'episode',
                        'episode': episode,
                        'pageURL': self.BASE_URL + div.a['href'],
                        'sConstant': stringConstant,
                        'UA': session.headers['User-Agent'],
                        'cookies': session.cookies.get_dict()
                    }
            return None  # No results found.
        except:
            self._logException()
            return None
Ejemplo n.º 3
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            year = url['year']
            h = {'User-Agent': client.randomagent()}
            title = cleantitle.geturl(url['title']).replace('-', '+')
            url = urlparse.urljoin(self.base_link, self.search_link % title)
            r = client.request(url, headers=h)
            r = BeautifulSoup(r, 'html.parser').find('div', {'class': 'item'})
            r = r.find('a')['href']
            r = client.request(r, headers=h)
            r = BeautifulSoup(r, 'html.parser')
            quality = r.find('span', {'class': 'calidad2'}).text
            url = r.find('div', {'class': 'movieplay'}).find('iframe')['src']
            if not quality in ['1080p', '720p']:
                quality = 'SD'

            valid, host = source_utils.is_host_valid(url, hostDict)
            sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False,
                            'debridonly': False})
            return sources
        except:
            return sources
Ejemplo n.º 4
0
 def setNewCookies(self):
     try:
         ua = client.randomagent()
         self.headers['User-Agent'] = ua
         self.cookie = proxies.request(url=self.base_link,
                                       headers=self.headers,
                                       output='cookie',
                                       use_web_proxy=self.proxyrequired)
         self.headers['Cookie'] = self.cookie
     except Exception as e:
         log('ERROR', 'setNewCookies', '%s' % e)
Ejemplo n.º 5
0
	def setNewCookies(self, site):
		try:
			ua = client.randomagent()
			self.headers['User-Agent'] = ua
			self.cookie = proxies.request(url=site, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired)
			if self.cookie == None:
				raise Exception('Retrieved cookie None')
			self.headers['Cookie'] = self.cookie
			log('SUCCESS', 'setNewCookies', 'CF Cookie : %s for %s' % (self.cookie,site))
		except Exception as e:
			log('ERROR','setNewCookies', '%s' % e)
Ejemplo n.º 6
0
 def __init__(self):
     self.base_link = 'https://freeproxy.io'
     self.name = name
     self.captcha = False
     self.ssl = False
     self.speedtest = 0
     self.headers = {
         'Connection': 'keep-alive',
         'User-Agent': client.randomagent()
     }
     self.working = self.testSite(disabled=True)
Ejemplo n.º 7
0
 def __init__(self):
     self.base_link = 'https://ssl-proxy.my-addr.org'
     self.name = name
     self.base_link_usage = '/'
     self.captcha = False
     self.ssl = True
     self.speedtest = 0
     self.headers = {
         'Connection': 'keep-alive',
         'User-Agent': client.randomagent()
     }
     self.working = self.testSite()
Ejemplo n.º 8
0
	def __init__(self):
		del loggertxt[:]
		self.ver = '0.0.1'
		self.update_date = 'Dec. 19, 2017'
		log(type='INFO', method='init', err=' -- Initializing %s %s %s Start --' % (name, self.ver, self.update_date))
		self.base_link = 'https://unblockweb.co'
		self.name = name
		self.loggertxt = []
		self.disabled = False
		self.captcha = False
		self.ssl = True
		self.speedtest = 0
		self.headers = {'Connection' : 'keep-alive', 'User-Agent' : client.randomagent()}
		self.working = self.testSite()
		log(type='INFO', method='init', err=' -- Initializing %s %s %s End --' % (name, self.ver, self.update_date))
Ejemplo n.º 9
0
	def initAndSleep(self):
		try:
			self.TOKEN_KEY = []
			self.getVidToken()
			if len(self.TOKEN_KEY) > 0:
				log('SUCCESS', 'initAndSleep', 'Vid Token: %s' % client.b64encode(self.TOKEN_KEY[0]))
			else:
				log('FAIL', 'initAndSleep', 'Vid Token Not retrieved !')
			
			t_base_link = self.base_link
			self.headers = {'X-Requested-With': 'XMLHttpRequest'}
			self.headers['Referer'] = t_base_link
			ua = client.randomagent()
			self.headers['User-Agent'] = ua
			
			#get cf cookie
			cookie1 = proxies.request(url=t_base_link, headers=self.headers, output='cookie', use_web_proxy=self.proxyrequired, httpsskip=True)
			self.headers['Cookie'] = cookie1
			
			# get reqkey cookie
			try:
				token_url = urlparse.urljoin(t_base_link, self.token_link)
				r1 = proxies.request(token_url, headers=self.headers, httpsskip=True)
				reqkey = self.decodeJSFCookie(r1)
			except:
				reqkey = ''
			
			# get session cookie
			serverts = str(((int(time.time())/3600)*3600))
			query = {'ts': serverts}
			try:
				tk = self.__get_token(query)
			except:
				tk = self.__get_token(query, True)

			query.update(tk)
			hash_url = urlparse.urljoin(t_base_link, self.hash_menu_link)
			hash_url = hash_url + '?' + urllib.urlencode(query)

			r1, headers, content, cookie2 = proxies.request(hash_url, headers=self.headers, limit='0', output='extended', httpsskip=True)

			#cookie = cookie1 + '; ' + cookie2 + '; user-info=null; reqkey=' + reqkey
			cookie = '%s; %s; user-info=null; reqkey=%s' % (cookie1 , cookie2 , reqkey)
			
			self.headers['Cookie'] = cookie
			log('SUCCESS', 'initAndSleep', 'Cookies : %s for %s' % (cookie,self.base_link))
		except Exception as e:
			log('ERROR','initAndSleep', '%s' % e)
Ejemplo n.º 10
0
 def _createSession(self, customHeaders={}):
     # Create a 'requests.Session' and try to spoof a header from a web browser.
     session = requests.Session()
     session.headers.update({
         'Accept':
         'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
         'User-Agent':
         customHeaders.get('UA', randomagent()),
         'Accept-Language':
         'en-US,en;q=0.5',
         'Referer':
         customHeaders.get('referer', self.BASE_URL + '/'),
         'DNT':
         '1'
     })
     return session
Ejemplo n.º 11
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url is None: return sources

            headers = {'User-Agent': client.randomagent()}
            html = client.request(url, headers=headers)

            Links = re.compile('id="link_.+?target="_blank" id="(.+?)"', re.DOTALL).findall(html)
            for vid_url in Links:
                if 'openload' in vid_url:
                    try:
                        source_html = client.request(vid_url, headers=headers)
                        source_string = re.compile('description" content="(.+?)"', re.DOTALL).findall(source_html)[0]
                        quality, info = source_utils.get_release_quality(source_string, vid_url)
                    except:
                        quality = 'DVD'
                        info = []
                    sources.append(
                        {'source': 'Openload', 'quality': quality, 'language': 'en', 'url': vid_url, 'info': info,
                         'direct': False, 'debridonly': False})
                elif 'streamango' in vid_url:
                    try:
                        source_html = client.request(vid_url, headers=headers)
                        source_string = re.compile('description" content="(.+?)"', re.DOTALL).findall(source_html)[0]
                        quality, info = source_utils.get_release_quality(source_string, vid_url)
                    except:
                        quality = 'DVD'
                        info = []
                    sources.append(
                        {'source': 'Streamango', 'quality': quality, 'language': 'en', 'url': vid_url, 'info': info,
                         'direct': False, 'debridonly': False})
                else:
                    if resolveurl.HostedMediaFile(vid_url):
                        quality, info = source_utils.get_release_quality(vid_url, vid_url)
                        host = vid_url.split('//')[1].replace('www.', '')
                        host = host.split('/')[0].split('.')[0].title()
                        sources.append(
                            {'source': host, 'quality': quality, 'language': 'en', 'url': vid_url, 'info': info,
                             'direct': False, 'debridonly': False})
            return sources
        except:
            failure = traceback.format_exc()
            log_utils.log('SolarMovie - Exception: \n' + str(failure))
            return sources
Ejemplo n.º 12
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])

            title = data['title'];
            year = data['year']

            h = {'User-Agent': client.randomagent()}

            v = '%s_%s' % (cleantitle.geturl(title).replace('-', '_'), year)

            url = '/watch_%s.html' % v
            url = urlparse.urljoin(self.base_link, url)

            c = client.request(url, headers=h, output='cookie')
            c = client.request(urlparse.urljoin(self.base_link, '/av'), cookie=c, output='cookie', headers=h,
                               referer=url)
            # c = client.request(url, cookie=c, headers=h, referer=url, output='cookie')

            post = urllib.urlencode({'v': v})
            u = urlparse.urljoin(self.base_link, '/video_info/frame')

            # r = client.request(u, post=post, cookie=c, headers=h, XHR=True, referer=url)
            r = client.request(u, post=post, headers=h, XHR=True, referer=url)
            r = json.loads(r).values()
            r = [urllib.unquote(i.split('url=')[-1]) for i in r]

            for i in r:
                try:
                    sources.append(
                        {'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en',
                         'url': i, 'direct': True, 'debridonly': False})
                except:
                    pass

            return sources
        except:
            return sources
Ejemplo n.º 13
0
    def initAndSleep(self):
        try:
            t_base_link = self.base_link
            self.headers = {'X-Requested-With': 'XMLHttpRequest'}
            self.headers['Referer'] = t_base_link
            ua = client.randomagent()
            self.headers['User-Agent'] = ua

            #get cf cookie
            cookie = proxies.request(url=t_base_link,
                                     headers=self.headers,
                                     output='cookie',
                                     use_web_proxy=self.proxyrequired,
                                     httpsskip=True)
            self.headers['Cookie'] = cookie
            log('SUCCESS', 'initAndSleep',
                'Cookies : %s for %s' % (cookie, self.base_link))
        except Exception as e:
            log('ERROR', 'initAndSleep', '%s' % e)
Ejemplo n.º 14
0
    def __init__(self):
        self.priority = 1
        self.language = ['en']
        self.domains = ['ondarewatch.com', 'dailytvfix.com']
        self.base_link = 'http://www.dailytvfix.com'
        self.search_link = self.base_link + '/ajax/search.php'
        self.ua = client.randomagent()

        self.search_headers = {
            'Host': self.base_link.replace('http://', '', 1),
            'User-Agent': self.ua,
            'Accept': 'application/json, text/javascript, */*; q=0.01',
            'Accept-Language': 'en-US,en;q=0.5',
            'Accept-Encoding': 'gzip, deflate',
            'Referer': self.base_link + '/',
            'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
            'X-Requested-With': 'XMLHttpRequest',
            'DNT': '1'
        }
Ejemplo n.º 15
0
    def movie(self, imdb, title, localtitle, aliases, year):
        try:
            search_id = title.lower().replace(':', ' ').replace('-', ' ')

            start_url = urlparse.urljoin(self.base_link, (self.search_link % (search_id.replace(' ', '%20'))))

            headers = {'User-Agent': client.randomagent()}
            html = client.request(start_url, headers=headers)

            match = re.compile('<span class="name"><a title="(.+?)" href="(.+?)".+?title="(.+?)"', re.DOTALL).findall(
                html)
            for name, item_url, link_year in match:
                if year in link_year:
                    if cleantitle.get(title) in cleantitle.get(name):
                        return item_url
            return
        except:
            failure = traceback.format_exc()
            log_utils.log('SolarMovie - Exception: \n' + str(failure))
            return
Ejemplo n.º 16
0
 def _createSession(self, userAgent=None, cookies=None, referer=None):
     # Try to spoof a header from a web browser.
     session = requests.Session()
     session.headers.update({
         'Accept':
         'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
         'User-Agent':
         userAgent if userAgent else randomagent(),
         'Accept-Language':
         'en-US,en;q=0.5',
         'Referer':
         referer if referer else self.BASE_URL + '/',
         'Upgrade-Insecure-Requests':
         '1',
         'DNT':
         '1'
     })
     if cookies:
         session.cookies.update(cookies)
     return session
Ejemplo n.º 17
0
 def _createSession(self, userAgent=None, cookies=None, referer=None):
     # Try to spoof a header from a web browser.
     session = requests.Session()
     session.headers.update({
         'Accept':
         self.DEFAULT_ACCEPT,
         'User-Agent':
         userAgent if userAgent else randomagent(),
         'Accept-Language':
         'en-US,en;q=0.5',
         'Referer':
         referer if referer else self.BASE_URL + '/',
         'DNT':
         '1'
     })
     if cookies:
         session.cookies.update(cookies)
         session.cookies[
             ''] = '__test'  # See _getSearch() for more info on this.
     return session
Ejemplo n.º 18
0
 def __init__(self):
     del loggertxt[:]
     self.ver = '0.1.3'
     self.update_date = 'Aug. 09, 2018'
     log(type='INFO',
         method='init',
         err=' -- Initializing %s %s %s Start --' %
         (name, self.ver, self.update_date))
     self.init = False
     self.base_link_alts = ['http://www.primewire.gr']
     self.base_link = self.base_link_alts[0]
     self.MainPageValidatingContent = [
         'PrimeWire | LetMeWatchThis | 1Channel'
     ]
     self.type_filter = ['movie', 'show', 'anime']
     self.name = name
     self.disabled = False
     self.loggertxt = []
     self.ssl = False
     self.headers = {}
     ua = client.randomagent()
     self.headers['User-Agent'] = ua
     self.logo = 'http://i.imgur.com/6zeDNpu.png'
     self.key_link = '/index.php?search'
     self.moviesearch_link = '/index.php?search_keywords=%s&key=%s&search_section=1'
     self.tvsearch_link = '/index.php?tv=&search_keywords=%s&key=%s&search_section=1'
     self.headers = {'Connection': 'keep-alive'}
     self.speedtest = 0
     if len(proxies.sourceProxies) == 0:
         proxies.init()
     self.proxyrequired = False
     self.msg = ''
     self.siteonline = self.testSite()
     self.testparser = 'Unknown'
     self.testparser = self.testParser()
     self.firstRunDisabled = False
     self.init = True
     log(type='INFO',
         method='init',
         err=' -- Initializing %s %s %s End --' %
         (name, self.ver, self.update_date))
Ejemplo n.º 19
0
def requestdirect(url, close=True, redirect=True, followredirect=False, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, output='', timeout='30', httpsskip=False, use_web_proxy=False, XHR=False, IPv4=False):

	try:
		urlhost = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
		
		if headers == None:
			headers = {'Connection' : 'keep-alive'}
			headers['User-Agent'] = client.randomagent()
		
		res = client.request(url = PROXY_URL + url, close=close, redirect=redirect, followredirect=followredirect, error=error, proxy=proxy, post=post, headers=headers, mobile=mobile, limit=limit, referer=referer, cookie=cookie, output=output, timeout=timeout, httpsskip=httpsskip, use_web_proxy=use_web_proxy, XHR=XHR, IPv4=IPv4)
		
		page_data_string = client.getPageDataBasedOnOutput(res, output)
		
		#print page_data_string
		
		pattern = re.compile('<script[\s\S]+?/script>')
		page_data_string = re.sub(pattern, '', page_data_string)
			
		try:
			page_data_string = page_data_string.replace('\n','')	
			#page_data_string = page_data_string.replace('\r','r').replace('\n','<br/>').replace('\w','').replace('\.','').replace('\t','').replace('\ ','')
		except Exception as e:
			log('FAIL','requestdirect-1', '%s' % e, dolog=False)
			
		#print page_data_string

		try:
			page_data_stringx = json.dumps(page_data_string)
			page_data_stringx = page_data_stringx.replace('\\','')
			page_data_stringx = page_data_stringx[1:-1]
			page_data_string = page_data_stringx
		except Exception as e:
			log('FAIL','requestdirect-2', '%s' % e, dolog=False)
		
		#print page_data_string
		#page_data_string = str(page_data_string)
		
		try:
			r = unicode(page_data_string, "utf-8")
			page_data_string = r
		except Exception as e:
			log('FAIL','requestdirect-3', '%s' % e, dolog=False)
			try:
				r = str(page_data_string)
				page_data_string = r
			except Exception as e:
				log('FAIL','requestdirect-4', '%s' % e, dolog=False)
		
		page_data_string = page_data_string.replace('https://unblock.co/browse.php?', '')
		page_data_string = page_data_string.replace('/browse.php?u=', '')
		page_data_string = page_data_string.replace('b=4', '')
		page_data_string = page_data_string.replace('u=', '')
		page_data_string = page_data_string.replace('&http', 'http')
		page_data_string = page_data_string.replace('/http', 'http')
		
		try:
			page_data_string = page_data_string.decode('utf-8')
		except:
			pass
		try:
			page_data_string = urllib.unquote_plus(page_data_string)
		except:
			pass
		try:
			page_data_string = page_data_string.encode('utf-8')
		except:
			pass
		
		return client.getResponseDataBasedOnOutput(page_data_string, res, output)
		
	except Exception as e:
		log('ERROR','requestdirect', '%s' % e)
		return None
Ejemplo n.º 20
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            if url == None: return sources

            if (self.user == '' or self.password == ''): raise Exception()
            login = urlparse.urljoin(self.base_link, '/login')
            post = {
                'username': self.user,
                'password': self.password,
                'returnpath': '/'
            }
            post = urllib.urlencode(post)

            headers = {'User-Agent': client.randomagent()}
            rlogin = client.request(login,
                                    headers=headers,
                                    post=post,
                                    output='extended')
            guid = re.findall('(.*?);\s', rlogin[2]['Set-Cookie'])[0]
            headers['Cookie'] += '; ' + guid
            url = urlparse.urljoin(self.base_link, url)

            result = client.request(url, headers=headers)

            url = re.findall("embeds\[\d+\]\s*=\s*'([^']+)", result)[0]
            url = client.parseDOM(url, 'iframe', ret='src')[0]
            url = url.replace('https://', 'http://')

            links = []

            try:
                dec = re.findall('mplanet\*(.+)', url)[0]
                dec = dec.rsplit('&')[0]
                dec = self._gkdecrypt(
                    base64.b64decode('MllVcmlZQmhTM2swYU9BY0lmTzQ='), dec)
                dec = directstream.google(dec)

                links += [(i['url'], i['quality'], 'gvideo') for i in dec]
            except:
                pass

            result = client.request(url, headers=headers)

            try:
                url = re.findall('src\s*=\s*(?:\'|\")(http.+?)(?:\'|\")',
                                 result)
                for i in url:
                    try:
                        links.append({
                            'source':
                            'gvideo',
                            'quality':
                            directstream.googletag(i)[0]['quality'],
                            'url':
                            i
                        })
                    except:
                        pass
            except:
                pass

            try:
                url = client.parseDOM(result, 'source', ret='src')
                url += re.findall('src\s*:\s*\'(.*?)\'', result)
                url = [i for i in url if '://' in i]
                links.append({'source': 'cdn', 'quality': 'HD', 'url': url[0]})
            except:
                pass

            for i in links:
                sources.append({
                    'source': i['source'],
                    'quality': i['quality'],
                    'language': 'en',
                    'url': i['url'],
                    'direct': True,
                    'debridonly': False
                })

            return sources
        except:
            return sources
Ejemplo n.º 21
0
def cloudflareAgent():
    return client.randomagent()
Ejemplo n.º 22
0
def requestdirect(url,
                  close=True,
                  redirect=True,
                  followredirect=False,
                  error=False,
                  proxy=None,
                  post=None,
                  headers=None,
                  mobile=False,
                  limit=None,
                  referer=None,
                  cookie=None,
                  output='',
                  timeout='30',
                  httpsskip=False,
                  use_web_proxy=False,
                  XHR=False,
                  IPv4=False):
    #try:

    print "Requesting: %s Using via: %s" % (url, PROXY_URL)

    urlhost = re.findall('([\w]+[.][\w]+)$',
                         urlparse.urlparse(url.strip().lower()).netloc)[0]

    if headers == None:
        headers = {'Connection': 'keep-alive'}
    headers['User-Agent'] = client.randomagent()

    res = client.request(url=PROXY_URL + url,
                         close=close,
                         redirect=redirect,
                         followredirect=followredirect,
                         error=error,
                         proxy=proxy,
                         post=post,
                         headers=headers,
                         mobile=mobile,
                         limit=limit,
                         referer=referer,
                         cookie=cookie,
                         output=output,
                         timeout=timeout,
                         httpsskip=httpsskip,
                         use_web_proxy=use_web_proxy,
                         XHR=XHR,
                         IPv4=IPv4)

    page_data_string = client.getPageDataBasedOnOutput(res, output)

    page_data_string = page_data_string.decode('utf-8')
    page_data_string = urllib.unquote_plus(page_data_string)
    page_data_string = page_data_string.encode('utf-8')

    page_data_string = page_data_string.replace(
        '/o.php?b=4&amp;f=frame&amp;mobile=&amp;u=', '')
    page_data_string = page_data_string.replace(
        '/o.php?b=4&amp;mobile=&amp;u=', '')
    page_data_string = page_data_string.replace('/o.php?b=4&mobile=&u=', '')

    # page_data_string_t = None
    # regex = r'{.*[token:]}]}'
    # matches = re.finditer(regex, page_data_string)
    # for matchNum, match in enumerate(matches):
    # page_data_string_t = match.group()
    # break
    # if page_data_string_t != None and 'token' in page_data_string_t:
    # page_data_string = page_data_string_t

    return client.getResponseDataBasedOnOutput(page_data_string, res, output)
    #except Exception as e:
    #	print "Error: %s - %s" % (name, e)
    #	return None
Ejemplo n.º 23
0
def requestdirect(url,
                  close=True,
                  redirect=True,
                  followredirect=False,
                  error=False,
                  proxy=None,
                  post=None,
                  headers=None,
                  mobile=False,
                  limit=None,
                  referer=None,
                  cookie=None,
                  output='',
                  timeout='30',
                  httpsskip=False,
                  use_web_proxy=False,
                  XHR=False,
                  IPv4=False):

    print "Requesting: %s Using via: %s" % (url, PROXY_URL)

    urlhost = re.findall('([\w]+[.][\w]+)$',
                         urlparse.urlparse(url.strip().lower()).netloc)[0]

    if headers == None:
        headers = {'Connection': 'keep-alive'}
        headers['User-Agent'] = client.randomagent()

    res = client.request(url=PROXY_URL + url,
                         close=close,
                         redirect=redirect,
                         followredirect=followredirect,
                         error=error,
                         proxy=proxy,
                         post=post,
                         headers=headers,
                         mobile=mobile,
                         limit=limit,
                         referer=referer,
                         cookie=cookie,
                         output=output,
                         timeout=timeout,
                         httpsskip=httpsskip,
                         use_web_proxy=use_web_proxy,
                         XHR=XHR,
                         IPv4=IPv4)

    page_data_string = client.getPageDataBasedOnOutput(res, output)

    pattern = re.compile('<script[\s\S]+?/script>')
    page_data_string = re.sub(pattern, '', page_data_string)

    try:
        page_data_string = page_data_string.replace('\n', '')
        #page_data_string = page_data_string.replace('\r','r').replace('\n','<br/>').replace('\w','').replace('\.','').replace('\t','').replace('\ ','')
    except Exception as e:
        control.log("Error1: %s - %s" % (name, e))

    #print page_data_string

    page_data_string = json.dumps(page_data_string)
    page_data_string = page_data_string.replace('\\', '')
    page_data_string = page_data_string[1:-1]

    #print page_data_string
    #page_data_string = str(page_data_string)

    try:
        r = unicode(page_data_string, "utf-8")
        page_data_string = r
    except Exception as e:
        control.log("Error2: %s - %s" % (name, e))
        try:
            r = str(page_data_string)
            page_data_string = r
        except Exception as e:
            control.log("Error3: %s - %s" % (name, e))

    page_data_string = page_data_string.replace(
        'https://www.xperienc.com/browsexp.php?', '')
    page_data_string = page_data_string.replace('b=40', '')
    page_data_string = page_data_string.replace('u=', '')
    page_data_string = page_data_string.replace('browsexp.php?', '')
    page_data_string = page_data_string.replace('&http', 'http')
    page_data_string = page_data_string.replace('/http', 'http')

    page_data_string = page_data_string.decode('utf-8')
    page_data_string = urllib.unquote_plus(page_data_string)
    page_data_string = page_data_string.encode('utf-8')

    return client.getResponseDataBasedOnOutput(page_data_string, res, output)
Ejemplo n.º 24
0
    def sources(self, url, hostDict, hostprDict):
        try:
            sources = []

            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            aliases = eval(data['aliases'])
            headers = {}

            if 'tvshowtitle' in data:
                episode = int(data['episode'])
                url = self.searchShow(data['tvshowtitle'], data['season'],
                                      data['year'], aliases, headers)
            else:
                episode = 0
                url = self.searchMovie(data['title'], data['year'], aliases,
                                       headers)

            if url == None: return sources

            # url = urlparse.urljoin(self.base_link, url)
            url = re.sub('/watching.html$', '', url.strip('/'))
            url = url + '/watching.html'

            p = client.request(url)

            if episode > 0:
                r = client.parseDOM(p, 'div', attrs={'class': 'ep_link.+?'})[0]
                r = zip(client.parseDOM(r, 'a', ret='href'),
                        client.parseDOM(r, 'a'))
                r = [(i[0], re.findall('Episode\s+(\d+)', i[1])) for i in r]
                r = [(i[0], i[1][0]) for i in r]
                url = [i[0] for i in r if int(i[1]) == episode][0]
                p = client.request(url, headers=headers)

            referer = url
            headers = {'User-Agent': client.randomagent(), 'Referer': url}

            id = re.findall('load_player\(.+?(\d+)', p)[0]
            r = urlparse.urljoin(self.base_link,
                                 '/ajax/movie/load_player_v3?id=%s' % id)
            r = client.request(r, headers=headers)

            url = json.loads(r)['value']

            if url.startswith('//'):
                url = 'https:' + url

            r = client.request(url, headers=headers)

            headers = '|' + urllib.urlencode(headers)

            source = str(json.loads(r)['playlist'][0]['file']) + headers

            sources.append({
                'source': 'CDN',
                'quality': 'HD',
                'language': 'en',
                'url': source,
                'direct': True,
                'debridonly': False
            })

            return sources
        except:
            return sources
Ejemplo n.º 25
0
 def testSite(self):
     try:
         ua = client.randomagent()
         self.headers['User-Agent'] = ua
         x1 = time.time()
         http_res, content = proxies.request(url=self.base_link,
                                             headers=self.headers,
                                             output='response',
                                             use_web_proxy=False,
                                             httpsskip=True)
         self.speedtest = time.time() - x1
         if content != None and content.find(
                 self.MainPageValidatingContent) > -1:
             x1 = time.time()
             self.cookie = proxies.request(url=self.base_link,
                                           headers=self.headers,
                                           output='cookie',
                                           use_web_proxy=False,
                                           httpsskip=True)
             self.speedtest = time.time() - x1
             self.headers['Cookie'] = self.cookie
             self.log('SUCCESS',
                      'testSite',
                      'HTTP Resp : %s for %s' % (http_res, self.base_link),
                      dolog=True)
             self.log('SUCCESS',
                      'testSite',
                      'Cookie Resp : %s for %s' %
                      (self.cookie, self.base_link),
                      dolog=True)
             return True
         else:
             self.log('ERROR',
                      'testSite',
                      'HTTP Resp : %s for %s' % (http_res, self.base_link),
                      dolog=True)
             x1 = time.time()
             http_res, content = proxies.request(url=self.base_link,
                                                 headers=self.headers,
                                                 output='response',
                                                 use_web_proxy=True,
                                                 httpsskip=True)
             self.speedtest = time.time() - x1
             if content != None and content.find(
                     self.MainPageValidatingContent) > -1:
                 self.proxyrequired = True
                 x1 = time.time()
                 self.cookie = proxies.request(url=self.base_link,
                                               headers=self.headers,
                                               output='cookie',
                                               use_web_proxy=True,
                                               httpsskip=True)
                 self.speedtest = time.time() - x1
                 self.headers['Cookie'] = self.cookie
                 self.log('SUCCESS',
                          'testSite',
                          'HTTP Resp : %s via proxy for %s' %
                          (http_res, self.base_link),
                          dolog=True)
                 self.log('SUCCESS',
                          'testSite',
                          'Cookie Resp : %s for %s' %
                          (self.cookie, self.base_link),
                          dolog=True)
                 return True
             else:
                 time.sleep(2.0)
                 x1 = time.time()
                 http_res, content = proxies.request(url=self.base_link,
                                                     headers=self.headers,
                                                     output='response',
                                                     use_web_proxy=True,
                                                     httpsskip=True)
                 self.speedtest = time.time() - x1
                 if content != None and content.find(
                         self.MainPageValidatingContent) > -1:
                     self.proxyrequired = True
                     self.log('SUCCESS',
                              'testSite',
                              'HTTP Resp : %s via proxy for %s' %
                              (http_res, self.base_link),
                              dolog=True)
                     return True
                 else:
                     self.log('ERROR',
                              'testSite',
                              'HTTP Resp : %s via proxy for %s' %
                              (http_res, self.base_link),
                              dolog=True)
                     self.log('ERROR', 'testSite', content, dolog=True)
         return False
     except Exception as e:
         self.log('ERROR', 'testSite', '%s' % e, dolog=True)
         return False
Ejemplo n.º 26
0
    def get_sources(self, url, hosthdDict, hostDict, locDict):
        try:
            sources = []

            if url == None: return sources

            url = urlparse.urljoin(self.base_link, url)

            try: url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
            except: episode = None

            headers = {'X-Requested-With': 'XMLHttpRequest', 'Referer': url}

            for i in range(3):
                result = client.request(url)
                if not result == None: break

            if not episode == None:
                mid = client.parseDOM(result, 'input', ret='value', attrs = {'name': 'phimid'})[0]
                url = urlparse.urljoin(self.base_link, '/ajax.php')
                post = {'ipos_server': 1, 'phimid': mid, 'keyurl': episode}
                post = urllib.urlencode(post)

                for i in range(3):
                    result = client.request(url, post=post, headers=headers, timeout='10')
                    if not result == None: break

            r = client.parseDOM(result, 'div', attrs = {'class': '[^"]*server_line[^"]*'})

            links = []

            for u in r:
                try:
                    host = client.parseDOM(u, 'p', attrs = {'class': 'server_servername'})[0]
                    host = host.strip().lower().split(' ')[-1]

                    url = urlparse.urljoin(self.base_link, '/ip.temp/swf/plugins/ipplugins.php')

                    p1 = client.parseDOM(u, 'a', ret='data-film')[0]
                    p2 = client.parseDOM(u, 'a', ret='data-server')[0]
                    p3 = client.parseDOM(u, 'a', ret='data-name')[0]
                    post = {'ipplugins': 1, 'ip_film': p1, 'ip_server': p2, 'ip_name': p3}
                    post = urllib.urlencode(post)

                    if not host in ['google', 'putlocker', 'megashare']: raise Exception()

                    for i in range(3):
                        result = client.request(url, post=post, headers=headers, timeout='10')
                        if not result == None: break

                    result = json.loads(result)['s']

                    url = urlparse.urljoin(self.base_link, '/ip.temp/swf/ipplayer/ipplayer.php')

                    post = {'u': result, 'w': '100%', 'h': '420'}
                    post = urllib.urlencode(post)

                    for i in range(3):
                        result = client.request(url, post=post, headers=headers)
                        if not result == None: break

                    url = json.loads(result)['data']

                    if type(url) is list:
                        url = [i['files'] for i in url]
                        for i in url:
                            try: sources.append({'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'url': i})
                            except: pass

                    else:
                        url = client.request(url)
                        url = client.parseDOM(url, 'source', ret='src', attrs = {'type': 'video.+?'})[0]
                        url += '|%s' % urllib.urlencode({'User-agent': client.randomagent()})
                        sources.append({'source': 'cdn', 'quality': 'HD','provider': 'Tunemovie', 'url': i})

                except:
                    pass

            return sources
        except Exception as e:
            control.log('ERROR tunemovie %s' % e)
            return sources
Ejemplo n.º 27
0
    def createMeta(self,
                   url,
                   provider,
                   logo,
                   quality,
                   links,
                   key,
                   riptype,
                   vidtype='Movie',
                   lang='en',
                   sub_url=None,
                   txt='',
                   file_ext='.mp4',
                   testing=False,
                   poster=None,
                   headers=None):

        if testing == True:
            links.append(url)
            return links

        if control.setting('Host-%s' % name) == False:
            log('INFO', 'createMeta', 'Host Disabled by User')
            return links

        orig_url = url
        ua = client.randomagent()
        headers = {'Referer': 'https://vidnode.net/', 'User-Agent': 'Mozilla'}

        urldata = client.b64encode(json.dumps('', encoding='utf-8'))
        params = {'headers': headers, 'cookie': None}
        params = json.dumps(params, encoding='utf-8')
        params = client.b64encode(params)

        online = check(url, headers=headers)
        vidurls, err, sub_url_t = getAllQuals(url, online)

        if vidurls == None:
            log(type='ERROR', method='createMeta-1', err=u'%s' % err)
            return links

        if sub_url_t != None:
            sub_url = sub_url_t

        files_ret = []

        #print vidurls

        for vv in vidurls:
            durl = vv['page']
            vidurl, r1, r2 = resolve(durl, online)

            print vidurl

            if vidurl == None:
                log(type='ERROR', method='createMeta', err=u'%s' % r1)
            else:
                quality = vv['label']
                try:
                    online = check(vidurl, headers=headers)
                    fs = client.getFileSize(vidurl)
                    fs = int(fs)
                except:
                    fs = 0

                try:
                    log(type='INFO',
                        method='createMeta',
                        err=u'durl:%s ; res:%s; fs:%s' % (vidurl, quality, fs))
                    files_ret.append({
                        'source': self.name,
                        'maininfo': '',
                        'titleinfo': txt,
                        'quality': quality,
                        'vidtype': vidtype,
                        'rip': riptype,
                        'provider': provider,
                        'url': durl,
                        'durl': durl,
                        'urldata': urldata,
                        'params': params,
                        'logo': logo,
                        'online': online,
                        'allowsDownload': self.allowsDownload,
                        'resumeDownload': self.resumeDownload,
                        'allowsStreaming': self.allowsStreaming,
                        'key': key,
                        'enabled': True,
                        'fs': fs,
                        'file_ext': file_ext,
                        'ts': time.time(),
                        'lang': lang,
                        'poster': poster,
                        'sub_url': sub_url,
                        'subdomain': client.geturlhost(url),
                        'misc': {
                            'player': 'iplayer',
                            'gp': False
                        }
                    })
                except Exception as e:
                    log(type='ERROR', method='createMeta', err=u'%s' % e)
                    files_ret.append({
                        'source': urlhost,
                        'maininfo': '',
                        'titleinfo': txt,
                        'quality': quality,
                        'vidtype': vidtype,
                        'rip': 'Unknown',
                        'provider': provider,
                        'url': durl,
                        'durl': durl,
                        'urldata': urldata,
                        'params': params,
                        'logo': logo,
                        'online': online,
                        'allowsDownload': self.allowsDownload,
                        'resumeDownload': self.resumeDownload,
                        'allowsStreaming': self.allowsStreaming,
                        'key': key,
                        'enabled': True,
                        'fs': fs,
                        'file_ext': file_ext,
                        'ts': time.time(),
                        'lang': lang,
                        'sub_url': sub_url,
                        'poster': poster,
                        'subdomain': client.geturlhost(url),
                        'misc': {
                            'player': 'iplayer',
                            'gp': False
                        }
                    })

        for fr in files_ret:
            links.append(fr)

        log('INFO',
            'createMeta',
            'Successfully processed %s link >>> %s' % (provider, orig_url),
            dolog=self.init)
        return links
Ejemplo n.º 28
0
def cloudflareAgent():
    return client.randomagent()
Ejemplo n.º 29
0
    def get_sources(self, url, hosthdDict, hostDict, locDict):
        #try: sources.append({'source': 'gvideo', 'quality': client.googletag(i)[0]['quality'], 'url': i})
        try:
            sources = []

            if url == None: return sources

            url = urlparse.urljoin(self.base_link, url)

            try:
                url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
            except:
                episode = None

            ref = url

            for i in range(3):
                result = client.request(url)
                if not result == None: break

            if not episode == None:
                result = client.parseDOM(result,
                                         'div',
                                         attrs={'id': 'ip_episode'})[0]
                ep_url = client.parseDOM(result,
                                         'a',
                                         attrs={'data-name': str(episode)},
                                         ret='href')[0]
                for i in range(3):
                    result = client.request(ep_url)
                    if not result == None: break

            r = client.parseDOM(result,
                                'div',
                                attrs={'class': '[^"]*server_line[^"]*'})

            for u in r:
                try:
                    url = urlparse.urljoin(
                        self.base_link, '/ip.file/swf/plugins/ipplugins.php')
                    p1 = client.parseDOM(u, 'a', ret='data-film')[0]
                    p2 = client.parseDOM(u, 'a', ret='data-server')[0]
                    p3 = client.parseDOM(u, 'a', ret='data-name')[0]
                    post = {
                        'ipplugins': 1,
                        'ip_film': p1,
                        'ip_server': p2,
                        'ip_name': p3
                    }
                    post = urllib.urlencode(post)
                    for i in range(3):
                        result = client.request(url,
                                                post=post,
                                                XHR=True,
                                                referer=ref,
                                                timeout='10')
                        if not result == None: break

                    result = json.loads(result)
                    u = result['s']
                    s = result['v']

                    url = urlparse.urljoin(
                        self.base_link, '/ip.file/swf/ipplayer/ipplayer.php')

                    post = {'u': u, 'w': '100%', 'h': '420', 's': s, 'n': 0}
                    post = urllib.urlencode(post)

                    for i in range(3):
                        result = client.request(url,
                                                post=post,
                                                XHR=True,
                                                referer=ref)
                        if not result == None: break

                    url = json.loads(result)['data']

                    if type(url) is list:
                        url = [i['files'] for i in url]
                        for i in url:
                            try:
                                sources.append({
                                    'source':
                                    'gvideo',
                                    'provider':
                                    'Tunemovie',
                                    'quality':
                                    client.googletag(i)[0]['quality'],
                                    'url':
                                    i
                                })
                            except:
                                pass

                    else:
                        url = client.request(url)
                        url = client.parseDOM(url,
                                              'source',
                                              ret='src',
                                              attrs={'type': 'video.+?'})[0]
                        url += '|%s' % urllib.urlencode(
                            {'User-agent': client.randomagent()})
                        sources.append({
                            'source': 'cdn',
                            'quality': 'HD',
                            'provider': 'Tunemovie',
                            'url': i
                        })

                except:
                    pass

            return sources
        except:
            return sources
Ejemplo n.º 30
0
 def testSiteAlts(self, site):
     try:
         ua = client.randomagent()
         self.headers['User-Agent'] = ua
         self.base_link = proxies.request(url=site,
                                          headers=self.headers,
                                          output='geturl',
                                          use_web_proxy=False,
                                          httpsskip=True).strip("/")
         x1 = time.time()
         http_res, content = proxies.request(url=self.base_link,
                                             headers=self.headers,
                                             output='response',
                                             use_web_proxy=False,
                                             httpsskip=True)
         self.speedtest = time.time() - x1
         if content != None and content.find(
                 self.MainPageValidatingContent) > -1:
             x1 = time.time()
             self.cookie = proxies.request(url=self.base_link,
                                           headers=self.headers,
                                           output='cookie',
                                           use_web_proxy=False,
                                           httpsskip=True)
             self.speedtest = time.time() - x1
             self.headers['Cookie'] = self.cookie
             log('SUCCESS', 'testSite',
                 'HTTP Resp : %s for %s' % (http_res, self.base_link))
             log('SUCCESS', 'testSite',
                 'Cookie Resp : %s for %s' % (self.cookie, self.base_link))
             return True
         else:
             log(
                 'FAIL', 'testSite',
                 'Validation content Not Found. HTTP Resp : %s for %s' %
                 (http_res, self.base_link))
             x1 = time.time()
             http_res, content = proxies.request(url=self.base_link.replace(
                 'https:', 'http:'),
                                                 headers=self.headers,
                                                 output='response',
                                                 use_web_proxy=True,
                                                 httpsskip=True)
             self.speedtest = time.time() - x1
             if content != None and content.find(
                     self.MainPageValidatingContent) > -1:
                 self.proxyrequired = True
                 x1 = time.time()
                 self.cookie = proxies.request(url=self.base_link,
                                               headers=self.headers,
                                               output='cookie',
                                               use_web_proxy=True,
                                               httpsskip=True)
                 self.speedtest = time.time() - x1
                 self.headers['Cookie'] = self.cookie
                 log(
                     'SUCCESS', 'testSite',
                     'HTTP Resp : %s via proxy for %s' %
                     (http_res, self.base_link))
                 log(
                     'SUCCESS', 'testSite', 'Cookie Resp : %s for %s' %
                     (self.cookie, self.base_link))
                 return True
             else:
                 time.sleep(2.0)
                 x1 = time.time()
                 http_res, content = proxies.request(url=self.base_link,
                                                     headers=self.headers,
                                                     output='response',
                                                     use_web_proxy=True,
                                                     httpsskip=True)
                 self.speedtest = time.time() - x1
                 if content != None and content.find(
                         self.MainPageValidatingContent) > -1:
                     self.proxyrequired = True
                     log(
                         'SUCCESS', 'testSite',
                         'HTTP Resp : %s via proxy for %s' %
                         (http_res, self.base_link))
                     return True
                 else:
                     log(
                         'FAIL', 'testSite',
                         'Validation content Not Found. HTTP Resp : %s via proxy for %s'
                         % (http_res, self.base_link))
         return False
     except Exception as e:
         log('ERROR', 'testSite', '%s' % e)
         return False
Ejemplo n.º 31
0
import re, urllib, urlparse, json, random, time, base64, cookielib, urllib2, sys
import HTMLParser

try:
    from resources.lib.libraries import control
    from resources.lib.libraries import cleantitle
    from resources.lib.libraries import client
    from resources.lib.libraries import testparams
    from resources.lib.libraries import workers
    from resources.lib import resolvers
    from resources.lib import proxies
except:
    pass

try:
    USER_AGENT = client.randomagent()
except:
    USER_AGENT = "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0"

name = 'Einthusan'
loggertxt = []


class source:
    def __init__(self):
        del loggertxt[:]
        self.ver = '0.0.1'
        self.update_date = 'Mar. 28, 2019'
        log(type='INFO',
            method='init',
            err=' -- Initializing %s %s %s Start --' %