예제 #1
0
	def process(self, url, q, r, headers, page_url):
		items = []

		try:
			if 'vcstream.to' in url:
				id = re.compile('//.+?/(?:embed|f)/([0-9a-zA-Z-_]+)').findall(url)[0]
				headersx = {'Referer': url, 'User-Agent': client.agent()}
				page_data = client.request('https://vcstream.to/player?fid=%s&page=embed' % id, headers=headersx)
				srcs = re.findall(r'sources:.\[(.*?)\]', page_data)[0]
				srcs = srcs.replace('\\n','').replace('\\','')
				srcs = '''[%s]''' % srcs
				j_data = json.loads(srcs)
				for j in j_data:
					t = j['name']
					label = j['label']
					u = j['src']
					if label.lower() == 'raw':
						q = source_utils.check_sd_url(t)
					else:
						q = label
					r = source_utils.check_sd_url_rip(t)
					
					fs = client.getFileSize(u, retry429=True, headers=headers)
					if fs == None or int(fs) == 0:
						fs = client.getFileSize(u, retry429=True)
					q = qual_based_on_fs(q,fs)
					online = check(u)
					urldata = client.b64encode(json.dumps('', encoding='utf-8'))
					params = client.b64encode(json.dumps('', encoding='utf-8'))
					if headers != None:
						paramsx = {'headers':headers}
						params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
					
					items.append({'quality':q, 'riptype':r, 'src':u, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True})
					
			elif '3donlinefilms.com' in url or '3dmoviesfullhd.com' in url or 'freedocufilms.com' in url:
				data = urlparse.parse_qs(url)
				headers = {}
				
				if '3donlinefilms.com' in url:
					headers['Referer'] = 'http://3donlinefilms.com'
					l0 = 'https://3donlinefilms.com/update.php'
				elif 'freedocufilms.com' in url:
					headers['Referer'] = 'http://freedocufilms.com'
					l0 = 'https://freedocufilms.com/update.php'
				else:
					headers['Referer'] = 'http://3dmoviesfullhd.com'
					l0 = 'https://3dmoviesfullhd.com/update.php'
					
				page = data['page'][0]
				cook = client.request(page, output='cookie')
				post_data = {'file':data['src_file'][0]}
				
				cookie = '%s; zeroday=; visit=yes; jwplayer.qualityLabel=HD' % cook
				headers['Referer'] = page
				headers['User-Agent'] = client.agent()
				headers['Cookie'] = cookie
				
				u = data['file'][0]
				u = u.replace('//freedocufilms','//www.freedocufilms')
	
				try:
					ret = client.request(l0, post=client.encodePostData(post_data),headers=headers, output='extended', XHR=True, cookie=cookie)
				except Exception as e:
					log(type='FAIL', method='process', err='%s' % e, dolog=False, logToControl=False, doPrint=True)
				
				ret = client.request(u, output='headers', headers=headers, XHR=True)
				
				try:
					fs = int(re.findall(r'Content-Length:(.*)', str(ret), re.MULTILINE)[0].strip())
				except:
					fs = 0

				q = qual_based_on_fs(q,fs)
				online = False
				
				if int(fs) > 0:
					online = True
					
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				paramsx = {'headers':headers}
				params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				
				items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':False, 'allowsDownload':True})
			elif 'cooltvseries.com' in url:
				urlx = client.request(url, output='geturl', headers=headers)
				urlx = '%s?e=file.mp4' % urlx
				fs = client.getFileSize(url, retry429=True, headers=headers)
				if fs == None or int(fs) == 0:
					fs = client.getFileSize(url, retry429=True)
				q = qual_based_on_fs(q,fs)
				online = check(url)
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				params = client.b64encode(json.dumps('', encoding='utf-8'))
				if headers != None:
					paramsx = {'headers':headers}
					params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				allowsDownload = True
				items.append({'quality':q, 'riptype':r, 'src':urlx, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload})
			else:
				fs = client.getFileSize(url, retry429=True, headers=headers)
				if fs == None or int(fs) == 0:
					fs = client.getFileSize(url, retry429=True)
				q = qual_based_on_fs(q,fs)
				online = check(url)
				urldata = client.b64encode(json.dumps('', encoding='utf-8'))
				params = client.b64encode(json.dumps('', encoding='utf-8'))
				if headers != None:
					paramsx = {'headers':headers}
					params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
				allowsDownload = True
				if '.m3u8' in url:
					allowsDownload = False
				items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':allowsDownload})
					
		except Exception as e:
			log(type='ERROR',method='process', err=u'%s' % e)

		if len(items) == 0:
			fs = client.getFileSize(url, retry429=True, headers=headers)
			if fs == None or int(fs) == 0:
				fs = client.getFileSize(url, retry429=True)
			q = qual_based_on_fs(q,fs)
			online = check(url)
			urldata = client.b64encode(json.dumps('', encoding='utf-8'))
			params = client.b64encode(json.dumps('', encoding='utf-8'))
			if headers != None:
				paramsx = {'headers':headers}
				params = client.b64encode(json.dumps(paramsx, encoding='utf-8'))
			items.append({'quality':q, 'riptype':r, 'src':url, 'fs':fs, 'online':online, 'params':params, 'urldata':urldata, 'allowsStreaming':True, 'allowsDownload':True})
			
		return items
    def get_sources(self,
                    url,
                    hosthdDict=None,
                    hostDict=None,
                    locDict=None,
                    proxy_options=None,
                    key=None,
                    testing=False):
        #try:
        try:
            sources = []
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_sources', 'Provider Disabled by User')
                return sources
            if url == None:
                log('FAIL',
                    'get_sources',
                    'url == None. Could not find a matching title: %s' %
                    cleantitle.title_from_key(key),
                    dolog=not testing)
                return sources

            base_link = self.base_link

            try:
                if url[0].startswith('http'):
                    base_link = url[0]
                mid = re.findall('-(\d+)', url[0])[-1]
            except:
                if url.startswith('http'):
                    base_link = url
                mid = re.findall('-(\d+)', url)[-1]

            try:
                if len(url[1]) > 0:
                    episode = url[1]
                else:
                    episode = None
            except:
                episode = None

            #print mid

            links_m = []
            trailers = []
            headers = {'Referer': self.base_link}

            u = urlparse.urljoin(self.base_link, url[0])
            #print u
            #r = client.request(u, headers=headers, IPv4=True)
            r = proxies.request(u,
                                headers=headers,
                                IPv4=True,
                                proxy_options=proxy_options,
                                use_web_proxy=self.proxyrequired)

            try:
                elem = client.parseDOM(r, 'span', attrs={'class':
                                                         'quality'})[0]
                qual = source_utils.check_sd_url(elem)
                riptype = source_utils.check_sd_url_rip(elem)
            except Exception as e:
                qual = '480p'
                riptype = 'BRRIP'

            try:
                poster = client.parseDOM(r, 'div', attrs={'class':
                                                          'dm-thumb'})[0]
                poster = client.parseDOM(poster, 'img', ret='src')[0]
            except:
                poster = None

            if testing == False:
                try:

                    #regex = r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
                    #matches = re.finditer(regex, r, re.MULTILINE)
                    matches = re.compile(
                        'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
                    ).findall(r)
                    for match in matches:
                        try:
                            #print match
                            if 'youtube.com' in match:
                                match = match.replace('embed/', 'watch?v=')
                                trailers.append(match)
                        except:
                            pass
                except Exception as e:
                    pass

                for trailer in trailers:
                    links_m = resolvers.createMeta(trailer,
                                                   self.name,
                                                   self.logo,
                                                   '720p',
                                                   links_m,
                                                   key,
                                                   vidtype='Trailer',
                                                   testing=testing)

            try:
                u = urlparse.urljoin(self.base_link, self.server_link % mid)
                #print u
                #r = client.request(u, headers=headers, XHR=True, IPv4=True)
                r = proxies.request(u,
                                    headers=headers,
                                    XHR=True,
                                    IPv4=True,
                                    proxy_options=proxy_options,
                                    use_web_proxy=self.proxyrequired)
                r = json.loads(r)['html']
                r = client.parseDOM(r, 'div', attrs={'class': 'pas-list'})
                ids = client.parseDOM(r, 'li', ret='data-id')
                servers = client.parseDOM(r, 'li', ret='data-server')
                labels = client.parseDOM(r, 'a', ret='title')
                r = zip(ids, servers, labels)

                for eid in r:
                    #print r
                    try:
                        sub_url = None
                        try:
                            ep = re.findall('episode.*?(\d+):.*?',
                                            eid[2].lower())[0]
                        except:
                            ep = 0

                        if (episode is None) or (int(ep) == int(episode)):

                            url = urlparse.urljoin(
                                self.base_link,
                                self.token_link % (eid[0], mid))
                            #script = client.request(url, IPv4=True)
                            script = proxies.request(
                                url,
                                IPv4=True,
                                proxy_options=proxy_options,
                                use_web_proxy=self.proxyrequired)
                            #print script

                            if '$_$' in script:
                                params = self.uncensored1(script)
                            elif script.startswith('[]') and script.endswith(
                                    '()'):
                                params = self.uncensored2(script)
                            elif '_x=' in script and '_y=' in script:
                                params = self.uncensored3(script)
                            else:
                                raise Exception()

                            u = urlparse.urljoin(
                                self.base_link, self.sourcelink %
                                (eid[0], params['x'], params['y']))
                            #print u
                            #r = client.request(u, IPv4=True)
                            r = proxies.request(
                                u,
                                IPv4=True,
                                proxy_options=proxy_options,
                                use_web_proxy=self.proxyrequired)

                            if r == None or len(r) == 0:
                                u = urlparse.urljoin(
                                    self.base_link, self.embed_link % (eid[0]))
                                #print u
                                #r = client.request(u, IPv4=True)
                                r = proxies.request(
                                    u,
                                    IPv4=True,
                                    proxy_options=proxy_options,
                                    use_web_proxy=self.proxyrequired)

                            try:
                                url = json.loads(r)['playlist'][0]['sources']
                            except:
                                url = [{'file': json.loads(r)['src']}]

                            try:
                                url = [i['file'] for i in url]
                            except:
                                url = [url['file']]

                            try:
                                sub_url = json.loads(
                                    r)['playlist'][0]['tracks'][0]['file']
                            except:
                                pass

                            vidtype = 'Movie'
                            if int(ep) > 0:
                                vidtype = 'Show'

                            for s in url:
                                links_m = resolvers.createMeta(s,
                                                               self.name,
                                                               self.logo,
                                                               qual,
                                                               links_m,
                                                               key,
                                                               poster=poster,
                                                               riptype=riptype,
                                                               vidtype=vidtype,
                                                               sub_url=sub_url,
                                                               testing=testing)
                    except:
                        pass
            except:
                pass

            sources += [l for l in links_m]

            if len(sources) == 0:
                log(
                    'FAIL', 'get_sources',
                    'Could not find a matching title: %s' %
                    cleantitle.title_from_key(key))
                return sources

            log('SUCCESS',
                'get_sources',
                '%s sources : %s' %
                (cleantitle.title_from_key(key), len(sources)),
                dolog=not testing)
            return sources
        except Exception as e:
            log('ERROR', 'get_sources', '%s' % e, dolog=not testing)
            return sources
예제 #3
0
    def get_sources(self,
                    url,
                    hosthdDict=None,
                    hostDict=None,
                    locDict=None,
                    proxy_options=None,
                    key=None,
                    testing=False):
        try:
            sources = []
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_sources', 'Provider Disabled by User')
                log('INFO', 'get_sources', 'Completed')
                return sources
            if url == None:
                log('FAIL',
                    'get_sources',
                    'url == None. Could not find a matching title: %s' %
                    cleantitle.title_from_key(key),
                    dolog=not testing)
                log('INFO', 'get_sources', 'Completed')
                return sources

            urls = []
            vidtype = 'Movie'

            if not str(url).startswith('http'):
                try:
                    data = urlparse.parse_qs(url)
                    data = dict([(i, data[i][0]) if data[i] else (i, '')
                                 for i in data])

                    title = data[
                        'tvshowtitle'] if 'tvshowtitle' in data else data[
                            'title']

                    if 'year' in data:
                        year = data['year']

                    if 'season' in data:
                        query = {
                            'keyword':
                            '%s %s %s' % (title, 'season', data['season'])
                        }
                    else:
                        query = {'keyword': title}

                    search_url = urlparse.urljoin(self.base_link,
                                                  '/search.html')
                    search_url = search_url + '?' + urllib.urlencode(query)

                    result = proxies.request(search_url,
                                             headers=self.headers,
                                             proxy_options=proxy_options,
                                             use_web_proxy=self.proxyrequired,
                                             httpsskip=True)
                    r = client.parseDOM(result,
                                        'div',
                                        attrs={'class': 'wrapper'})

                    try:
                        r = r[1]
                    except:
                        raise Exception()

                    r1 = client.parseDOM(r, 'figure')
                    r2 = []
                    for res in r1:
                        l = client.parseDOM(res, 'a', ret='href')[0]
                        t = client.parseDOM(res,
                                            'div',
                                            attrs={'class': 'title'})[0]
                        r = (l, t)
                        r2.append(r)

                    r = r2

                    if 'season' in data:
                        vidtype = 'Show'
                        episode = int(data['episode'])

                        r = [(i[0], re.sub(' \(\w*\)', '', i[1])) for i in r]
                        url = [(i[0], re.findall('(.+?) (\d+)$', i[1]))
                               for i in r]
                        url = [(i[0], i[1][0][0], i[1][0][1]) for i in url
                               if len(i[1]) > 0]
                        url = [
                            i for i in url
                            if cleantitle.get(title) in cleantitle.get(i[1])
                        ]
                        url = [
                            i for i in url if '%01d' %
                            int(data['season']) == '%01d' % int(i[2])
                        ]

                        ep_url = []
                        for i in url:
                            result = proxies.request(
                                urlparse.urljoin(self.base_link, i[0]),
                                headers=self.headers,
                                proxy_options=proxy_options,
                                use_web_proxy=self.proxyrequired,
                                httpsskip=True)
                            t = client.parseDOM(result,
                                                'div',
                                                attrs={'class': 'eps'})
                            for tt in t:
                                if 'watch' in tt:
                                    tt = client.parseDOM(
                                        tt, 'div', attrs={'class':
                                                          'server'})[0]
                                    section_links = client.parseDOM(tt,
                                                                    'a',
                                                                    ret='href')
                                    for a_link in section_links:
                                        if episode < 100:
                                            f_key = '-episode-%02d-' % episode
                                        else:
                                            f_key = '-episode-%03d-' % episode
                                        if f_key in a_link:
                                            log('INFO', 'get_sources',
                                                'episode url = %s' % a_link)
                                            ep_url.append(a_link)
                                            break
                        for i in ep_url:
                            urls.append(urlparse.urljoin(self.base_link, i))
                    else:
                        for i in r:
                            if cleantitle.get(title) in cleantitle.get(i[1]):
                                urls.append(
                                    urlparse.urljoin(self.base_link, i[0]))

                except:
                    urls == [self.base_link]

            links_m = []

            page = None
            for url in urls:
                try:
                    log('INFO',
                        'get_sources',
                        'url == %s' % url,
                        dolog=False,
                        doPrint=True)
                    page_url = url
                    page = result = proxies.request(
                        url,
                        headers=self.headers,
                        proxy_options=proxy_options,
                        use_web_proxy=self.proxyrequired,
                        httpsskip=True)

                    quality = '480p'
                    type = 'BRRIP'

                    atr = ''
                    qtr = ''

                    try:
                        qtr = client.parseDOM(result,
                                              'span',
                                              attrs={'class': 'quanlity'})[0]
                        # q, t = cleantitle.getQuality(atr)
                        # if q != None:
                        # quality = q
                        # type = t
                    except:
                        try:
                            qtr = client.parseDOM(result,
                                                  'span',
                                                  attrs={'class':
                                                         'quality'})[0]
                            # q, t = cleantitle.getQuality(atr)
                            # if q != None:
                            # quality = q
                            # type = t
                        except:
                            pass

                    try:
                        quality = source_utils.check_sd_url(qtr)
                        type = source_utils.check_sd_url_rip(qtr)
                    except Exception as e:
                        quality = '480p'
                        type = 'BRRIP'

                    try:
                        atr = client.parseDOM(result,
                                              'span',
                                              attrs={'class': 'year'})[0]
                    except:
                        atr = ''

                    try:
                        atr_release = client.parseDOM(result,
                                                      'div',
                                                      attrs={'class':
                                                             'meta'})[1]
                    except:
                        atr_release = ''

                    if 'season' in data:
                        vidtype = 'Show'
                        pass
                    else:
                        vidtype = 'Movie'
                        resultx = result if str(int(year)) in atr else None
                        if resultx == None:
                            resultx = result if str(
                                int(year)) in atr_release else None
                        if resultx == None:
                            raise Exception()

                    try:
                        poster = client.parseDOM(page,
                                                 'div',
                                                 attrs={'class':
                                                        'detail-l'})[0]
                        poster = client.parseDOM(poster, 'img', ret='src')[0]
                        if 'http' not in poster:
                            poster = 'http:' + poster
                    except:
                        poster = None
                    #print result

                    #r = client.parseDOM(result, 'article', attrs = {'class': 'player current'})[0]
                    #r = client.parseDOM(r, 'iframe', ret='src')[0]
                    #r = r.split('?')

                    try:
                        servers = re.findall(r'link_server_.*\"(.*)\";', page)
                        servers = list(set(servers))
                        for server in servers:
                            try:
                                if 'http' not in server:
                                    server = 'http:' + server

                                result = proxies.request(
                                    server,
                                    headers=self.headers,
                                    proxy_options=proxy_options,
                                    use_web_proxy=self.proxyrequired,
                                    httpsskip=True)

                                server = client.parseDOM(result,
                                                         'iframe',
                                                         ret='src')[0]
                                if len(server) > 0:
                                    if 'http' not in server:
                                        server = 'http:' + server

                                    l = resolvers.createMeta(server,
                                                             self.name,
                                                             self.logo,
                                                             quality, [],
                                                             key,
                                                             poster=poster,
                                                             riptype=type,
                                                             vidtype=vidtype,
                                                             testing=testing,
                                                             page_url=page_url)
                                    for ll in l:
                                        if ll != None and 'key' in ll.keys():
                                            links_m.append(ll)
                            except Exception as e:
                                pass
                            if testing and len(links_m) > 0:
                                break
                    except Exception as e:
                        pass

                    try:
                        servers = re.findall(r'link_server_.*\"(.*)\";', page)
                        servers = list(set(servers))
                        for server in servers:
                            if server != None:
                                if 'http' not in server:
                                    server = 'http:' + server

                                try:
                                    l = resolvers.createMeta(server,
                                                             self.name,
                                                             self.logo,
                                                             quality, [],
                                                             key,
                                                             poster=poster,
                                                             riptype=type,
                                                             vidtype=vidtype,
                                                             testing=testing,
                                                             page_url=page_url)
                                    for ll in l:
                                        if ll != None and 'key' in ll.keys():
                                            links_m.append(ll)
                                except:
                                    pass
                    except:
                        pass

                    break
                except:
                    pass

            for link in links_m:
                if link != None and 'key' in link.keys():
                    sources.append(link)

            if len(sources) == 0:
                log(
                    'FAIL', 'get_sources',
                    'Could not find a matching title: %s' %
                    cleantitle.title_from_key(key))
            else:
                log(
                    'SUCCESS', 'get_sources', '%s sources : %s' %
                    (cleantitle.title_from_key(key), len(sources)))

            log('INFO', 'get_sources', 'Completed')

            return sources
        except Exception as e:
            log('ERROR', 'get_sources', '%s' % e)
            log('INFO', 'get_sources', 'Completed')
            return sources
예제 #4
0
    def get_sources(self,
                    url,
                    hosthdDict=None,
                    hostDict=None,
                    locDict=None,
                    proxy_options=None,
                    key=None,
                    testing=False):
        #try:
        try:
            sources = []
            if control.setting('Provider-%s' % name) == False:
                log('INFO', 'get_sources', 'Provider Disabled by User')
                return sources
            if url == None:
                log('FAIL',
                    'get_sources',
                    'url == None. Could not find a matching title: %s' %
                    cleantitle.title_from_key(key),
                    dolog=not testing)
                return sources

            links_m = []
            trailers = []
            headers = self.headers
            headers = {'Referer': self.base_link}
            sub_url = None

            u = url[0]
            ep = url[1]
            #r = client.request(u, headers=headers IPv4=True)
            r = proxies.request(u,
                                headers=self.headers,
                                IPv4=True,
                                proxy_options=proxy_options,
                                use_web_proxy=self.proxyrequired)

            if testing == False:
                try:
                    #regex = r"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"
                    #matches = re.finditer(regex, r, re.MULTILINE)
                    matches = re.compile(
                        'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+'
                    ).findall(r)
                    for match in matches:
                        try:
                            #print match
                            if 'youtube.com' in match:
                                match = match.replace('embed/', 'watch?v=')
                                trailers.append(match)
                        except:
                            pass
                except Exception as e:
                    pass

                for trailer in trailers:
                    links_m = resolvers.createMeta(trailer,
                                                   self.name,
                                                   self.logo,
                                                   '720p',
                                                   links_m,
                                                   key,
                                                   vidtype='Trailer',
                                                   testing=testing)

            try:
                if ep == None:
                    srcs = client.parseDOM(r, 'a', ret='player-data')
                else:
                    srcs = client.parseDOM(r,
                                           'a',
                                           ret='player-data',
                                           attrs={'episode-data': str(ep)})

                try:
                    elem = client.parseDOM(r,
                                           'span',
                                           attrs={'class': 'quality'})[0]
                    qual = source_utils.check_sd_url(elem)
                    riptype = source_utils.check_sd_url_rip(elem)
                except Exception as e:
                    qual = '480p'
                    riptype = 'BRRIP'

                try:
                    poster = client.parseDOM(r,
                                             'div',
                                             attrs={'class': 'dm-thumb'})[0]
                    poster = client.parseDOM(poster, 'img', ret='src')[0]
                except:
                    poster = None

                for s in srcs:
                    try:
                        if s.startswith('//'):
                            s = 'https:%s' % s
                        links_m = resolvers.createMeta(s,
                                                       self.name,
                                                       self.logo,
                                                       qual,
                                                       links_m,
                                                       key,
                                                       poster=poster,
                                                       riptype=riptype,
                                                       vidtype='Movie',
                                                       sub_url=sub_url,
                                                       testing=testing)
                        if testing == True and len(links_m) > 0:
                            break
                    except:
                        pass
            except:
                pass

            sources += [l for l in links_m]

            if len(sources) == 0:
                log(
                    'FAIL', 'get_sources',
                    'Could not find a matching title: %s' %
                    cleantitle.title_from_key(key))
                return sources

            log('SUCCESS',
                'get_sources',
                '%s sources : %s' %
                (cleantitle.title_from_key(key), len(sources)),
                dolog=not testing)
            return sources
        except Exception as e:
            log('ERROR', 'get_sources', '%s' % e, dolog=not testing)
            return sources