def get_source(self, item): quality = '' try: urls = client.parseDOM(item, "td")[1] urls = client.parseDOM(urls, "a", ret="href") for i in range(0, len(urls)): uResult = client.source(urls[i], mobile=False) uResult = uResult.replace('\n', '').replace('\t', '') if 'Could not connect to mysql! Please check your database' in uResult: uResult = client.source(urls[i], mobile=True) item = client.parseDOM(uResult, "div", attrs={"class": "videoplayer"})[0] item = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]' ).findall(item)[0][1] urls[i] = item host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] self.sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'ApnaView', 'url': url, 'direct': False }) except: pass
def get_source(self, item): quality = '' try: urls = client.parseDOM(item, "td", attrs={"class": "col-md-7"})[0] urls = client.parseDOM(urls, "a", ret="href") for i in range(0, len(urls)): item = client.source(urls[i], mobile=False) item = item.replace('\n', '').replace('\t', '') item = client.parseDOM( item, "div", attrs={"class": "embed-responsive embed-responsive-16by9"})[0] item = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]' ).findall(item)[0][1] urls[i] = item host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] self.sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'iBollyTV', 'url': url, 'direct': False }) except: pass
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources url = self.movie_link % url url = urlparse.urljoin(self.base_link, url) rUrl = url try: result = client.source(url, referer=rUrl) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, "div", attrs={"class": "movierip"}) for item in result: try: urls = client.parseDOM(item, "a", ret="href") quality = client.parseDOM(item, "a")[0] quality = quality.lower() if "scr rip" in quality: quality = "SCR" elif "dvd" in quality: quality = "HD" else: quality = "CAM" for i in range(0, len(urls)): urls[i] = client.urlRewrite(urls[i]) host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'DesiHit', 'url': url, 'direct': False }) except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(self.movie_link % (self.base_link_1, url)) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') categories = client.parseDOM(result, "div", attrs={"id": "extras"}) categories = client.parseDOM(categories, "a", attrs={"rel": "category tag"}) for category in categories: category = category.lower() if "scr" in category: quality = "SCR" break elif "bluray" in category: quality = "HD" break links = client.parseDOM( result, "div", attrs={"class": "GTTabs_divs GTTabs_curr_div"}) links += client.parseDOM(result, "div", attrs={"class": "GTTabs_divs"}) for link in links: try: url = re.compile( '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall( link)[0][1] host = client.host(url) sources.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'HDBuffer', 'url': url, 'direct': False }) except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: sources = [] if url == None: return sources url = urlparse.urljoin(self.base_link, url) try: result = client.source(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') quality = client.parseDOM(result, "span", attrs={"class": "calidad2"})[0] parts = client.parseDOM(result, "div", attrs={"class": "player_nav"})[0] parts = client.parseDOM(parts, "a") items = client.parseDOM(result, "div", attrs={"id": "player2"})[0] items = client.parseDOM(items, "div", attrs={"class": "movieplay"}) for i in range(0, len(items)): try: part = parts[i] part = cleantitle.movie(part) if not "full" in part or "option" in part: continue url = re.compile( '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall( items[i])[0][1] host = client.host(url) sources.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'DesiHDMovies', 'url': url, 'direct': False }) except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','') quality = '' result = client.parseDOM(result, name="div", attrs={"class" : "entry-content rich-content"})[0] result = client.parseDOM(result, name="p") try : host = '' urls = [] result = result[1::] serversList = result[::2] linksList = result[1::2] for i in range(0, len(serversList)): try : links = linksList[i] urls = client.parseDOM(links, name="a", ret="href") for j in range(0, len(urls)): try : item = client.source(urls[j], mobile=True) item = client.parseDOM(item, "td")[0] item = re.compile('(SRC|src|data-config)=\"(.+?)\"').findall(item)[0][1] urls[j] = item except: pass if len(urls) > 1: url = "##".join(urls) else: url = urls[0] host = client.host(urls[0]) sources.append({'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'HindiLinks4U', 'url': url, 'direct':False}) except: pass except: pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' self.sources = [] if url == None: return self.sources try: result = client.source(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = client.parseDOM(result, "div", attrs={"class":"col-md-12 mt20"})[0] try : item = client.parseDOM(result, "center")[0] url = re.compile('(SRC|src|data-config)=\"(.+?)\"').findall(item)[0][1] host = client.host(url) self.sources.append({'source': host, 'parts' : '1', 'quality': quality, 'provider': 'iBollyTV', 'url': url, 'direct':False}) except: pass hypermode = False if control.setting('hypermode') == 'false' else True threads = [] try : result = client.parseDOM(result, "div", attrs={"class":"table-responsive"})[0] result = client.parseDOM(result, "tbody")[0] result = client.parseDOM(result, "tr") for item in result: if hypermode : threads.append(workers.Thread(self.get_source, item)) else : self.get_source(item) if hypermode: [i.start() for i in threads] stillWorking = True while stillWorking: stillWorking = False stillWorking = [True for x in threads if x.is_alive() == True] except: pass logger.debug('%s SOURCES [%s]' % (__name__,self.sources)) return self.sources except: return self.sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources url = self.movie_link % url url = urlparse.urljoin(self.base_link, url) rUrl = url try: result = client.source(url, referer=rUrl) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') result = client.parseDOM(result, "div", attrs={"class":"movierip"}) for item in result: try : urls = client.parseDOM(item, "a", ret="href") quality = client.parseDOM(item, "a")[0] quality = quality.lower() if "scr rip" in quality: quality = "SCR" elif "dvd" in quality : quality = "HD" else: quality = "CAM" for i in range(0, len(urls)): urls[i] = client.urlRewrite(urls[i]) host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] sources.append({'source': host, 'parts' : str(len(urls)), 'quality': quality, 'provider': 'DesiHit', 'url': url, 'direct':False}) except : pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): try: logger.debug('%s SOURCES URL %s' % (self.__class__, url)) quality = '' sources = [] result = '' links = [self.base_link_1, self.base_link_2, self.base_link_3] for base_link in links: try: result = client.source(base_link + '/' + url) except: result = '' if 'item' in result: break result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','') items = client.parseDOM(result, 'content:encoded')[0] items = re.compile('class=\"single-heading\">(.+?)<span').findall(items) for i in range(0, len(items)): try : if '720p' in items[i]: quality = 'HD' else: quality = 'SD' urls = client.parseDOM(items[i], "a", ret="href") for j in range(0,len(urls)): result = client.source(urls[j]) item = client.parseDOM(result, name="div", attrs={"style":"float:none;height:700px;margin-left:200px"})[0] rUrl = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] urls[j] = rUrl host = client.host(urls[0]) url = "##".join(urls) sources.append({'source':host, 'parts': str(len(urls)), 'quality':quality,'provider':'YoDesi','url':url, 'direct':False}) urls = [] except: pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: sources = [] if url == None: return sources result = cloudflare.source(url) result = client.parseDOM(result, 'table', attrs={'class': 'rows differ_download'})[0] links = client.parseDOM(result, 'tr') for link in links: try: quality = client.parseDOM(link, 'span', attrs={'class': 'quality_1'})[0].lower() except: quality = 'hd' if quality == 'ts': quality = 'CAM' elif '360p' in quality: quality = 'SD' elif '720p' in quality: quality = 'HD' else: quality = 'SD' url = client.parseDOM(link, 'a', ret="href")[0] host = client.host(url) sources.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'filmywap', 'url': url, 'direct': False, 'debridonly': False }) logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_source(self, item): quality = '' try : urls = client.parseDOM(item, "td", attrs={"class":"col-md-7"})[0] urls = client.parseDOM(urls, "a", ret="href") for i in range(0, len(urls)): item = client.source(urls[i], mobile=False) item = item.replace('\n','').replace('\t','') item = client.parseDOM(item, "div", attrs={"class":"embed-responsive embed-responsive-16by9"})[0] item = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] urls[i] = item host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] self.sources.append({'source': host, 'parts' : str(len(urls)), 'quality': quality, 'provider': 'iBollyTV', 'url': url, 'direct':False}) except: pass
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(self.movie_link % (self.base_link_1, url)) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','') categories = client.parseDOM(result, "div", attrs={"id":"extras"}) categories = client.parseDOM(categories, "a", attrs={"rel":"category tag"}) for category in categories: category = category.lower() if "scr" in category: quality = "SCR" break elif "bluray" in category: quality = "HD" break links = client.parseDOM(result, "div", attrs={"class":"GTTabs_divs GTTabs_curr_div"}) links += client.parseDOM(result, "div", attrs={"class":"GTTabs_divs"}) for link in links: try : url = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(link)[0][1] host = client.host(url) sources.append({'source': host, 'parts': '1', 'quality': quality, 'provider': 'HDBuffer', 'url': url, 'direct':False}) except : pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: sources = [] if url == None: return sources url = urlparse.urljoin(self.base_link, url) try: result = client.source(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') quality = client.parseDOM(result, "span", attrs={"class":"calidad2"})[0] parts = client.parseDOM(result, "div", attrs={"class":"player_nav"})[0] parts = client.parseDOM(parts, "a") items = client.parseDOM(result, "div", attrs={"id":"player2"})[0] items = client.parseDOM(items, "div", attrs={"class":"movieplay"}) for i in range(0, len(items)): try : part = parts[i] part = cleantitle.movie(part) if not "full" in part or "option" in part : continue url = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(items[i])[0][1] host = client.host(url) sources.append({'source': host, 'parts' : '1', 'quality': quality, 'provider': 'DesiHDMovies', 'url': url, 'direct':False}) except : pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_source(self, item): quality = '' try : urls = client.parseDOM(item, "td")[1] urls = client.parseDOM(urls, "a", ret="href") for i in range(0, len(urls)): uResult = client.source(urls[i], mobile=False) uResult = uResult.replace('\n','').replace('\t','') if 'Could not connect to mysql! Please check your database' in uResult: uResult = client.source(urls[i], mobile=True) item = client.parseDOM(uResult, "div", attrs={"class":"videoplayer"})[0] item = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] urls[i] = item host = client.host(urls[0]) if len(urls) > 1: url = "##".join(urls) else: url = urls[0] self.sources.append({'source': host, 'parts' : str(len(urls)), 'quality': quality, 'provider': 'ApnaView', 'url': url, 'direct':False}) except : pass
def request(url, resolverList=None): # Custom Resolvers try: u = client.host(url) r = [i['class'] for i in info() if u in i['host']][0] r = __import__(r, globals(), locals(), [], -1) r = r.resolve(url) if not r: raise Exception() h = dict('') h['User-Agent'] = client.agent() h['Referer'] = url r = '%s|%s' % (r.split('|')[0], urllib.urlencode(h)) return r except: pass # URLResolvers 3.0.0 u = url try: url = None hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True, include_universal=False) if hmf.valid_url() == True: url = hmf.resolve() else: url = False except: pass try: if not url == None: raise Exception() hmf = urlresolver.HostedMediaFile(url=u, include_disabled=True) hmf = hmf.get_resolvers(validated=True) hmf = [i for i in hmf if not i.isUniversal()][0] host, media_id = hmf.get_host_and_id(u) url = hmf.get_media_url(host, media_id) except: pass # URL Resolver 2.10.12 try: if not url == None: raise Exception() hmf = urlresolver.plugnplay.man.implementors(urlresolver.UrlResolver) hmf = [i for i in hmf if not '*' in i.domains] hmf = [(i, i.get_host_and_id(u)) for i in hmf] hmf = [i for i in hmf if not i[1] == False] hmf = [(i[0], i[0].valid_url(u, i[1][0]), i[1][0], i[1][1]) for i in hmf] hmf = [i for i in hmf if not i[1] == False][0] url = hmf[0].get_media_url(hmf[2], hmf[3]) except: pass try: headers = dict(urlparse.parse_qsl(url.rsplit('|', 1)[1])) except: headers = dict('') if url.startswith('http') and '.m3u8' in url: result = client.request(url.split('|')[0], headers=headers, output='geturl', timeout='20') if result == None: raise Exception() elif url.startswith('http'): result = client.request(url.split('|')[0], headers=headers, output='chunk', timeout='20') if result == None: raise Exception() return url
def get_sources(self, url): try: logger.debug('%s SOURCES URL %s' % (self.__class__, url)) quality = '' sources = [] result = '' links = [self.base_link_1, self.base_link_2, self.base_link_3] for base_link in links: try: result = client.source(base_link + '/' + url) except: result = '' if 'item' in result: break result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') items = client.parseDOM(result, 'content:encoded')[0] items = re.compile('class=\"single-heading\">(.+?)<span').findall( items) for i in range(0, len(items)): try: if '720p' in items[i]: quality = 'HD' else: quality = 'SD' urls = client.parseDOM(items[i], "a", ret="href") for j in range(0, len(urls)): result = client.source(urls[j]) item = client.parseDOM( result, name="div", attrs={ "style": "float:none;height:700px;margin-left:200px" })[0] rUrl = re.compile( '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]' ).findall(item)[0][1] urls[j] = rUrl host = client.host(urls[0]) url = "##".join(urls) sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'YoDesi', 'url': url, 'direct': False }) urls = [] except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] result = '' links = [self.base_link_1, self.base_link_2, self.base_link_3] for base_link in links: try: result = client.source(base_link + '/' + url) except: result = '' if 'blockquote' in result: break result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','') ### DIRTY Implementation import BeautifulSoup soup = BeautifulSoup.BeautifulSoup(result).findAll('blockquote', {'class':re.compile(r'\bpostcontent\b')})[0] for e in soup.findAll('br'): e.extract() if soup.has_key('div'): soup = soup.findChild('div', recursive=False) urls = [] quality = 'SD' for child in soup.findChildren(): if (child.getText() == '') or ((child.name == 'font' or child.name == 'a') and re.search('DesiRulez', str(child.getText()),re.IGNORECASE)): continue elif (child.name == 'font') and re.search('Links|Online|Link',str(child.getText()),re.IGNORECASE): if len(urls) > 0: for i in range(0,len(urls)): try : result = client.source(urls[i]) item = client.parseDOM(result, name="div", attrs={"style":"float:right;margin-bottom:10px"})[0] rUrl = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] rUrl = client.urlRewrite(rUrl) urls[i] = rUrl except : urls[i] = client.urlRewrite(urls[i]) pass host = client.host(urls[0]) url = "##".join(urls) sources.append({'source':host, 'parts': str(len(urls)), 'quality':quality,'provider':'DesiRulez','url':url, 'direct':False}) urls = [] quality = child.getText() if '720p HD' in quality: quality = 'HD' else : quality = 'SD' elif (child.name =='a') and not child.getText() == 'registration': urls.append(str(child['href'])) if len(urls) > 0: for i in range(0,len(urls)): try : result = client.source(urls[i]) item = client.parseDOM(result, name="div", attrs={"style":"float:right;margin-bottom:10px"})[0] rUrl = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] rUrl = client.urlRewrite(rUrl) urls[i] = rUrl except : urls[i] = client.urlRewrite(urls[i]) pass host = client.host(urls[0]) url = "##".join(urls) sources.append({'source': host, 'parts' : str(len(urls)), 'quality': quality, 'provider': 'DesiRulez', 'url': url,'direct':False}) logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' self.sources = [] if url == None: return self.sources try: result = client.source(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = client.parseDOM(result, "div", attrs={"class": "col-md-12 mt20"})[0] try: item = client.parseDOM(result, "center")[0] url = re.compile('(SRC|src|data-config)=\"(.+?)\"').findall( item)[0][1] host = client.host(url) self.sources.append({ 'source': host, 'parts': '1', 'quality': quality, 'provider': 'iBollyTV', 'url': url, 'direct': False }) except: pass hypermode = False if control.setting( 'hypermode') == 'false' else True threads = [] try: result = client.parseDOM(result, "div", attrs={"class": "table-responsive"})[0] result = client.parseDOM(result, "tbody")[0] result = client.parseDOM(result, "tr") for item in result: if hypermode: threads.append(workers.Thread(self.get_source, item)) else: self.get_source(item) if hypermode: [i.start() for i in threads] stillWorking = True while stillWorking: stillWorking = False stillWorking = [ True for x in threads if x.is_alive() == True ] except: pass logger.debug('%s SOURCES [%s]' % (__name__, self.sources)) return self.sources except: return self.sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(self.movie_link % (self.base_link_1, url)) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','') try : # get bluray source url = client.parseDOM(result, "a", attrs={"class": "btn btn-custom btn-custom-large btn-blue btn-icon "}, ret="href")[0] if 'playsominal' in url: raise Exception() quality = "HD" sources.append({'source': "playsominal", 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url}) except : pass try : # get DVD source quality = client.parseDOM(result, "span", attrs={"class": "btn btn-custom btn-custom-large btn-yellow "})[0] quality = client.parseDOM(quality, "strong")[0] quality = "HD" except : quality = 'CAM' pass result = client.parseDOM(result, "p", attrs= {"style":"text-align: center;"}) try : host = '' urls = [] for tag in result: if len(client.parseDOM(tag, "span", attrs= {"class":"btn btn-custom btn-custom-large btn-black "})) > 0: link = client.parseDOM(tag, "strong") if len(urls) > 0 : url = "##".join(urls) host = client.host(urls[0]) sources.append({'source': host, 'parts' : str(len(urls)), 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url, 'direct':True}) urls = [] else : link = client.parseDOM(tag, "a", attrs= {"class":"btn btn-custom btn-medium btn-red btn-red "}, ret="href") if len(link) > 0 : #urls.append(resolvers.request(link[0])) urls.append(link[0]) if len(urls) > 0: url = "##".join(urls) host = client.host(urls[0]) sources.append({'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url, 'direct':True}) except: pass logger.debug('%s SOURCES [%s]' % (__name__,sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(url) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') quality = '' result = client.parseDOM( result, name="div", attrs={"class": "entry-content rich-content"})[0] result = client.parseDOM(result, name="p") try: host = '' urls = [] result = result[1::] serversList = result[::2] linksList = result[1::2] for i in range(0, len(serversList)): try: links = linksList[i] urls = client.parseDOM(links, name="a", ret="href") for j in range(0, len(urls)): try: item = client.source(urls[j], mobile=True) item = client.parseDOM(item, "td")[0] item = re.compile( '(SRC|src|data-config)=\"(.+?)\"').findall( item)[0][1] urls[j] = item except: pass if len(urls) > 1: url = "##".join(urls) else: url = urls[0] host = client.host(urls[0]) sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'HindiLinks4U', 'url': url, 'direct': False }) except: pass except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources
def get_sources(self, url): logger.debug('%s SOURCES URL %s' % (self.__class__, url)) try: quality = '' sources = [] if url == None: return sources try: result = client.source(self.movie_link % (self.base_link_1, url)) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') try: # get bluray source url = client.parseDOM( result, "a", attrs={ "class": "btn btn-custom btn-custom-large btn-blue btn-icon " }, ret="href")[0] if 'playsominal' in url: raise Exception() quality = "HD" sources.append({ 'source': "playsominal", 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url }) except: pass try: # get DVD source quality = client.parseDOM( result, "span", attrs={ "class": "btn btn-custom btn-custom-large btn-yellow " })[0] quality = client.parseDOM(quality, "strong")[0] quality = "HD" except: quality = 'CAM' pass result = client.parseDOM(result, "p", attrs={"style": "text-align: center;"}) try: host = '' urls = [] for tag in result: if len( client.parseDOM( tag, "span", attrs={ "class": "btn btn-custom btn-custom-large btn-black " })) > 0: link = client.parseDOM(tag, "strong") if len(urls) > 0: url = "##".join(urls) host = client.host(urls[0]) sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url, 'direct': True }) urls = [] else: link = client.parseDOM( tag, "a", attrs={ "class": "btn btn-custom btn-medium btn-red btn-red " }, ret="href") if len(link) > 0: #urls.append(resolvers.request(link[0])) urls.append(link[0]) if len(urls) > 0: url = "##".join(urls) host = client.host(urls[0]) sources.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'provider': 'PlayIndiaFilms', 'url': url, 'direct': True }) except: pass logger.debug('%s SOURCES [%s]' % (__name__, sources)) return sources except: return sources