def sources(self, url, hostDict, hostprDict): hostDict = hostDict + hostprDict sources = [] if url is None: return sources try: with requests.Session() as s: p = s.get(self.base_link + self.episode_link + url, headers=self.headers) soup = BeautifulSoup(p.text, 'html.parser') src = soup.find('iframe') url = src['src'] if '//apu,litaurl.com/' in url: p = s.headers(url) url = p.url valid, host = source_utils.checkHost(url, hostDict) quality = source_utils.get_quality_simple(url) if valid == True: sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': '', 'direct': False, 'debridonly': True}) except: failure = traceback.format_exc() log_utils.log('Vmovee - Exception: \n' + str(failure)) return sources
def sources(self, url, hostDict, hostprDict): hostDict = hostDict + hostprDict sources = [] if url is None: return sources try: with requests.Session() as s: p = s.get(self.base_link + self.episode_link + url, headers=self.headers) soup = BeautifulSoup(p.text, 'html.parser') src = soup.find('iframe') url = src['src'] if '//apu,litaurl.com/' in url: p = s.headers(url) url = p.url valid, host = source_utils.checkHost(url, hostDict) quality = source_utils.get_quality_simple(url) if valid == True: sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': '', 'direct': False, 'debridonly': True}) except: traceback.print_exc()() return sources
def getPost(self, url): soup = BeautifulSoup(self.scraper.get(url).text, 'html.parser') title = soup.find('div', {'class':'notifierbar'}).text links = soup.find('table', {'id':'download_table'}).findAll('a') quality = source_utils.get_quality_simple(title) info = source_utils.get_info_simple(title) for link in links: valid, host = source_utils.checkHost(link['href'], self.validHosts) if valid: self.sourceList.append( {'source': host, 'quality': quality, 'language': 'en', 'url': link['href'], 'info': info, 'direct': False, 'debridonly': True})
def getPost(self, url): soup = BeautifulSoup(self.scraper.get(url).text, 'html.parser') title = soup.find('div', {'class':'notifierbar'}).text links = soup.find('table', {'id':'download_table'}).findAll('a') quality = source_utils.get_quality_simple(title) info = source_utils.get_info_simple(title) for link in links: valid, host = source_utils.checkHost(link['href'], self.validHosts) if valid: self.sourceList.append( {'source': host, 'quality': quality, 'language': 'en', 'url': link['href'], 'info': info, 'direct': False, 'debridonly': True})