Beispiel #1
0
    def sources(self, url, hostDict, hostprDict):
        links = []
        sources = []

        response = requests.get(self.base_link + self.search_link % url)
        capture = re.findall(r'<singlelink><\/singlelink>((?s).*)<download><\/download>', response.text)

        for i in capture:
            links.append(re.findall(r'href="(.*?)"', i))

        links = [item for sublist in links for item in sublist]

        for i in links:
            for h in hostprDict:
                if h in i:
                    if not '.rar' in i:
                        quality = get_quality_simple(i)
                        video = {}
                        video['url'] = i
                        video['quality'] = quality
                        video['source'] = h
                        video['debridonly'] = True
                        video['language'] = 'en'
                        video['info'] = ''
                        video['direct'] = False
                        sources.append(video)

        return sources
Beispiel #2
0
    def sources(self, url, hostDict, hostprDict):
        links = []
        sources = []

        response = requests.get(self.base_link + self.search_link % url)
        capture = re.findall(
            r'<singlelink><\/singlelink>((?s).*)<download><\/download>',
            response.text)

        for i in capture:
            links.append(re.findall(r'href="(.*?)"', i))

        links = [item for sublist in links for item in sublist]

        for i in links:
            for h in hostprDict:
                if h in i:
                    if not '.rar' in i:
                        quality = get_quality_simple(i)
                        video = {}
                        video['url'] = i
                        video['quality'] = quality
                        video['source'] = h
                        video['debridonly'] = True
                        video['language'] = 'en'
                        video['info'] = ''
                        video['direct'] = False
                        sources.append(video)

        return sources
    def sources(self, url, hostDict, hostprDict):
        hostDict = hostDict + hostprDict
        sources = []
        if url is None:
            return sources

        try:
            with requests.Session() as s:
                p = s.get(self.base_link + self.episode_link + url, headers=self.headers)
                soup = BeautifulSoup(p.text, 'html.parser')
                src = soup.find('iframe')
                url = src['src']

                if '//apu,litaurl.com/' in url:
                    p = s.headers(url)
                    url = p.url

                valid, host = source_utils.checkHost(url, hostDict)
                quality = source_utils.get_quality_simple(url)

                if valid == True:
                    sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': '',
                     'direct': False,
                     'debridonly': True})
        except:
            traceback.print_exc()()

        return sources
    def sources(self, url, hostDict, hostprDict):
        hostDict = hostDict + hostprDict
        sources = []
        if url is None:
            return sources

        try:
            with requests.Session() as s:
                p = s.get(self.base_link + self.episode_link + url, headers=self.headers)
                soup = BeautifulSoup(p.text, 'html.parser')
                src = soup.find('iframe')
                url = src['src']

                if '//apu,litaurl.com/' in url:
                    p = s.headers(url)
                    url = p.url

                valid, host = source_utils.checkHost(url, hostDict)
                quality = source_utils.get_quality_simple(url)

                if valid == True:
                    sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': '',
                     'direct': False,
                     'debridonly': True})
        except:
            failure = traceback.format_exc()
            log_utils.log('Vmovee - Exception: \n' + str(failure))

        return sources
Beispiel #5
0
    def getPost(self, url):
        soup = BeautifulSoup(self.scraper.get(url).text, 'html.parser')
        title = soup.find('div', {'class':'notifierbar'}).text
        links = soup.find('table', {'id':'download_table'}).findAll('a')
        quality = source_utils.get_quality_simple(title)
        info = source_utils.get_info_simple(title)

        for link in links:
            valid, host = source_utils.checkHost(link['href'], self.validHosts)
            if valid:
                self.sourceList.append(
                    {'source': host, 'quality': quality, 'language': 'en', 'url': link['href'], 'info': info, 'direct': False,
                     'debridonly': True})
Beispiel #6
0
    def getPost(self, url):
        soup = BeautifulSoup(self.scraper.get(url).text, 'html.parser')
        title = soup.find('div', {'class':'notifierbar'}).text
        links = soup.find('table', {'id':'download_table'}).findAll('a')
        quality = source_utils.get_quality_simple(title)
        info = source_utils.get_info_simple(title)

        for link in links:
            valid, host = source_utils.checkHost(link['href'], self.validHosts)
            if valid:
                self.sourceList.append(
                    {'source': host, 'quality': quality, 'language': 'en', 'url': link['href'], 'info': info, 'direct': False,
                     'debridonly': True})
    def sources(self, url, hostDict, hostprDict):
        sources = []

        try:

            if url == None: return sources

            if debrid.status() == False: raise Exception()

            data = url

            title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']

            hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']

            query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
            query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)

            url = self.search_link % urllib.quote_plus(query)
            url = urlparse.urljoin(self.base_link, url)

            r = requests.get(url).text
            posts = re.findall(r'(?s)<item>(.*?)</item>', r)

            hostDict = hostprDict + hostDict

            items = []
            for post in posts:
                try:
                    title = re.findall(r'<title>(.*?)</title>', post)[0]
                    if query.lower() in title.lower():
                        linksDivs = re.findall(r'(?s)<singlelink></singlelink><br />(.*?)<br />.<strong>', post)
                        for div in linksDivs:
                            links = re.findall(r'<a href="(.*?)"', div)
                            for link in links:
                                quality = source_utils.get_quality_simple(link)
                                valid, host = source_utils.is_host_valid(link, hostDict)
                                if valid:
                                    sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': link, 'info': '', 'direct': False, 'debridonly': True})

                except:
                    traceback.print_exc()
                    pass
            return sources
        except:
            traceback.print_exc()
            return sources