Ejemplo n.º 1
0
    def episode(self, url, imdb, tvdb, title, premiered, season, episode):
        try:
            self.zen_url = []
            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '')
                         for i in data])
            title = cleantitle.getsearch(data['tvshowtitle'])
            print("SERIESONLINE EPISODES STARTED")
            season = '%01d' % int(season)
            episode = '%01d' % int(episode)
            query = cleantitle_geturl(title) + "-season-" + season
            q = self.search_link % (query)
            r = urlparse.urljoin(self.base_link, q)
            cleaned_title = cleantitle_get(title) + "season" + season
            print("SERIESONLINE EPISODES", q)
            html = BeautifulSoup(client.request(r))
            containers = html.findAll('div', attrs={'class': 'ml-item'})
            for result in containers:
                links = result.findAll('a')
                for link in links:
                    link_title = link['title'].encode('utf-8')
                    href = link['href'].encode('utf-8')
                    href = urlparse.urljoin(self.base_link, href)
                    href = re.sub('/watching.html', '', href)
                    href = href + '/watching.html'

                    # print("SERIESONLINE", link_title, href)
                    if title_match(cleantitle_get(link_title),
                                   cleaned_title) == True:
                        print("SERIESONLINE FOUND MATCH", link_title, href)
                        referer = href
                        html = client.request(href)
                        s = BeautifulSoup(html)

                        s = s.findAll('div', attrs={'class': 'les-content'})
                        for x in s:
                            try:
                                items = x.findAll('a')
                                for u in items:

                                    player = u['player-data'].encode('utf-8')
                                    ep_id = u['episode-data'].encode('utf-8')
                                    if ep_id == episode:

                                        if not player in self.zen_url:
                                            self.zen_url.append(
                                                [player, referer])
                            except:
                                pass

            print("SERIESONLINE PASSED", self.zen_url)
            return self.zen_url

        except:
            return
Ejemplo n.º 2
0
	def movie(self, imdb, title, year):
		self.zen_url = []
		try:
			headers = {'User-Agent': random_agent()}
			
			title = cleantitle_geturl(title)
			title = title + "-" + year
			query = self.movie_link % title
			u = urlparse.urljoin(self.base_link, query)
			self.zen_url.append(u)
			return self.zen_url
		except:
			return
Ejemplo n.º 3
0
    def episode(self, url, imdb, tvdb, title, premiered, season, episode):
        try:
            self.elysium_url = []
            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
            title = cleantitle.getsearch(data['tvshowtitle'])
            print ("SERIESONLINE EPISODES STARTED")
            season = '%01d' % int(season)
            episode = '%01d' % int(episode)
            query = cleantitle_geturl(title) + "-season-" + season
            q = self.search_link % (query)
            r = urlparse.urljoin(self.base_link, q)
            cleaned_title = cleantitle_get(title) + "season" + season
            print ("SERIESONLINE EPISODES", q)
            html = BeautifulSoup(client.request(r))
            containers = html.findAll('div', attrs={'class': 'ml-item'})
            for result in containers:
                links = result.findAll('a')
                for link in links:
                    link_title = link['title'].encode('utf-8')
                    href = link['href'].encode('utf-8')
                    href = urlparse.urljoin(self.base_link, href)
                    href = re.sub('/watching.html','', href)
                    href = href + '/watching.html'

                    # print("SERIESONLINE", link_title, href)
                    if title_match(cleantitle_get(link_title), cleaned_title) == True:
						print("SERIESONLINE FOUND MATCH", link_title, href)
						referer = href
						html = client.request(href)
   						s = BeautifulSoup(html)
							
						s = s.findAll('div', attrs={'class': 'les-content'})
						for x in s:
							try:
								items = x.findAll('a')
								for u in items:
									
									player = u['player-data'].encode('utf-8')
									ep_id = u['episode-data'].encode('utf-8')
									if ep_id == episode: 
									
										if not player in self.elysium_url:	self.elysium_url.append([player, referer])
							except:
								pass
							
            print("SERIESONLINE PASSED", self.elysium_url)
            return self.elysium_url

        except:
            return
Ejemplo n.º 4
0
    def movie(self, imdb, title, year):
        self.zen_url = []
        try:
            headers = {'User-Agent': random_agent()}

            title = cleantitle_geturl(title)

            query = self.movie_link % title
            u = urlparse.urljoin(self.base_link, query)
            url = {'url': u, 'year': year, 'type': 'movie'}
            url = urllib.urlencode(url)

            return url
        except:
            return
Ejemplo n.º 5
0
	def movie(self, imdb, title, year):
		self.elysium_url = []
		try:
			headers = {'User-Agent': random_agent()}
			
			title = cleantitle_geturl(title)
			
			query = self.movie_link % title
			u = urlparse.urljoin(self.base_link, query)
			url = {'url': u, 'year': year, 'type': 'movie'}
			url = urllib.urlencode(url)

			return url
		except:
			return
Ejemplo n.º 6
0
    def movie(self, imdb, title, year):
        try:
            self.zen_url = []

            cleaned_title = cleantitle_get(title)
            title = cleantitle_query(title)

            q = self.search_link % (cleantitle_geturl(title))
            r = urlparse.urljoin(self.base_link, q)
            print("SERIESONLINE EPISODES", r)
            html = BeautifulSoup(client.request(r))
            containers = html.findAll('div', attrs={'class': 'ml-item'})
            for result in containers:
                links = result.findAll('a')
                for link in links:
                    link_title = link['title'].encode('utf-8')
                    href = link['href'].encode('utf-8')
                    href = urlparse.urljoin(self.base_link, href)
                    href = re.sub('/watching.html', '', href)
                    href = href + '/watching.html'

                    print("SERIESONLINE PASSED", link_title, href)
                    if title_match(cleantitle_get(link_title),
                                   cleaned_title) == True:
                        referer = href
                        html = client.request(href)

                        match = re.findall(
                            '<strong>Release:</strong>(.+?)</p>', html)[0]
                        if year in match:

                            s = BeautifulSoup(html)

                            s = s.findAll('div',
                                          attrs={'class': 'les-content'})
                            for u in s:
                                print("SERIESONLINE PASSED u", u)
                                player = u.findAll(
                                    'a')[0]['player-data'].encode('utf-8')

                                if not player in self.zen_url:
                                    self.zen_url.append([player, referer])

                            return self.zen_url
        except:
            return
Ejemplo n.º 7
0
    def episode(self, url, imdb, tvdb, title, premiered, season, episode):
        try:
            self.zen_url = []
            data = urlparse.parse_qs(url)
            data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
            title = cleantitle.getsearch(data['tvshowtitle'])
            print ("ONEMOVIES EPISODES STARTED")
            season = '%01d' % int(season)
            episode = '%01d' % int(episode)
            query = cleantitle_geturl(title) + "-season-" + season
            q = self.search_link % (query)
            r = urlparse.urljoin(self.base_link, q)
            cleaned_title = cleantitle_get(title) + "season" + season
            print ("ONEMOVIES EPISODES", q)
            html = BeautifulSoup(OPEN_URL(r).content)
            containers = html.findAll('div', attrs={'class': 'ml-item'})
            for result in containers:
                links = result.findAll('a')
                for link in links:
                    link_title = link['title'].encode('utf-8')
                    href = link['href'].encode('utf-8')
                    href = urlparse.urljoin(self.base_link, href)
                    href = re.sub('/watching.html','', href)
                    href = href + '/watching.html'

                    # print("ONEMOVIES", link_title, href)
                    if title_match(cleantitle_get(link_title), cleaned_title) == True:
						print("ONEMOVIES FOUND MATCH", link_title, href)
						html = OPEN_URL(href).content
   						s = BeautifulSoup(html)
							
						s = s.findAll('div', attrs={'class': 'les-content'})
						for u in s:
							print("ONEMOVIES PASSED u", u)
							player = u.findAll('a')[0]['player-data'].encode('utf-8')
							ep_id = u.findAll('a')[0]['episode-data'].encode('utf-8')
							if not ep_id == episode: raise Exception()
								
							if not player in self.zen_url:	self.zen_url.append(player)
							

						return self.zen_url

        except:
            return
Ejemplo n.º 8
0
	def episode(self, url, imdb, tvdb, title, premiered, season, episode):
		self.zen_url = []
		try:
			headers = {'User-Agent': random_agent()}
			data = urlparse.parse_qs(url)
			data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
			title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
			data['season'], data['episode'] = season, episode
			self.zen_url = []
			title = cleantitle_geturl(title)
			query = title + "-season-" + season + "-episode-" + episode
			query= self.ep_link % query
			# print("SOLAR query", query)
			u = urlparse.urljoin(self.base_link, query)
			self.zen_url.append(u)
			return self.zen_url
		except:
			return
Ejemplo n.º 9
0
    def movie(self, imdb, title, year):
        try:
            self.elysium_url = []


            cleaned_title = cleantitle_get(title)
            title = cleantitle_query(title)
                    
            q = self.search_link % (cleantitle_geturl(title))
            r = urlparse.urljoin(self.base_link, q)
            print ("SERIESONLINE EPISODES", r)
            html = BeautifulSoup(client.request(r))
            containers = html.findAll('div', attrs={'class': 'ml-item'})
            for result in containers:
                links = result.findAll('a')
                for link in links:
                    link_title = link['title'].encode('utf-8')
                    href = link['href'].encode('utf-8')
                    href = urlparse.urljoin(self.base_link, href)
                    href = re.sub('/watching.html','', href)
                    href = href + '/watching.html'

                    print("SERIESONLINE PASSED", link_title, href)
                    if title_match(cleantitle_get(link_title), cleaned_title) == True:
                        referer = href
                        html = client.request(href)
                        
                        match = re.findall('<strong>Release:</strong>(.+?)</p>', html)[0]
                        if year in match:
							
							s = BeautifulSoup(html)
							
							s = s.findAll('div', attrs={'class': 'les-content'})
							for u in s:
								print("SERIESONLINE PASSED u", u)
								player = u.findAll('a')[0]['player-data'].encode('utf-8')
								
								if not player in self.elysium_url:	self.elysium_url.append([player, referer])
							

							return self.elysium_url
        except:
            return