def recently_viewed(delete=None): ExternalDatabase.connect() InternalDatabase.connect() if delete is not None: ExternalDatabase.remove(delete) xbmc.executebuiltin('Container.Refresh') else: items = [] for path in ExternalDatabase.fetchall(): drama = drama_detail(path) item = ListItem(drama['title']) item.addContextMenuItems([( 'Remove', 'RunPlugin(plugin://plugin.video.dramacool/recently-viewed?delete=' + path + ')')]) item.setArt({'poster': drama.pop('poster')}) item.setInfo('video', drama) items.append((plugin.url_for(path), item, True)) xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.endOfDirectory(plugin.handle) ExternalDatabase.close() InternalDatabase.close()
def get_anime_detail(path): anime = InternalDatabase.fetchone(path) if anime is None: response = request(path) document = BeautifulSoup(response.text, 'html.parser').find( 'div', class_="anime_info_body_bg") img = document.find('img')['src'].encode('utf-8').strip() title = document.find('h1').string.encode('utf-8').strip() pList = document.find_all('p', class_="type") plot = pList[1].contents[1].encode('utf-8').strip() if len( pList[1].contents) >= 2 else '' genre = "" for a in pList[2].find_all('a'): genre += a.string.encode('utf-8') try: year = pList[3].contents[1].encode('utf-8').strip() year = int(year) if year.isdigit() else None except IndexError: year = None status = pList[4].contents[1].encode('utf-8').strip() InternalDatabase.add((path, img, title, plot, genre, status, year)) anime = InternalDatabase.fetchone(path) return anime
def genList(url): # download pages InternalDatabase.connect() response = Get(url) page = response.text soup = BeautifulSoup(page, 'html.parser') result = soup.find('ul', class_='drama_rich clearfix') liList = result.find_all('li', class_='sizing') recentUpdatedDiv = result.find_all('div', class_='txttitle') for item in liList: aTag = item.find('a') divTag = item.find('div', class_='title sizing') if chinaqUrl in aTag['href']: location = urlparse.urlparse(aTag['href']) path = location.path else: path = aTag['href'] drama = get_drama_detail(path) li = xbmcgui.ListItem(drama['title'] + "(" + item.find( 'div', class_="episode").find('a').string.encode('utf-8') + ")") li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': urlparse.urlparse(response.url).hostname }) if newUrl is not None: xbmcplugin.addDirectoryItem(handle=addon_handle, url=newUrl, listitem=li, isFolder=True) for div in recentUpdatedDiv: aTag = div.find('a') if chinaqUrl in aTag['href']: location = urlparse.urlparse(aTag['href']) path = location.path else: path = aTag['href'] drama = get_drama_detail(path) li = xbmcgui.ListItem(aTag.string) li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': urlparse.urlparse(response.url).hostname }) if newUrl is not None: xbmcplugin.addDirectoryItem(handle=addon_handle, url=newUrl, listitem=li, isFolder=True) InternalDatabase.close() xbmcplugin.endOfDirectory(addon_handle)
def on_going(): InternalDatabase.connect() pageNum = int(plugin.query['page'][0]) response = request2(plugin.pathqs) document = BeautifulSoup(response.text, 'html.parser').find( 'div', class_="added_series_body popular") items = [] for li in document.find_all('li'): a = li.find_all('a') path = a[0]['href'].encode('utf-8') anime = get_anime_detail(path) item = ListItem(anime['title'] + " " + a[len(a) - 1].string.encode('utf-8')) item.setArt({'poster': anime.pop('poster')}) item.setInfo("video", anime) items.append((plugin.url_for(path), item, True)) item = ListItem("Next >>") items.append( (plugin.url_for("/ajax/page-recent-release-ongoing.html?page=" + str(pageNum + 1)), item, True)) if pageNum != 1: item = ListItem("Back to main page") items.append((plugin.url_for("/"), item, True)) InternalDatabase.close() xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.endOfDirectory(plugin.handle)
def recent_release(): InternalDatabase.connect() pageNum = int(plugin.query['page'][0]) response = request(plugin.pathqs) document = BeautifulSoup(response.text, 'html.parser').find( 'div', class_="last_episodes loaddub") items = [] for li in document.find_all('li'): a = li.find('a') p = li.find('p', class_="episode") # it = re.search("^(/.+)-episode-([0-9/-]+)$", a['href'].encode('utf-8'), flags=0) response = request(a['href']) path = BeautifulSoup(response.text, 'html.parser').find( 'div', class_="anime-info").find('a')['href'] # path = "/category"+it.group(1).encode('utf-8') anime = get_anime_detail(path) item = ListItem(anime['title'] + " " + p.string.encode('utf-8')) item.setArt({'poster': anime.pop('poster')}) item.setInfo("video", anime) items.append((plugin.url_for(path), item, True)) item = ListItem("Next >>") items.append((plugin.url_for("/?page=" + str(pageNum + 1)), item, True)) if pageNum != 1: item = ListItem("Back to main page") items.append((plugin.url_for("/"), item, True)) InternalDatabase.close() xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.endOfDirectory(plugin.handle)
def create_database(): InternalDatabase.connect() for path in ['/drama-list', '/kshow']: response = request(path) document = BeautifulSoup(response.content, 'html.parser') for li in document.find_all('li', {'class': 'filter-item'}): drama_detail(li.find('a').attrs['href']) InternalDatabase.close()
def genericList(): InternalDatabase.connect() if "/search.html" == plugin.path and "keyword" not in plugin.query: keyboard = Keyboard() keyboard.doModal() if keyboard.isConfirmed(): keyword = keyboard.getText() response = request(plugin.pathqs + '&keyword=' + keyword) else: return else: response = request(plugin.pathqs) pageNum = int(plugin.query['page'][0]) document = BeautifulSoup(response.text, 'html.parser').find('div', class_="last_episodes") items = [] for li in document.find_all('li'): a = li.find('a') path = a['href'].encode('utf-8') anime = get_anime_detail(path) # item = ListItem(a['title'].encode('utf-8').strip()) # item.setArt({'poster': a.find('img')['src']}) # item.setInfo("video", {'year': int(li.find('p', class_="released").string.strip()[-4:])}) item = ListItem(anime['title']) item.setArt({'poster': anime.pop('poster')}) item.setInfo("video", anime) items.append((plugin.url_for(path), item, True)) item = ListItem("Next >>") if "/search.html" == plugin.path: if "keyword" in plugin.query: items.append( (plugin.url_for(plugin.path + "?page=" + str(pageNum + 1) + '&keyword=' + plugin.query['keyword'][0]), item, True)) else: items.append( (plugin.url_for(plugin.path + "?page=" + str(pageNum + 1) + '&keyword=' + keyword), item, True)) else: items.append( (plugin.url_for(plugin.path + "?page=" + str(pageNum + 1)), item, True)) if pageNum != 1: item = ListItem("Back to main page") items.append((plugin.url_for("/"), item, True)) InternalDatabase.close() xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.endOfDirectory(plugin.handle)
def get_drama_detail(path): drama = InternalDatabase.fetchone(path) if drama is None: response = Get(chinaqUrl + path) response.encoding = 'utf-8' document = BeautifulSoup(response.text, 'html.parser') h1 = document.find('div', id='contain').find('h1') title = h1.contents[0][:-3].strip() img = imageUrl + path[:-1] + ".jpg" plot = getDescription(document) InternalDatabase.add((path, img, title, plot)) drama = InternalDatabase.fetchone(path) return drama
def drama_detail(path): drama = InternalDatabase.fetchone(path) if drama is None: response = request(path) document = BeautifulSoup(response.content, 'html.parser') element = document.find('div', {'class': 'details'}) year = document.find('span', text='Released:').find_next_sibling('a').text InternalDatabase.add( (path, element.find('img').attrs['src'], element.find('h1').text, element.find('span', text=re.compile( 'Description:?')).parent.find_next_sibling().text, document.find('span', text=re.compile('Country: ?')).next_sibling.strip(), document.find('span', text='Status:').find_next_sibling('a').text, int(year) if year.isdigit() else None)) drama = InternalDatabase.fetchone(path) return drama
def star(): response = request(plugin.path) document = BeautifulSoup(response.text, 'html.parser') InternalDatabase.connect() items = [] for a in document.find('ul', {'class': 'list-episode-item'}).find_all('a'): path = a.attrs['href'] drama = drama_detail(path) item = ListItem(drama['title']) item.setArt({'poster': drama.pop('poster')}) item.setInfo('video', drama) items.append((plugin.url_for(path), item, True)) InternalDatabase.close() xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.addSortMethod(plugin.handle, xbmcplugin.SORT_METHOD_TITLE) xbmcplugin.addSortMethod(plugin.handle, xbmcplugin.SORT_METHOD_VIDEO_YEAR) xbmcplugin.endOfDirectory(plugin.handle)
def list_select_id(select_id, select_value, path): InternalDatabase.connect() response = request(path) document = BeautifulSoup(response.text, 'html.parser') items = [] if select_id == 'char': select_value = chr(int(select_value)) for div in document.find_all('div', {'class': 'list-content'}): if div.find('h4').text == select_value: for a in div.find('ul', { 'class': 'filter-char' }).find_all('a'): path = a.attrs['href'] drama = drama_detail(path) item = ListItem(drama['title']) item.setArt({'poster': drama.pop('poster')}) item.setInfo('video', drama) items.append((plugin.url_for(path), item, True)) break else: for li in document.find_all( 'li', {'class': '{}_{}'.format(select_id, select_value)}): path = li.find('a').attrs['href'] drama = drama_detail(path) item = ListItem(drama['title']) item.setArt({'poster': drama.pop('poster')}) item.setInfo('video', drama) items.append((plugin.url_for(path), item, True)) InternalDatabase.close() xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.addSortMethod(plugin.handle, xbmcplugin.SORT_METHOD_TITLE) xbmcplugin.addSortMethod(plugin.handle, xbmcplugin.SORT_METHOD_VIDEO_YEAR) xbmcplugin.endOfDirectory(plugin.handle)
def pagination(): if plugin.path == '/search' and 'keyword' not in plugin.query: keyboard = Keyboard() keyboard.doModal() if keyboard.isConfirmed(): response = request(plugin.pathqs + '&keyword=' + keyboard.getText()) else: return else: response = request(plugin.pathqs) document = BeautifulSoup(response.text, 'html.parser').find( 'ul', {'class': ['list-episode-item', 'list-star']}) items = [] if document is not None: if plugin.path in ('/list-star.html', '/most-popular-drama', '/search'): if plugin.path == '/list-star.html' or ('type' in plugin.query and 'stars' in plugin.query['type']): for li in document.find_all('li', recursive=False): plot = li.find('ul') item = ListItem(li.find('img').attrs['alt']) item.setArt( {'poster': li.find('img').attrs['data-original']}) item.setInfo('video', {'plot': '' if plot is None else plot.text}) items.append((plugin.url_for(li.find('a').attrs['href']), item, True)) else: InternalDatabase.connect() for a in document.find_all('a'): path = a.attrs['href'] drama = drama_detail(path) item = ListItem(drama['title']) item.setArt({'poster': drama.pop('poster')}) item.setInfo('video', drama) items.append((plugin.url_for(path), item, True)) InternalDatabase.close() else: for a in document.find_all('a'): item = ListItem(u'[{}] {} {}'.format( a.find('span', { 'class': 'type' }).text, a.find('h3').text, a.find('span', { 'class': 'ep' }).text)) item.setArt({'poster': a.find('img').attrs['data-original']}) item.setInfo('video', {}) item.setProperty('IsPlayable', 'true') items.append((plugin.url_for(a.attrs['href']), item, False)) document = document.find_next_sibling() if document is not None: for li in document.find_all('li', {'class': ['next', 'previous']}): item = ListItem(li.text) items.append( (plugin.url_for(plugin.path + li.find('a').attrs['href']), item, True)) xbmcplugin.setContent(plugin.handle, 'videos') xbmcplugin.addDirectoryItems(plugin.handle, items, len(items)) xbmcplugin.endOfDirectory(plugin.handle)
def genListForCountry(country=None): # download pages InternalDatabase.connect() if country is None: response = Get(langlangbayUrl + "/new.html") else: response = Get(langlangbayUrl + "/all.html") page = response.text soup = BeautifulSoup(page, 'html.parser') result = soup.find('ul', class_='drama_list').find_all('li') for item in result: if country is not None: if country in item['name']: aTag = item.find('a') if chinaqUrl in aTag['href']: location = urlparse.urlparse(aTag['href']) path = location.path drama = get_drama_detail(path) li = xbmcgui.ListItem(drama['title']) li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': location.hostname }) else: path = aTag['href'] drama = get_drama_detail(path) li = xbmcgui.ListItem(drama['title']) li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': urlparse.urlparse(response.url).hostname }) if newUrl is not None: xbmcplugin.addDirectoryItem(handle=addon_handle, url=newUrl, listitem=li, isFolder=True) else: aTag = item.find('a') if chinaqUrl in aTag['href']: location = urlparse.urlparse(aTag['href']) path = location.path drama = get_drama_detail(path) li = xbmcgui.ListItem(drama['title']) li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': location.hostname }) else: path = aTag['href'] drama = get_drama_detail(path) li = xbmcgui.ListItem(drama['title']) li.setArt({'poster': drama.pop('poster')}) li.setInfo("video", drama) newUrl = build_url({ 'mode': 'genEps', 'path': path, 'domain': urlparse.urlparse(response.url).hostname }) if newUrl is not None: xbmcplugin.addDirectoryItem(handle=addon_handle, url=newUrl, listitem=li, isFolder=True) InternalDatabase.close() xbmcplugin.endOfDirectory(addon_handle)