def build_search_directory(url): if url == 'search': try: search = common.getUserInput("Enter search term", "").replace(' ','+') url = 'http://revision3.com/search/page?type=video&q=' + search + '&limit=10&page=1' except: return html = getUrl(url).encode('ascii', 'ignore') current = common.parseDOM(html, "span", attrs = { "class": "active" }) pageLoad = common.parseDOM(html, "a", ret = "onclick") try: strs = 'http://revision3.com' + pageLoad[-1:][0].rsplit('\'')[1] params = getParameters(strs) saveurl = strs.rstrip('&page=' + params['page']) + '&page=' + str( int(current[0]) + 1 ) if int(params['page']) > int(current[0]): next = True else: next = False except: next = False episodes = common.parseDOM(html, "li", attrs = { "class": "video" }) if len(episodes) == 0: dialog = xbmcgui.Dialog() ok = dialog.ok( plugin , settings.getLocalizedString( 30009 ) + '\n' + settings.getLocalizedString( 30010 ) ) return for data in episodes: thumb = common.parseDOM(data, "img", ret = "src")[0] url = common.parseDOM(data, "a", attrs = { "class": "thumbnail" }, ret = "href" )[0] url = clean(url.replace('http://www.videosurf.com/webui/inc/go.php?redirect=','')).replace('&client_id=revision3','') title = clean(common.parseDOM(data, "a", attrs = { "class": "title" })[0]) plot = clean(common.stripTags(common.parseDOM(data, "div", attrs = { "class": "description" })[0])) try: studio = title.rsplit(' - ')[1] except: studio = 'Search' infoLabels = { "Title": title, "Studio": studio, "Plot": plot, "Episode": 0, "Aired": "0000-00-00" } u = { 'mode': '2', 'name': urllib.quote_plus(title), 'url': urllib.quote_plus(url), 'plot': urllib.quote_plus(infoLabels['Plot']), 'studio': urllib.quote_plus(infoLabels['Studio']), 'episode': urllib.quote_plus(str(infoLabels['Episode'])), 'thumb': urllib.quote_plus(thumb), 'date': urllib.quote_plus(infoLabels['Aired']) } addListItem(title, thumb, u, False, infoLabels, fanart_bg) if next == True: u = { 'mode': '4', 'url': urllib.quote_plus(saveurl), 'name': urllib.quote_plus(studio) } infoLabels = { "Title": settings.getLocalizedString( 30016 ), "Plot": settings.getLocalizedString( 30016 ) } addListItem(settings.getLocalizedString( 30016 ) + ' (' + str( int(current[0]) + 1 ) + ')', next_thumb, u, True, infoLabels, fanart_bg) xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_UNSORTED ) setViewMode("503") xbmcplugin.endOfDirectory(int(sys.argv[1]))
data = getUrl( url ) slug = re.compile( '<link>http:\/\/www\.traileraddict\.com\/trailer\/(.+?)\/' ).findall( data ) if len(slug): slug = slug[0] else: slug = build_search_directory(name, 'library') if slug == None: dialog = xbmcgui.Dialog() ok = dialog.ok(plugin, settings.getLocalizedString( 30019) + name + '.') return url = 'http://www.traileraddict.com/tags/' + slug url = find_trailers( url, name, page, True ) url = 'http://www.traileraddict.com' + url play_video( url, name + ' (' + settings.getLocalizedString(30017) + ')', download ) params = getParameters(sys.argv[2]) url = None name = None mode = None download = None library = False page = 0 try: url = urllib.unquote_plus( params['url'] ) except: pass try: name = urllib.unquote_plus( params['name'] ) except: pass
try: data = getUrl( url, True ) url = re.compile('<media url=\"(.+?)\"').findall(data)[0] except: url = 'http://www.tmz.com/videos/' + thumb.split('/')[9] data = getUrl( url, True ) url = common.parseDOM(data, "meta", attrs = { "name": "VideoURL" }, ret = "content")[0] infoLabels = { "Title": name , "Studio": "TMZ: " + studio, "Plot": name } playListItem(label = name, image = thumb, path = url, infoLabels = infoLabels) def playall(): playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO) xbmc.Player().play(playlist) return params = getParameters(sys.argv[2]) mode = None name = None url = None studio = None thumb = None try: url = urllib.unquote_plus(params["url"]) except: pass try: name = urllib.unquote_plus(params["name"]) except: pass try:
def build_video_directory(url, name, type): nextname = name if name == settings.getLocalizedString( 30005 ): if page == 0 and type != 'history': try: newStr = common.getUserInput(settings.getLocalizedString( 30005 ), '').replace(' ','%20') except: return presets = settings.getSetting( "presets_search" ) if presets == '': save_str = newStr else: if presets.find(newStr + ' |') == -1: save_str = presets + ' | ' + newStr else: save_str = presets settings.setSetting("presets_search", save_str) else: newStr = getParameters(url)["searchString"] url = 'http://search.espn.go.com/results?searchString=' + newStr + '&start=' + str(int(page) * 16) + '&dims=6' nexturl = url html = getUrl(url).decode('ascii', 'ignore') html = html.decode('UTF-8','ignore') #Swedemon/Mrdally204 Change 2015-08-31 results = common.parseDOM(html, "li", attrs = { "class": "result video-result" }) titledata = common.parseDOM(results, "h3") title = common.parseDOM(titledata, "a", attrs = { "rel": "nofollow" }) if len(title) == 0: dialog = xbmcgui.Dialog() ok = dialog.ok( plugin , settings.getLocalizedString( 30013 ) + '\n' + settings.getLocalizedString( 30014 ) ) remove_menu(newStr,'search') return img = common.parseDOM(results, "a", attrs = { "class": "list-thumb" }) desc = common.parseDOM(results, "p") thumb = common.parseDOM(img, "img", ret = "src" ) pagenum = common.parseDOM(html, "div", attrs = { "class": "page-numbers" })[0] maxlength = common.parseDOM(pagenum, "span")[1].replace('of ','') value = common.parseDOM(pagenum, "input", attrs = { "id": "page-number" }, ret = "value" )[0] pagecount = [ value, maxlength ] else: nexturl = url html = getUrl(url + "&pageNum=" + str(int(page)) + "&sortBy=&assetURL=http://assets.espn.go.com&module=LibraryPlaylist&pagename=vhub_index") html = html.decode('UTF-8','ignore') #Swedemon/Mrdally204 Change 2015-08-31 videocell = common.parseDOM(html, "div", attrs = { "class": "video-cell" }) title = common.parseDOM(videocell, "h5") thumb = common.parseDOM(videocell, "img", ret = "src") desc = common.parseDOM(common.parseDOM(videocell, "p", attrs = { "class": "watch-now" }), "a", ret = "href") try: pagecount = common.parseDOM(html, "div", attrs = { "class": "page-numbers" })[0].rsplit(' of ') except: pagecount = '' item_count = 0 #print 'videocell='+str(videocell)+'title='+str(title)+'thumb='+str(thumb)+'desc='+str(desc)+'pagecount='+str(pagecount) for name in title: if '/espn360/' not in thumb[item_count]: if 'http://' in desc[item_count]: plot = name else: plot = desc[item_count] try: data = thumb[item_count].replace('_thumdnail_wbig.jpg','').replace('.jpg','').rsplit('motion/') url = data[1] except: data = thumb[item_count].replace('_thumdnail_wbig.jpg','').replace('.jpg','').rsplit('/')[-4:] if len(data) >= 4: url = data[0] + '/' + data[1] + '/' + data[2] + '/' + data[3] else: url = 'null' thumbnailImage = thumb[item_count].replace('_thumdnail_wbig','') u = { 'mode': '3', 'name': name, 'url': url.replace('motion/',''), 'thumb': thumbnailImage, 'plot': plot } infoLabels = { "Title": name, "Plot": plot } addListItem(label = name, image = thumbnailImage, url = u, isFolder = False, infoLabels = infoLabels) item_count += 1 if pagecount and pagecount[0] != pagecount[1]: u = { 'mode': '2', 'name': nextname, 'url': nexturl, 'page': str(int(page) + 1), 'type': 'null' } infoLabels = { "Title": settings.getLocalizedString( 30003 ), "Plot": settings.getLocalizedString( 30003 ) } addListItem(label = settings.getLocalizedString( 30003 ), image = next_thumb, url = u, isFolder = True, infoLabels = infoLabels) xbmcplugin.addSortMethod( handle = int(sys.argv[1]), sortMethod = xbmcplugin.SORT_METHOD_NONE ) setViewMode("503") xbmcplugin.endOfDirectory( int( sys.argv[1] ) )
def build_sub_directory(url, name): saveurl = url studio = name savestudio = name html = getUrl(url) ret = common.parseDOM(html, "div", attrs = { "id": "main-episodes" }) pageLoad = common.parseDOM(ret, "a", ret = "onclick") if len(ret) == 0: ret = common.parseDOM(html, "div", attrs = { "id": "all-episodes" }) pageLoad = common.parseDOM(ret, "a", ret = "onclick") if len(ret) == 0: ret = common.parseDOM(html, "ul", attrs = { "class": "episode-grid" }) pageLoad = common.parseDOM(html, "a", ret = "onclick") current = common.parseDOM(html, "span", attrs = { "class": "active" }) episodes = common.parseDOM(ret, "li", attrs = { "class": "episode item" }) img = common.parseDOM(episodes[0], "img", ret = "src")[0] if settings.getLocalizedString( 30013 ) != name: try: downloads = 'http://revision3.com/' + img.rsplit('/')[6] + '/' + img.rsplit('/')[6] + '_downloads' fresult = getPage(downloads)['content'] data = re.compile( '<a href="(.+?)" target="_blank">1920x1200</a>' ).findall(fresult) if len(data) > 1: fanart = data[1] else: fanart = data[0] settings.setSetting(img.rsplit('/')[6], fanart) except: fanart = 'http://statics.revision3.com/_/images/shows/' + img.rsplit('/')[6] + '/show_background.jpg' if getPage(fanart)['error'] == 'HTTP Error 404: Not Found': settings.setSetting(img.rsplit('/')[6], fanart_bg) else: settings.setSetting(img.rsplit('/')[6], fanart) try: child = common.parseDOM(html, "div", attrs = { "id": "child-episodes" }) label = common.parseDOM(html, "a", attrs = { "href": "#child-episodes" })[0] childshow = common.parseDOM(child, "a", attrs = { "class": "thumbnail" }, ret = "href" )[0].rsplit('/')[1] csaveurl = 'http://revision3.com/' + childshow + '/episodePage?type=recent&limit=15&hideShow=1&hideArrows=1&page=1' u = { 'mode': '1', 'name': urllib.quote_plus(studio), 'url': urllib.quote_plus(csaveurl) } infoLabels = { "Title": label, "Plot": label } addListItem('[ ' + label + ' ]', more_thumb, u, True, infoLabels, fanart) except: pass try: strs = 'http://revision3.com' + pageLoad[-1:][0].rsplit('\'')[1] params = getParameters(strs) saveurl = strs.rstrip('&page=' + params['page']) + '&page=' + str( int(current[0]) + 1 ) if int(params['page']) > int(current[0]): next = True else: next = False except: next = False for data in episodes: thumb = common.parseDOM(data, "img", ret = "src")[0].replace('small.thumb','medium.thumb') show_id = thumb.split('/')[6] if not settings.getSetting(show_id): fanart = fanart_bg else: fanart = settings.getSetting(show_id) plot = clean(common.parseDOM(data, "img", ret = "alt")[0]) name = clean(common.stripTags(common.parseDOM(data, "a")[1])) cut = common.parseDOM(data, "a")[1] try: studio = clean(common.parseDOM(cut, "strong")[0]) except: pass name = name.replace(studio + ' ','') url = 'http://revision3.com' + common.parseDOM(data, "a", attrs = { "class": "thumbnail" }, ret = "href")[0] try: episode = name.rsplit(' ')[1] date = name.rsplit(' ')[3].rsplit('/')[2] + '-' + name.rsplit(' ')[3].rsplit('/')[0] + '-' + name.rsplit(' ')[3].rsplit('/')[1] except: episode = '0' date = '0000-00-00' length = name[-6:].rstrip(')').replace('(','').split(':') duration = int(length[0]) * 60 + int(length[1]) infoLabels = { "Title": plot, "Studio": studio, "Plot": plot, "Episode": int(episode), "Aired": date } u = { 'mode': '2', 'name': urllib.quote_plus(plot), 'url': urllib.quote_plus(url), 'plot': urllib.quote_plus(infoLabels['Plot'].encode('ascii', 'ignore')), 'studio': urllib.quote_plus(infoLabels['Studio']), 'episode': urllib.quote_plus(str(infoLabels['Episode'])), 'thumb': urllib.quote_plus(thumb), 'date': urllib.quote_plus(infoLabels['Aired']) } addListItem(plot, thumb, u, False, infoLabels, fanart, duration) if next == True: u = { 'mode': '1', 'name': urllib.quote_plus(savestudio), 'url': urllib.quote_plus(saveurl) } infoLabels = { "Title": settings.getLocalizedString( 30016 ), "Plot": settings.getLocalizedString( 30016 ) } addListItem(settings.getLocalizedString( 30016 ) + ' (' + str( int(current[0]) + 1 ) + ')', next_thumb, u, True, infoLabels, fanart_bg) xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_UNSORTED ) xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_EPISODE ) xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_STUDIO ) xbmcplugin.addSortMethod( handle=int( sys.argv[ 1 ] ), sortMethod=xbmcplugin.SORT_METHOD_VIDEO_RUNTIME ) setViewMode("503") xbmcplugin.endOfDirectory(int(sys.argv[1]))