def KategorieLista(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] rG = requests.get(url, headers=headersget, timeout=15).content rG = CleanHTML(rG) # LoginCheck(url=rG) result = parseDOM(rG, 'div', attrs={'class': 'avia-content-slider-inner'})[0] label = [parseDOM(i, 'a', ret='title')[0] for i in parseDOM(result, 'h3')] obraz = parseDOM(result, 'img', ret='src') links = [parseDOM(i, 'a', ret='href')[0] for i in parseDOM(result, 'h3')] for item in zip(label, links, obraz): if str(item[1]).__contains__('/drama/'): addon.addDir(str(item[0]) + ' ' + '[COLOR %s]Drama[/COLOR]' % 'green', str(item[1]), mode=4, fanart=str(item[2]), thumb=str(item[2])) elif str(item[1]).__contains__('/film/'): addon.addLink(str(item[0]) + ' ' + '[COLOR %s]Film[/COLOR]' % 'green', str(item[1]), mode=5, fanart=str(item[2]), thumb=str(item[2]))
def ListMovies(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] rM = str(requests.get(url, headers=headersget, timeout=15).content) rM = CleanHTML(rM) result = parseDOM(rM, 'div', attrs={'id': 'av_section_1'})[0] results = re.findall('flex_column av_one_fourth(.+?)</div></div></div>', result) Titles = re.findall('><p>(.+?)</p>', result) Plot = re.findall('/p>[\s,\S,.]<p>(.+?)</p>', result) obrazy = parseDOM(results, 'img', ret='src') linki = [item for item in parseDOM(results, 'a', ret='href')] for item in zip(linki, Titles, obrazy, Plot): addon.addLink(str(item[1]), str(item[0]), mode=5, thumb=str(item[2]), fanart=str(item[2]), plot=str(item[3]))
def Kategorie(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] rG = requests.get(url, headers=headersget, timeout=15).text # LoginCheck(url=rG) result = parseDOM(rG, 'div', attrs={'class': 'tagcloud'})[0] links = parseDOM(result, 'a', ret='href') label = parseDOM(result, 'a') count = [ re.findall('\d+', i)[0] for i in parseDOM(result, 'a', ret='aria-label') ] for item in zip(label, links, count): addon.addDir(str(item[0]), str(item[1]), mode=5, fanart='', plot='', thumb='', code='[B][COLOR %s]%s[/COLOR][/B]' % ('green', str(item[2]) + ' pozycji')) xbmcplugin.addSortMethod(int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_TITLE, label2Mask='%P')
def Kategorie(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] rG = requests.get(url, headers=headersget, timeout=15).content # LoginCheck(url=rG) result = parseDOM(rG, 'div', attrs={'class': 'tagcloud'})[0] links = parseDOM(result, 'a', ret='href') label = parseDOM(result, 'a') count = [ re.findall('\d+', i)[0] for i in parseDOM(result, 'a', ret='aria-label') ] for item in zip(label, links, count): addon.addDir(str(item[0]) + ' ' + '[COLOR %s]%s[/COLOR]' % ('green', str(item[2]) + ' pozycji'), str(item[1]), mode=7, fanart='', plot='', thumb='')
def WyswietlanieLinkow(): Logowanie(False) cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] name = params['name'] html = requests.get(url, headers=headersget, timeout=15).text LoginCheck(html) results = [ item for item in parseDOM( html, 'section', attrs={'class': 'av_toggle_section ' + r'.+?'}) ] if name.startswith('Odcinek '): index = int(re.findall('\d+', name)[0]) avlinks = [parseDOM(item, 'a', ret='href') for item in results][index - 1] avplayers = [parseDOM(item, 'button') for item in results][index - 1] elif 'tłumaczeni' in name: pass elif 'korekta' in name: pass else: avlinks = [parseDOM(item, 'a', ret='href') for item in results][0] avplayers = [parseDOM(item, 'button') for item in results][0] addon.SourceSelect(players=avplayers, links=avlinks, title=name)
def GetDragonVideolink(url): cookie = cache.cache_get('db_cookie')['value'] headersget.update({'Cookie': cookie}) headers = headersget videolink = requests.get(url, headers=headers).url return videolink
def ListEpisodes(): section = params['section'] name = params['name'] url = params['url'] + '/episodes' thumb = params['img'] Logowanie() cookie = cache.cache_get('shinden_cookie')['value'] headersget.update({'Cookie': cookie}) headers = headersget html = requests.get(url, headers=headers, timeout=15).content result = parseDOM(html, 'tbody', attrs={'class': 'list-episode-checkboxes'})[0] results = parseDOM(result, 'tr') epNo = [parseDOM(item, 'td')[0] for item in results] epTitle = [ parseDOM(item, 'td', attrs={'class': 'ep-title'})[0] for item in results ] epstatus = [ re.findall('<i class="fa fa-fw fa-(.+?)"></i>', item)[0] for item in results ] epDate = [ parseDOM(item, 'td', attrs={'class': 'ep-date'})[0] for item in results ] link = [ mainLink + re.sub('^/', '', parseDOM(item, 'a', ret='href')[0]) for item in results ] for ep in zip(epNo, epTitle, epDate, link, epstatus): if str(ep[4]) == 'check': title = str(ep[0]) + ' : ' + str( ep[1]) + '[COLOR=blue]%s[/COLOR]' % (' - ' + str(ep[2])) section = 'online' elif str(ep[4]) == 'times': title = str(ep[0]) + ' ' + '[COLOR=red] offline [/COLOR]' section = 'offline' else: title = str(ep[0]) + ' ' + '[COLOR=red] offline [/COLOR]' section = 'offline' addon.addLink(title, str(ep[3]), mode='SHListLinks', fanart=str(thumb), thumb=str(thumb), section=section)
def ListEpisodes(): Logowanie(False) cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) name = params['name'] thumb = params['img'] url = params['url'] rE = str(requests.get(url, headers=headersget, timeout=15).text) LoginCheck(rE) rE = str.replace(rE, '–', '-') rE = rE.replace(' ', ' ') result = parseDOM(rE, 'div', attrs={'class': 'togglecontainer ' + r'.+?'})[0] results = re.findall('av_toggle_section(.+?)<span', result) episodes = [item for item in parseDOM(results, 'p')] plot = parseDOM(rE, 'section', attrs={'class': 'av_textblock_section '})[1] if '<em>' in plot: plot = CleanHTML(parseDOM(plot, 'em')[0]) else: plot = CleanHTML(parseDOM(plot, 'p')[0]) fanart = '' #re.findall('background-image: url\((.+?)\);', rE)[1] inprogress = '[COLOR=red][I] w tłumaczeniu[/COLOR][/I]' incorrection = '[COLOR=red][I] korekta[/COLOR][/I]' for item in episodes: if item.__contains__('tłumaczenie'): addon.addLink(str(inprogress), url, mode=8, fanart=(str(fanart)), plot=(str(plot)), thumb=str(thumb)) elif 'korekta' in item: addon.addLink(str(incorrection), url, mode=8, fanart=(str(fanart)), plot=(str(plot)), thumb=str(thumb)) else: addon.addLink(str(item), url, mode=3, fanart=(str(fanart)), plot=(str(plot)), thumb=str(thumb))
def ListLinks(): Logowanie() url = params['url'] section = params['section'] name = params['name'] subdir = params['subdir'] cookie = cache.cache_get('db_cookie')['value'] headersget.update({'Cookie': cookie}) headers = headersget html = requests.get(url, headers=headers, timeout=10).text result = parseDOM(html, 'table', attrs={'id': 'video-table'})[0] results = parseDOM(result, 'tr', attrs={'title': 'Kliknij' + r'.+?'}) playerlink = [ mainLink + re.sub('^/', '', parseDOM(item, 'a', ret='href')[0]) for item in results ] playername = [parseDOM(item, 'a')[0] for item in results] player = [] playerdetails = [parseDOM(item, 'td') for item in results] playersubs = [ re.sub('<span(.+?)span>', '', parseDOM(item, 'td')[2]) for item in results ] playeraudio = [parseDOM(item, 'td')[1] for item in results] playerquality = [parseDOM(item, 'td')[4] for item in results] for item in zip(playername, playersubs, playeraudio, playerquality): if 'VIP' in item[0]: playertitle = item[ 0] + ' ' + '[COLOR=red] brak obsługi [/COLOR]' #'[COLOR=green] napisy %s - audio %s - %s [/COLOR]' % (item[1], item[2], item[3]) else: playertitle = item[ 0] + ' ' + '[COLOR=green] napisy %s - audio %s - %s [/COLOR]' % ( item[1], item[2], item[3]) player.append(playertitle) addon.SourceSelect(player, playerlink, name, subdir)
def ListLinks(): name = params['name'] url = params['url'] section = params['section'] subdir = params['subdir'] if section == 'online': Logowanie() cookie = cache.cache_get('shinden_cookie')['value'] headersget.update({'Cookie': cookie}) headers = headersget html = requests.get(url, headers=headers, timeout=15).text result = [item for item in parseDOM(html, 'tbody') if 'player' in item] results = parseDOM(result, 'tr') playerinfo = [ re.findall('data-episode=\'(.+?)\' ', item) for item in results ] code = re.findall("""_Storage\.basic.*=.*'(.*?)'""", html)[0] playerdata = [json.loads(item[0]) for item in playerinfo] playerlink = [] player = [] for i in playerdata: title = i['player'] + '[COLOR=green]%s[/COLOR]' % ( ' ' + 'Audio' + ' ' + i['lang_audio'] + ('' if (i['lang_subs'] == '') or (i['lang_subs'] == None) else ' SUB ' + i['lang_subs'])) player.append(title) ID = (i['online_id']) link = "https://api4.shinden.pl/xhr/%s/player_load?auth=%s" % ( ID, code) playerlink.append(link) addon.SourceSelect(player, playerlink, name, subdir) else: return
def WyswietlanieLinkow(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] name = params['name'] if name.startswith('Odcinek '): index = int(re.findall('\d+', name)[0]) rEL = requests.get(url, headers=headersget, timeout=15).content LoginCheck(rEL) results = [ item for item in parseDOM(rEL, 'section') if 'https://www.dramaqueen.pl/player.html' in item ] avDlinks = [parseDOM(item, 'a', ret='href') for item in results][index - 1] avDplayers = [parseDOM(item, 'button') for item in results][index - 1] addon.SourceSelect(players=avDplayers, links=avDlinks, title=name) elif 'tłumaczeni' in name: pass elif 'korekta' in name: pass else: rML = requests.get(url, headers=headersget, timeout=15).content LoginCheck(rML) results2 = [ item for item in parseDOM( rML, 'section', attrs={'class': 'av_toggle_section'}) ] avMlinks = [parseDOM(item, 'a', ret='href') for item in results2][0] avMplayers = [parseDOM(item, 'button') for item in results2][0] addon.SourceSelect(players=avMplayers, links=avMlinks, title=name)
def KategorieLista(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] rG = requests.get(url, headers=headersget, timeout=15).text rG = CleanHTML(rG) # LoginCheck(url=rG) result = parseDOM(rG, 'div', attrs={'class': 'avia-content-slider-inner'})[0] label = [parseDOM(i, 'a', ret='title')[0] for i in parseDOM(result, 'h3')] obraz = parseDOM(result, 'img', ret='src') links = [parseDOM(i, 'a', ret='href')[0] for i in parseDOM(result, 'h3')] for item in zip(label, links, obraz): if str(item[1]).__contains__('/drama/'): addon.addDir(str(item[0]), str(item[1]), mode=2, fanart=str(item[2]), thumb=str(item[2]), code='[B][COLOR %s]Drama[/COLOR][/B]' % 'green') elif str(item[1]).__contains__('/film/'): addon.addLink(str(item[0]), str(item[1]), mode=3, fanart=str(item[2]), thumb=str(item[2]), code='[B][COLOR %s]Film[/COLOR][/B]' % 'green') xbmcplugin.addSortMethod(int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_TITLE, label2Mask='%P')
def ListTitles(): cookie = cache.cache_get('dramaqueen_cookie')['value'] headersget.update({'Cookie': cookie}) url = params['url'] html = requests.get(url, headers=headersget, timeout=15).text html = CleanHTML(html) result = parseDOM(html, 'div', attrs={'id': 'av_section_1'})[0] results = re.findall( 'flex_column ' + r'.+?' + 'av_one_fourth(.+?)</div></div></div>', result) titles = re.findall('><p>(.+?)</p>', result) linki = [item for item in parseDOM(results, 'a', ret='href')] Plot = re.findall('/p>[\s,\S,.]<p>(.+?)</p>', result) obrazy = parseDOM(results, 'img', ret='src') threads = [] for item in zip(linki, titles, obrazy, Plot): title, poster, plot, banner, fanart, genre, year, thread = dqscraper.scraper_check( item[1], item[0], item[2]) if title == '': title = item[1] if plot == '': plot = item[3] if poster == '': poster = item[2] if banner == '': banner = item[2] if fanart == '': fanart = item[2] if genre == '': genre = '' if year == '': year = '' if thread: threads.append(thread) if str(item[0]).__contains__('/drama/'): addon.addDir(str(title), str(item[0]), mode=2, plot=(str(plot)), fanart=(str(fanart)), isFolder=True, thumb=(str(poster)), banner=(str(banner)), genre=str(genre), year=str(year), section='') elif str(item[0]).__contains__('/film/'): addon.addLink(str(title), str(item[0]), mode=3, plot=str(plot), fanart=str(fanart), thumb=str(poster), banner=str(banner)) if thread: t = threading.Thread(target=ScrapInfo, args=(threads, )) if t.is_alive(): t.join() else: t.start()
def ListEpisodes(): section = params['section'] url = params['url'] + '/all-episodes' thumb = params['img'] subdir = params['subdir'] Logowanie() cookie = cache.cache_get('shinden_cookie')['value'] headersget.update({'Cookie': cookie}) headers = headersget html = requests.get(url, headers=headers, timeout=15).text result = parseDOM(html, 'tbody', attrs={'class': 'list-episode-checkboxes'})[0] results = parseDOM(result, 'tr') epNo = [parseDOM(item, 'td')[0] for item in results] epTitle = [ parseDOM(item, 'td', attrs={'class': 'ep-title'})[0] for item in results ] epstatus = [ re.findall('<i class="fa fa-fw fa-(.+?)"></i>', item)[0] for item in results ] epDate = [ parseDOM(item, 'td', attrs={'class': 'ep-date'})[0] for item in results ] link = [ mainLink + re.sub('^/', '', parseDOM(item, 'a', ret='href')[0]) for item in results ] for ep in zip(epNo, epTitle, epDate, link, epstatus): if str(ep[4]) == 'check': title = str(ep[0]) + ' : ' + str(ep[1]) code = '[B][COLOR=blue]%s[/COLOR][/B]' % (str(ep[2])) section = 'online' elif str(ep[4]) == 'times': title = str(ep[0]) + ' ' + '[COLOR=red] offline [/COLOR]' section = 'offline' code = '[B][COLOR=blue]%s[/COLOR][/B]' % (str(ep[2])) else: title = str(ep[0]) + ' ' + '[COLOR=red] offline [/COLOR]' section = 'offline' code = '[B][COLOR=blue]%s[/COLOR][/B]' % (str(ep[2])) addon.addLink(title, str(ep[3]), mode='SHListLinks', fanart=str(thumb), thumb=str(thumb), section=section, subdir=subdir, code=code) xbmcplugin.addSortMethod(int(sys.argv[1]), sortMethod=xbmcplugin.SORT_METHOD_TITLE, label2Mask='%P')