def list_all(item): logger.info() itemlist = [] data = get_source(item.url) patron = "<article class='Anime alt B'><a href='([^']+)'>.*?class=.*?<img src='([^']+)' alt='([^']+)'>" patron += "</figure><span class='Type' .*?>([^']+)</span>.*?star.*?<p>([^<]+)</p>" matches = scrapertools.find_multiple_matches(data, patron) for scrapedurl, scrapedthumbnail, scrapedtitle, _type, plot in matches: url = scrapedurl thumbnail = host + scrapedthumbnail title = scrapedtitle season = '' if 'season' in scrapedtitle.lower(): season = scrapertools.find_single_match(scrapedtitle, 'season (\d+)') scrapedtitle = scrapertools.find_single_match( scrapedtitle, '(.*?) season') new_item = Item(action='episodesxseason', channel=item.channel, plot=plot, thumbnail=thumbnail, title=title, type=_type, url=url) if _type.lower() == 'anime': new_item.contentType = "tvshow" new_item.contentSerieName = scrapedtitle new_item.contentSeasonNumber = season else: new_item.contentType = "movie" new_item.contentTitle = scrapedtitle itemlist.append(new_item) # Paginacion next_patron = r'<a class="page-link" href="([^"]+)" rel="next">' next_page = scrapertools.find_single_match(data, next_patron) if next_page != '': itemlist.append( Item(action="list_all", channel=item.channel, thumbnail='https://s16.postimg.cc/9okdu7hhx/siguiente.png', title="Siguiente página >>", url="%s%s" % (host, next_page) if not host in next_page else next_page, viewType=item.viewType)) return itemlist
def list_all(item): logger.info() itemlist = [] data = get_source(item.url) patron = '39;src=.*?(http.*?)style=display:.*?one-line href=(.*?) title=.*?>(.*?)<' matches = re.compile(patron, re.DOTALL).findall(data) for scrapedthumbnail, scrapedurl, scrapedtitle in matches: url = scrapedurl scrapedtitle = scrapedtitle.replace('×', 'x') contentSerieName = scrapedtitle action = 'seasons' if 'episode' in item.url: scrapedtitle, season, episode = scrapertools.find_single_match( scrapedtitle, '(.*?) (\d+).*?(?:x|X).*?(\d+)') contentSerieName = scrapedtitle scrapedtitle = '%sx%s - %s' % (season, episode, scrapedtitle) action = 'findvideos' thumbnail = scrapedthumbnail new_item = Item(channel=item.channel, title=scrapedtitle, url=url, thumbnail=thumbnail, contentSerieName=contentSerieName, action=action, context=filtertools.context(item, list_language, list_quality)) if 'episode' in item.url: new_item.contentSeasonNumber = season new_item.contentepisodeNumber = episode new_item.context = [] itemlist.append(new_item) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) # Paginacion next_page = scrapertools.find_single_match(data, 'rel=next href=(.*?)>»</a>') if next_page != '': itemlist.append( Item(channel=item.channel, action="list_all", title='Siguiente >>>', url=next_page, thumbnail='https://s16.postimg.cc/9okdu7hhx/siguiente.png', type=item.type)) return itemlist
def list_all(item): logger.info() itemlist = [] data = get_source(item.url) patron = "<article class='Anime alt B'><a href='(.*?)'>.*?class=.*?<img src='(.*?)' alt='(.*?)'>" patron += "</figure><span class='Type .*?'>(.*?)</span>.*?star.*?<p>(.*?)</p>" matches = re.compile(patron, re.DOTALL).findall(data) for scrapedurl, scrapedthumbnail, scrapedtitle, type, plot in matches: url = host + scrapedurl thumbnail = host + scrapedthumbnail title = scrapedtitle type = type season = '' if 'season' in scrapedtitle.lower(): season = scrapertools.find_single_match(scrapedtitle, 'season (\d+)') scrapedtitle = scrapertools.find_single_match( scrapedtitle, '(.*?) season') new_item = Item(channel=item.channel, action='episodios', title=title, url=url, thumbnail=thumbnail, contentSerieName=scrapedtitle, plot=plot, type=item.type, infoLabels={}) if type.lower() == 'anime': new_item.contentSerieName = scrapedtitle new_item.contentSeasonNumber = season else: new_item.contentTitle = scrapedtitle itemlist.append(new_item) # Paginacion next_page = scrapertools.find_single_match( data, "<li><a href='([^']+)'><span>»</span></a></li></ul>") if next_page != '': itemlist.append( Item(channel=item.channel, action="list_all", title=">> Página siguiente", url=host + next_page, thumbnail='https://s16.postimg.cc/9okdu7hhx/siguiente.png')) return itemlist
def list_all(item): logger.info() itemlist = [] next = False data = get_source(item.url) patron = 'spotlight_container>.*?image lazy data-original=(.*?)>.*?<div class=spotlight_title>(.*?)<' patron += '(.*?) sres>(\d{4})<.*?playLink href=(.*?)>' matches = re.compile(patron, re.DOTALL).findall(data) first = item.first last = first+19 if last > len(matches): last = len(matches) next = True for scrapedthumbnail, scrapedtitle, type_data, year, scrapedurl in matches[first:last]: url = scrapedurl title = scrapedtitle season = scrapertools.find_single_match(type_data, 'class=title-season>Temporada<.*?> (\d+) <') episode = scrapertools.find_single_match(type_data, 'class=title-season>Episodio<.*?> (\d+) <') if season != '' or episode != '': item.type = 'tvshow' else: item.type = 'movie' new_item = Item(channel=item.channel, title=title, url=url, thumbnail=scrapedthumbnail, type=item.type, infoLabels={'year': year}) if item.type == 'tvshow': new_item.action = 'episodios' new_item.contentSerieName = scrapedtitle season = season.strip() episode = episode.strip() if season == '': if 'Anime' in item.title: season = 1 else: season = scrapertools.find_single_match(url, '.*?temp-(\d+)') new_item.contentSeasonNumber = season else: new_item.contentSeasonNumber = season if episode != '': new_item.contentEpisodeNumber = episode if season != '' and episode != '': new_item.title = '%s %sx%s' % (new_item.title, season, episode) elif episode == '': new_item.title = '%s Temporada %s' % (new_item.title, season) else: new_item.action = 'findvideos' new_item.contentTitle = scrapedtitle itemlist.append(new_item) tmdb.set_infoLabels_itemlist(itemlist, seekTmdb=True) # Paginación if not next: url_next_page = item.url first = last else: url_next_page = scrapertools.find_single_match(data, "<a href=([^ ]+) class=page-link aria-label=Next>") first = 0 if url_next_page: itemlist.append(item.clone(title="Siguiente >>", url=url_next_page, action='list_all', first=first)) return itemlist