def sources(url): di_list = [] for name, source_url in scrapers.sources(url): action_url = common.action_url('episodes', url=source_url) di_list.append(common.diritem(name, action_url)) if len(di_list) <= 0: common.popup(common.getMessage(33305)) return None return di_list
def filters(url): di_list = [] index = 'c1' nextAction = 'filters' if re.match(r'(.*)/------1(.*)', url): index = 'c3' elif re.match(r'(.*)/--(.*)----1(.*)', url): index = 'c2' nextAction = 'shows' action_url = common.action_url('shows', url=url) di_list.append(common.diritem(common.getMessage(33007), action_url, '')) for all_title, show_url in scrapers.types(url, index): action_url = common.action_url(nextAction, url=show_url) name = all_title di_list.append(common.diritem(name, action_url, '')) if len(di_list) <= 0: common.popup(common.getMessage(33305)) return None return di_list
def filters(url): di_list = [] index = 0 nextAction = 'filters' if re.match(r'(.)*id/(2|1[3-6])(.)*.html', url): if re.match(r'(.)*id/2.html', url): index = 1 else: index = 4 elif re.match(r'(.)*id/1.html', url): index = 2 if re.match(r'(.)*area/(.)*', url): index = 4 elif re.match(r'(.)*id/3.html', url): index = 2 nextAction = 'shows' elif re.match(r'(.)*id/3.html', url): index = 2 nextAction = 'shows' if re.match(r'(.)*lang/(.)*', url): index = 3 nextAction = 'shows' action_url = common.action_url('shows', url=url) di_list.append(common.diritem(common.getMessage(33007), action_url, '')) for all_title, show_url, image in scrapers.types(url, index): action_url = common.action_url(nextAction, url=show_url) name = all_title di_list.append(common.diritem(name, action_url, image)) if len(di_list) <= 0: common.popup(common.getMessage(33305)) return None return di_list
def shows(url): di_list = [] for all_title, show_url, image in scrapers.shows(url): action_url = common.action_url('sources', url=show_url) name = all_title cm = _saved_to_list_context_menu(all_title, show_url, image) di_list.append(common.diritem(name, action_url, image, context_menu=cm)) for page, page_url in scrapers.pages(url): action_url = common.action_url('shows', url=page_url) page_label = cleanstring.page(page) di_list.append(common.diritem(page_label, action_url)) if len(di_list) <= 0: common.popup(common.getMessage(33305)) return None return di_list
def filters(url): di_list = [] index = 0 nextAction = 'filters' if re.match(r'(.)*2-----------\.html', url): nextAction = 'shows' elif re.match(r'(.)*1-(.)*----------\.html', url): index = 1 if re.match(r'(.)*1-(.)+----------\.html', url): index = 2 nextAction = 'shows' elif re.match(r'(.)*3-----------\.html', url): index = 1 nextAction = 'shows' else: return shows(url) action_url = common.action_url('shows', url=url) di_list.append(common.diritem(common.getMessage(33007), action_url, '')) for all_title, show_url, image in scrapers.types(url, index): action_url = common.action_url(nextAction, url=show_url) name = all_title di_list.append(common.diritem(name, action_url, image)) return di_list