def shows(url): di_list = [] for eng_name, ori_name, show_url, image in scrapers.shows(url): action_url = common.action_url('versions', url=show_url) name = cleanstring.show(eng_name, ori_name) cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image) di_list.append(common.diritem(name, action_url, image, context_menu=cm)) for page, page_url in scrapers.pages(url): action_url = common.action_url('shows', url=page_url) page_label = cleanstring.page(page) di_list.append(common.diritem(page_label, action_url)) return di_list
def saved_list(): sl = _get_saved_list() di_list = [] for eng_name, ori_name, show_url, image in sl: action_url = common.action_url('versions', url=show_url) name = cleanstring.show(eng_name, ori_name) remove_save_url = common.action_url('remove_saved', eng_name=eng_name, ori_name=ori_name, show_url=show_url, image=image) builtin_url = common.run_plugin_builtin_url(remove_save_url) cm = [(xbmcaddon.Addon().getLocalizedString(33109), builtin_url)] di_list.append(common.diritem(name, action_url, image, context_menu=cm)) return di_list
def search(url=None): if not url: heading = xbmcaddon.Addon().getLocalizedString(33301) s = common.input(heading) if s: url = config.search_url % quote(s) else: return [] di_list = [] for eng_name, ori_name, show_url, image in scrapers.search(url): action_url = common.action_url('versions', url=show_url) name = cleanstring.show(eng_name, ori_name) cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image) di_list.append(common.diritem(name, action_url, image, context_menu=cm)) for page, page_url in scrapers.pages(url): action_url = common.action_url('search', url=page_url) page_label = cleanstring.page(page) di_list.append(common.diritem(page_label, action_url)) if not di_list: common.popup(xbmcaddon.Addon().getLocalizedString(33304)) return di_list
def search(url=None): if not url: heading = xbmcaddon.Addon().getLocalizedString(33301) s = common.input(heading) if s: url = config.search_url % urllib.quote(s.encode('utf8')) else: return [] di_list = [] for eng_name, ori_name, show_url, image in scrapers.search(url): action_url = common.action_url('versions', url=show_url) name = cleanstring.show(eng_name, ori_name) cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image) di_list.append(common.diritem(name, action_url, image, context_menu=cm)) for page, page_url in scrapers.pages(url): action_url = common.action_url('search', url=page_url) page_label = cleanstring.page(page) di_list.append(common.diritem(page_label, action_url)) if not di_list: common.popup(xbmcaddon.Addon().getLocalizedString(33304)) return di_list