Ejemplo n.º 1
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % s
        else:
            return []

    di_list = []
    for ori_name, show_url, image, info in scrapers.search(url):
        action_url = common.action_url('sources', url=show_url)
        cm = _saved_to_list_context_menu(ori_name, show_url, image)
        di_list.append(
            common.diritem(ori_name,
                           action_url,
                           image,
                           context_menu=cm,
                           info=info))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
Ejemplo n.º 2
0
def shows(url):
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.shows(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    return di_list
def shows(url):
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.shows(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    return di_list
Ejemplo n.º 4
0
def shows(url):
    di_list = []
    for all_title, show_url, image in scrapers.shows(url):
        action_url = common.action_url('sources', url=show_url)
        name = all_title
        cm = _saved_to_list_context_menu(all_title, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
Ejemplo n.º 6
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list