Exemplo n.º 1
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % s
        else:
            return []

    di_list = []
    for ori_name, show_url, image, info in scrapers.search(url):
        action_url = common.action_url('sources', url=show_url)
        cm = _saved_to_list_context_menu(ori_name, show_url, image)
        di_list.append(
            common.diritem(ori_name,
                           action_url,
                           image,
                           context_menu=cm,
                           info=info))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
Exemplo n.º 2
0
def shows(url):
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.shows(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    return di_list
def shows(url):
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.shows(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    return di_list
Exemplo n.º 4
0
def recent_updates(url):
    di_list = []
    for name, update_url in scrapers.recent_updates(url):
        action_url = common.action_url('mirrors', url=update_url)
        di_list.append(common.diritem(name, action_url))

    return di_list
Exemplo n.º 5
0
def shows(url):
    di_list = []
    for all_title, show_url, image in scrapers.shows(url):
        action_url = common.action_url('sources', url=show_url)
        name = all_title
        cm = _saved_to_list_context_menu(all_title, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
Exemplo n.º 6
0
def _mirrors(url, title):
    di_list = []

    for all_title, show_url, image in scrapers.categories(url):
        action_url = common.action_url('play_mirror',
                                       url=show_url,
                                       title=title + '-' + all_title)
        name = all_title
        di_list.append(common.diritem(name, action_url, image, isfolder=False))
    return di_list
Exemplo n.º 7
0
def index():
    di_list = []

    url = urljoin(config.base_url, '/iptva.php?app=126')
    for tv_title, tv_url, image in scrapers.getTVSources(url):
        action_url = common.action_url('tv',
                                       url=urljoin(config.base_url, tv_url))
        di_list.append(common.diritem(tv_title, action_url, image))

    return di_list
Exemplo n.º 8
0
def tv(url):
    di_list = []

    for tv_title, tv_url, image in scrapers.getTVSources(url):
        action_url = common.action_url('mirrors',
                                       url=urljoin(config.base_url, tv_url),
                                       title=tv_title)
        di_list.append(common.diritem(tv_title, action_url, image))

    return di_list
def _episodes(url):
    episodes = scrapers.episodes(url)
    if len(episodes) > 0:
        di_list = []
        for name, episode_url in episodes:
            action_url = common.action_url('mirrors', url=episode_url)
            epi = cleanstring.episode(name)
            di_list.append(common.diritem(epi, action_url))
        return di_list
    else:
        return _mirrors(url)
Exemplo n.º 10
0
def sources(url):
    di_list = []
    for name, source_url in scrapers.sources(url):
        action_url = common.action_url('episodes', url=source_url)
        di_list.append(common.diritem(name, action_url))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
Exemplo n.º 11
0
def _episodes(url):
    episodes = scrapers.episodes(url)
    if len(episodes) > 0:
        di_list = []
        for name, episode_url in episodes:
            action_url = common.action_url('play_mirror', url=episode_url)
            epi = cleanstring.episode(name)
            di_list.append(common.diritem(epi, action_url))
        return di_list
    else:
        return []
def versions(url):
    versions = scrapers.versions(url)
    if len(versions) == 1:
        ver, href = versions[0]
        return _episodes(href)
    else:
        di_list = []
        for label, version_url in versions:
            action_url = common.action_url('episodes', url=version_url)
            ver = cleanstring.version(label)
            di_list.append(common.diritem(ver, action_url))
        return di_list
Exemplo n.º 13
0
def saved_list():
    sl = _get_saved_list()
    di_list = []
    for eng_name, ori_name, show_url, image in sl:
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        remove_save_url = common.action_url('remove_saved', eng_name=eng_name, ori_name=ori_name,
                                            show_url=show_url, image=image)
        builtin_url = common.run_plugin_builtin_url(remove_save_url)
        cm = [(xbmcaddon.Addon().getLocalizedString(33109), builtin_url)]
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    return di_list
Exemplo n.º 14
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
Exemplo n.º 16
0
def saved_list():
    sl = _get_saved_list()
    di_list = []
    for all_title, show_url, image in sl:
        action_url = common.action_url('episodes', url=show_url)
        name = all_title
        remove_save_url = common.action_url('remove_saved', all_title=all_title,
                                            show_url=show_url, image=image)
        builtin_url = common.run_plugin_builtin_url(remove_save_url)
        cm = [(xbmcaddon.Addon().getLocalizedString(33109), builtin_url)]
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    return di_list
Exemplo n.º 17
0
def versions(url):
    versions = scrapers.versions(url)
    if len(versions) == 1:
        ver, href = versions[0]
        return _episodes(href)
    else:
        di_list = []
        for label, version_url in versions:
            action_url = common.action_url('episodes', url=version_url)
            ver = cleanstring.version(label)
            di_list.append(common.diritem(ver, action_url))
        return di_list
def saved_list():
    sl = _get_saved_list()
    di_list = []
    for eng_name, ori_name, show_url, image in sl:
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        remove_save_url = common.action_url('remove_saved', eng_name=eng_name, ori_name=ori_name,
                                            show_url=show_url, image=image)
        builtin_url = common.run_plugin_builtin_url(remove_save_url)
        cm = [(xbmcaddon.Addon().getLocalizedString(33109), builtin_url)]
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    return di_list
Exemplo n.º 19
0
def filters(url):
    di_list = []
    index = 'c1'
    nextAction = 'filters'
    if re.match(r'(.*)/------1(.*)', url):
        index = 'c3'
    elif re.match(r'(.*)/--(.*)----1(.*)', url):
        index = 'c2'
        nextAction = 'shows'

    action_url = common.action_url('shows', url=url)
    di_list.append(common.diritem(common.getMessage(33007), action_url, ''))
    for all_title, show_url in scrapers.types(url, index):
        action_url = common.action_url(nextAction, url=show_url)
        name = all_title
        di_list.append(common.diritem(name, action_url, ''))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
Exemplo n.º 20
0
def filters(url):
    di_list = []
    index = 0
    nextAction = 'filters'
    if re.match(r'(.)*id/(2|1[3-6])(.)*.html', url):
        if re.match(r'(.)*id/2.html', url):
            index = 1
        else:
            index = 4
    elif re.match(r'(.)*id/1.html', url):
        index = 2
        if re.match(r'(.)*area/(.)*', url):
            index = 4
    elif re.match(r'(.)*id/3.html', url):
        index = 2
        nextAction = 'shows'
    elif re.match(r'(.)*id/3.html', url):
        index = 2
        nextAction = 'shows'

    if re.match(r'(.)*lang/(.)*', url):
        index = 3
        nextAction = 'shows'

    action_url = common.action_url('shows', url=url)
    di_list.append(common.diritem(common.getMessage(33007), action_url, ''))
    for all_title, show_url, image in scrapers.types(url, index):
        action_url = common.action_url(nextAction, url=show_url)
        name = all_title
        di_list.append(common.diritem(name, action_url, image))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
def _mirrors(url):
    mirrors = scrapers.mirrors(url)
    num_mirrors = len(mirrors)
    if num_mirrors > 0:
        di_list = []
        for mirr_label, parts in mirrors:
            for part_label, part_url in parts:
                label = cleanstring.mirror(mirr_label, part_label)
                action_url = common.action_url('play_mirror', url=part_url)
                di_list.append(common.diritem(label, action_url, isfolder=False))
        return di_list
    else:
        # if no mirror listing, try to resolve this page directly
        play_mirror(url)
        return []
Exemplo n.º 22
0
def filters(url):
    di_list = []
    index = 0
    nextAction = 'filters'
    if re.match(r'(.)*2-----------\.html', url):
        nextAction = 'shows'
    elif re.match(r'(.)*1-(.)*----------\.html', url):
        index = 1
        if re.match(r'(.)*1-(.)+----------\.html', url):
            index = 2
            nextAction = 'shows'
    elif re.match(r'(.)*3-----------\.html', url):
        index = 1
        nextAction = 'shows'
    else:
        return shows(url)

    action_url = common.action_url('shows', url=url)
    di_list.append(common.diritem(common.getMessage(33007), action_url, ''))
    for all_title, show_url, image in scrapers.types(url, index):
        action_url = common.action_url(nextAction, url=show_url)
        name = all_title
        di_list.append(common.diritem(name, action_url, image))
    return di_list
Exemplo n.º 23
0
def _mirrors(url):
    mirrors = scrapers.mirrors(url)
    num_mirrors = len(mirrors)
    if num_mirrors > 0:
        di_list = []
        for mirr_label, parts in mirrors:
            for part_label, part_url in parts:
                label = cleanstring.mirror(mirr_label, part_label)
                action_url = common.action_url('play_mirror', url=part_url)
                di_list.append(common.diritem(label, action_url, isfolder=False))
        return di_list
    else:
        # if no mirror listing, try to resolve this page directly
        play_mirror(url)
        return []
Exemplo n.º 24
0
def versions(url):
    versions = scrapers.versions(url)
    if len(versions) == 1:
        ver, href = versions[0]
        return _episodes(href)
    else:
        di_list = []
        for label, version_url in versions:
            action_url = common.action_url('episodes', url=version_url)
            ver = cleanstring.version(label)
            di_list.append(common.diritem(ver, action_url))

            if auto_select.settings_is_set('auto_select_version'):
                desire_version = auto_select.get_version_string()
                if desire_version != '' and desire_version in ver.lower():
                    common.notify(heading="Auto picked version",
                                  message="Picked {}".format(ver))
                    return _episodes(versions[0][1])

        return di_list
def _mirrors(url):
    mirrors = scrapers.mirrors(url)
    num_mirrors = len(mirrors)
    if num_mirrors > 0:
        di_list = []
        if (xbmcaddon.Addon().getSetting('auto_select_mirror') == 'true'):
            for mirr_label, parts in mirrors:
                if "Full" in parts[0][0]:
                    if play_mirror(parts[0][1]):
                        return []
        else:
            for mirr_label, parts in mirrors:
                for part_label, part_url in parts:
                    label = cleanstring.mirror(mirr_label, part_label)
                    action_url = common.action_url('play_mirror', url=part_url)
                    di_list.append(common.diritem(label, action_url, isfolder=False))
            return di_list
    else:
        # if no mirror listing, try to resolve this page directly
        play_mirror(url)
        return []
Exemplo n.º 26
0
def _mirrors(url):
    mirrors = scrapers.mirrors(url)
    num_mirrors = len(mirrors)
    if num_mirrors > 0:
        di_list = []
        for mirr_label, parts in mirrors:
            for part_label, part_url in parts:
                label = cleanstring.mirror(mirr_label, part_label)
                action_url = common.action_url('play_mirror', url=part_url)
                di_list.append(
                    common.diritem(label, action_url, isfolder=False))

        if auto_select.settings_is_set('auto_select_first_mirror'):
            common.notify(heading="Other mirrors exists",
                          message=", ".join(
                              "{} (count: {})".format(mirr_label, len(parts))
                              for mirr_label, parts in mirrors),
                          time=6000)
            play_mirror(url)
        return di_list
    else:
        # if no mirror listing, try to resolve this page directly
        play_mirror(url)
        return []
Exemplo n.º 27
0
import os.path
from urllib.parse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'https://player.ggiptv.com'
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/vodsearch/%s----------1---.html')
index_items = [
    # diritem(33000, action_url('saved_list')),
    diritem(33005,
            action_url('shows', url=urljoin(base_url, '/iptv.php?tid=gt'))),
    # diritem(33001, action_url('filters', url=urljoin(base_url, '/vodshow/2-----------.html'))),
    # diritem(33002, action_url('filters', url=urljoin(base_url, '/vodshow/1-----------.html'))),
    # diritem(33003, action_url('filters', url=urljoin(base_url, '/vodshow/3-----------.html'))),
    # diritem(33004, action_url('shows', url=urljoin(base_url, '/vodshow/4-----------.html'))),
]
Exemplo n.º 28
0
import os.path
from urlparse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'https://www.pianku.me'
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/s/%s.html')
index_items = [
    diritem(33000, action_url('saved_list')),
    diritem(
        33005,
        action_url('shows',
                   url=urljoin(base_url,
                               '/tv/--_E9_A6_99_E6_B8_AF----1.html'))),
    diritem(33001,
            action_url('filters', url=urljoin(base_url, '/tv/------1.html'))),
    diritem(33002,
            action_url('filters', url=urljoin(base_url, '/mv/------1.html'))),
    diritem(33004,
            action_url('filters', url=urljoin(base_url, '/ac/------1.html'))),
    diritem(33006, action_url('search'))
]
Exemplo n.º 29
0
import os.path
from urllib.parse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'https://www.iyueyuz.com'
video_url = 'https://www.iyueyuz.com'
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/index.php/vod/search/page/1/wd/%s.html')
index_items = [
    diritem(33000, action_url('saved_list')),
    diritem(
        33005,
        action_url(
            'shows',
            url=urljoin(
                base_url,
                '/index.php%2Fvod%2Fshow%2Fid%2F14%2Flang%2F%E7%B2%A4%E8%AF%AD.html'
            ))),
    diritem(
        33001,
        action_url('filters',
                   url=urljoin(base_url, '/index.php/vod/show/id/2.html'))),
    diritem(
        33002,
        action_url('filters',
                   url=urljoin(base_url, '/index.php/vod/show/id/1.html'))),
    diritem(
def versions(url):
    versions = scrapers.versions(url)
    if len(versions) == 1:
        ver, href = versions[0]
        return _episodes(href)
    else:
        auto_select = xbmcaddon.Addon().getSetting('auto_select_version')
        if auto_select == 'true':
            '''priorities = [('Cantonese', int(xbmcaddon.Addon().getSetting('cantonese_priority'))),
                          ('Mandarin', int(xbmcaddon.Addon().getSetting('mandarin_priority'))),
                          ('Chinese Subtitles', int(xbmcaddon.Addon().getSetting('ch_sub_priority'))),
                          ('English Subtitles', int(xbmcaddon.Addon().getSetting('eng_sub_priority')))]'''
            '''def sort_key(a):
                for priority in priorities:
                    if priority[0] in a[0]:
                        return priority[1]
                return ""
                for priority in priorities:
                    if priority[0] in versions[0][0]:# Fits the priority list above
                        version, href = versions[0]
                        return _episodes(href)'''
            def match_str(string):
                    match = re.match(r'Watch online \(([^\)]+)\)$', string)
                    if match:
                        return match.group(1)
                    else:
                        return ""
            
            priorities = {'Cantonese': int(xbmcaddon.Addon().getSetting('cantonese_priority')),
                          'Mandarin': int(xbmcaddon.Addon().getSetting('mandarin_priority')),
                          'Chinese Subtitles': int(xbmcaddon.Addon().getSetting('ch_sub_priority')),
                          'English Subtitles': int(xbmcaddon.Addon().getSetting('eng_sub_priority'))}
            
            def sort_key(a):
                try:
                    return priorities[cleanstring.version(a[0])]
                except Exception as e:
                    print("Icdrama: " + str(e))
                    return ""
            versions.sort(key = sort_key)
            
            priority_indices = {}
            for k, v in priorities.items():
                L = priority_indices.get(v, [])
                L.append(k)
                priority_indices[v] = L
            
            min_priority = min(priority_indices.keys())
            count = len([label for label, _ in versions if match_str(label) in priority_indices[min_priority]])

            if count == 1:
                version, href = versions[0]
                return _episodes(href)
            elif count > 1:
                di_list = []
                for i in range(count):
                    label, version_url = versions[i]
                    action_url = common.action_url('episodes', url=version_url)
                    ver = cleanstring.version(label)
                    di_list.append(common.diritem(ver, action_url))
                return di_list
            
            # Else create full directory
        
        di_list = []
        for label, version_url in versions:
            action_url = common.action_url('episodes', url=version_url)
            ver = cleanstring.version(label)
            di_list.append(common.diritem(ver, action_url))
        return di_list
def recent_updates(url):
    di_list = []
    for name, update_url in scrapers.recent_updates(url):
        action_url = common.action_url('mirrors', url=update_url)
        di_list.append(common.diritem(name, action_url))
    return di_list
Exemplo n.º 32
0
import os.path
from urllib.parse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'https://www.duboku.tv'
video_url = 'https://tv.gboku.com'
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/vodsearch/%s----------1---.html')
index_items = [
    diritem(33000, action_url('saved_list')),
    diritem(
        33005,
        action_url('shows',
                   url=urljoin(base_url, '/vodshow/20-----------.html'))),
    diritem(
        33001,
        action_url('filters',
                   url=urljoin(base_url, '/vodshow/2-----------.html'))),
    diritem(
        33002,
        action_url('filters',
                   url=urljoin(base_url, '/vodshow/1-----------.html'))),
    diritem(
        33003,
        action_url('filters',
                   url=urljoin(base_url, '/vodshow/3-----------.html'))),
    diritem(
Exemplo n.º 33
0
import os.path
from urlparse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'http://icdrama.se'
domains = ['icdrama.se', 'icdrama.to']
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/search/%s/')
index_items = [
    diritem(33011, action_url('saved_list')),
    diritem(33000, action_url('recent_updates', url=urljoin(base_url, '/recently-updated/'))),
    diritem(33001, action_url('shows', url=urljoin(base_url, '/hk-drama/'))),
    diritem(33002, action_url('shows', url=urljoin(base_url, '/hk-movie/'))),
    diritem(33003, action_url('shows', url=urljoin(base_url, '/hk-show/'))),
    diritem(33004, action_url('shows', url=urljoin(base_url, '/chinese-drama/'))),
    diritem(33012, action_url('shows', url=urljoin(base_url, '/chinese-drama-cantonesedub/'))),
    diritem(33005, action_url('shows', url=urljoin(base_url, '/taiwanese-drama/'))),
    diritem(33013, action_url('shows', url=urljoin(base_url, '/taiwanese-drama-cantonesedub/'))),
    diritem(33006, action_url('shows', url=urljoin(base_url, '/korean-drama/'))),
    diritem(33014, action_url('shows', url=urljoin(base_url, '/korean-drama-cantonesedub/'))),
    diritem(33015, action_url('shows', url=urljoin(base_url, '/korean-drama-chinesesubtitles/'))),
    diritem(33007, action_url('shows', url=urljoin(base_url, '/korean-show/'))),
    diritem(33008, action_url('shows', url=urljoin(base_url, '/japanese-drama/'))),
    diritem(33016, action_url('shows', url=urljoin(base_url, '/japanese-drama-cantonesedub/'))),
    diritem(33017, action_url('shows', url=urljoin(base_url, '/japanese-drama-chinesesubtitles/'))),
    diritem(33009, action_url('shows', url=urljoin(base_url, '/movies/'))),
    diritem(33018, action_url('shows', url=urljoin(base_url, '/genre/25-animation.html'))),
Exemplo n.º 34
0
domains = ['icdrama.to', 'icdrama.to']
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/search/%s/')
index_items = [
    #diritem(33011, action_url('saved_list')),
    #diritem(33000, action_url('recent_updates', url=urljoin(base_url, '/recently-updated/'))),
    #diritem(33001, action_url('shows', url=urljoin(base_url, '/hk-drama/'))),
    #diritem(33002, action_url('shows', url=urljoin(base_url, '/hk-movie/'))),
    #diritem(33003, action_url('shows', url=urljoin(base_url, '/hk-show/'))),
    #diritem(33004, action_url('shows', url=urljoin(base_url, '/chinese-drama/'))),
    #diritem(33012, action_url('shows', url=urljoin(base_url, '/chinese-drama-cantonesedub/'))),
    #diritem(33005, action_url('shows', url=urljoin(base_url, '/taiwanese-drama/'))),
    #diritem(33013, action_url('shows', url=urljoin(base_url, '/taiwanese-drama-cantonesedub/'))),
    diritem(33006, action_url('shows', url=urljoin(base_url,
                                                   '/korean-drama/'))),
    #diritem(33014, action_url('shows', url=urljoin(base_url, '/korean-drama-cantonesedub/'))),
    #diritem(33015, action_url('shows', url=urljoin(base_url, '/korean-drama-chinesesubtitles/'))),
    diritem(33007, action_url('shows', url=urljoin(base_url,
                                                   '/korean-show/'))),
    #diritem(33008, action_url('shows', url=urljoin(base_url, '/japanese-drama/'))),
    #diritem(33016, action_url('shows', url=urljoin(base_url, '/japanese-drama-cantonesedub/'))),
    #diritem(33017, action_url('shows', url=urljoin(base_url, '/japanese-drama-chinesesubtitles/'))),
    #diritem(33009, action_url('shows', url=urljoin(base_url, '/movies/'))),
    #diritem(33018, action_url('shows', url=urljoin(base_url, '/genre/25-animation.html'))),
    #diritem(33010, action_url('search'))
]
Exemplo n.º 35
0
import os.path
from urlparse import urljoin
from lib.common import diritem, action_url, profile_dir

base_url = 'http://icdrama.se'
domains = ['icdrama.se', 'icdrama.to']
cache_file = os.path.join(profile_dir, 'cache.pickle')
store_file = os.path.join(profile_dir, 'store.pickle')

# the trailing forward slashes are necessary
# without it, page urls will be wrong (icdrama bug)
search_url = urljoin(base_url, '/search/%s/')
index_items = [
    diritem(33011, action_url('saved_list')),
    diritem(
        33000,
        action_url('recent_updates',
                   url=urljoin(base_url, '/recently-updated/'))),
    diritem(33001, action_url('shows', url=urljoin(base_url, '/hk-drama/'))),
    diritem(33002, action_url('shows', url=urljoin(base_url, '/hk-movie/'))),
    diritem(33003, action_url('shows', url=urljoin(base_url, '/hk-show/'))),
    diritem(33004, action_url('shows',
                              url=urljoin(base_url, '/chinese-drama/'))),
    diritem(
        33012,
        action_url('shows',
                   url=urljoin(base_url, '/chinese-drama-cantonesedub/'))),
    diritem(33005,
            action_url('shows', url=urljoin(base_url, '/taiwanese-drama/'))),
    diritem(
        33013,