示例#1
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % s
        else:
            return []

    di_list = []
    for ori_name, show_url, image, info in scrapers.search(url):
        action_url = common.action_url('sources', url=show_url)
        cm = _saved_to_list_context_menu(ori_name, show_url, image)
        di_list.append(
            common.diritem(ori_name,
                           action_url,
                           image,
                           context_menu=cm,
                           info=info))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
示例#2
0
def add_to_saved(all_title, show_url, image):
    with common.busy_indicator():
        sl = _get_saved_list()
        sl.insert(0, (all_title, show_url, image))
        uniq = set()
        sl = [x for x in sl if not (x in uniq or uniq.add(x))]
        store.put(_saved_list_key, sl)
    common.popup(xbmcaddon.Addon().getLocalizedString(33302))
def add_to_saved(eng_name, ori_name, show_url, image):
    with common.busy_indicator():
        sl = _get_saved_list()
        sl.insert(0, (eng_name, ori_name, show_url, image))
        uniq = set()
        sl = [x for x in sl if not (x in uniq or uniq.add(x))]
        store.put(_saved_list_key, sl)
    common.popup(xbmcaddon.Addon().getLocalizedString(33302))
def add_to_saved(eng_name, ori_name, show_url, image):
    with common.busy_indicator():
        sl = _get_saved_list()
        sl.insert(0, (eng_name, ori_name, show_url, image))
        uniq = set()
        sl = [x for x in sl if not (x in uniq or uniq.add(x))]
        store.put(_saved_list_key, sl)
    common.popup(loc.getLocalizedString(33302))
示例#5
0
def sources(url):
    di_list = []
    for name, source_url in scrapers.sources(url):
        action_url = common.action_url('episodes', url=source_url)
        di_list.append(common.diritem(name, action_url))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
示例#6
0
    def get_media_url(self, host, media_id):
        url = self.get_url(host, media_id)
        cmn.debug('Icdrama: ' + url)

        if 'vidembed' in url or 'vb.icdrama' in url:
            headers = self.headers
            headers['Referer'] = 'http://adrama.to'

            response = requests.get(url, headers=headers)

            unwrapped_url = ''

            streams = self._extract_streams(response)
            cmn.debug("Icdrama: Extracted links... " + str(streams))

            unwrapped_url = ''
            if xbmcaddon.Addon().getSetting('auto_select_source') == 'true':
                unwrapped_url = sourceutil.pick_source(streams)
            else:
                unwrapped_url = helpers.pick_source(streams, False)

            if ('redirector.googlevideo.com' in unwrapped_url
                    or 'blogspot.com' in unwrapped_url
                    or 'googleusercontent.com' in unwrapped_url or 'fbcdn.net'
                    in unwrapped_url):  # for current Videobug source
                # Kodi can play directly, skip further resolve
                return unwrapped_url

            return resolveurl.resolve(unwrapped_url)
        else:
            try:
                html = self.net.http_GET(url, headers=self.headers).content

                iframe = BeautifulSoup(html,
                                       'html5lib').find(id='iframeplayer')
                if iframe:
                    iframe_url = urljoin(self.get_url(host, ''), iframe['src'])
                    return resolveurl.resolve(iframe_url)
                else:
                    cmn.popup(loc.getLocalizedString(33305))
                    return ''

            except Exception as e:
                if 'No link selected' in str(e):
                    return ''
                raise ResolverError('Icdrama resolver: ' + str(e) + ' : ' +
                                    self.get_url(host, media_id))
示例#7
0
def shows(url):
    di_list = []
    for all_title, show_url, image in scrapers.shows(url):
        action_url = common.action_url('sources', url=show_url)
        name = all_title
        cm = _saved_to_list_context_menu(all_title, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))

    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('shows', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
示例#9
0
def search(url=None):
    if not url:
        heading = xbmcaddon.Addon().getLocalizedString(33301)
        s = common.input(heading)
        if s:
            url = config.search_url % urllib.quote(s.encode('utf8'))
        else:
            return []
    di_list = []
    for eng_name, ori_name, show_url, image in scrapers.search(url):
        action_url = common.action_url('versions', url=show_url)
        name = cleanstring.show(eng_name, ori_name)
        cm = _saved_to_list_context_menu(eng_name, ori_name, show_url, image)
        di_list.append(common.diritem(name, action_url, image, context_menu=cm))
    for page, page_url in scrapers.pages(url):
        action_url = common.action_url('search', url=page_url)
        page_label = cleanstring.page(page)
        di_list.append(common.diritem(page_label, action_url))
    if not di_list:
        common.popup(xbmcaddon.Addon().getLocalizedString(33304))
    return di_list
示例#10
0
def filters(url):
    di_list = []
    index = 'c1'
    nextAction = 'filters'
    if re.match(r'(.*)/------1(.*)', url):
        index = 'c3'
    elif re.match(r'(.*)/--(.*)----1(.*)', url):
        index = 'c2'
        nextAction = 'shows'

    action_url = common.action_url('shows', url=url)
    di_list.append(common.diritem(common.getMessage(33007), action_url, ''))
    for all_title, show_url in scrapers.types(url, index):
        action_url = common.action_url(nextAction, url=show_url)
        name = all_title
        di_list.append(common.diritem(name, action_url, ''))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
示例#11
0
def filters(url):
    di_list = []
    index = 0
    nextAction = 'filters'
    if re.match(r'(.)*id/(2|1[3-6])(.)*.html', url):
        if re.match(r'(.)*id/2.html', url):
            index = 1
        else:
            index = 4
    elif re.match(r'(.)*id/1.html', url):
        index = 2
        if re.match(r'(.)*area/(.)*', url):
            index = 4
    elif re.match(r'(.)*id/3.html', url):
        index = 2
        nextAction = 'shows'
    elif re.match(r'(.)*id/3.html', url):
        index = 2
        nextAction = 'shows'

    if re.match(r'(.)*lang/(.)*', url):
        index = 3
        nextAction = 'shows'

    action_url = common.action_url('shows', url=url)
    di_list.append(common.diritem(common.getMessage(33007), action_url, ''))
    for all_title, show_url, image in scrapers.types(url, index):
        action_url = common.action_url(nextAction, url=show_url)
        name = all_title
        di_list.append(common.diritem(name, action_url, image))

    if len(di_list) <= 0:
        common.popup(common.getMessage(33305))
        return None

    return di_list
示例#12
0
def remove_saved(all_title, show_url, image):
    sl = _get_saved_list()
    sl.remove((all_title, show_url, image))
    store.put(_saved_list_key, sl)
    common.refresh()
    common.popup(xbmcaddon.Addon().getLocalizedString(33303))
def remove_saved(eng_name, ori_name, show_url, image):
    sl = _get_saved_list()
    sl.remove((eng_name, ori_name, show_url, image))
    store.put(_saved_list_key, sl)
    common.refresh()
    common.popup(xbmcaddon.Addon().getLocalizedString(33303))
def remove_saved(eng_name, ori_name, show_url, image):
    sl = _get_saved_list()
    sl.remove((eng_name, ori_name, show_url, image))
    store.put(_saved_list_key, sl)
    common.refresh()
    common.popup(loc.getLocalizedString(33303))