def _get_episode_url(self, show_url, video):
        url = scraper_utils.urljoin(self.base_url, show_url)
        html = self._http_get(url, cache_limit=2)
        if html:
            force_title = scraper_utils.force_title(video)
            episodes = dom_parser2.parse_dom(html, 'div', {'class': 'el-item'})
            if not force_title:
                episode_pattern = 'href="([^"]*-[sS]%02d[eE]%02d(?!\d)[^"]*)' % (int(video.season), int(video.episode))
                match = re.search(episode_pattern, html)
                if match:
                    return scraper_utils.pathify_url(match.group(1))
                
                if kodi.get_setting('airdate-fallback') == 'true' and video.ep_airdate:
                    airdate_pattern = '%02d-%02d-%d' % (video.ep_airdate.day, video.ep_airdate.month, video.ep_airdate.year)
                    for episode in episodes:
                        episode = episode.content
                        ep_url = dom_parser2.parse_dom(episode, 'a', req='href')
                        ep_airdate = dom_parser2.parse_dom(episode, 'div', {'class': 'date'})
                        if ep_url and ep_airdate:
                            ep_airdate = ep_airdate[0].content.strip()
                            if airdate_pattern == ep_airdate:
                                return scraper_utils.pathify_url(ep_url[0].attrs['href'])

            if (force_title or kodi.get_setting('title-fallback') == 'true') and video.ep_title:
                norm_title = scraper_utils.normalize_title(video.ep_title)
                for episode in episodes:
                    episode = episode.content
                    ep_url = dom_parser2.parse_dom(episode, 'a', req='href')
                    ep_title = dom_parser2.parse_dom(episode, 'div', {'class': 'e-name'})
                    if ep_url and ep_title and norm_title == scraper_utils.normalize_title(ep_title[0].content):
                        return scraper_utils.pathify_url(ep_url[0].attrs['href'])
Exemple #2
0
def do_scheduled_task(task, isPlaying):
    global last_check
    now = datetime.datetime.now()
    if kodi.get_setting("auto-%s" % task) == "true":
        if last_check < now - datetime.timedelta(minutes=1):
            # log_utils.log('Check Triggered: Last: %s Now: %s' % (last_check, now), log_utils.LOGDEBUG)
            next_run = get_next_run(task)
            last_check = now
        else:
            # hack next_run to be in the future
            next_run = now + datetime.timedelta(seconds=1)

        # log_utils.log("Update Status on [%s]: Currently: %s Will Run: %s Last Check: %s" % (task, now, next_run, last_check), xbmc.LOGDEBUG)
        if now >= next_run:
            is_scanning = xbmc.getCondVisibility("Library.IsScanningVideo")
            if not is_scanning:
                during_playback = kodi.get_setting("%s-during-playback" % (task)) == "true"
                if during_playback or not isPlaying:
                    log_utils.log("Service: Running Scheduled Task: [%s]" % (task))
                    builtin = "RunPlugin(plugin://%s/?mode=%s)" % (kodi.get_id(), task)
                    xbmc.executebuiltin(builtin)
                    db_connection.set_setting("%s-last_run" % task, now.strftime("%Y-%m-%d %H:%M:%S.%f"))
                else:
                    log_utils.log("Service: Playing... Busy... Postponing [%s]" % (task), log_utils.LOGDEBUG)
            else:
                log_utils.log("Service: Scanning... Busy... Postponing [%s]" % (task), log_utils.LOGDEBUG)
def update_all_scrapers():
        try: last_check = int(kodi.get_setting('last_list_check'))
        except: last_check = 0
        now = int(time.time())
        list_url = kodi.get_setting('scraper_url')
        scraper_password = kodi.get_setting('scraper_password')
        list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt')
        exists = os.path.exists(list_path)
        if list_url and scraper_password and (not exists or last_check < (now - (24 * 60 * 60))):
            scraper_list = utils2.get_and_decrypt(list_url, scraper_password)
            if scraper_list:
                try:
                    with open(list_path, 'w') as f:
                        f.write(scraper_list)
    
                    kodi.set_setting('last_list_check', str(now))
                    kodi.set_setting('scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now)))
                    for line in scraper_list.split('\n'):
                        line = line.replace(' ', '')
                        if line:
                            scraper_url, filename = line.split(',')
                            if scraper_url.startswith('http'):
                                update_scraper(filename, scraper_url)
                except Exception as e:
                    log_utils.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
Exemple #4
0
    def __init__(self):
        global OperationalError
        global DatabaseError
        self.dbname = kodi.get_setting('db_name')
        self.username = kodi.get_setting('db_user')
        self.password = kodi.get_setting('db_pass')
        self.address = kodi.get_setting('db_address')
        self.db = None
        self.progress = None

        if kodi.get_setting('use_remote_db') == 'true':
            if self.address is not None and self.username is not None and self.password is not None and self.dbname is not None:
                import mysql.connector as db_lib
                from mysql.connector import OperationalError as OperationalError
                from mysql.connector import DatabaseError as DatabaseError
                log_utils.log('Loading MySQL as DB engine', log_utils.LOGDEBUG)
                self.db_type = DB_TYPES.MYSQL
            else:
                log_utils.log('MySQL is enabled but not setup correctly', log_utils.LOGERROR)
                raise ValueError('MySQL enabled but not setup correctly')
        else:
            from sqlite3 import dbapi2 as db_lib
            from sqlite3 import OperationalError as OperationalError
            from sqlite3 import DatabaseError as DatabaseError
            log_utils.log('Loading sqlite3 as DB engine', log_utils.LOGDEBUG)
            self.db_type = DB_TYPES.SQLITE
            db_dir = kodi.translate_path("special://database")
            self.db_path = os.path.join(db_dir, 'saltscache.db')
        self.db_lib = db_lib
        self.__connect_to_db()
 def _get_episode_url(self, show_url, video):
     query = scraper_utils.parse_query(show_url)
     if 'id' in query:
         url = scraper_utils.urljoin(self.base_url, '/api/v2/shows/%s' % (query['id']))
         js_data = self._http_get(url, cache_limit=.5)
         if 'episodes' in js_data:
             force_title = scraper_utils.force_title(video)
             if not force_title:
                 for episode in js_data['episodes']:
                     if int(video.season) == int(episode['season']) and int(video.episode) == int(episode['number']):
                         return scraper_utils.pathify_url('?id=%s' % (episode['id']))
                 
                 if kodi.get_setting('airdate-fallback') == 'true' and video.ep_airdate:
                     for episode in js_data['episodes']:
                         if 'airdate' in episode:
                             ep_airdate = scraper_utils.to_datetime(episode['airdate'], "%Y-%m-%d").date()
                             if video.ep_airdate == (ep_airdate - datetime.timedelta(days=1)):
                                 return scraper_utils.pathify_url('?id=%s' % (episode['id']))
             else:
                 logger.log('Skipping S&E matching as title search is forced on: %s' % (video.trakt_id), log_utils.LOGDEBUG)
             
             if (force_title or kodi.get_setting('title-fallback') == 'true') and video.ep_title:
                 norm_title = scraper_utils.normalize_title(video.ep_title)
                 for episode in js_data['episodes']:
                     if 'name' in episode and norm_title in scraper_utils.normalize_title(episode['name']):
                         return scraper_utils.pathify_url('?id=%s' % (episode['id']))
 def _get_episode_url(self, show_url, video):
     url = scraper_utils.urljoin(self.base_url, show_url)
     html = self._http_get(url, cache_limit=2)
     episode_pattern = 'href="([^"]+-s0*%se0*%s(?!\d)[^"]*)' % (video.season, video.episode)
     parts = dom_parser2.parse_dom(html, 'ul', {'class': 'episode_list'})
     fragment = '\n'.join(part.content for part in parts)
     result = self._default_get_episode_url(fragment, video, episode_pattern)
     if result: return result
     
     ep_urls = [r.attrs['href'] for r in dom_parser2.parse_dom(fragment, 'a', req='href')]
     ep_dates = [r.content for r in dom_parser2.parse_dom(fragment, 'span', {'class': 'episode_air_d'})]
     ep_titles = [r.content for r in dom_parser2.parse_dom(fragment, 'span', {'class': 'episode_name'})]
     force_title = scraper_utils.force_title(video)
     if not force_title and kodi.get_setting('airdate-fallback') == 'true' and video.ep_airdate:
         for ep_url, ep_date in zip(ep_urls, ep_dates):
             logger.log('Quikr Ep Airdate Matching: %s - %s - %s' % (ep_url, ep_date, video.ep_airdate), log_utils.LOGDEBUG)
             if video.ep_airdate == scraper_utils.to_datetime(ep_date, '%Y-%m-%d').date():
                 return scraper_utils.pathify_url(ep_url)
 
     if force_title or kodi.get_setting('title-fallback') == 'true':
         norm_title = scraper_utils.normalize_title(video.ep_title)
         for ep_url, ep_title in zip(ep_urls, ep_titles):
             ep_title = re.sub('<span>.*?</span>\s*', '', ep_title)
             logger.log('Quikr Ep Title Matching: %s - %s - %s' % (ep_url.encode('utf-8'), ep_title.encode('utf-8'), video.ep_title), log_utils.LOGDEBUG)
             if norm_title == scraper_utils.normalize_title(ep_title):
                 return scraper_utils.pathify_url(ep_url)
Exemple #7
0
def do_startup_task(task):
    run_on_startup = kodi.get_setting('auto-%s' % task) == 'true' and kodi.get_setting('%s-during-startup' % task) == 'true'
    if run_on_startup and not xbmc.abortRequested:
        log_utils.log('Service: Running startup task [%s]' % (task))
        now = datetime.datetime.now()
        xbmc.executebuiltin('RunPlugin(plugin://%s/?mode=%s)' % (kodi.get_id(), task))
        db_connection.set_setting('%s-last_run' % (task), now.strftime("%Y-%m-%d %H:%M:%S.%f"))
Exemple #8
0
    def _default_get_episode_url(self, html, video, episode_pattern, title_pattern='', airdate_pattern=''):
        logger.log('Default Episode Url: |%s|%s|' % (self.get_name(), video), log_utils.LOGDEBUG)
        if not html: return
        
        try: html = html[0].content
        except AttributeError: pass
        force_title = scraper_utils.force_title(video)
        if not force_title:
            if episode_pattern:
                match = re.search(episode_pattern, html, re.DOTALL | re.I)
                if match:
                    return scraper_utils.pathify_url(match.group(1))

            if kodi.get_setting('airdate-fallback') == 'true' and airdate_pattern and video.ep_airdate:
                airdate_pattern = airdate_pattern.replace('{year}', str(video.ep_airdate.year))
                airdate_pattern = airdate_pattern.replace('{month}', str(video.ep_airdate.month))
                airdate_pattern = airdate_pattern.replace('{p_month}', '%02d' % (video.ep_airdate.month))
                airdate_pattern = airdate_pattern.replace('{month_name}', MONTHS[video.ep_airdate.month - 1])
                airdate_pattern = airdate_pattern.replace('{short_month}', SHORT_MONS[video.ep_airdate.month - 1])
                airdate_pattern = airdate_pattern.replace('{day}', str(video.ep_airdate.day))
                airdate_pattern = airdate_pattern.replace('{p_day}', '%02d' % (video.ep_airdate.day))
                logger.log('Air Date Pattern: %s' % (airdate_pattern), log_utils.LOGDEBUG)

                match = re.search(airdate_pattern, html, re.DOTALL | re.I)
                if match:
                    return scraper_utils.pathify_url(match.group(1))
        else:
            logger.log('Skipping S&E matching as title search is forced on: %s' % (video.trakt_id), log_utils.LOGDEBUG)

        if (force_title or kodi.get_setting('title-fallback') == 'true') and video.ep_title and title_pattern:
            norm_title = scraper_utils.normalize_title(video.ep_title)
            for match in re.finditer(title_pattern, html, re.DOTALL | re.I):
                episode = match.groupdict()
                if norm_title == scraper_utils.normalize_title(episode['title']):
                    return scraper_utils.pathify_url(episode['url'])
Exemple #9
0
def do_disable_check():
    scrapers = relevant_scrapers()
    auto_disable = kodi.get_setting('auto-disable')
    check_freq = int(kodi.get_setting('disable-freq'))
    disable_thresh = int(kodi.get_setting('disable-thresh'))
    for cls in scrapers:
        last_check = db_connection.get_setting('%s_check' % (cls.get_name()))
        last_check = int(last_check) if last_check else 0
        tries = kodi.get_setting('%s_try' % (cls.get_name()))
        tries = int(tries) if tries else 0
        if tries > 0 and tries / check_freq > last_check / check_freq:
            kodi.set_setting('%s_check' % (cls.get_name()), str(tries))
            success_rate = calculate_success(cls.get_name())
            if success_rate < disable_thresh:
                if auto_disable == DISABLE_SETTINGS.ON:
                    kodi.set_setting('%s-enable' % (cls.get_name()), 'false')
                    kodi.notify(msg='[COLOR blue]%s[/COLOR] %s' % (i18n('scraper_disabled')), duration=5000)
                elif auto_disable == DISABLE_SETTINGS.PROMPT:
                    dialog = xbmcgui.Dialog()
                    line1 = i18n('disable_line1') % (cls.get_name(), 100 - success_rate, tries)
                    line2 = i18n('disable_line2')
                    line3 = i18n('disable_line3')
                    ret = dialog.yesno('SALTS', line1, line2, line3, i18n('keep_enabled'), i18n('disable_it'))
                    if ret:
                        kodi.set_setting('%s-enable' % (cls.get_name()), 'false')
    def get_season_subtitles(self, language, tvshow_id, season):
        url = BASE_URL + '/ajax_loadShow.php?show=%s&season=%s&langs=&hd=%s&hi=%s' % (tvshow_id, season, 0, 0)
        html = self.__get_cached_url(url, .25)
        # print html.decode('ascii', 'ignore')
        req_hi = kodi.get_setting('subtitle-hi') == 'true'
        req_hd = kodi.get_setting('subtitle-hd') == 'true'
        items = []
        regex = re.compile('<td>(\d+)</td><td>(\d+)</td><td>.*?</td><td>(.*?)</td><td.*?>(.*?)</td>.*?<td.*?>(.+?)</td><td.*?>(.*?)</td><td.*?>(.*?)</td><td.*?>(.*?)</td><td.*?><a\s+href="(.*?)">.+?</td>',
                           re.DOTALL)
        for match in regex.finditer(html):
            season, episode, srt_lang, version, completed, hi, corrected, hd, srt_url = match.groups()
            if not language or language == srt_lang and (not req_hi or hi) and (not req_hd or hd):
                item = {}
                item['season'] = season
                item['episode'] = episode
                item['language'] = srt_lang
                item['version'] = version

                if completed.lower() == 'completed':
                    item['completed'] = True
                    item['percent'] = '100'
                else:
                    item['completed'] = False
                    r = re.search('([\d.]+)%', completed)
                    if r:
                        item['percent'] = r.group(1)
                    else:
                        item['percent'] = '0'

                item['hi'] = True if hi else False
                item['corrected'] = True if corrected else False
                item['hd'] = True if hd else False
                item['url'] = srt_url
                items.append(item)
        return items
def show_movies():
    try: limit = int(kodi.get_setting('limit'))
    except: limit = 0
    try: source = int(kodi.get_setting('source'))
    except: source = 0
    list_data = local_utils.make_list_dict()
    for movie in get_movies(source, limit):
        label = movie['title']
        key = movie['title'].upper()
        if key in list_data:
            if 'year' not in movie or not movie['year'] or not list_data[key] or int(movie['year']) in list_data[key]:
                label = '[COLOR green]%s[/COLOR]' % (label)
        
        liz = utils.make_list_item(label, movie, local_utils.make_art(movie))
        liz.setInfo('video', movie)
        
        menu_items = []
        queries = {'mode': MODES.PLAY_RECENT, 'movie_id': movie['movie_id'], 'location': movie['location'], 'thumb': movie.get('poster', '')}
        runstring = 'RunPlugin(%s)' % (kodi.get_plugin_url(queries))
        menu_items.append((i18n('play_most_recent'), runstring),)
        queries = {'mode': MODES.ADD_TRAKT, 'title': movie['title'], 'year': movie.get('year', '')}
        runstring = 'RunPlugin(%s)' % (kodi.get_plugin_url(queries))
        menu_items.append((i18n('add_to_trakt'), runstring),)
        runstring = 'RunPlugin(%s)' % (CP_ADD_URL % (movie['title']))
        menu_items.append((i18n('add_to_cp'), runstring),)
        liz.addContextMenuItems(menu_items, replaceItems=False)
        
        queries = {'mode': MODES.TRAILERS, 'movie_id': movie['movie_id'], 'location': movie['location'], 'poster': movie.get('poster', ''), 'fanart': movie.get('fanart', '')}
        liz_url = kodi.get_plugin_url(queries)
        xbmcplugin.addDirectoryItem(int(sys.argv[1]), liz_url, liz, isFolder=True)
    kodi.set_view('movies', set_sort=True)
    kodi.end_of_directory(cache_to_disc=False)
Exemple #12
0
	def _authorize(self, pin=None):
		if kodi.get_setting('debug') == "true":
			print "Attempting to login/refresh Trakt Account"
		uri = '/oauth/token'
		data = {'client_id': CLIENT_ID, 'client_secret': SECRET_ID, 'redirect_uri': REDIRECT_URI}
		if pin:
			data['code'] = pin
			data['grant_type'] = 'authorization_code'
		else:
			refresh_token = kodi.get_setting('trakt_refresh_token')
			if refresh_token:
				data['refresh_token'] = refresh_token
				data['grant_type'] = 'refresh_token'
			else:
				kodi.set_setting('trakt_oauth_token', '')
				kodi.set_setting('trakt_refresh_token', '')
				kodi.set_setting('trakt_authorized', 'false')
				return False
		if self.token is None: self.token = False
		response = self._call(uri, data, auth=False)
		if response is False or response is None:
			return False
		if 'access_token' in response.keys() and 'refresh_token' in response.keys():
			kodi.set_setting('trakt_oauth_token', response['access_token'])
			kodi.set_setting('trakt_refresh_token', response['refresh_token'])
			kodi.set_setting('trakt_authorized', "true")
			self.token = response['access_token']
			if kodi.get_setting('debug') == "true":
				print "YOU JUST AUTHORIZED TRAKT"
				#kodi.notify('TRAKT ','Account Authorized You may continue','5000','')
			return True
    def _get_episode_url(self, show_url, video):
        url = urlparse.urljoin(self.base_url, show_url)
        html = self._http_get(url, cache_limit=8)
        pattern = "<a[^>]*class='dropdown-toggle'[^>]*>Season\s+%s<(.*?)<li\s+class='divider'>" % (video.season)
        match = re.search(pattern, html, re.DOTALL)
        if match:
            fragment = match.group(1)
            ep_ids = dom_parser.parse_dom(fragment, 'a', {'id': 'epiloader'}, ret='class')
            episodes = dom_parser.parse_dom(fragment, 'a', {'id': 'epiloader'})
            airdates = dom_parser.parse_dom(fragment, 'span', {'class': 'airdate'})
            ep_airdate = video.ep_airdate.strftime('%Y-%m-%d') if isinstance(video.ep_airdate, datetime.date) else ''
            norm_title = scraper_utils.normalize_title(video.ep_title)
            num_id, airdate_id, title_id = '', '', ''
            for episode, airdate, ep_id in zip(episodes, airdates, ep_ids):
                if ep_airdate and ep_airdate == airdate: airdate_id = ep_id
                match = re.search('(?:<span[^>]*>)?(\d+)\.\s*([^<]+)', episode)
                if match:
                    ep_num, ep_title = match.groups()
                    if int(ep_num) == int(video.episode): num_id = ep_id
                    if norm_title and norm_title in scraper_utils.normalize_title(ep_title): title_id = ep_id

            best_id = ''
            if not scraper_utils.force_title(video):
                if num_id: best_id = num_id
                if kodi.get_setting('airdate-fallback') == 'true' and airdate_id: best_id = airdate_id
                if kodi.get_setting('title-fallback') == 'true' and title_id: best_id = title_id
            else:
                if title_id: best_id = title_id
            
            if best_id:
                return EP_URL % (best_id)
def add_trakt(title, year=''):
    trakt_api = Trakt_API(kodi.get_setting('trakt_oauth_token'), kodi.get_setting('use_https') == 'true', timeout=int(kodi.get_setting('trakt_timeout')))
    results = trakt_api.search(SECTIONS.MOVIES, title)
    try: results = [result for result in results if result['year'] is not None and int(result['year']) - 1 <= int(year) <= int(result['year'] + 1)]
    except: pass
    if not results:
        kodi.notify(msg=i18n('no_movie_found'))
        return
    
    if len(results) == 1:
        index = 0
    else:
        pick_list = [movie['title'] if movie['year'] is None else '%s (%s)' % (movie['title'], movie['year']) for movie in results]
        index = xbmcgui.Dialog().select(i18n('pick_a_movie'), pick_list)
        
    if index > -1:
        slug = kodi.get_setting('default_slug')
        name = kodi.get_setting('default_list')
        if not slug:
            result = utils.choose_list(Trakt_API, translations)
            if result is None:
                return
            else:
                slug, name = result
        
        item = {'trakt': results[index]['ids']['trakt']}
        if slug == WATCHLIST_SLUG:
            trakt_api.add_to_watchlist(SECTIONS.MOVIES, item)
        elif slug:
            trakt_api.add_to_list(SECTIONS.MOVIES, slug, item)
            
        movie = results[index]
        label = movie['title'] if movie['year'] is None else '%s (%s)' % (movie['title'], movie['year'])
        kodi.notify(msg=i18n('added_to_list') % (label, name))
        kodi.refresh_container()
    def _get_episode_url(self, show_url, video):
        episode_pattern = 'href="([^"]+-s0*%se0*%s(?!\d)[^"]*)' % (video.season, video.episode)
        result = self._default_get_episode_url(show_url, video, episode_pattern)
        if result:
            return result

        url = urlparse.urljoin(self.base_url, show_url)
        html = self._http_get(url, cache_limit=2)
        fragment = dom_parser.parse_dom(html, "ul", {"class": "episode_list"})
        if fragment:
            ep_urls = dom_parser.parse_dom(fragment[0], "a", ret="href")
            ep_dates = dom_parser.parse_dom(fragment[0], "span", {"class": "episode_air_d"})
            ep_titles = dom_parser.parse_dom(fragment[0], "span", {"class": "episode_name"})
            force_title = scraper_utils.force_title(video)
            if not force_title and kodi.get_setting("airdate-fallback") == "true" and video.ep_airdate:
                for ep_url, ep_date in zip(ep_urls, ep_dates):
                    log_utils.log(
                        "Quikr Ep Airdate Matching: %s - %s - %s" % (ep_url, ep_date, video.ep_airdate),
                        log_utils.LOGDEBUG,
                    )
                    if video.ep_airdate == scraper_utils.to_datetime(ep_date, "%Y-%m-%d").date():
                        return scraper_utils.pathify_url(ep_url)

            if force_title or kodi.get_setting("title-fallback") == "true":
                norm_title = scraper_utils.normalize_title(video.ep_title)
                for ep_url, ep_title in zip(ep_urls, ep_titles):
                    ep_title = re.sub("<span>.*?</span>\s*", "", ep_title)
                    log_utils.log(
                        "Quikr Ep Title Matching: %s - %s - %s" % (ep_url, norm_title, video.ep_title),
                        log_utils.LOGDEBUG,
                    )
                    if norm_title == scraper_utils.normalize_title(ep_title):
                        return scraper_utils.pathify_url(ep_url)
Exemple #16
0
def calculate_success(name):
    tries = kodi.get_setting('%s_try' % (name))
    fail = kodi.get_setting('%s_fail' % (name))
    tries = int(tries) if tries else 0
    fail = int(fail) if fail else 0
    rate = int(round((fail * 100.0) / tries)) if tries > 0 else 0
    rate = 100 - rate
    return rate
Exemple #17
0
 def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
     self.timeout = timeout
     self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
     self.username = kodi.get_setting('%s-username' % (self.get_name()))
     self.password = kodi.get_setting('%s-password' % (self.get_name()))
     self.max_results = int(kodi.get_setting('%s-result_limit' % (self.get_name())))
     self.max_gb = kodi.get_setting('%s-size_limit' % (self.get_name()))
     self.max_bytes = int(self.max_gb) * 1024 * 1024 * 1024
Exemple #18
0
def do_startup_task(task):
    run_on_startup = (
        kodi.get_setting("auto-%s" % task) == "true" and kodi.get_setting("%s-during-startup" % task) == "true"
    )
    if run_on_startup and not xbmc.abortRequested:
        log_utils.log("Service: Running startup task [%s]" % (task))
        now = datetime.datetime.now()
        xbmc.executebuiltin("RunPlugin(plugin://%s/?mode=%s)" % (kodi.get_id(), task))
        db_connection.set_setting("%s-last_run" % (task), now.strftime("%Y-%m-%d %H:%M:%S.%f"))
Exemple #19
0
def format_source_label(item):
    color = kodi.get_setting('debrid_color') or 'green'
    label = item['class'].format_source_label(item)
    label = '[%s] %s' % (item['class'].get_name(), label)
    if kodi.get_setting('show_debrid') == 'true' and 'debrid' in item and item['debrid']:
        label = '[COLOR %s]%s[/COLOR]' % (color, label)
        
    if 'debrid' in item and item['debrid']:
        label += ' (%s)' % (', '.join(item['debrid']))
    item['label'] = label
    return label
 def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
     self.timeout = timeout
     if kodi.get_setting('%s-use_https' % (self.__class__.base_name)) == 'true':
         scheme = 'https'
         prefix = 'www'
     else:
         scheme = 'http'
         prefix = 'http'
     base_url = kodi.get_setting('%s-base_url' % (self.__class__.base_name))
     self.base_url = scheme + '://' + prefix + '.' + base_url
     self.username = kodi.get_setting('%s-username' % (self.__class__.base_name))
     self.password = kodi.get_setting('%s-password' % (self.__class__.base_name))
Exemple #21
0
def choose_list(Trakt_API, translations, username=None):
    i18n = translations.i18n
    trakt_api = Trakt_API(kodi.get_setting('trakt_oauth_token'), kodi.get_setting('use_https') == 'true', timeout=int(kodi.get_setting('trakt_timeout')))
    lists = trakt_api.get_lists(username)
    if username is None: lists.insert(0, {'name': 'watchlist', 'ids': {'slug': WATCHLIST_SLUG}})
    if lists:
        dialog = xbmcgui.Dialog()
        index = dialog.select(i18n('pick_a_list'), [list_data['name'] for list_data in lists])
        if index > -1:
            return (lists[index]['ids']['slug'], lists[index]['name'])
    else:
        kodi.notify(msg=i18n('no_lists_for_user') % (username), duration=5000)
def download_trailer(trailer_url, title, year=''):
    path = kodi.get_setting('download_path')
    while not path:
        ret = xbmcgui.Dialog().yesno(kodi.get_name(), i18n('no_download_path'), nolabel=i18n('cancel'), yeslabel=i18n('set_it_now'))
        if not ret:
            return

        kodi.show_settings()
        path = kodi.get_setting('download_path')
        
    trailer_url = local_utils.resolve_trailer(trailer_url)
    file_name = utils.create_legal_filename(title, year)
    utils.download_media(trailer_url, path, file_name, translations)
Exemple #23
0
def get_ua():
    try: last_gen = int(kodi.get_setting('last_ua_create'))
    except: last_gen = 0
    if not kodi.get_setting('current_ua') or last_gen < (time.time() - (7 * 24 * 60 * 60)):
        index = random.randrange(len(RAND_UAS))
        versions = {'win_ver': random.choice(WIN_VERS), 'feature': random.choice(FEATURES), 'br_ver': random.choice(BR_VERS[index])}
        user_agent = RAND_UAS[index].format(**versions)
        # logger.log('Creating New User Agent: %s' % (user_agent), log_utils.LOGDEBUG)
        kodi.set_setting('current_ua', user_agent)
        kodi.set_setting('last_ua_create', str(int(time.time())))
    else:
        user_agent = kodi.get_setting('current_ua')
    return user_agent
Exemple #24
0
def show_next_up(last_label, sf_begin):
    token = kodi.get_setting('trakt_oauth_token')
    if token and xbmc.getInfoLabel('Container.PluginName') == kodi.get_id() and xbmc.getInfoLabel('Container.Content') == 'tvshows':
        if xbmc.getInfoLabel('ListItem.label') != last_label:
            sf_begin = time.time()

        last_label = xbmc.getInfoLabel('ListItem.label')
        if sf_begin and (time.time() - sf_begin) >= int(kodi.get_setting('next_up_delay')):
            liz_url = xbmc.getInfoLabel('ListItem.FileNameAndPath')
            queries = kodi.parse_query(liz_url[liz_url.find('?'):])
            if 'trakt_id' in queries:
                try: list_size = int(kodi.get_setting('list_size'))
                except: list_size = 30
                try: trakt_timeout = int(kodi.get_setting('trakt_timeout'))
                except: trakt_timeout = 20
                trakt_api = Trakt_API(token, kodi.get_setting('use_https') == 'true', list_size, trakt_timeout, kodi.get_setting('trakt_offline') == 'true')
                progress = trakt_api.get_show_progress(queries['trakt_id'], full=True)
                if 'next_episode' in progress and progress['next_episode']:
                    if progress['completed'] or kodi.get_setting('next_unwatched') == 'true':
                        next_episode = progress['next_episode']
                        date = utils2.make_day(utils2.make_air_date(next_episode['first_aired']))
                        if kodi.get_setting('next_time') != '0':
                            date_time = '%s@%s' % (date, utils2.make_time(utils.iso_2_utc(next_episode['first_aired']), 'next_time'))
                        else:
                            date_time = date
                        msg = '[[COLOR deeppink]%s[/COLOR]] - %sx%s' % (date_time, next_episode['season'], next_episode['number'])
                        if next_episode['title']: msg += ' - %s' % (next_episode['title'])
                        duration = int(kodi.get_setting('next_up_duration')) * 1000
                        kodi.notify(header=i18n('next_episode'), msg=msg, duration=duration)
            sf_begin = 0
    else:
        last_label = ''
    
    return last_label, sf_begin
    def __init__(self, videocache):
        """
        Initialize DB, either MYSQL or SQLITE
        """   

        '''
        Use SQLIte3 wherever possible, needed for newer versions of XBMC/Kodi
        Keep pysqlite2 for legacy support
        '''
        try:
            if  kodi.get_setting('use_remote_db')=='true' and   \
                kodi.get_setting('db_address') and \
                kodi.get_setting('db_user') and \
                kodi.get_setting('db_pass') and \
                kodi.get_setting('db_name'):
                import mysql.connector as new_database
                logger.log_notice('Loading MySQLdb as DB engine version: %s' % new_database.version.VERSION_TEXT)
                self.DB_Type = 'mysql'
            else:
                raise ValueError('MySQL not enabled or not setup correctly')
        except:
            from sqlite3 import dbapi2 as new_database
            logger.log_notice('Loading sqlite3 as DB engine version: %s' % new_database.sqlite_version)
            self.DB_Type = 'sqlite'

        self.videocache = videocache
        self.database = new_database

        if self.DB_Type == 'mysql':
            class MySQLCursorDict(self.database.cursor.MySQLCursor):
                def _row_to_python(self, rowdata, desc=None):
                    row = super(MySQLCursorDict)._row_to_python(rowdata, desc)
                    if row:
                        return dict(zip(column_names, row))
                    return None
            db_address = kodi.get_setting('db_address')
            db_port = kodi.get_setting('db_port')
            if db_port: db_address = '%s:%s' %(db_address,db_port)
            db_user = kodi.get_setting('db_user')
            db_pass = kodi.get_setting('db_pass')
            db_name = kodi.get_setting('db_name')
            self.dbcon = self.database.connect(database=db_name, user=db_user, password=db_pass, host=db_address, buffered=True)
            self.dbcur = self.dbcon.cursor(cursor_class=MySQLCursorDict, buffered=True)
        else:
            self.dbcon = self.database.connect(videocache)
            self.dbcon.row_factory = self.database.Row # return results indexed by field names and not numbers so we can convert to dict
            self.dbcur = self.dbcon.cursor()

        # initialize cache db
        self.__create_cache_db()
Exemple #26
0
def url_exists(video):
    """
    check each source for a url for this video; return True as soon as one is found. If none are found, return False
    """
    max_timeout = int(kodi.get_setting("source_timeout"))
    log_utils.log("Checking for Url Existence: |%s|" % (video), log_utils.LOGDEBUG)
    for cls in relevant_scrapers(video.video_type):
        if kodi.get_setting("%s-sub_check" % (cls.get_name())) == "true":
            scraper_instance = cls(max_timeout)
            url = scraper_instance.get_url(video)
            if url:
                log_utils.log("Found url for |%s| @ %s: %s" % (video, cls.get_name(), url), log_utils.LOGDEBUG)
                return True

    log_utils.log("No url found for: |%s|" % (video))
    return False
Exemple #27
0
	def _callnetworks(self, uri, data=None, params=None, auth=False, cache=False, timeout=None):
		url = '%s%s' % (BASE_URL, uri)
		if timeout is not None: self.timetout = timeout
		json_data = json.dumps(data) if data else None
		headers = {'Content-Type': 'application/json', 'trakt-api-key': CLIENT_ID, 'trakt-api-version': 2}
		if auth:
			self._authorize()
			if self.token is None:
				raise TraktError('Trakt Authorization Required: 400')
			headers.update({'Authorization': 'Bearer %s' % (self.token)})
		#url = '%s%s' % (BASE_URL, uri)
		#print "URL IS = "+url
		if params and not uri.endswith('/token'):
			params['limit'] = 200
		else:
			params = {'limit': 200}
		url = url + '?' + urllib.urlencode(params)
		#START CACHE STUFF
		created, cached_result = cache_stat.get_cached_url(url)
		now = time.time()
		#print "API NOW TIME IS :"+str(now)
		limit = 60 * 60 * int(kodi.get_setting('cache_limit'))
		#print "API LIMIT IS : "+str(limit)
		age = now - created
		#print "API AGE IS :"+str(age)
		if cached_result and  age < limit:
			result = cached_result
			#print 'Using cached result for: %s' % (url)
			response = json.loads(result)
			return response
		#END CACHE STUFF
		else:
			try:
				request = urllib2.Request(url, data=json_data, headers=headers)
				f = urllib2.urlopen(request, timeout=self.timeout)
				result = f.read()
				response = json.loads(result)
			except HTTPError as e:
				print "ERROR IS  = "+str(e)
				kodi.notify(header='Trakt Error',msg='(error) %s  %s' % (str(e), ''),duration=5000,sound=None)
				if not uri.endswith('/token'):
					print "ERROR IS  = "+str(e)
					kodi.notify(header='Trakt Error',msg='(error) %s  %s' % (str(e), ''),duration=5000,sound=None)
					#ADDON.show_error_dialog(['Trakt Error', 'HTTP ERROR', str(e)])
					#raise TraktError('Trakt-HTTP-Error: %s' % e)
				return False
			except URLError as e:
				print "URLERROR IS  = "+str(e)
				#ADDON.log(url, LOG_LEVEL.VERBOSE)
				if not uri.endswith('/token'):
					#ADDON.show_error_dialog(['Trakt Error', 'URLLib ERROR', str(e)])
					kodi.notify(header='Trakt Error',msg='(error) %s  %s' % (str(e), ''),duration=5000,sound=None)
					raise TraktError('Trakt-URL-Error: %s' % e)
				return False
			else:
				if cache =='true':
					cache_stat.set_cache_url(url,result)
					return response
				else:
					return response
 def _get_episode_url(self, show_url, video):
     force_title = scraper_utils.force_title(video)
     title_fallback = kodi.get_setting('title-fallback') == 'true'
     norm_title = scraper_utils.normalize_title(video.ep_title)
     page_url = [show_url]
     too_old = False
     while page_url and not too_old:
         url = scraper_utils.urljoin(self.base_url, page_url[0])
         html = self._http_get(url, require_debrid=True, cache_limit=1)
         headings = re.findall('<h2>\s*<a\s+href="([^"]+)[^>]+>(.*?)</a>', html)
         posts = [r.content for r in dom_parser2.parse_dom(html, 'div', {'id': re.compile('post-\d+')})]
         for heading, post in zip(headings, posts):
             if self.__too_old(post):
                 too_old = True
                 break
             if CATEGORIES[VIDEO_TYPES.TVSHOW] in post and show_url in post:
                 url, title = heading
                 if not force_title:
                     if scraper_utils.release_check(video, title, require_title=False):
                         return scraper_utils.pathify_url(url)
                 else:
                     if title_fallback and norm_title:
                         match = re.search('<strong>(.*?)</strong>', post)
                         if match and norm_title == scraper_utils.normalize_title(match.group(1)):
                             return scraper_utils.pathify_url(url)
             
         page_url = dom_parser2.parse_dom(html, 'a', {'class': 'nextpostslink'}, req='href')
         if page_url: page_url = [page_url[0].attrs['href']]
Exemple #29
0
def record_failures(fails, counts=None):
    if counts is None: counts = {}

    for name in fails:
        setting = '%s_last_results' % (name)
        # remove timeouts from counts so they aren't double counted
        if name in counts: del counts[name]
        if int(kodi.get_setting(setting)) > -1:
            accumulate_setting(setting, 5)
    
    for name in counts:
        setting = '%s_last_results' % (name)
        if counts[name]:
            kodi.set_setting(setting, '0')
        elif int(kodi.get_setting(setting)) > -1:
            accumulate_setting(setting)
Exemple #30
0
        def onInit(self):
            log_utils.log('onInit:', log_utils.LOGDEBUG)
            self.OK = False
            self.radio_buttons = []
            posy = starty
            for label in RADIO_BUTTONS:
                self.radio_buttons.append(self.__get_radio_button(posx, posy, label))
                posy += gap
            
            try: responses = json.loads(kodi.get_setting('prev_responses'))
            except: responses = [True] * len(self.radio_buttons)
            if len(responses) < len(self.radio_buttons):
                responses += [True] * (len(self.radio_buttons) - len(responses))
            
            self.addControls(self.radio_buttons)
            last_button = None
            for response, radio_button in zip(responses, self.radio_buttons):
                radio_button.setSelected(response)
                if last_button is not None:
                    radio_button.controlUp(last_button)
                    radio_button.controlLeft(last_button)
                    last_button.controlDown(radio_button)
                    last_button.controlRight(radio_button)
                last_button = radio_button

            continue_ctrl = self.getControl(CONTINUE_BUTTON)
            cancel_ctrl = self.getControl(CANCEL_BUTTON)
            self.radio_buttons[0].controlUp(cancel_ctrl)
            self.radio_buttons[0].controlLeft(cancel_ctrl)
            self.radio_buttons[-1].controlDown(continue_ctrl)
            self.radio_buttons[-1].controlRight(continue_ctrl)
            continue_ctrl.controlUp(self.radio_buttons[-1])
            continue_ctrl.controlLeft(self.radio_buttons[-1])
            cancel_ctrl.controlDown(self.radio_buttons[0])
            cancel_ctrl.controlRight(self.radio_buttons[0])
Exemple #31
0
def get_force_title_list():
    filter_str = kodi.get_setting('force_title_match')
    filter_list = filter_str.split('|') if filter_str else []
    return filter_list
Exemple #32
0
def mainSearch(url):

    if '|SPLIT|' in url: url, site = url.split('|SPLIT|')
    term = url
    if term == "null": term = kodi.get_keyboard('Search %s' % kodi.get_name())

    if term:
        search_on_off = kodi.get_setting("search_setting")
        if search_on_off == "true":
            delTerm(term)
            addTerm(term)

        display_term = term
        term = urllib.quote_plus(term)
        term = term.lower()

        if site == 'all':
            sources = __all__
            search_sources = []
            for i in sources:
                try:
                    if eval(i + ".search_tag") == 1: search_sources.append(i)
                except:
                    pass

            if search_sources:
                i = 0
                source_num = 0
                failed_list = ''
                line1 = kodi.giveColor('Searching: ',
                                       'white') + kodi.giveColor(
                                           '%s', 'orangered')
                line2 = kodi.giveColor('Found: %s videos', 'white')
                line3 = kodi.giveColor(
                    'Source: %s of ' + str(len(search_sources)), 'white')

                kodi.dp.create(kodi.get_name(), '', line2, '')
                xbmc.executebuiltin('Dialog.Close(busydialog)')
                for u in sorted(search_sources):
                    if kodi.dp.iscanceled(): break
                    try:
                        i += 1
                        progress = 100 * int(i) / len(search_sources)
                        kodi.dp.update(progress, line1 % u.title(),
                                       line2 % str(source_num), line3 % str(i))
                        search_url = eval(u + ".search_base") % term
                        source_n = eval(u + ".content('%s',True)" % search_url)
                        try:
                            source_n = int(source_n)
                        except:
                            source_n = 0
                        if (not source_n):
                            if failed_list == '': failed_list += str(u).title()
                            else: failed_list += ', %s' % str(u).title()
                        else: source_num += int(source_n)
                    except Exception as e:
                        log_utils.log(
                            'Error searching %s :: Error: %s' %
                            (u.title(), str(e)), log_utils.LOGERROR)
                        pass
                kodi.dp.close()
                if failed_list != '':
                    kodi.notify(msg='%s failed to return results.' %
                                failed_list,
                                duration=4000,
                                sound=True)
                    log_utils.log(
                        'Scrapers failing to return search results are :: : %s'
                        % failed_list, log_utils.LOGERROR)
                else:
                    kodi.notify(msg='%s results found.' % str(source_num),
                                duration=4000,
                                sound=True)
                xbmcplugin.setContent(kodi.syshandle, 'movies')
                xbmcplugin.endOfDirectory(kodi.syshandle, cacheToDisc=True)
                utils.setView('search')
        else:
            search_url = eval(site + ".search_base") % term
            eval(site + ".content('%s')" % search_url)
    else:
        kodi.notify(msg='Blank searches are not allowed.')
        quit()
Exemple #33
0
def get_progress_skip_list():
    filter_str = kodi.get_setting('progress_skip_cache')
    filter_list = filter_str.split('|') if filter_str else []
    return filter_list
Exemple #34
0
except ImportError:
    from pysqlite2 import dbapi2 as db, OperationalError

logger = log_utils.Logger.get_logger(__name__)
logger.disable()
addonInfo = xbmcaddon.Addon().getAddonInfo

try:
    cache_path = kodi.translate_path(os.path.join(kodi.get_profile(), 'cache'))
    if not os.path.exists(cache_path):
        os.makedirs(cache_path)
except Exception as e:
    logger.log('Failed to create cache: %s: %s' % (cache_path, e),
               log_utils.LOGWARNING)

cache_enabled = kodi.get_setting('use_cache') == 'true'


def reset_cache():
    try:
        shutil.rmtree(cache_path)
        return True
    except Exception as e:
        logger.log('Failed to Reset Cache: %s' % (e), log_utils.LOGWARNING)
        return False


def _get_func(name, args=None, kwargs=None, cache_limit=1):
    if not cache_enabled: return False, None
    now = time.time()
    max_age = now - (cache_limit * 60 * 60)
Exemple #35
0
    def _cached_http_get(self,
                         url,
                         base_url,
                         timeout,
                         params=None,
                         data=None,
                         multipart_data=None,
                         headers=None,
                         cookies=None,
                         allow_redirect=True,
                         method=None,
                         require_debrid=False,
                         read_error=False,
                         cache_limit=8):
        if require_debrid:
            if Scraper.debrid_resolvers is None:
                Scraper.debrid_resolvers = [
                    resolver for resolver in urlresolver.relevant_resolvers()
                    if resolver.isUniversal()
                ]
            if not Scraper.debrid_resolvers:
                logger.log(
                    '%s requires debrid: %s' %
                    (self.__module__, Scraper.debrid_resolvers),
                    log_utils.LOGDEBUG)
                return ''

        if cookies is None: cookies = {}
        if timeout == 0: timeout = None
        if headers is None: headers = {}
        if url.startswith('//'): url = 'http:' + url
        referer = headers['Referer'] if 'Referer' in headers else base_url
        if params:
            if url == base_url and not url.endswith('/'):
                url += '/'

            parts = urlparse.urlparse(url)
            if parts.query:
                params.update(scraper_utils.parse_query(url))
                url = urlparse.urlunparse(
                    (parts.scheme, parts.netloc, parts.path, parts.params, '',
                     parts.fragment))

            url += '?' + urllib.urlencode(params)
        logger.log(
            'Getting Url: %s cookie=|%s| data=|%s| extra headers=|%s|' %
            (url, cookies, data, headers), log_utils.LOGDEBUG)
        if data is not None:
            if isinstance(data, basestring):
                data = data
            else:
                data = urllib.urlencode(data, True)

        if multipart_data is not None:
            headers['Content-Type'] = 'multipart/form-data; boundary=X-X-X'
            data = multipart_data

        _created, _res_header, html = self.db_connection().get_cached_url(
            url, data, cache_limit)
        if html:
            logger.log('Returning cached result for: %s' % (url),
                       log_utils.LOGDEBUG)
            return html

        try:
            self.cj = self._set_cookies(base_url, cookies)
            if isinstance(url, unicode): url = url.encode('utf-8')
            request = urllib2.Request(url, data=data)
            headers = headers.copy()
            request.add_header('User-Agent', scraper_utils.get_ua())
            request.add_header('Accept', '*/*')
            request.add_header('Accept-Encoding', 'gzip')
            request.add_unredirected_header('Host', request.get_host())
            if referer: request.add_unredirected_header('Referer', referer)
            if 'Referer' in headers: del headers['Referer']
            if 'Host' in headers: del headers['Host']
            for key, value in headers.iteritems():
                request.add_header(key, value)
            self.cj.add_cookie_header(request)
            if not allow_redirect:
                opener = urllib2.build_opener(NoRedirection)
                urllib2.install_opener(opener)
            else:
                opener = urllib2.build_opener(urllib2.HTTPRedirectHandler)
                urllib2.install_opener(opener)
                opener2 = urllib2.build_opener(
                    urllib2.HTTPCookieProcessor(self.cj))
                urllib2.install_opener(opener2)

            if method is not None: request.get_method = lambda: method.upper()
            response = urllib2.urlopen(request, timeout=timeout)
            self.cj.extract_cookies(response, request)
            if kodi.get_setting('cookie_debug') == 'true':
                logger.log(
                    'Response Cookies: %s - %s' %
                    (url, scraper_utils.cookies_as_str(self.cj)),
                    log_utils.LOGDEBUG)
            self.cj._cookies = scraper_utils.fix_bad_cookies(self.cj._cookies)
            self.cj.save(ignore_discard=True)
            if not allow_redirect and (
                    response.getcode() in [301, 302, 303, 307]
                    or response.info().getheader('Refresh')):
                if response.info().getheader('Refresh') is not None:
                    refresh = response.info().getheader('Refresh')
                    return refresh.split(';')[-1].split('url=')[-1]
                else:
                    redir_url = response.info().getheader('Location')
                    if redir_url.startswith('='):
                        redir_url = redir_url[1:]
                    return redir_url

            content_length = response.info().getheader('Content-Length', 0)
            if int(content_length) > MAX_RESPONSE:
                logger.log(
                    'Response exceeded allowed size. %s => %s / %s' %
                    (url, content_length, MAX_RESPONSE), log_utils.LOGWARNING)

            if method == 'HEAD':
                return ''
            else:
                if response.info().get('Content-Encoding') == 'gzip':
                    html = ungz(response.read(MAX_RESPONSE))
                else:
                    html = response.read(MAX_RESPONSE)
        except urllib2.HTTPError as e:
            if e.info().get('Content-Encoding') == 'gzip':
                html = ungz(e.read(MAX_RESPONSE))
            else:
                html = e.read(MAX_RESPONSE)

            if CF_CAPCHA_ENABLED and e.code == 403 and 'cf-captcha-bookmark' in html:
                html = cf_captcha.solve(url, self.cj, scraper_utils.get_ua(),
                                        self.get_name())
                if not html:
                    return ''
            elif e.code == 503 and 'cf-browser-verification' in html:
                html = cloudflare.solve(url,
                                        self.cj,
                                        scraper_utils.get_ua(),
                                        extra_headers=headers)
                if not html:
                    return ''
            else:
                logger.log(
                    'Error (%s) during scraper http get: %s' % (str(e), url),
                    log_utils.LOGWARNING)
                if not read_error:
                    return ''
        except Exception as e:
            logger.log(
                'Error (%s) during scraper http get: %s' % (str(e), url),
                log_utils.LOGWARNING)
            return ''

        self.db_connection().cache_url(url, html, data)
        return html
Exemple #36
0
def srt_indicators_enabled():
    return (kodi.get_setting('enable-subtitles') == 'true'
            and (kodi.get_setting('subtitle-indicator') == 'true'))
Exemple #37
0
import xbmcaddon
import xbmcvfs
import xbmcgui
import xbmcplugin
import kodi
import pyaes
from constants import *
from kodi import i18n

THEME_LIST = [
    'Shine', 'Luna_Blue', 'Iconic', 'Simple', 'SALTy', 'SALTy (Blended)',
    'SALTy (Blue)', 'SALTy (Frog)', 'SALTy (Green)', 'SALTy (Macaw)',
    'SALTier (Green)', 'SALTier (Orange)', 'SALTier (Red)', 'IGDB',
    'Simply Elegant', 'IGDB Redux'
]
THEME = THEME_LIST[int(kodi.get_setting('theme'))]
if xbmc.getCondVisibility('System.HasAddon(script.salts.themepak)'):
    themepak_path = xbmcaddon.Addon('script.salts.themepak').getAddonInfo(
        'path')
else:
    themepak_path = kodi.get_path()
THEME_PATH = os.path.join(themepak_path, 'art', 'themes', THEME)
PLACE_POSTER = os.path.join(kodi.get_path(), 'resources', 'place_poster.png')

SORT_FIELDS = [(SORT_LIST[int(kodi.get_setting('sort1_field'))],
                SORT_SIGNS[kodi.get_setting('sort1_order')]),
               (SORT_LIST[int(kodi.get_setting('sort2_field'))],
                SORT_SIGNS[kodi.get_setting('sort2_order')]),
               (SORT_LIST[int(kodi.get_setting('sort3_field'))],
                SORT_SIGNS[kodi.get_setting('sort3_order')]),
               (SORT_LIST[int(kodi.get_setting('sort4_field'))],
Exemple #38
0
 def remove_from_collection(self, media, id, id_type='trakt'):
     if kodi.get_setting('debug') == "true":
         print "Deleting Trakt Id: " + id + " from Collection"
     uri = '/sync/collection/remove'
     data = {media: [{'ids': {id_type: id}}]}
     return self._call(uri, data, auth=True)
Exemple #39
0
 def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
     self.timeout = timeout
     self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
Exemple #40
0
 def delete_from_watchlist(self, media, id, id_type='trakt'):
     if kodi.get_setting('debug') == "true":
         print "Deleting Trakt Id: " + id + " from Watchlist"
     uri = '/sync/watchlist/remove'
     data = {media: [{'ids': {id_type: id}}]}
     return self._call(uri, data, auth=True)
Exemple #41
0
 def _callnetworks(self,
                   uri,
                   data=None,
                   params=None,
                   auth=False,
                   cache=False,
                   timeout=None):
     url = '%s%s' % (BASE_URL, uri)
     if timeout is not None: self.timetout = timeout
     json_data = json.dumps(data) if data else None
     headers = {
         'Content-Type': 'application/json',
         'trakt-api-key': CLIENT_ID,
         'trakt-api-version': 2
     }
     if auth:
         self._authorize()
         if self.token is None:
             raise TraktError('Trakt Authorization Required: 400')
         headers.update({'Authorization': 'Bearer %s' % (self.token)})
     #url = '%s%s' % (BASE_URL, uri)
     #print "URL IS = "+url
     if params and not uri.endswith('/token'):
         params['limit'] = 200
     else:
         params = {'limit': 200}
     url = url + '?' + urllib.urlencode(params)
     #START CACHE STUFF
     created, cached_result = cache_stat.get_cached_url(url)
     now = time.time()
     #print "API NOW TIME IS :"+str(now)
     limit = 60 * 60 * int(kodi.get_setting('cache_limit'))
     #print "API LIMIT IS : "+str(limit)
     age = now - created
     #print "API AGE IS :"+str(age)
     if cached_result and age < limit:
         result = cached_result
         #print 'Using cached result for: %s' % (url)
         response = json.loads(result)
         return response
     #END CACHE STUFF
     else:
         try:
             request = urllib2.Request(url, data=json_data, headers=headers)
             f = urllib2.urlopen(request, timeout=self.timeout)
             result = f.read()
             response = json.loads(result)
         except HTTPError as e:
             print "ERROR IS  = " + str(e)
             kodi.notify(header='Trakt Error',
                         msg='(error) %s  %s' % (str(e), ''),
                         duration=5000,
                         sound=None)
             if not uri.endswith('/token'):
                 print "ERROR IS  = " + str(e)
                 kodi.notify(header='Trakt Error',
                             msg='(error) %s  %s' % (str(e), ''),
                             duration=5000,
                             sound=None)
                 #ADDON.show_error_dialog(['Trakt Error', 'HTTP ERROR', str(e)])
                 #raise TraktError('Trakt-HTTP-Error: %s' % e)
             return False
         except URLError as e:
             print "URLERROR IS  = " + str(e)
             #ADDON.log(url, LOG_LEVEL.VERBOSE)
             if not uri.endswith('/token'):
                 #ADDON.show_error_dialog(['Trakt Error', 'URLLib ERROR', str(e)])
                 kodi.notify(header='Trakt Error',
                             msg='(error) %s  %s' % (str(e), ''),
                             duration=5000,
                             sound=None)
                 raise TraktError('Trakt-URL-Error: %s' % e)
             return False
         else:
             if cache == 'true':
                 cache_stat.set_cache_url(url, result)
                 return response
             else:
                 return response
Exemple #42
0
 def add_to_watchlist(self, media, id, id_type='trakt'):
     if kodi.get_setting('debug') == "true":
         print "Adding Trakt Id: " + id + " to Watchlist"
     uri = '/sync/watchlist'
     data = {media: [{'ids': {id_type: id}}]}
     return self._call(uri, data, auth=True)
Exemple #43
0
 def __init__(self):
     self.token = None
     self.timeout = int(kodi.get_setting('timeout'))
     self.limit = int(kodi.get_setting('list_size'))
     self.cache_limit = int(kodi.get_setting('cache_limit'))
     self.cacheset = kodi.get_setting('cache_set')
Exemple #44
0
# -*- coding: utf-8 -*-
#

import urllib2, urllib
from urllib2 import URLError, HTTPError
from datetime import datetime
import re, time
import json
import traceback
import kodi
from tm_libs import cache_stat

use_https = kodi.get_setting('use_https')
list_size = int(kodi.get_setting('list_size'))
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
if use_https == 'true':
    BASE_URL = "https://api-v2launch.trakt.tv"
else:
    BASE_URL = "http://api-v2launch.trakt.tv"

CLIENT_ID = 'a19aa7f7cf7fa27437254cc27fcba454664360086949e80029f83874fa455e8f'
SECRET_ID = '5872236e7c198363867d89014ee334281648a7f433f9e4c362e5519e334693d1'

PIN_URL = 'http://trakt.tv/pin/7558'
DAYS_TO_GET = 21
DECAY = 2
ADDON_NAME = 'Velocity'


class TraktError(Exception):
    def __init__(self, value):
Exemple #45
0
def srt_show_enabled():
    return (kodi.get_setting('enable-subtitles') == 'true'
            and (kodi.get_setting('subtitle-show') == 'true'))
Exemple #46
0
def download_media(url, path, file_name):
    try:
        progress = int(kodi.get_setting('down_progress'))
        request = urllib2.Request(url)
        request.add_header('User-Agent', USER_AGENT)
        request.add_unredirected_header('Host', request.get_host())
        response = urllib2.urlopen(request)

        content_length = 0
        if 'Content-Length' in response.info():
            content_length = int(response.info()['Content-Length'])

        file_name = file_name.replace('.strm', get_extension(url, response))
        full_path = os.path.join(path, file_name)
        log_utils.log('Downloading: %s -> %s' % (url, full_path),
                      log_utils.LOGDEBUG)

        path = xbmc.makeLegalFilename(path)
        if not xbmcvfs.exists(path):
            try:
                try:
                    xbmcvfs.mkdirs(path)
                except:
                    os.mkdir(path)
            except Exception as e:
                raise Exception(i18n('failed_create_dir'))

        file_desc = xbmcvfs.File(full_path, 'w')
        total_len = 0
        if progress:
            if progress == PROGRESS.WINDOW:
                dialog = xbmcgui.DialogProgress()
            else:
                dialog = xbmcgui.DialogProgressBG()

            dialog.create('Stream All The Sources',
                          i18n('downloading') % (file_name))
            dialog.update(0)
        while True:
            data = response.read(CHUNK_SIZE)
            if not data:
                break

            if progress == PROGRESS.WINDOW and dialog.iscanceled():
                break

            total_len += len(data)
            if not file_desc.write(data):
                raise Exception('failed_write_file')

            percent_progress = (
                total_len) * 100 / content_length if content_length > 0 else 0
            log_utils.log(
                'Position : %s / %s = %s%%' %
                (total_len, content_length, percent_progress),
                log_utils.LOGDEBUG)
            if progress == PROGRESS.WINDOW:
                dialog.update(percent_progress)
            elif progress == PROGRESS.BACKGROUND:
                dialog.update(percent_progress, 'Stream All The Sources')
        else:
            kodi.notify(msg=i18n('download_complete') % (file_name),
                        duration=5000)
            log_utils.log('Download Complete: %s -> %s' % (url, full_path),
                          log_utils.LOGDEBUG)

        file_desc.close()
        if progress:
            dialog.close()

    except Exception as e:
        log_utils.log(
            'Error (%s) during download: %s -> %s' % (str(e), url, file_name),
            log_utils.LOGERROR)
        kodi.notify(msg=i18n('download_error') % (str(e), file_name),
                    duration=5000)
Exemple #47
0
def srt_download_enabled():
    return (kodi.get_setting('enable-subtitles') == 'true'
            and (kodi.get_setting('subtitle-download') == 'true'))
Exemple #48
0
    def _blog_get_url(self, video, delim='.'):
        url = None
        result = self.db_connection().get_related_url(video.video_type,
                                                      video.title, video.year,
                                                      self.get_name(),
                                                      video.season,
                                                      video.episode)
        if result:
            url = result[0][0]
            logger.log(
                'Got local related url: |%s|%s|%s|%s|%s|' %
                (video.video_type, video.title, video.year, self.get_name(),
                 url), log_utils.LOGDEBUG)
        else:
            try:
                select = int(kodi.get_setting('%s-select' % (self.get_name())))
            except:
                select = 0
            if video.video_type == VIDEO_TYPES.EPISODE:
                temp_title = re.sub('[^A-Za-z0-9 ]', '', video.title)
                if not scraper_utils.force_title(video):
                    search_title = '%s S%02dE%02d' % (
                        temp_title, int(video.season), int(video.episode))
                    if isinstance(video.ep_airdate, datetime.date):
                        fallback_search = '%s %s' % (
                            temp_title,
                            video.ep_airdate.strftime(
                                '%Y{0}%m{0}%d'.format(delim)))
                    else:
                        fallback_search = ''
                else:
                    if not video.ep_title: return None
                    search_title = '%s %s' % (temp_title, video.ep_title)
                    fallback_search = ''
            else:
                search_title = video.title
                fallback_search = ''

            results = self.search(video.video_type, search_title, video.year)
            if not results and fallback_search:
                results = self.search(video.video_type, fallback_search,
                                      video.year)

            if results:
                # TODO: First result isn't always the most recent...
                best_result = results[0]
                if select != 0:
                    best_qorder = 0
                    for result in results:
                        if 'quality' in result:
                            quality = result['quality']
                        else:
                            match = re.search('\((\d+p)\)', result['title'])
                            if match:
                                quality = scraper_utils.height_get_quality(
                                    match.group(1))
                            else:
                                match = re.search('\[(.*)\]$', result['title'])
                                q_str = match.group(1) if match else ''
                                quality = scraper_utils.blog_get_quality(
                                    video, q_str, '')

                        logger.log(
                            'result: |%s|%s|%s|' %
                            (result, quality, Q_ORDER[quality]),
                            log_utils.LOGDEBUG)
                        if Q_ORDER[quality] > best_qorder:
                            logger.log(
                                'Setting best as: |%s|%s|%s|' %
                                (result, quality, Q_ORDER[quality]),
                                log_utils.LOGDEBUG)
                            best_result = result
                            best_qorder = Q_ORDER[quality]

                url = best_result['url']
                self.db_connection().set_related_url(video.video_type,
                                                     video.title, video.year,
                                                     self.get_name(), url,
                                                     video.season,
                                                     video.episode)
        return url
Exemple #49
0
def scraper_enabled(name):
    # return true if setting exists and set to true, or setting doesn't exist (i.e. '')
    return kodi.get_setting('%s-enable' % (name)) in ('true', '')
 def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
     self.timeout = timeout
     self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
     if 'www' in self.base_url:
         self.base_url = BASE_URL  # hack base url to work
Exemple #51
0
    def get_sources(self, video):
        hosters = []
        sources = {}
        source_url = self.get_url(video)
        if not source_url or source_url == FORCE_NO_MATCH: return hosters
        page_url = scraper_utils.urljoin(self.base_url, source_url)
        movie_id, watching_url, html = self.__get_source_page(
            video.video_type, page_url)

        links = []
        for match in dom_parser2.parse_dom(html,
                                           'li', {'class': 'ep-item'},
                                           req=['data-id', 'data-server']):
            label = dom_parser2.parse_dom(match.content, 'a', req='title')
            if not label: continue
            if video.video_type == VIDEO_TYPES.EPISODE and not self.__episode_match(
                    video, label[0].attrs['title']):
                continue
            links.append((match.attrs['data-server'], match.attrs['data-id']))

        for link_type, link_id in links:
            if link_type in ['12', '13', '14', '15']:
                url = scraper_utils.urljoin(
                    self.base_url, PLAYLIST_URL1.format(ep_id=link_id))
                sources.update(self.__get_link_from_json(url))
            elif kodi.get_setting('scraper_url'):
                url = scraper_utils.urljoin(
                    self.base_url, PLAYLIST_URL2.format(ep_id=link_id))
                params = self.__get_params(movie_id, link_id, watching_url)
                if params is not None:
                    url += '?' + urllib.urlencode(params)
                sources.update(
                    self.__get_links_from_json2(url, page_url,
                                                video.video_type))

        for source in sources:
            if not source.lower().startswith('http'): continue
            if sources[source]['direct']:
                host = scraper_utils.get_direct_hostname(self, source)
                if host != 'gvideo':
                    stream_url = source + scraper_utils.append_headers(
                        {
                            'User-Agent': scraper_utils.get_ua(),
                            'Referer': page_url
                        })
                else:
                    stream_url = source
            else:
                host = urlparse.urlparse(source).hostname
                stream_url = source
            hoster = {
                'multi-part': False,
                'host': host,
                'class': self,
                'quality': sources[source]['quality'],
                'views': None,
                'rating': None,
                'url': stream_url,
                'direct': sources[source]['direct']
            }
            hosters.append(hoster)

        return hosters
Exemple #52
0
def download_media(url, path, file_name, translations, progress=None):
    try:
        if progress is None:
            progress = int(kodi.get_setting('down_progress'))

        i18n = translations.i18n
        active = not progress == PROGRESS.OFF
        background = progress == PROGRESS.BACKGROUND

        with kodi.ProgressDialog(kodi.get_name(),
                                 i18n('downloading') % (file_name),
                                 background=background,
                                 active=active) as pd:
            try:
                headers = dict([
                    item.split('=') for item in (url.split('|')[1]).split('&')
                ])
                for key in headers:
                    headers[key] = urllib.unquote(headers[key])
            except:
                headers = {}
            if 'User-Agent' not in headers: headers['User-Agent'] = BROWSER_UA
            request = urllib2.Request(url.split('|')[0], headers=headers)
            response = urllib2.urlopen(request)
            if 'Content-Length' in response.info():
                content_length = int(response.info()['Content-Length'])
            else:
                content_length = 0

            file_name += '.' + get_extension(url, response)
            full_path = os.path.join(path, file_name)
            logger.log('Downloading: %s -> %s' % (url, full_path),
                       log_utils.LOGDEBUG)

            path = kodi.translate_path(xbmc.makeLegalFilename(path))
            try:
                try:
                    xbmcvfs.mkdirs(path)
                except:
                    os.makedirs(path)
            except Exception as e:
                logger.log('Path Create Failed: %s (%s)' % (e, path),
                           log_utils.LOGDEBUG)

            if not path.endswith(os.sep): path += os.sep
            if not xbmcvfs.exists(path):
                raise Exception(i18n('failed_create_dir'))

            file_desc = xbmcvfs.File(full_path, 'w')
            total_len = 0
            cancel = False
            while True:
                data = response.read(CHUNK_SIZE)
                if not data:
                    break

                if pd.is_canceled():
                    cancel = True
                    break

                total_len += len(data)
                if not file_desc.write(data):
                    raise Exception(i18n('failed_write_file'))

                percent_progress = (
                    total_len
                ) * 100 / content_length if content_length > 0 else 0
                logger.log(
                    'Position : %s / %s = %s%%' %
                    (total_len, content_length, percent_progress),
                    log_utils.LOGDEBUG)
                pd.update(percent_progress)

            file_desc.close()

        if not cancel:
            kodi.notify(msg=i18n('download_complete') % (file_name),
                        duration=5000)
            logger.log('Download Complete: %s -> %s' % (url, full_path),
                       log_utils.LOGDEBUG)

    except Exception as e:
        logger.log(
            'Error (%s) during download: %s -> %s' % (str(e), url, file_name),
            log_utils.LOGERROR)
        kodi.notify(msg=i18n('download_error') % (str(e), file_name),
                    duration=5000)
Exemple #53
0
def show_requires_source(trakt_id):
    show_str = kodi.get_setting('exists_list')
    show_list = show_str.split('|')
    return str(trakt_id) in show_list
Exemple #54
0
buildDirectory = utils.buildDir

download_icon = xbmc.translatePath(
    os.path.join(
        'special://home/addons/script.xxxodus.artwork/resources/art/main',
        'downloads.png'))


class MyOpener(FancyURLopener):
    version = 'python-requests/2.9.1'


myopener = MyOpener()
urlretrieve = MyOpener().retrieve
urlopen = MyOpener().open
download_location = kodi.get_setting("download_location")
download_folder = xbmc.translatePath(download_location)

databases = xbmc.translatePath(os.path.join(kodi.datafolder, 'databases'))
downloaddb = xbmc.translatePath(os.path.join(databases, 'downloads.db'))

if (not os.path.exists(databases)): os.makedirs(databases)
conn = sqlite3.connect(downloaddb)
c = conn.cursor()
try:
    c.executescript("CREATE TABLE IF NOT EXISTS downloads (name, url, image);")
except:
    pass
conn.close()

Exemple #55
0
def accumulate_setting(setting, addend=1):
    cur_value = kodi.get_setting(setting)
    cur_value = int(cur_value) if cur_value else 0
    kodi.set_setting(setting, cur_value + addend)
Exemple #56
0
def menu_on(menu):
    return kodi.get_setting('show_%s' % (menu)) == 'true'
Exemple #57
0
    def _blog_proc_results(self, html, post_pattern, date_format, video_type,
                           title, year):
        results = []
        search_date = ''
        search_sxe = ''
        if video_type == VIDEO_TYPES.EPISODE:
            match = re.search('(.*?)\s*(S\d+E\d+)\s*', title)
            if match:
                show_title, search_sxe = match.groups()
            else:
                match = re.search(
                    '(.*?)\s*(\d{4})[._ -]?(\d{2})[._ -]?(\d{2})\s*', title)
                if match:
                    show_title, search_year, search_month, search_day = match.groups(
                    )
                    search_date = '%s-%s-%s' % (search_year, search_month,
                                                search_day)
                    search_date = scraper_utils.to_datetime(
                        search_date, "%Y-%m-%d").date()
                else:
                    show_title = title
        else:
            show_title = title

        today = datetime.date.today()
        for match in re.finditer(post_pattern, html, re.DOTALL):
            post_data = match.groupdict()
            post_title = post_data['post_title']
            post_title = re.sub('<[^>]*>', '', post_title)
            if 'quality' in post_data:
                post_title += '- [%s]' % (post_data['quality'])

            try:
                filter_days = int(
                    kodi.get_setting('%s-filter' % (self.get_name())))
            except ValueError:
                filter_days = 0
            if filter_days and date_format and 'date' in post_data:
                post_data['date'] = post_data['date'].strip()
                filter_days = datetime.timedelta(days=filter_days)
                post_date = scraper_utils.to_datetime(post_data['date'],
                                                      date_format).date()
                if not post_date:
                    logger.log(
                        'Failed date Check in %s: |%s|%s|%s|' %
                        (self.get_name(), post_data['date'], date_format),
                        log_utils.LOGWARNING)
                    post_date = today

                if today - post_date > filter_days:
                    continue

            match_year = ''
            match_date = ''
            match_sxe = ''
            match_title = full_title = post_title
            if video_type == VIDEO_TYPES.MOVIE:
                meta = scraper_utils.parse_movie_link(post_title)
                match_year = meta['year']
            else:
                meta = scraper_utils.parse_episode_link(post_title)
                match_sxe = 'S%02dE%02d' % (int(
                    meta['season']), int(meta['episode']))
                match_date = meta['airdate']

            match_title = meta['title']
            full_title = '%s (%sp) [%s]' % (meta['title'], meta['height'],
                                            meta['extra'])
            norm_title = scraper_utils.normalize_title(show_title)
            match_norm_title = scraper_utils.normalize_title(match_title)
            title_match = norm_title and (match_norm_title in norm_title
                                          or norm_title in match_norm_title)
            year_match = not year or not match_year or year == match_year
            sxe_match = not search_sxe or (search_sxe == match_sxe)
            date_match = not search_date or (search_date == match_date)
            logger.log(
                'Blog Results: |%s|%s|%s| - |%s|%s|%s| - |%s|%s|%s| - |%s|%s|%s| (%s)'
                % (match_norm_title, norm_title, title_match, year, match_year,
                   year_match, search_date, match_date, date_match, search_sxe,
                   match_sxe, sxe_match, self.get_name()), log_utils.LOGDEBUG)
            if title_match and year_match and date_match and sxe_match:
                quality = scraper_utils.height_get_quality(meta['height'])
                result = {
                    'url': scraper_utils.pathify_url(post_data['url']),
                    'title': scraper_utils.cleanse_title(full_title),
                    'year': match_year,
                    'quality': quality
                }
                results.append(result)
        return results
Exemple #58
0
def get_force_progress_list():
    filter_str = kodi.get_setting('force_include_progress')
    filter_list = filter_str.split('|') if filter_str else []
    return filter_list
Exemple #59
0
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <http://www.gnu.org/licenses/>.
"""
import time
import kodi
import cProfile
import StringIO
import pstats
from xbmc import LOGDEBUG, LOGERROR, LOGFATAL, LOGINFO, LOGNONE, LOGNOTICE, LOGSEVERE, LOGWARNING  # @UnusedImport

# TODO: Remove after next Death Streams release
name = kodi.get_name()
enabled_comp = kodi.get_setting('enabled_comp')
if enabled_comp:
    enabled_comp = enabled_comp.split(',')
else:
    enabled_comp = None


def log(msg, level=LOGDEBUG, component=None):
    req_level = level
    # override message level to force logging when addon logging turned on
    debug_enabled = control.setting('addon_debug')
    debug_log = control.setting('debug.location')

    print DEBUGPREFIX + ' Debug Enabled?: ' + str(debug_enabled)
    print DEBUGPREFIX + ' Debug Log?: ' + str(debug_log)
 def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
     self.timeout = timeout
     self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
     self.username = kodi.get_setting('%s-username' % (self.get_name()))
     self.password = kodi.get_setting('%s-password' % (self.get_name()))