示例#1
0
 def get_settings(cls):
     name = cls.get_name()
     settings = [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>'
         % (name, name, i18n('enabled')),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="false" visible="eq(-1,true)"/>'
         % (name, i18n('page_existence')),
     ]
     return settings
示例#2
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-3,true)"/>'
         % (name, i18n('username')))
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-4,true)"/>'
         % (name, i18n('password')))
     return settings
示例#3
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-filter" type="slider" range="0,180" option="int" label="     %s" default="30" visible="eq(-4,true)"/>'
         % (name, i18n('filter_results_days')))
     settings.append(
         '         <setting id="%s-select" type="enum" label="     %s" lvalues="30636|30637" default="0" visible="eq(-5,true)"/>'
         % (name, i18n('auto_select')))
     return settings
示例#4
0
 def get_settings(cls):
     """
     Returns a list of settings to be used for this scraper. Settings are automatically checked for updates every time scrapers are imported
     The list returned by each scraper is aggregated into a big settings.xml string, and then if it differs from the current settings xml in the Scrapers category
     the existing settings.xml fragment is removed and replaced by the new string
     """
     name = cls.get_name()
     return [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>'
         % (name, name, i18n('enabled')),
         '         <setting id="%s-base_url" type="text" label="    %s" default="%s" visible="eq(-1,true)"/>'
         % (name, i18n('base_url'), cls.base_url),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="true" visible="eq(-2,true)"/>'
         % (name, i18n('page_existence')),
     ]
示例#5
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            old_lm = None
            old_py = ''
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
                    match = re.search('^#\s+Last-Modified:\s*(.*)', old_py)
                    if match:
                        old_lm = match.group(1).strip()

            new_lm, new_py = utils2.get_and_decrypt(scraper_url, scraper_password, old_lm)
            if new_py:
                logger.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write('# Last-Modified: %s\n' % (new_lm))
                        f.write(new_py)
                    kodi.notify(msg=utils2.i18n('scraper_updated') + filename)
                        
    except Exception as e:
        logger.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
示例#6
0
    def resolve_link(self, link):
        playlist = super(self.__class__, self)._http_get(link, cache_limit=.5)
        try:
            ns = '{http://xspf.org/ns/0/}'
            root = ET.fromstring(playlist)
            tracks = root.findall('.//%strack' % (ns))
            locations = []
            for track in tracks:
                duration = track.find('%sduration' % (ns)).text
                try:
                    duration = int(duration)
                except:
                    duration = 0
                if duration >= MIN_DURATION:
                    location = track.find('%slocation' % (ns)).text
                    locations.append({
                        'duration': duration / 1000,
                        'url': location
                    })

            if len(locations) > 1:
                result = xbmcgui.Dialog().select(i18n('choose_stream'), [
                    utils.format_time(location['duration'])
                    for location in locations
                ])
                if result > -1:
                    return locations[result]['url']
            elif locations:
                return locations[0]['url']
        except Exception as e:
            logger.log('Failure during furk playlist parse: %s' % (e),
                       log_utils.LOGWARNING)
示例#7
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-filter" type="slider" range="0,180" option="int" label="     %s" default="60" visible="eq(-3,true)"/>'
         % (name, i18n('filter_results_days')))
     return settings
示例#8
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-select" type="enum" label="     %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>'
         % (name, i18n('auto_select')))
     return settings
示例#9
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-3,true)"/>'
         % (name, i18n('username')))
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-4,true)"/>'
         % (name, i18n('password')))
     settings.append(
         '         <setting id="%s-result_limit" label="     %s" type="slider" default="10" range="10,100" option="int" visible="eq(-5,true)"/>'
         % (name, i18n('result_limit')))
     settings.append(
         '         <setting id="%s-size_limit" label="     %s" type="slider" default="0" range="0,50" option="int" visible="eq(-6,true)"/>'
         % (name, i18n('size_limit')))
     return settings
示例#10
0
def show_next_up(last_label, sf_begin):
    token = kodi.get_setting('trakt_oauth_token')
    if token and xbmc.getInfoLabel('Container.PluginName') == kodi.get_id(
    ) and xbmc.getInfoLabel('Container.Content') == 'tvshows':
        if xbmc.getInfoLabel('ListItem.label') != last_label:
            sf_begin = time.time()

        last_label = xbmc.getInfoLabel('ListItem.label')
        if sf_begin and (time.time() - sf_begin) >= int(
                kodi.get_setting('next_up_delay')):
            liz_url = xbmc.getInfoLabel('ListItem.FileNameAndPath')
            queries = kodi.parse_query(liz_url[liz_url.find('?'):])
            if 'trakt_id' in queries:
                try:
                    list_size = int(kodi.get_setting('list_size'))
                except:
                    list_size = 30
                try:
                    trakt_timeout = int(kodi.get_setting('trakt_timeout'))
                except:
                    trakt_timeout = 20
                trakt_api = Trakt_API(
                    token,
                    kodi.get_setting('use_https') == 'true', list_size,
                    trakt_timeout,
                    kodi.get_setting('trakt_offline') == 'true')
                progress = trakt_api.get_show_progress(queries['trakt_id'],
                                                       full=True)
                if 'next_episode' in progress and progress['next_episode']:
                    if progress['completed'] or kodi.get_setting(
                            'next_unwatched') == 'true':
                        next_episode = progress['next_episode']
                        date = utils2.make_day(
                            utils2.make_air_date(next_episode['first_aired']))
                        if kodi.get_setting('next_time') != '0':
                            date_time = '%s@%s' % (
                                date,
                                utils2.make_time(
                                    utils.iso_2_utc(
                                        next_episode['first_aired']),
                                    'next_time'))
                        else:
                            date_time = date
                        msg = '[[COLOR deeppink]%s[/COLOR]] - %sx%s' % (
                            date_time, next_episode['season'],
                            next_episode['number'])
                        if next_episode['title']:
                            msg += ' - %s' % (next_episode['title'])
                        duration = int(
                            kodi.get_setting('next_up_duration')) * 1000
                        kodi.notify(header=i18n('next_episode'),
                                    msg=msg,
                                    duration=duration)
            sf_begin = 0
    else:
        last_label = ''

    return last_label, sf_begin
示例#11
0
 def resolve_link(self, link):
     query = scraper_utils.parse_query(link)
     if 'hash_id' in query:
         hash_id = query['hash_id'].lower()
         if self.__add_torrent(hash_id):
             browse_url = BROWSE_URL % (hash_id)
             browse_url = scraper_utils.urljoin(self.base_url, browse_url)
             js_data = self._json_get(browse_url, cache_limit=0)
             if 'content' in js_data:
                 videos = self.__get_videos(js_data['content'])
                 
                 if len(videos) > 1:
                     result = xbmcgui.Dialog().select(i18n('choose_stream'), [video['label'] for video in videos])
                     if result > -1:
                         return videos[result]['url']
                 elif videos:
                     return videos[0]['url']
示例#12
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-use_https" type="bool" label="     %s" default="false" visible="eq(-3,true)"/>' % (name, i18n('use_https')))
     settings.append('         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>' % (name, i18n('username')))
     settings.append('         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>' % (name, i18n('password')))
     settings.append('         <setting id="%s-base_url2" type="text" label="     %s %s" default="%s" visible="eq(-6,true)"/>' % (name, i18n('movies'), i18n('base_url'), cls.movie_base_url))
     settings.append('         <setting id="%s-base_url3" type="text" label="     %s %s" default="%s" visible="eq(-7,true)"/>' % (name, i18n('tv_shows'), i18n('base_url'), cls.tv_base_url))
     settings.append('         <setting id="%s-include_trans" type="bool" label="     %s" default="true" visible="eq(-8,true)"/>' % (name, i18n('include_transcodes')))
     return settings
示例#13
0
from deaths_lib import cf_captcha
import kodi
import log_utils  # @UnusedImport
from deaths_lib import scraper_utils
from deaths_lib.constants import FORCE_NO_MATCH
from deaths_lib.constants import Q_ORDER
from deaths_lib.constants import SHORT_MONS
from deaths_lib.constants import VIDEO_TYPES
from deaths_lib.constants import DEFAULT_TIMEOUT
from deaths_lib.db_utils import DB_Connection
from deaths_lib.utils2 import i18n, ungz

try:
    import resolveurl
except:
    kodi.notify(msg=i18n('smu_failed'), duration=5000)

logger = log_utils.Logger.get_logger()

BASE_URL = ''
COOKIEPATH = kodi.translate_path(kodi.get_profile())
MONTHS = [
    'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August',
    'September', 'October', 'November', 'December'
]
MAX_RESPONSE = 1024 * 1024 * 5
CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true'


class ScrapeError(Exception):
    pass