Exemplo n.º 1
0
 def get_settings(cls):
     name = cls.get_name()
     settings = [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>'
         % (name, name, i18n('enabled')),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="false" visible="eq(-1,true)"/>'
         % (name, i18n('page_existence')),
     ]
     return settings
Exemplo n.º 2
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>'
         % (name, i18n('username')))
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>'
         % (name, i18n('password')))
     return settings
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-filter" type="slider" range="0,180" option="int" label="     %s" default="30" visible="eq(-3,true)"/>'
         % (name, i18n('filter_results_days')))
     settings.append(
         '         <setting id="%s-select" type="enum" label="     %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>'
         % (name, i18n('auto_select')))
     return settings
Exemplo n.º 4
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append('         <setting id="%s-get_token" label="    %s" type="action" action="RunPlugin(plugin://plugin.video.salts/?mode=auth_torba)" visible="eq(-4,true)"/>'
                     % (name, i18n('torba_auth')))
     settings.append('         <setting id="%s-reset_token" label="    %s" type="action" action="RunPlugin(plugin://plugin.video.salts/?mode=reset_torba)" visible="eq(-5,true)"/>'
                     % (name, i18n('reset_torba')))
     settings.append('         <setting id="%s-token" type="text" default="" visible="false"/>' % (name))
     settings.append('         <setting id="%s-refresh" type="text" default="" visible="false"/>' % (name))
     settings.append('         <setting id="%s-client_id" type="text" default="" visible="false"/>' % (name))
     settings.append('         <setting id="%s-client_secret" type="text" default="" visible="false"/>' % (name))
     return settings
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>'
         % (name, i18n("username"))
     )
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>'
         % (name, i18n("password"))
     )
     return settings
Exemplo n.º 6
0
 def auth_torba(self):
     html = self._http_get(OAUTH_GET_URL, cache_limit=0)
     js_data = scraper_utils.parse_json(html, OAUTH_GET_URL)
     line1 = i18n('verification_url') % (js_data['verification_short_url'])
     line2 = i18n('login_prompt')
     countdown = int(utils.iso_2_utc(js_data['expires_in']) - time.time())
     interval = js_data['interval'] / 1000
     with kodi.CountdownDialog(i18n('torba_acct_auth'), line1=line1, line2=line2, countdown=countdown, interval=interval) as cd:
         result = cd.start(self.check_oauth, [js_data['device_code']])
     
     # cancelled
     if result is None: return
     return self.__get_token(result['client_id'], result['client_secret'], js_data['device_code'])
Exemplo n.º 7
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>'
         % (name, i18n('username')))
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>'
         % (name, i18n('password')))
     settings.append(
         '         <setting id="%s-result_limit" label="     %s" type="slider" default="10" range="10,100" option="int" visible="eq(-6,true)"/>'
         % (name, i18n('result_limit')))
     return settings
Exemplo n.º 8
0
 def get_settings(cls):
     """
     Returns a list of settings to be used for this scraper. Settings are automatically checked for updates every time scrapers are imported
     The list returned by each scraper is aggregated into a big settings.xml string, and then if it differs from the current settings xml in the Scrapers category
     the existing settings.xml fragment is removed and replaced by the new string
     """
     name = cls.get_name()
     return [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>'
         % (name, name, i18n('enabled')),
         '         <setting id="%s-base_url" type="text" label="    %s" default="%s" visible="eq(-1,true)"/>'
         % (name, i18n('base_url'), cls.base_url),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="true" visible="eq(-2,true)"/>'
         % (name, i18n('page_existence')),
     ]
Exemplo n.º 9
0
def show_next_up(last_label, sf_begin):
    token = kodi.get_setting('trakt_oauth_token')
    if token and xbmc.getInfoLabel('Container.PluginName') == kodi.get_id() and xbmc.getInfoLabel('Container.Content') == 'tvshows':
        if xbmc.getInfoLabel('ListItem.label') != last_label:
            sf_begin = time.time()

        last_label = xbmc.getInfoLabel('ListItem.label')
        if sf_begin and (time.time() - sf_begin) >= int(kodi.get_setting('next_up_delay')):
            liz_url = xbmc.getInfoLabel('ListItem.FileNameAndPath')
            queries = kodi.parse_query(liz_url[liz_url.find('?'):])
            if 'trakt_id' in queries:
                try: list_size = int(kodi.get_setting('list_size'))
                except: list_size = 30
                try: trakt_timeout = int(kodi.get_setting('trakt_timeout'))
                except: trakt_timeout = 20
                trakt_api = Trakt_API(token, kodi.get_setting('use_https') == 'true', list_size, trakt_timeout, kodi.get_setting('trakt_offline') == 'true')
                progress = trakt_api.get_show_progress(queries['trakt_id'], full=True)
                if 'next_episode' in progress and progress['next_episode']:
                    if progress['completed'] or kodi.get_setting('next_unwatched') == 'true':
                        next_episode = progress['next_episode']
                        date = utils2.make_day(utils2.make_air_date(next_episode['first_aired']))
                        if kodi.get_setting('next_time') != '0':
                            date_time = '%s@%s' % (date, utils2.make_time(utils.iso_2_utc(next_episode['first_aired']), 'next_time'))
                        else:
                            date_time = date
                        msg = '[[COLOR deeppink]%s[/COLOR]] - %sx%s' % (date_time, next_episode['season'], next_episode['number'])
                        if next_episode['title']: msg += ' - %s' % (next_episode['title'])
                        duration = int(kodi.get_setting('next_up_duration')) * 1000
                        kodi.notify(header=i18n('next_episode'), msg=msg, duration=duration)
            sf_begin = 0
    else:
        last_label = ''
    
    return last_label, sf_begin
Exemplo n.º 10
0
 def get_settings(cls):
     """
     Returns a list of settings to be used for this scraper. Settings are automatically checked for updates every time scrapers are imported
     The list returned by each scraper is aggregated into a big settings.xml string, and then if it differs from the current settings xml in the Scrapers category
     the existing settings.xml fragment is removed and replaced by the new string
     """
     name = cls.get_name()
     return [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>'
         % (name, name, i18n("enabled")),
         '         <setting id="%s-base_url" type="text" label="    %s" default="%s" visible="eq(-1,true)"/>'
         % (name, i18n("base_url"), cls.base_url),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="true" visible="eq(-2,true)"/>'
         % (name, i18n("page_existence")),
         '         <setting id="%s_last_results" type="number" default="0" visible="false"/>' % (name),
     ]
Exemplo n.º 11
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-auto_pick" type="bool" label="    %s" default="false" visible="eq(-4,true)"/>'
         % (name, i18n('auto_pick')))
     return settings
Exemplo n.º 12
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            old_lm = None
            old_py = ''
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
                    match = re.search('^#\s+Last-Modified:\s*(.*)', old_py)
                    if match:
                        old_lm = match.group(1).strip()

            new_lm, new_py = utils2.get_and_decrypt(scraper_url, scraper_password, old_lm)
            if new_py:
                logger.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write('# Last-Modified: %s\n' % (new_lm))
                        f.write(new_py)
                    kodi.notify(msg=utils2.i18n('scraper_updated') + filename)
                        
    except Exception as e:
        logger.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
Exemplo n.º 13
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            old_lm = None
            old_py = ''
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
                    match = re.search('^#\s+Last-Modified:\s*(.*)', old_py)
                    if match:
                        old_lm = match.group(1).strip()

            new_lm, new_py = utils2.get_and_decrypt(scraper_url, scraper_password, old_lm)
            if new_py:
                logger.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write('# Last-Modified: %s\n' % (new_lm))
                        f.write(new_py)
                    kodi.notify(msg=utils2.i18n('scraper_updated') + filename)
                        
    except Exception as e:
        logger.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
Exemplo n.º 14
0
def show_next_up(last_label, sf_begin):
    token = kodi.get_setting('trakt_oauth_token')
    if token and xbmc.getInfoLabel('Container.PluginName') == kodi.get_id(
    ) and xbmc.getInfoLabel('Container.Content') == 'tvshows':
        if xbmc.getInfoLabel('ListItem.label') != last_label:
            sf_begin = time.time()

        last_label = xbmc.getInfoLabel('ListItem.label')
        if sf_begin and (time.time() - sf_begin) >= int(
                kodi.get_setting('next_up_delay')):
            liz_url = xbmc.getInfoLabel('ListItem.FileNameAndPath')
            queries = kodi.parse_query(liz_url[liz_url.find('?'):])
            if 'trakt_id' in queries:
                try:
                    list_size = int(kodi.get_setting('list_size'))
                except:
                    list_size = 30
                try:
                    trakt_timeout = int(kodi.get_setting('trakt_timeout'))
                except:
                    trakt_timeout = 20
                trakt_api = Trakt_API(
                    token,
                    kodi.get_setting('use_https') == 'true', list_size,
                    trakt_timeout,
                    kodi.get_setting('trakt_offline') == 'true')
                progress = trakt_api.get_show_progress(queries['trakt_id'],
                                                       full=True)
                if 'next_episode' in progress and progress['next_episode']:
                    if progress['completed'] or kodi.get_setting(
                            'next_unwatched') == 'true':
                        next_episode = progress['next_episode']
                        date = utils2.make_day(
                            utils2.make_air_date(next_episode['first_aired']))
                        if kodi.get_setting('next_time') != '0':
                            date_time = '%s@%s' % (
                                date,
                                utils2.make_time(
                                    utils.iso_2_utc(
                                        next_episode['first_aired']),
                                    'next_time'))
                        else:
                            date_time = date
                        msg = '[[COLOR deeppink]%s[/COLOR]] - %sx%s' % (
                            date_time, next_episode['season'],
                            next_episode['number'])
                        if next_episode['title']:
                            msg += ' - %s' % (next_episode['title'])
                        duration = int(
                            kodi.get_setting('next_up_duration')) * 1000
                        kodi.notify(header=i18n('next_episode'),
                                    msg=msg,
                                    duration=duration)
            sf_begin = 0
    else:
        last_label = ''

    return last_label, sf_begin
Exemplo n.º 15
0
    def resolve_link(self, link):
        try:
            headers = dict(
                [item.split('=') for item in (link.split('|')[1]).split('&')])
            for key in headers:
                headers[key] = urllib.unquote(headers[key])
            link = link.split('|')[0]
        except:
            headers = {}

        html = self._http_get(link, headers=headers, cache_limit=.5)
        fragment = dom_parser.parse_dom(html, 'div', {'class': 'player'})
        if fragment:
            iframe_url = dom_parser.parse_dom(fragment[0], 'iframe', ret='src')
            if iframe_url:
                headers = {'Referer': link}
                html = self._http_get(iframe_url[0],
                                      headers=headers,
                                      cache_limit=.5)
                match = re.search("window\.atob\('([^']+)", html)
                if match:
                    func_count = len(re.findall('window\.atob', html))
                    html = match.group(1)
                    for _i in xrange(func_count):
                        html = base64.decodestring(html)

                streams = []
                for match in re.finditer(
                        '''<source[^>]+src=["']([^'"]+)[^>]+label=['"]([^'"]+)''',
                        html):
                    streams.append(match.groups())

                if len(streams) > 1:
                    if not self.auto_pick:
                        result = xbmcgui.Dialog().select(
                            i18n('choose_stream'), [e[1] for e in streams])
                        if result > -1:
                            return streams[result][0]
                    else:
                        best_stream = ''
                        best_q = 0
                        for stream in streams:
                            stream_url, label = stream
                            if Q_ORDER[scraper_utils.height_get_quality(
                                    label)] > best_q:
                                best_q = Q_ORDER[
                                    scraper_utils.height_get_quality(label)]
                                best_stream = stream_url

                        if best_stream:
                            return best_stream
                elif streams:
                    return streams[0][0]
 def resolve_link(self, link):
     query = urlparse.parse_qs(link)
     if 'hash_id' in query:
         hash_id = query['hash_id'][0].lower()
         if self.__add_torrent(hash_id):
             browse_url = BROWSE_URL % (hash_id)
             browse_url = urlparse.urljoin(self.base_url, browse_url)
             js_data = self._json_get(browse_url, cache_limit=0)
             if 'content' in js_data:
                 videos = self.__get_videos(js_data['content'])
                 
                 if len(videos) > 1:
                     result = xbmcgui.Dialog().select(i18n('choose_stream'), [video['label'] for video in videos])
                     if result > -1:
                         return videos[result]['url']
                 elif videos:
                     return videos[0]['url']
Exemplo n.º 17
0
    def resolve_link(self, link):
        try:
            headers = dict([item.split('=') for item in (link.split('|')[1]).split('&')])
            for key in headers: headers[key] = urllib.unquote(headers[key])
            link = link.split('|')[0]
        except:
            headers = {}

        html = self._http_get(link, headers=headers, cache_limit=.5)
        fragment = dom_parser.parse_dom(html, 'div', {'class': 'player'})
        if fragment:
            iframe_url = dom_parser.parse_dom(fragment[0], 'iframe', ret='src')
            if iframe_url:
                headers = {'Referer': link}
                html = self._http_get(iframe_url[0], headers=headers, cache_limit=.5)
                match = re.search("window\.atob\('([^']+)", html)
                if match:
                    func_count = len(re.findall('window\.atob', html))
                    html = match.group(1)
                    for _i in xrange(func_count):
                        html = base64.decodestring(html)
                
                streams = []
                for match in re.finditer('''<source[^>]+src=["']([^'"]+)[^>]+label=['"]([^'"]+)''', html):
                    streams.append(match.groups())
                
                if len(streams) > 1:
                    if not self.auto_pick:
                        result = xbmcgui.Dialog().select(i18n('choose_stream'), [e[1] for e in streams])
                        if result > -1:
                            return streams[result][0]
                    else:
                        best_stream = ''
                        best_q = 0
                        for stream in streams:
                            stream_url, label = stream
                            if Q_ORDER[scraper_utils.height_get_quality(label)] > best_q:
                                best_q = Q_ORDER[scraper_utils.height_get_quality(label)]
                                best_stream = stream_url
                        
                        if best_stream:
                            return best_stream
                elif streams:
                    return streams[0][0]
Exemplo n.º 18
0
    def resolve_link(self, link):
        query = scraper_utils.parse_query(link)
        if 'hash_id' in query:
            hash_id = query['hash_id'].lower()
            if self.__add_torrent(hash_id):
                browse_url = BROWSE_URL % (hash_id)
                browse_url = scraper_utils.urljoin(self.base_url, browse_url)
                js_data = self._json_get(browse_url, cache_limit=0)
                if 'content' in js_data:
                    videos = self.__get_videos(js_data['content'])

                    if len(videos) > 1:
                        result = xbmcgui.Dialog().select(
                            i18n('choose_stream'),
                            [video['label'] for video in videos])
                        if result > -1:
                            return videos[result]['url']
                    elif videos:
                        return videos[0]['url']
Exemplo n.º 19
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append(
         '         <setting id="%s-use_https" type="bool" label="     %s" default="false" visible="eq(-3,true)"/>'
         % (name, i18n('use_https')))
     settings.append(
         '         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>'
         % (name, i18n('username')))
     settings.append(
         '         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>'
         % (name, i18n('password')))
     settings.append(
         '         <setting id="%s-base_url2" type="text" label="     %s %s" default="%s" visible="eq(-6,true)"/>'
         % (name, i18n('movies'), i18n('base_url'), cls.movie_base_url))
     settings.append(
         '         <setting id="%s-base_url3" type="text" label="     %s %s" default="%s" visible="eq(-7,true)"/>'
         % (name, i18n('tv_shows'), i18n('base_url'), cls.tv_base_url))
     settings.append(
         '         <setting id="%s-include_trans" type="bool" label="     %s" default="true" visible="eq(-8,true)"/>'
         % (name, i18n('include_transcodes')))
     return settings
Exemplo n.º 20
0
    def resolve_link(self, link):
        playlist = super(self.__class__, self)._http_get(link, cache_limit=.5)
        try:
            ns = '{http://xspf.org/ns/0/}'
            root = ET.fromstring(playlist)
            tracks = root.findall('.//%strack' % (ns))
            locations = []
            for track in tracks:
                duration = track.find('%sduration' % (ns)).text
                try: duration = int(duration)
                except: duration = 0
                if duration >= MIN_DURATION:
                    location = track.find('%slocation' % (ns)).text
                    locations.append({'duration': duration / 1000, 'url': location})

            if len(locations) > 1:
                result = xbmcgui.Dialog().select(i18n('choose_stream'), [utils.format_time(location['duration']) for location in locations])
                if result > -1:
                    return locations[result]['url']
            elif locations:
                return locations[0]['url']
        except Exception as e:
            logger.log('Failure during furk playlist parse: %s' % (e), log_utils.LOGWARNING)
Exemplo n.º 21
0
from salts_lib import cf_captcha
import kodi
import log_utils  # @UnusedImport
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import Q_ORDER
from salts_lib.constants import SHORT_MONS
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import DEFAULT_TIMEOUT
from salts_lib.db_utils import DB_Connection
from salts_lib.utils2 import i18n, ungz

try:
    import urlresolver
except:
    kodi.notify(msg=i18n('smu_failed'), duration=5000)

logger = log_utils.Logger.get_logger()

BASE_URL = ''
COOKIEPATH = kodi.translate_path(kodi.get_profile())
MONTHS = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
MAX_RESPONSE = 1024 * 1024 * 5
CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true'

class ScrapeError(Exception):
    pass

class NoRedirection(urllib2.HTTPErrorProcessor):
    def http_response(self, request, response):  # @UnusedVariable
        logger.log('Stopping Redirect', log_utils.LOGDEBUG)
Exemplo n.º 22
0
from salts_lib import cf_captcha
import kodi
import log_utils  # @UnusedImport
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import Q_ORDER
from salts_lib.constants import SHORT_MONS
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import DEFAULT_TIMEOUT
from salts_lib.db_utils import DB_Connection
from salts_lib.utils2 import i18n, ungz

try:
    import urlresolver
except:
    kodi.notify(msg=i18n('smu_failed'), duration=5000)

logger = log_utils.Logger.get_logger()

BASE_URL = ''
COOKIEPATH = kodi.translate_path(kodi.get_profile())
MONTHS = [
    'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August',
    'September', 'October', 'November', 'December'
]
MAX_RESPONSE = 1024 * 1024 * 5
CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true'


class ScrapeError(Exception):
    pass
Exemplo n.º 23
0
 def get_settings(cls):
     name = cls.get_name()
     try:
         settings = real_scraper.get_settings()
         offset = 6
     except:
         settings = super(cls, cls).get_settings()
         offset = 4
     settings.append('         <setting id="%s-scraper_url" type="text" label="    %s" default="" visible="eq(-%d,true)"/>' % (name, i18n('scraper_location'), offset))
     settings.append('         <setting id="%s-scraper_password" type="text" label="    %s" option="hidden" default="" visible="eq(-%d,true)"/>' % (name, i18n('scraper_key'), offset + 1))
     return settings
 def get_settings(cls):
     name = cls.get_name()
     settings = [
         '         <setting id="%s-enable" type="bool" label="%s %s" default="true" visible="true"/>' % (name, name, i18n('enabled')),
         '         <setting id="%s-sub_check" type="bool" label="    %s" default="false" visible="eq(-1,true)"/>' % (name, i18n('page_existence')),
         '         <setting id="%s_last_results" type="number" default="0" visible="false"/>' % (name)
     ]
     return settings
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-use_https" type="bool" label="     %s" default="false" visible="eq(-4,true)"/>' % (name, i18n('use_https')))
     settings.append('         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-5,true)"/>' % (name, i18n('username')))
     settings.append('         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-6,true)"/>' % (name, i18n('password')))
     settings.append('         <setting id="%s-base_url2" type="text" label="     %s %s" default="%s" visible="eq(-7,true)"/>' % (name, i18n('movies'), i18n('base_url'), cls.movie_base_url))
     settings.append('         <setting id="%s-base_url3" type="text" label="     %s %s" default="%s" visible="eq(-8,true)"/>' % (name, i18n('tv_shows'), i18n('base_url'), cls.tv_base_url))
     settings.append('         <setting id="%s-include_trans" type="bool" label="     %s" default="true" visible="eq(-9,true)"/>' % (name, i18n('include_transcodes')))
     return settings
Exemplo n.º 26
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append('         <setting id="%s-auto_pick" type="bool" label="    %s" default="false" visible="eq(-4,true)"/>' % (name, i18n('auto_pick')))
     return settings
Exemplo n.º 27
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-filter" type="slider" range="0,180" option="int" label="     %s" default="60" visible="eq(-3,true)"/>' % (name, i18n('filter_results_days')))
     return settings
Exemplo n.º 28
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-filter" type="slider" range="0,180" option="int" label="     %s" default="30" visible="eq(-4,true)"/>' % (name, i18n('filter_results_days')))
     settings.append('         <setting id="%s-select" type="enum" label="     %s" lvalues="30636|30637" default="0" visible="eq(-5,true)"/>' % (name, i18n('auto_select')))
     return settings
Exemplo n.º 29
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>' % (name, i18n('username')))
     settings.append('         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>' % (name, i18n('password')))
     settings.append('         <setting id="%s-result_limit" label="     %s" type="slider" default="10" range="10,100" option="int" visible="eq(-6,true)"/>' % (name, i18n('result_limit')))
     settings.append('         <setting id="%s-size_limit" label="     %s" type="slider" default="0" range="0,50" option="int" visible="eq(-7,true)"/>' % (name, i18n('size_limit')))
     return settings
Exemplo n.º 30
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>' % (name, i18n('username')))
     settings.append('         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>' % (name, i18n('password')))
     return settings
Exemplo n.º 31
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     name = cls.get_name()
     settings.append('         <setting id="%s-username" type="text" label="     %s" default="" visible="eq(-4,true)"/>' % (name, i18n('username')))
     settings.append('         <setting id="%s-password" type="text" label="     %s" option="hidden" default="" visible="eq(-5,true)"/>' % (name, i18n('password')))
     settings.append('         <setting id="%s-include_premium" type="bool" label="     %s" default="false" visible="eq(-6,true)"/>' % (name, i18n('include_premium')))
     return settings
Exemplo n.º 32
0
 def get_settings(cls):
     settings = super(cls, cls).get_settings()
     settings = scraper_utils.disable_sub_check(settings)
     name = cls.get_name()
     settings.append('         <setting id="%s-select" type="enum" label="     %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>' % (name, i18n('auto_select')))
     return settings