def __init__(self, timeout=scraper.DEFAULT_TIMEOUT): self.timeout = timeout self.db_connection = DB_Connection() self.base_url = xbmcaddon.Addon().getSetting('%s-base_url' % (self.get_name())) self.max_pages = int(xbmcaddon.Addon().getSetting('%s-max_pages' % (self.get_name())))
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT): self.timeout=timeout self.db_connection = DB_Connection() self.base_url = xbmcaddon.Addon().getSetting('%s-base_url' % (self.get_name())) self.username = xbmcaddon.Addon().getSetting('%s-username' % (self.get_name())) self.password = xbmcaddon.Addon().getSetting('%s-password' % (self.get_name())) self.include_paid = xbmcaddon.Addon().getSetting('%s-include_premium' % (self.get_name())) == 'true'
def create_db_connection(self): if P_MODE == P_MODES.THREADS: worker_id = threading.current_thread().ident elif P_MODE == P_MODES.PROCESSES: worker_id = multiprocessing.current_process().pid # create a connection if we don't have one or it was created in a different worker if self.db_connection is None or self.worker_id != worker_id: self.db_connection = DB_Connection() self.worker_id = worker_id
def _cached_http_get(self, url, base_url, timeout, cookies=None, data=None, headers=None, cache_limit=8): if cookies is None: cookies = {} if timeout == 0: timeout = None if headers is None: headers = {} referer = headers['Referer'] if 'Referer' in headers else url log_utils.log( 'Getting Url: %s cookie=|%s| data=|%s| extra headers=|%s|' % (url, cookies, data, headers)) db_connection = DB_Connection() _, html = db_connection.get_cached_url(url, cache_limit) if html: log_utils.log('Returning cached result for: %s' % (url), xbmc.LOGDEBUG) return html try: cj = self._set_cookies(base_url, cookies) if data is not None: data = urllib.urlencode(data, True) request = urllib2.Request(url, data=data) request.add_header('User-Agent', USER_AGENT) request.add_unredirected_header('Host', request.get_host()) request.add_unredirected_header('Referer', referer) for key in headers: request.add_header(key, headers[key]) response = urllib2.urlopen(request, timeout=timeout) cj.save(ignore_discard=True, ignore_expires=True) if response.info().get('Content-Encoding') == 'gzip': buf = StringIO(response.read()) f = gzip.GzipFile(fileobj=buf) html = f.read() else: html = response.read() except Exception as e: log_utils.log( 'Error (%s) during scraper http get: %s' % (str(e), url), xbmc.LOGWARNING) return '' db_connection.cache_url(url, html) return html
def create_db_connection(self): worker_id = threading.current_thread().ident # create a connection if we don't have one or it was created in a different worker if self.db_connection is None or self.worker_id != worker_id: self.db_connection = DB_Connection() self.worker_id = worker_id
def __init__(self, timeout=DEFAULT_TIMEOUT): self.db_connection = DB_Connection()
def db_connection(self): if self.__db_connection is None: self.__db_connection = DB_Connection() return self.__db_connection
import xbmc import xbmcgui import xbmcaddon from salts_lib import kodi from salts_lib import log_utils from salts_lib import utils from salts_lib import utils2 from salts_lib.constants import MODES from salts_lib.constants import TRIG_DB_UPG from salts_lib.db_utils import DB_Connection MAX_ERRORS = 10 log_utils.log('Service: Installed Version: %s' % (kodi.get_version()), log_utils.LOGNOTICE) db_connection = DB_Connection() if kodi.get_setting('use_remote_db') == 'false' or kodi.get_setting( 'enable_upgrade') == 'true': if TRIG_DB_UPG: db_version = db_connection.get_db_version() else: db_version = kodi.get_version() db_connection.init_database(db_version) class Service(xbmc.Player): def __init__(self, *args, **kwargs): log_utils.log('Service: starting...', log_utils.LOGNOTICE) xbmc.Player.__init__(self, *args, **kwargs) self.win = xbmcgui.Window(10000) self.reset()
def __init__(self, *args, **kwargs): logger.log('Service: starting...', log_utils.LOGNOTICE) self.db_connection = DB_Connection() xbmc.Player.__init__(self, *args, **kwargs) self.win = xbmcgui.Window(10000) self.reset()