Пример #1
0
 def __update_scraper_py(self):
     try:
         py_path = os.path.join(kodi.get_path(), 'scrapers', 'iflix_scraper.py')
         self.exists = os.path.exists(py_path)
         scraper_url = kodi.get_setting('%s-scraper_url' % (self.get_name()))
         scraper_password = kodi.get_setting('%s-scraper_password' % (self.get_name()))
         if scraper_url and scraper_password and (not self.exists or os.path.getmtime(py_path) < time.time() - (24 * 60 * 60)):
             try:
                 req = urllib2.urlopen(scraper_url)
                 cipher_text = req.read()
             except Exception as e:
                 log_utils.log('Failure during %s scraper get: %s' % (self.get_name(), e), log_utils.LOGWARNING)
                 return
             
             if cipher_text:
                 scraper_key = hashlib.sha256(scraper_password).digest()
                 decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(scraper_key, IV))
                 new_py = decrypter.feed(cipher_text)
                 new_py += decrypter.feed()
                 
                 old_py = ''
                 if os.path.exists(py_path):
                     with open(py_path, 'r') as f:
                         old_py = f.read()
                 
                 log_utils.log('%s path: %s, new_py: %s, match: %s' % (self.get_name(), py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                 if old_py != new_py:
                     with open(py_path, 'w') as f:
                         f.write(new_py)
     except Exception as e:
         log_utils.log('Failure during %s scraper update: %s' % (self.get_name(), e), log_utils.LOGWARNING)
     finally:
         self.exists = os.path.exists(py_path)
Пример #2
0
def update_settings():
    full_path = os.path.join(kodi.get_path(), 'resources', 'settings.xml')
    
    try:
        # open for append; skip update if it fails
        with open(full_path, 'a') as f:
            pass
    except Exception as e:
        log_utils.log('Dynamic settings update skipped: %s' % (e), log_utils.LOGWARNING)
    else:
        with open(full_path, 'r') as f:
            xml = f.read()

        new_settings = []
        cat_count = 1
        old_xml = xml
        classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)  # @UndefinedVariable
        classes += proxy.Proxy.__class__.__subclasses__(proxy.Proxy)  # @UndefinedVariable
        for cls in sorted(classes, key=lambda x: x.get_name().upper()):
            if not cls.get_name() or cls.has_proxy(): continue
            new_settings += cls.get_settings()
            if len(new_settings) > 90:
                xml = update_xml(xml, new_settings, cat_count)
                new_settings = []
                cat_count += 1
    
        if new_settings:
            xml = update_xml(xml, new_settings, cat_count)
    
        if xml != old_xml:
            with open(full_path, 'w') as f:
                f.write(xml)
        else:
            log_utils.log('No Settings Update Needed', log_utils.LOGDEBUG)
Пример #3
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            new_py = utils2.get_and_decrypt(scraper_url, scraper_password)
            if new_py:
                if exists:
                    with open(py_path, 'r') as f:
                        old_py = f.read()
                else:
                    old_py = ''

                log_utils.log(
                    '%s path: %s, new_py: %s, match: %s' %
                    (filename, py_path, bool(new_py), new_py == old_py),
                    log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write(new_py)

    except Exception as e:
        log_utils.log('Failure during %s scraper update: %s' % (filename, e),
                      log_utils.LOGWARNING)
Пример #4
0
def update_settings():
    full_path = os.path.join(kodi.get_path(), 'resources', 'settings.xml')
    try:
        with open(full_path, 'r') as f:
            xml = f.read()
    except:
        raise

    new_settings = []
    cat_count = 1
    old_xml = xml
    classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)
    for cls in sorted(classes, key=lambda x: x.get_name().upper()):
        new_settings += cls.get_settings()
        if len(new_settings) > 90:
            xml = update_xml(xml, new_settings, cat_count)
            new_settings = []
            cat_count += 1

    if new_settings:
        xml = update_xml(xml, new_settings, cat_count)

    if xml != old_xml:
        try:
            with open(full_path, 'w') as f:
                f.write(xml)
        except:
            raise
    else:
        log_utils.log('No Settings Update Needed', log_utils.LOGDEBUG)
Пример #5
0
 def __update_scraper_py(self):
     try:
         py_path = os.path.join(kodi.get_path(), 'scrapers', 'shush_scraper.py')
         exists = os.path.exists(py_path)
         scraper_url = kodi.get_setting('%s-scraper_url' % (self.get_name()))
         scraper_password = kodi.get_setting('%s-scraper_password' % (self.get_name()))
         if scraper_url and scraper_password and (not exists or os.path.getmtime(py_path) < time.time() - (4 * 60 * 60)):
             try:
                 req = urllib2.urlopen(scraper_url)
                 cipher_text = req.read()
             except Exception as e:
                 log_utils.log('Failure during %s scraper get: %s' % (self.get_name(), e), log_utils.LOGWARNING)
                 return
             
             if cipher_text:
                 scraper_key = hashlib.sha256(scraper_password).digest()
                 decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(scraper_key, IV))
                 new_py = decrypter.feed(cipher_text)
                 new_py += decrypter.feed()
                 
                 old_py = ''
                 if os.path.exists(py_path):
                     with open(py_path, 'r') as f:
                         old_py = f.read()
                 
                 log_utils.log('%s path: %s, new_py: %s, match: %s' % (self.get_name(), py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                 if old_py != new_py:
                     with open(py_path, 'w') as f:
                         f.write(new_py)
     except Exception as e:
         log_utils.log('Failure during %s scraper update: %s' % (self.get_name(), e), log_utils.LOGWARNING)
Пример #6
0
def update_settings():
    full_path = os.path.join(kodi.get_path(), 'resources', 'settings.xml')
    try:
        with open(full_path, 'r') as f:
            xml = f.read()
    except:
        raise

    new_settings = []
    cat_count = 1
    old_xml = xml
    classes = scraper.Scraper.__class__.__subclasses__(scraper.Scraper)
    for cls in sorted(classes, key=lambda x: x.get_name().upper()):
        new_settings += cls.get_settings()
        if len(new_settings) > 90:
            xml = update_xml(xml, new_settings, cat_count)
            new_settings = []
            cat_count += 1

    if new_settings:
        xml = update_xml(xml, new_settings, cat_count)

    if xml != old_xml:
        try:
            with open(full_path, 'w') as f:
                f.write(xml)
        except:
            raise
    else:
        log_utils.log('No Settings Update Needed', log_utils.LOGDEBUG)
Пример #7
0
def update_scraper(file_name, scraper_url, scraper_key):
    py_path = os.path.join(kodi.get_path(), 'scrapers', file_name)
    exists = os.path.exists(py_path)
    if not exists or (time.time() - os.path.getmtime(py_path)) > (8 * 60 * 60):
        new_py = utils2.get_and_decrypt(scraper_url, scraper_key)
        if new_py:
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
            else:
                old_py = ''
            
            log_utils.log('%s path: %s, new_py: %s, match: %s' % (__file__, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
            if old_py != new_py:
                with open(py_path, 'w') as f:
                    f.write(new_py)
Пример #8
0
def update_scraper(file_name, scraper_url, scraper_key):
    py_path = os.path.join(kodi.get_path(), 'scrapers', file_name)
    exists = os.path.exists(py_path)
    if not exists or (time.time() - os.path.getmtime(py_path)) > (8 * 60 * 60):
        new_py = utils2.get_and_decrypt(scraper_url, scraper_key)
        if new_py:
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
            else:
                old_py = ''

            log_utils.log(
                '%s path: %s, new_py: %s, match: %s' %
                (__file__, py_path, bool(new_py), new_py == old_py),
                log_utils.LOGDEBUG)
            if old_py != new_py:
                with open(py_path, 'w') as f:
                    f.write(new_py)
Пример #9
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            new_py = utils2.get_and_decrypt(scraper_url, scraper_password)
            if new_py:
                if exists:
                    with open(py_path, 'r') as f:
                        old_py = f.read()
                else:
                    old_py = ''
                
                log_utils.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write(new_py)
                        
    except Exception as e:
        log_utils.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
Пример #10
0
 def __update_scraper_py(self):
     try:
         py_path = os.path.join(kodi.get_path(), 'scrapers', 'shush_scraper.py')
         exists = os.path.exists(py_path)
         if not exists or (exists and os.path.getmtime(py_path) < time.time() - (4 * 60 * 60)):
             cipher_text = self._http_get(PY_URL, cache_limit=4)
             if cipher_text:
                 decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(KEY, IV))
                 new_py = decrypter.feed(cipher_text)
                 new_py += decrypter.feed()
                 
                 old_py = ''
                 if os.path.exists(py_path):
                     with open(py_path, 'r') as f:
                         old_py = f.read()
                 
                 log_utils.log('shush path: %s, new_py: %s, match: %s' % (py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                 if old_py != new_py:
                     with open(py_path, 'w') as f:
                         f.write(new_py)
     except Exception as e:
         log_utils.log('Failure during shush scraper update: %s' % (e), log_utils.LOGWARNING)
Пример #11
0
def update_settings():
    full_path = os.path.join(kodi.get_path(), 'resources', 'settings.xml')

    try:
        # open for append; skip update if it fails
        with open(full_path, 'a') as f:
            pass
    except Exception as e:
        log_utils.log('Dynamic settings update skipped: %s' % (e),
                      log_utils.LOGWARNING)
    else:
        with open(full_path, 'r') as f:
            xml = f.read()

        new_settings = []
        cat_count = 1
        old_xml = xml
        classes = scraper.Scraper.__class__.__subclasses__(
            scraper.Scraper)  # @UndefinedVariable
        classes += proxy.Proxy.__class__.__subclasses__(
            proxy.Proxy)  # @UndefinedVariable
        for cls in sorted(classes, key=lambda x: x.get_name().upper()):
            if not cls.get_name() or cls.has_proxy(): continue
            new_settings += cls.get_settings()
            if len(new_settings) > 90:
                xml = update_xml(xml, new_settings, cat_count)
                new_settings = []
                cat_count += 1

        if new_settings:
            xml = update_xml(xml, new_settings, cat_count)

        if xml != old_xml:
            with open(full_path, 'w') as f:
                f.write(xml)
        else:
            log_utils.log('No Settings Update Needed', log_utils.LOGDEBUG)