def __update_scraper_py(self):
        try:
            path = xbmcaddon.Addon().getAddonInfo('path')
            py_path = os.path.join(path, 'scrapers', 'shush_scraper.py')
            exists = os.path.exists(py_path)
            if not exists or (exists
                              and os.path.getmtime(py_path) < time.time() -
                              (4 * 60 * 60)):
                cipher_text = self._http_get(PY_URL, cache_limit=4)
                if cipher_text:
                    decrypter = pyaes.Decrypter(
                        pyaes.AESModeOfOperationCBC(KEY, IV))
                    new_py = decrypter.feed(cipher_text)
                    new_py += decrypter.feed()

                    old_py = ''
                    if os.path.exists(py_path):
                        with open(py_path, 'r') as f:
                            old_py = f.read()

                    log_utils.log(
                        'shush path: %s, new_py: %s, match: %s' %
                        (py_path, bool(new_py), new_py == old_py),
                        xbmc.LOGDEBUG)
                    if old_py != new_py:
                        with open(py_path, 'w') as f:
                            f.write(new_py)
        except Exception as e:
            log_utils.log('Failure during shush scraper update: %s' % (e),
                          xbmc.LOGWARNING)
Exemple #2
0
 def __update_scraper_py(self):
     try:
         py_path = os.path.join(kodi.get_path(), 'scrapers', 'shush_scraper.py')
         exists = os.path.exists(py_path)
         scraper_url = kodi.get_setting('%s-scraper_url' % (self.get_name()))
         scraper_password = kodi.get_setting('%s-scraper_password' % (self.get_name()))
         if scraper_url and scraper_password and (not exists or os.path.getmtime(py_path) < time.time() - (4 * 60 * 60)):
             try:
                 req = urllib2.urlopen(scraper_url)
                 cipher_text = req.read()
             except Exception as e:
                 log_utils.log('Failure during %s scraper get: %s' % (self.get_name(), e), log_utils.LOGWARNING)
                 return
             
             if cipher_text:
                 scraper_key = hashlib.sha256(scraper_password).digest()
                 decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(scraper_key, IV))
                 new_py = decrypter.feed(cipher_text)
                 new_py += decrypter.feed()
                 
                 old_py = ''
                 if os.path.exists(py_path):
                     with open(py_path, 'r') as f:
                         old_py = f.read()
                 
                 log_utils.log('%s path: %s, new_py: %s, match: %s' % (self.get_name(), py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                 if old_py != new_py:
                     with open(py_path, 'w') as f:
                         f.write(new_py)
     except Exception as e:
         log_utils.log('Failure during %s scraper update: %s' % (self.get_name(), e), log_utils.LOGWARNING)
def evp_decode(cipher_text, passphrase, salt=None):
    cipher_text = base64.b64decode(cipher_text)
    if not salt:
        salt = cipher_text[8:16]
        cipher_text = cipher_text[16:]
    data = evpKDF(passphrase, salt)
    decrypter = pyaes.Decrypter(
        pyaes.AESModeOfOperationCBC(data['key'], data['iv']))
    plain_text = decrypter.feed(cipher_text)
    plain_text += decrypter.feed()
    return plain_text
 def __decrypt(self, message, key, iv):
     decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(key, iv))
     plain_text = decrypter.feed(message)
     plain_text += decrypter.feed()
     plain_text = plain_text.split('\0', 1)[0]
     return plain_text
 def __decrypt(self, cipher_text):
     decrypter = pyaes.Decrypter(pyaes.AESModeOfOperationCBC(PB_KEY, IV))
     plain_text = decrypter.feed(cipher_text)
     plain_text += decrypter.feed()
     return plain_text