Ejemplo n.º 1
0
def update_all_scrapers():
        try: last_check = int(kodi.get_setting('last_list_check'))
        except: last_check = 0
        now = int(time.time())
        list_url = kodi.get_setting('scraper_url')
        scraper_password = kodi.get_setting('scraper_password')
        list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt')
        exists = os.path.exists(list_path)
        if list_url and scraper_password and (not exists or last_check < (now - (24 * 60 * 60))):
            scraper_list = utils2.get_and_decrypt(list_url, scraper_password)
            if scraper_list:
                try:
                    with open(list_path, 'w') as f:
                        f.write(scraper_list)
    
                    kodi.set_setting('last_list_check', str(now))
                    kodi.set_setting('scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now)))
                    for line in scraper_list.split('\n'):
                        line = line.replace(' ', '')
                        if line:
                            scraper_url, filename = line.split(',')
                            if scraper_url.startswith('http'):
                                update_scraper(filename, scraper_url)
                except Exception as e:
                    log_utils.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
Ejemplo n.º 2
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            new_py = utils2.get_and_decrypt(scraper_url, scraper_password)
            if new_py:
                if exists:
                    with open(py_path, 'r') as f:
                        old_py = f.read()
                else:
                    old_py = ''

                log_utils.log(
                    '%s path: %s, new_py: %s, match: %s' %
                    (filename, py_path, bool(new_py), new_py == old_py),
                    log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write(new_py)

    except Exception as e:
        log_utils.log('Failure during %s scraper update: %s' % (filename, e),
                      log_utils.LOGWARNING)
Ejemplo n.º 3
0
def update_all_scrapers():
    try:
        last_check = int(kodi.get_setting('last_list_check'))
    except:
        last_check = 0
    now = int(time.time())
    list_url = kodi.get_setting('scraper_url')
    scraper_password = kodi.get_setting('scraper_password')
    list_path = os.path.join(kodi.translate_path(kodi.get_profile()),
                             'scraper_list.txt')
    exists = os.path.exists(list_path)
    if list_url and scraper_password and (not exists or last_check <
                                          (now - (24 * 60 * 60))):
        scraper_list = utils2.get_and_decrypt(list_url, scraper_password)
        if scraper_list:
            try:
                with open(list_path, 'w') as f:
                    f.write(scraper_list)

                kodi.set_setting('last_list_check', str(now))
                kodi.set_setting(
                    'scraper_last_update',
                    time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now)))
                for line in scraper_list.split('\n'):
                    line = line.replace(' ', '')
                    if line:
                        scraper_url, filename = line.split(',')
                        if scraper_url.startswith('http'):
                            update_scraper(filename, scraper_url)
            except Exception as e:
                log_utils.log('Exception during scraper update: %s' % (e),
                              log_utils.LOGWARNING)
Ejemplo n.º 4
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            old_lm = None
            old_py = ''
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
                    match = re.search('^#\s+Last-Modified:\s*(.*)', old_py)
                    if match:
                        old_lm = match.group(1).strip()

            new_lm, new_py = utils2.get_and_decrypt(scraper_url, scraper_password, old_lm)
            if new_py:
                logger.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write('# Last-Modified: %s\n' % (new_lm))
                        f.write(new_py)
                    kodi.notify(msg=utils2.i18n('scraper_updated') + filename)
                        
    except Exception as e:
        logger.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
Ejemplo n.º 5
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            old_lm = None
            old_py = ''
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
                    match = re.search('^#\s+Last-Modified:\s*(.*)', old_py)
                    if match:
                        old_lm = match.group(1).strip()

            new_lm, new_py = utils2.get_and_decrypt(scraper_url, scraper_password, old_lm)
            if new_py:
                logger.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write('# Last-Modified: %s\n' % (new_lm))
                        f.write(new_py)
                    kodi.notify(msg=utils2.i18n('scraper_updated') + filename)
                        
    except Exception as e:
        logger.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)
Ejemplo n.º 6
0
def update_scraper(file_name, scraper_url, scraper_key):
    py_path = os.path.join(kodi.get_path(), 'scrapers', file_name)
    exists = os.path.exists(py_path)
    if not exists or (time.time() - os.path.getmtime(py_path)) > (8 * 60 * 60):
        new_py = utils2.get_and_decrypt(scraper_url, scraper_key)
        if new_py:
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
            else:
                old_py = ''
            
            log_utils.log('%s path: %s, new_py: %s, match: %s' % (__file__, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
            if old_py != new_py:
                with open(py_path, 'w') as f:
                    f.write(new_py)
Ejemplo n.º 7
0
def update_scraper(file_name, scraper_url, scraper_key):
    py_path = os.path.join(kodi.get_path(), 'scrapers', file_name)
    exists = os.path.exists(py_path)
    if not exists or (time.time() - os.path.getmtime(py_path)) > (8 * 60 * 60):
        new_py = utils2.get_and_decrypt(scraper_url, scraper_key)
        if new_py:
            if exists:
                with open(py_path, 'r') as f:
                    old_py = f.read()
            else:
                old_py = ''

            log_utils.log(
                '%s path: %s, new_py: %s, match: %s' %
                (__file__, py_path, bool(new_py), new_py == old_py),
                log_utils.LOGDEBUG)
            if old_py != new_py:
                with open(py_path, 'w') as f:
                    f.write(new_py)
Ejemplo n.º 8
0
def update_scraper(filename, scraper_url):
    try:
        if not filename: return
        py_path = os.path.join(kodi.get_path(), 'scrapers', filename)
        exists = os.path.exists(py_path)
        scraper_password = kodi.get_setting('scraper_password')
        if scraper_url and scraper_password:
            new_py = utils2.get_and_decrypt(scraper_url, scraper_password)
            if new_py:
                if exists:
                    with open(py_path, 'r') as f:
                        old_py = f.read()
                else:
                    old_py = ''
                
                log_utils.log('%s path: %s, new_py: %s, match: %s' % (filename, py_path, bool(new_py), new_py == old_py), log_utils.LOGDEBUG)
                if old_py != new_py:
                    with open(py_path, 'w') as f:
                        f.write(new_py)
                        
    except Exception as e:
        log_utils.log('Failure during %s scraper update: %s' % (filename, e), log_utils.LOGWARNING)