def update_addons(): return save_file = kodi.vfs.join(kodi.get_profile(), "install.log") if vfs.exists(save_file): temp = kodi.load_data(save_file, format='json', compress=True) else: temp = {} kodi.open_busy_dialog() v = kodi.get_kodi_version() from sqlite3 import dbapi2 dbf = kodi.vfs.join("special://profile/Database", "Addons20.db") if v >= 17: dbf = kodi.vfs.join("special://profile/Database", "Addons27.db") SQL = """SELECT installed.addonID, addons.version from installed JOIN addons on installed.addonID=addons.addonID WHERE origin = '' and enabled=1""" with dbapi2.connect(dbf) as dbh: dbc = dbh.cursor() dbc.execute(SQL) for a in dbc.fetchall(): if a[0] in temp: kodi.log(temp[a[0]]) else: dbf = kodi.vfs.join("special://profile/Database", "Addons20.db") kodi.close_busy_dialog() kodi.notify("Update complete", 'Update complete')
class GIFScraper(Scraper): BASE_URL = 'www.consiliumb.com' IMAGE_BASE = 'http://' + BASE_URL + '/animatedgifs/' LOCAL_BASE = os.path.join(kodi.get_profile(), 'gif_posters') def get_movie_images(self, ids): art_dict = {} if 'imdb' in ids: key = 'imdbid' value = ids['imdb'] elif 'tmdb' in ids: key = 'tmdbid' value = ids['tmdb'] else: return art_dict url = '/animatedgifs/movies.json' meta = self._get_url(url, cache_limit=24 * 7) for movie in meta.get('movies', []): if movie.get(key) == value: images = [image for image in movie.get('entries', []) if image.get('type') == 'poster'] art_dict['poster'] = self.__get_best_image(images) return self._clean_art(art_dict) def __get_best_image(self, images): best_url = None images.sort(key=lambda x: x['size'], reverse=True) if images: best_image = images[0].get('image') if best_image: best_image = best_image.replace('.gif', '_original.gif') best_url = self.IMAGE_BASE + best_image return best_url
def update_all_scrapers(): try: last_check = int(kodi.get_setting('last_list_check')) except: last_check = 0 now = int(time.time()) list_url = kodi.get_setting('scraper_url') scraper_password = kodi.get_setting('scraper_password') list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt') exists = os.path.exists(list_path) if list_url and scraper_password and (not exists or last_check < (now - (24 * 60 * 60))): scraper_list = utils2.get_and_decrypt(list_url, scraper_password) if scraper_list: try: with open(list_path, 'w') as f: f.write(scraper_list) kodi.set_setting('last_list_check', str(now)) kodi.set_setting('scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now))) for line in scraper_list.split('\n'): line = line.replace(' ', '') if line: scraper_url, filename = line.split(',') if scraper_url.startswith('http'): update_scraper(filename, scraper_url) except Exception as e: log_utils.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
def update_all_scrapers(): try: last_check = int(kodi.get_setting('last_list_check')) except: last_check = 0 now = int(time.time()) list_url = kodi.get_setting('scraper_url') scraper_password = kodi.get_setting('scraper_password') list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt') exists = os.path.exists(list_path) if list_url and scraper_password and (not exists or (now - last_check) > 15 * 60): _etag, scraper_list = utils2.get_and_decrypt(list_url, scraper_password) if scraper_list: try: with open(list_path, 'w') as f: f.write(scraper_list) kodi.set_setting('last_list_check', str(now)) kodi.set_setting( 'scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now))) for line in scraper_list.split('\n'): line = line.replace(' ', '') if line: scraper_url, filename = line.split(',') if scraper_url.startswith('http'): update_scraper(filename, scraper_url) except Exception as e: logger.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
def save_sources(self, sources): save_file = kodi.vfs.join(kodi.get_profile(), "install.log") if vfs.exists(save_file): temp = kodi.load_data(save_file, format='json', compress=True) else: temp = {} for s in sources: temp[s] = sources[s] kodi.save_data(save_file, temp, format='json', compress=True)
class GIFScraper(Scraper): BASE_URL = 'www.consiliumb.com' IMAGE_BASE = 'http://' + BASE_URL + '/animatedgifs/' LOCAL_BASE = os.path.join(kodi.get_profile(), 'gif_posters') def get_movie_images(self, ids): art_dict = {} if 'imdb' in ids: key = 'imdbid' value = ids['imdb'] elif 'tmdb' in ids: key = 'tmdbid' value = ids['tmdb'] else: return art_dict url = '/animatedgifs/movies.json' meta = self._get_url(url, cache_limit=24 * 7) for movie in meta.get('movies', []): if movie.get(key) == value: images = [ image for image in movie.get('entries', []) if image.get('type') == 'poster' ] art_dict['poster'] = self.__get_best_image(images) return self._clean_art(art_dict) def __get_best_image(self, images): local_best = '' images.sort(key=lambda x: x['size'], reverse=True) if images: best_image = images[0].get('image') if best_image: best_image = best_image.replace('.gif', '_original.gif') best_url = self.IMAGE_BASE + best_image local_best = os.path.join(self.LOCAL_BASE, best_image) try: self.__create_path(self.LOCAL_BASE) tlb = kodi.translate_path(local_best) if not os.path.exists(tlb): with open(tlb, 'wb') as f: f.write(self._get_url(best_url)) except: local_best = '' return local_best def __create_path(self, path): try: path = kodi.translate_path(path) if not os.path.exists(path): os.makedirs(path) except Exception as e: log_utils.log('Failed to create path: %s: %s' % (path, e), log_utils.LOGWARNING) raise
def download(url, filename, destination, unzip=False): r = requests.get(url, stream=True) kodi.log("Download: %s" % url) if r.status_code == requests.codes.ok: temp_file = kodi.vfs.join(kodi.get_profile(), "downloads") if not kodi.vfs.exists(temp_file): kodi.vfs.mkdir(temp_file, recursive=True) temp_file = kodi.vfs.join(temp_file, filename) try: total_bytes = int(r.headers["Content-Length"]) except: total_bytes = 0 block_size = 1000 cached_bytes = 0 pb = xbmcgui.DialogProgress() pb.create("Downloading", filename, ' ', ' ') kodi.sleep(150) start = time.time() with open(temp_file, 'wb') as f: for block in r.iter_content(chunk_size=block_size): if not block: break if pb.iscanceled(): return False cached_bytes += len(block) f.write(block) if total_bytes > 0: delta = int(time.time() - start) if delta: kbs = int(cached_bytes / (delta * 1000)) else: kbs = 0 percent = int(cached_bytes * 100 / total_bytes) pb.update(percent, "Downloading", filename, format_speed(kbs)) pb.close() if unzip: zip_ref = zipfile.ZipFile(temp_file, 'r') zip_ref.extractall(destination) zip_ref.close() kodi.vfs.rm(temp_file, quiet=True) else: kodi.vfs.mv(temp_file, kodi.vfs.join(destination, filename)) else: kodi.close_busy_dialog() raise downloaderException(r.status_code) return True
from salts_lib.constants import Q_ORDER from salts_lib.constants import SHORT_MONS from salts_lib.constants import VIDEO_TYPES from salts_lib.constants import DEFAULT_TIMEOUT from salts_lib.db_utils import DB_Connection from salts_lib.utils2 import i18n, ungz try: import urlresolver except: kodi.notify(msg=i18n('smu_failed'), duration=5000) logger = log_utils.Logger.get_logger() BASE_URL = '' COOKIEPATH = kodi.translate_path(kodi.get_profile()) MONTHS = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] MAX_RESPONSE = 1024 * 1024 * 5 CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true' class ScrapeError(Exception): pass class NoRedirection(urllib2.HTTPErrorProcessor): def http_response(self, request, response): # @UnusedVariable logger.log('Stopping Redirect', log_utils.LOGDEBUG) return response https_response = http_response abstractstaticmethod = abc.abstractmethod
from salts_lib.constants import Q_ORDER from salts_lib.constants import SHORT_MONS from salts_lib.constants import VIDEO_TYPES from salts_lib.constants import DEFAULT_TIMEOUT from salts_lib.db_utils import DB_Connection from salts_lib.utils2 import i18n, ungz try: import urlresolver except: kodi.notify(msg=i18n('smu_failed'), duration=5000) logger = log_utils.Logger.get_logger() BASE_URL = '' COOKIEPATH = kodi.translate_path(kodi.get_profile()) MONTHS = [ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ] MAX_RESPONSE = 1024 * 1024 * 5 CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true' class ScrapeError(Exception): pass class NoRedirection(urllib2.HTTPErrorProcessor): def http_response(self, request, response): # @UnusedVariable logger.log('Stopping Redirect', log_utils.LOGDEBUG)
class MyRequestHandler(SimpleHTTPRequestHandler): proxy_cache = {} LOG_FILE = kodi.translate_path( os.path.join(kodi.get_profile(), 'proxy.log')) try: log_fd = open(LOG_FILE, 'w') except: log_fd = None lock = threading.Lock() ping_required = {} base_req = ['video_type', 'trakt_id', 'video_ids'] clear_required = { '': base_req, 'Season': base_req + ['season'], 'Episode': base_req + ['season', 'episode'], } base_req = base_req[:] + ['image_type'] image_required = { '': base_req, 'Season': base_req + ['season'], 'Episode': base_req + ['season', 'episode'], 'person': base_req + ['name', 'person_ids'] } required = { '/ping': ping_required, '/': image_required, '/clear': clear_required } def _set_headers(self, code=200): self.send_response(code) self.end_headers() def __redirect(self, url): self.send_response(301) self.send_header('Location', url) self.end_headers() def log_message(self, format, *args): if self.log_fd is not None: self.log_fd.write('[%s] (%s) %s\n' % (self.log_date_time_string(), threading.current_thread().getName(), format % (args))) def do_HEAD(self): return self.do_GET() def do_POST(self): self._set_headers(400) def do_GET(self): try: action, fields = self.__validate(self.path) if action == '/ping': self._set_headers() self.wfile.write('OK') return else: key = (fields['video_type'], fields['trakt_id'], fields.get('season'), fields.get('episode')) if action == '/clear': with self.lock: if key in self.proxy_cache: del self.proxy_cache[key] self._set_headers() self.wfile.write('OK') return else: with self.lock: if key in self.proxy_cache: images = self.proxy_cache[key] else: video_ids = json.loads(fields['video_ids']) if fields[ 'video_type'] == image_scraper.OBJ_PERSON: person_ids = json.loads(fields['person_ids']) person = { 'person': { 'name': fields['name'], 'ids': person_ids } } images = image_scraper.scrape_person_images( video_ids, person) else: images = image_scraper.scrape_images( fields['video_type'], video_ids, fields.get('season', ''), fields.get('episode', '')) self.proxy_cache[key] = images image_url = images[fields['image_type']] if image_url is None: self._set_headers() elif image_url.startswith('http'): self.__redirect(image_url) else: self._set_headers() if self.command == 'GET': with open(image_url) as f: self.wfile.write(f.read()) except ValidationError as e: self.__send_error(e) def __validate(self, path): action = path.split('?')[0] params = self.parse_query(path) if action not in self.required: raise ValidationError('Unrecognized Action: %s' % (action)) if '' in self.required[action]: required = self.required[action][''][:] for key in self.required[action]['']: if key in params: required.remove(key) if required: raise ValidationError('Missing Base Parameters: %s' % (', '.join(required))) if 'video_type' in params: video_type = params['video_type'] if video_type in self.required[action]: required = self.required[action][video_type][:] for key in self.required[action][video_type]: if key in params: required.remove(key) if required: raise ValidationError('Missing Sub Parameters: %s' % (', '.join(required))) return action, params def __send_error(self, msg): self.send_error(400, str(msg)) @staticmethod def parse_query(path): q = {} query = urlparse.urlparse(path).query if query.startswith('?'): query = query[1:] queries = urlparse.parse_qs(query) for key in queries: if len(queries[key]) == 1: q[key] = urllib.unquote(queries[key][0]) else: q[key] = queries[key] return q
along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import cPickle as pickle import hashlib import os import shutil import kodi logger = log_utils.Logger.get_logger(__name__) logger.disable() try: cache_path = kodi.translate_path(os.path.join(kodi.get_profile(), 'cache')) if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: logger.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def reset_cache(): try: shutil.rmtree(cache_path) return True except Exception as e: logger.log('Failed to Reset Cache: %s' % (e), log_utils.LOGWARNING) return False
import scraper XHR = {'X-Requested-With': 'XMLHttpRequest'} SEARCH_TYPES = {VIDEO_TYPES.MOVIE: 'movies', VIDEO_TYPES.TVSHOW: 'series'} BASE_URL = 'http://torba.se' SEARCH_URL = '/%s/autocomplete' BASE_URL2 = 'https://streamtorrent.tv' TOR_URL = BASE_URL2 + '/api/torrent/%s.json' PL_URL = BASE_URL2 + '/api/torrent/%s/%s.m3u8?json=true' OAUTH_GET_URL = BASE_URL2 + '/api/oauth/client' OAUTH_CRED_URL = BASE_URL2 + '/api/oauth/credentials?device_code=%s' OAUTH_TOKEN_URL = BASE_URL2 + '/api/oauth/token' M3U8_PATH = os.path.join(kodi.translate_path(kodi.get_profile()), 'torbase.m3u8') M3U8_TEMPLATE = [ '#EXTM3U', '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",DEFAULT=YES,AUTOSELECT=YES,NAME="Stream 1",URI="{audio_stream}"', '', '#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=0,NAME="{stream_name}",AUDIO="audio"', '{video_stream}'] class Scraper(scraper.Scraper): base_url = BASE_URL auth_url = False def __init__(self, timeout=scraper.DEFAULT_TIMEOUT): self.timeout = timeout self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
kodi.log("%s loading pysqlite2 as DB engine" % kodi.get_name()) if self.quiet is False: kodi.log("Connecting to SQLite on: " + self.db_file) directory = os.path.dirname(self.db_file) if not kodi.vfs.exists(directory): kodi.vfs.mkdir(directory) self.DBH = database.connect(self.db_file, check_same_thread=False) try: self.DBC = self.DBH.cursor() except Exception, e: kodi.raise_error("SqlLite Error", e) sys.exit() self.__connected = True DB_TYPE = 'sqlite' DB_FILE = kodi.vfs.join(kodi.get_profile(), 'cache.db') class DBI(SQLiteDatabase): def _initialize(self): self.connect() schema_file = kodi.vfs.join( kodi.get_path(), 'resources/database/schema.%s.sql' % self.db_type) if self.run_script(schema_file, commit=False): self.execute('DELETE FROM version WHERE 1') self.execute('INSERT INTO version(db_version) VALUES(?)', [self.db_version]) self.commit() self.disconnect()
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import urlresolver import xbmcgui import xbmcplugin import sys import os.path from url_dispatcher import URL_Dispatcher import log_utils import kodi def __enum(**enums): return type('Enum', (), enums) LINK_PATH = os.path.join(kodi.translate_path(kodi.get_profile()), 'links.txt') MODES = __enum( MAIN='main', ADD_LINK='add_link', PLAY_LINK='play_link', DELETE_LINK='delete_link', SETTINGS='settings', EDIT_LINK='edit_link' ) url_dispatcher = URL_Dispatcher() @url_dispatcher.register(MODES.MAIN) def main_menu(): kodi.create_item({'mode': MODES.ADD_LINK}, 'Add Link', is_folder=False, is_playable=False) kodi.create_item({'mode': MODES.SETTINGS}, 'URLResolver Settings', is_folder=False, is_playable=False) if os.path.exists(LINK_PATH): with open(LINK_PATH) as f: for i, line in enumerate(f): item = line.split('|') link = item[0].strip()
from pysqlite2 import dbapi2 as database if self.quiet is False: kodi.log("%s loading pysqlite2 as DB engine" % kodi.get_name()) if self.quiet is False: kodi.log("Connecting to SQLite on: " + self.db_file) directory = os.path.dirname(self.db_file) if not kodi.vfs.exists(directory): kodi.vfs.mkdir(directory) self.DBH = database.connect(self.db_file, check_same_thread=False) try: self.DBC = self.DBH.cursor() except Exception, e: kodi.log(e) kodi.raise_error("SqlLite Error", e) sys.exit() self.__connected = True DB_TYPE = 'sqlite' DB_FILE = kodi.vfs.join(kodi.get_profile(), 'cache.db') class DBI(SQLiteDatabase): def _initialize(self): self.connect() schema_file = kodi.vfs.join(kodi.get_path(), 'resources/database/schema.%s.sql' % self.db_type) if self.run_script(schema_file, commit=False): self.execute('DELETE FROM version WHERE 1', silent=True) self.execute('INSERT INTO version(db_version) VALUES(?)', [self.db_version], silent=True) self.commit() self.disconnect() DB = DBI(DB_FILE, quiet=True, connect=True)
import xbmcplugin import sys import os import shutil from url_dispatcher import URL_Dispatcher import log_utils import kodi logger = log_utils.Logger.get_logger() def __enum(**enums): return type('Enum', (), enums) DATA_PATH = kodi.translate_path(kodi.get_profile()) LINK_PATH = os.path.join(DATA_PATH, 'links') LINK_FILE = 'links.txt' if not os.path.exists(DATA_PATH): os.mkdir(DATA_PATH) if not os.path.exists(LINK_PATH): os.mkdir(LINK_PATH) MODES = __enum(MAIN='main', ADD_LINK='add_link', PLAY_LINK='play_link', DELETE_LINK='delete_link', SETTINGS='settings', EDIT_LINK='edit_link', OPEN_DIR='open_dir', CREATE_DIR='create_dir',
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import pickle import hashlib import os import shutil import kodi logger = log_utils.Logger.get_logger(__name__) try: cache_path = kodi.translate_path(os.path.join(kodi.get_profile(), 'cache')) if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: logger.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def reset_cache(): try: shutil.rmtree(cache_path) return True except Exception as e: logger.log('Failed to Reset Cache: %s' % (e), log_utils.LOGWARNING)
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import os import zipfile import xbmcvfs import db_utils import kodi DB_NAME = 'tmdb_cache.db' DB_FOLDER = kodi.translate_path(kodi.get_profile()) DB_PATH = os.path.join(DB_FOLDER, DB_NAME) def _update_db(): db_ver = None if xbmcvfs.exists(DB_PATH): db_connection = db_utils.DBCache(DB_PATH) db_ver = db_connection.get_setting('db_version') db_connection.close() if db_ver != kodi.get_version(): try: zip_path = os.path.join(kodi.translate_path(kodi.get_path()), 'tmdb_cache.zip') zip_file = zipfile.ZipFile(zip_path, 'r')