def _update_db(): db_ver = None if xbmcvfs.exists(DB_PATH): db_connection = db_utils.DBCache(DB_PATH) db_ver = db_connection.get_setting('db_version') db_connection.close() if db_ver != get_version(): try: # TODO: remove once updated tknorris.shared is out try: utils.download_media(ZIP_SOURCE, kodi.translate_path(get_profile()), CACHE_NAME, kodi.Translations(strings.STRINGS), utils.PROGRESS.WINDOW) except TypeError: utils.download_media(ZIP_SOURCE, kodi.translate_path(get_profile()), CACHE_NAME, kodi.Translations(strings.STRINGS)) zip_path = os.path.join(kodi.translate_path(get_profile()), ZIP_NAME) zip_file = zipfile.ZipFile(zip_path, 'r') zip_file.extract(DB_NAME, DB_FOLDER) db_connection = db_utils.DBCache(DB_PATH) db_connection.set_setting('db_version', get_version()) finally: try: zip_file.close() except UnboundLocalError: pass
def __prep_for_reinit(self): self.mig_path = os.path.join(kodi.translate_path("special://database"), 'mig_export_%s.csv' % (int(time.time()))) log_utils.log('Backing up DB to %s' % (self.mig_path), log_utils.LOGDEBUG) self.export_from_db(self.mig_path) log_utils.log('Backup export of DB created at %s' % (self.mig_path)) self.__drop_all() log_utils.log('DB Objects Dropped', log_utils.LOGDEBUG)
def __init__(self): global OperationalError global DatabaseError self.dbname = kodi.get_setting('db_name') self.username = kodi.get_setting('db_user') self.password = kodi.get_setting('db_pass') self.address = kodi.get_setting('db_address') self.db = None self.progress = None if kodi.get_setting('use_remote_db') == 'true': if self.address is not None and self.username is not None and self.password is not None and self.dbname is not None: import mysql.connector as db_lib from mysql.connector import OperationalError as OperationalError from mysql.connector import DatabaseError as DatabaseError log_utils.log('Loading MySQL as DB engine', log_utils.LOGDEBUG) self.db_type = DB_TYPES.MYSQL else: log_utils.log('MySQL is enabled but not setup correctly', log_utils.LOGERROR) raise ValueError('MySQL enabled but not setup correctly') else: from sqlite3 import dbapi2 as db_lib from sqlite3 import OperationalError as OperationalError from sqlite3 import DatabaseError as DatabaseError log_utils.log('Loading sqlite3 as DB engine', log_utils.LOGDEBUG) self.db_type = DB_TYPES.SQLITE db_dir = kodi.translate_path("special://database") self.db_path = os.path.join(db_dir, 'saltscache.db') self.db_lib = db_lib self.__connect_to_db()
def download_subtitle(self, url): url = BASE_URL + url (response, srt) = self.__get_url(url) if not hasattr(response, 'info') or 'Content-Disposition' not in response.info(): return cd = response.info()['Content-Disposition'] r = re.search('filename="(.*)"', cd) if r: filename = r.group(1) else: filename = 'addic7ed_subtitle.srt' filename = re.sub('[^\x00-\x7F]', '', filename) filename = re.sub('[<>:"/\\|?*]', '_', filename) filename = re.sub('_+', '_', filename) final_path = os.path.join(kodi.get_setting('subtitle-folder'), filename) final_path = kodi.translate_path(final_path) if not xbmcvfs.exists(os.path.dirname(final_path)): try: try: xbmcvfs.mkdirs(os.path.dirname(final_path)) except: os.makedirs(os.path.dirname(final_path)) except: logger.log( 'Failed to create directory %s' % os.path.dirname(final_path), log_utils.LOGERROR) raise with open(final_path, 'w') as f: f.write(srt) return final_path
def update_all_scrapers(): try: last_check = int(kodi.get_setting('last_list_check')) except: last_check = 0 now = int(time.time()) list_url = kodi.get_setting('scraper_url') scraper_password = kodi.get_setting('scraper_password') list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt') exists = os.path.exists(list_path) if list_url and scraper_password and (not exists or (now - last_check) > 15 * 60): _etag, scraper_list = utils2.get_and_decrypt(list_url, scraper_password) if scraper_list: try: with open(list_path, 'w') as f: f.write(scraper_list) kodi.set_setting('last_list_check', str(now)) kodi.set_setting( 'scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now))) for line in scraper_list.split('\n'): line = line.replace(' ', '') if line: scraper_url, filename = line.split(',') if scraper_url.startswith('http'): update_scraper(filename, scraper_url) except Exception as e: logger.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
def download_subtitle(self, url): url = BASE_URL + url (response, srt) = self.__get_url(url) if not hasattr(response, 'info') or 'Content-Disposition' not in response.info(): return cd = response.info()['Content-Disposition'] r = re.search('filename="(.*)"', cd) if r: filename = r.group(1) else: filename = 'addic7ed_subtitle.srt' filename = re.sub('[^\x00-\x7F]', '', filename) final_path = os.path.join(kodi.get_setting('subtitle-folder'), filename) final_path = kodi.translate_path(final_path) if not xbmcvfs.exists(os.path.dirname(final_path)): try: try: xbmcvfs.mkdirs(os.path.dirname(final_path)) except: os.mkdir(os.path.dirname(final_path)) except: log_utils.log('Failed to create directory %s' % os.path.dirname(final_path), log_utils.LOGERROR) raise with open(final_path, 'w') as f: f.write(srt) return final_path
def __init__(self): global OperationalError global DatabaseError self.dbname = kodi.get_setting('db_name') self.username = kodi.get_setting('db_user') self.password = kodi.get_setting('db_pass') self.address = kodi.get_setting('db_address') self.db = None self.progress = None if kodi.get_setting('use_remote_db') == 'true': if all((self.address, self.username, self.password, self.dbname)): import mysql.connector as db_lib # @UnresolvedImport @UnusedImport from mysql.connector import OperationalError as OperationalError # @UnresolvedImport from mysql.connector import DatabaseError as DatabaseError # @UnresolvedImport logger.log('Loading MySQL as DB engine', log_utils.LOGDEBUG) self.db_type = DB_TYPES.MYSQL else: logger.log('MySQL is enabled but not setup correctly', log_utils.LOGERROR) raise ValueError('MySQL enabled but not setup correctly') else: from sqlite3 import dbapi2 as db_lib # @Reimport from sqlite3 import OperationalError as OperationalError # @UnusedImport @Reimport from sqlite3 import DatabaseError as DatabaseError # @UnusedImport @Reimport logger.log('Loading sqlite3 as DB engine', log_utils.LOGDEBUG) self.db_type = DB_TYPES.SQLITE db_dir = kodi.translate_path("special://database") self.db_path = os.path.join(db_dir, 'DEATHScache.db') self.db_lib = db_lib
def update_all_scrapers(): try: last_check = int(kodi.get_setting('last_list_check')) except: last_check = 0 now = int(time.time()) list_url = kodi.get_setting('scraper_url') scraper_password = kodi.get_setting('scraper_password') list_path = os.path.join(kodi.translate_path(kodi.get_profile()), 'scraper_list.txt') exists = os.path.exists(list_path) if list_url and scraper_password and (not exists or last_check < (now - (24 * 60 * 60))): scraper_list = utils2.get_and_decrypt(list_url, scraper_password) if scraper_list: try: with open(list_path, 'w') as f: f.write(scraper_list) kodi.set_setting('last_list_check', str(now)) kodi.set_setting('scraper_last_update', time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(now))) for line in scraper_list.split('\n'): line = line.replace(' ', '') if line: scraper_url, filename = line.split(',') if scraper_url.startswith('http'): update_scraper(filename, scraper_url) except Exception as e: log_utils.log('Exception during scraper update: %s' % (e), log_utils.LOGWARNING)
def export_from_db(self, full_path): temp_path = os.path.join(kodi.translate_path("special://profile"), 'temp_export_%s.csv' % (int(time.time()))) with open(temp_path, 'w') as f: writer = csv.writer(f) f.write('***VERSION: %s***\n' % self.get_db_version()) if self.__table_exists('rel_url'): f.write(CSV_MARKERS.REL_URL + '\n') for fav in self.get_all_rel_urls(): writer.writerow(self.__utf8_encode(fav)) if self.__table_exists('other_lists'): f.write(CSV_MARKERS.OTHER_LISTS + '\n') for sub in self.get_all_other_lists(): writer.writerow(self.__utf8_encode(sub)) if self.__table_exists('saved_searches'): f.write(CSV_MARKERS.SAVED_SEARCHES + '\n') for sub in self.get_all_searches(): writer.writerow(self.__utf8_encode(sub)) if self.__table_exists('bookmark'): f.write(CSV_MARKERS.BOOKMARKS + '\n') for sub in self.get_bookmarks(): writer.writerow(self.__utf8_encode(sub)) log_utils.log('Copying export file from: |%s| to |%s|' % (temp_path, full_path), log_utils.LOGDEBUG) if not xbmcvfs.copy(temp_path, full_path): raise Exception('Export: Copy from |%s| to |%s| failed' % (temp_path, full_path)) if not xbmcvfs.delete(temp_path): raise Exception('Export: Delete of %s failed.' % (temp_path))
def __create_path(self, path): try: path = kodi.translate_path(path) if not os.path.exists(path): os.makedirs(path) except Exception as e: log_utils.log('Failed to create path: %s: %s' % (path, e), log_utils.LOGWARNING) raise
def import_into_db(self, full_path): temp_path = os.path.join(kodi.translate_path("special://profile"), 'temp_import_%s.csv' % (int(time.time()))) log_utils.log('Copying import file from: |%s| to |%s|' % (full_path, temp_path), log_utils.LOGDEBUG) if not xbmcvfs.copy(full_path, temp_path): raise Exception('Import: Copy from |%s| to |%s| failed' % (full_path, temp_path)) try: num_lines = sum(1 for line in open(temp_path)) if self.progress: progress = self.progress progress.update(0, line2='Importing Saved Data', line3='Importing 0 of %s' % (num_lines)) else: progress = xbmcgui.DialogProgress() progress.create('SALTS', line2='Import from %s' % (full_path), line3='Importing 0 of %s' % (num_lines)) with open(temp_path, 'r') as f: reader = csv.reader(f) mode = '' _ = f.readline() # read header i = 0 for line in reader: line = self.__unicode_encode(line) progress.update(i * 100 / num_lines, line3='Importing %s of %s' % (i, num_lines)) if progress.iscanceled(): return if line[0] in [CSV_MARKERS.REL_URL, CSV_MARKERS.OTHER_LISTS, CSV_MARKERS.SAVED_SEARCHES, CSV_MARKERS.BOOKMARKS]: mode = line[0] continue elif mode == CSV_MARKERS.REL_URL: self.set_related_url(line[0], line[1], line[2], line[5], line[6], line[3], line[4]) elif mode == CSV_MARKERS.OTHER_LISTS: name = None if len(line) != 4 else line[3] self.add_other_list(line[0], line[1], line[2], name) elif mode == CSV_MARKERS.SAVED_SEARCHES: self.save_search(line[1], line[3], line[2]) # column order is different than method order elif mode == CSV_MARKERS.BOOKMARKS: self.set_bookmark(line[0], line[3], line[1], line[2]) else: raise Exception('CSV line found while in no mode') i += 1 finally: if not xbmcvfs.delete(temp_path): raise Exception('Import: Delete of %s failed.' % (temp_path)) progress.close() self.progress = None if self.db_type == DB_TYPES.SQLITE: self.__execute('VACUUM')
def clear_thumbnails(images): for url in images.itervalues(): crc = utils2.crc32(url) for ext in ['jpg', 'png']: file_name = crc + '.' + ext file_path = os.path.join('special://thumbnails', file_name[0], file_name) if xbmcvfs.delete(file_path): break else: try: file_path = kodi.translate_path(file_path) os.remove(file_path) break except OSError: pass else: continue logger.log('Removed thumbnail: %s' % (file_path))
def __get_best_image(self, images): local_best = '' images.sort(key=lambda x: x['size'], reverse=True) if images: best_image = images[0].get('image') if best_image: best_image = best_image.replace('.gif', '_original.gif') best_url = self.IMAGE_BASE + best_image local_best = os.path.join(self.LOCAL_BASE, best_image) try: self.__create_path(self.LOCAL_BASE) tlb = kodi.translate_path(local_best) if not os.path.exists(tlb): with open(tlb, 'wb') as f: f.write(self._get_url(best_url)) except: local_best = '' return local_best
def _update_db(): db_ver = None if xbmcvfs.exists(DB_PATH): db_connection = db_utils.DBCache(DB_PATH) db_ver = db_connection.get_setting('db_version') db_connection.close() if db_ver != kodi.get_version(): try: zip_path = os.path.join(kodi.translate_path(kodi.get_path()), 'tmdb_cache.zip') zip_file = zipfile.ZipFile(zip_path, 'r') zip_file.extract(DB_NAME, DB_FOLDER) db_connection = db_utils.DBCache(DB_PATH) db_connection.set_setting('db_version', kodi.get_version()) finally: try: zip_file.close() except UnboundLocalError: pass
import xbmcplugin import sys import os import shutil from url_dispatcher import URL_Dispatcher import log_utils import kodi logger = log_utils.Logger.get_logger() def __enum(**enums): return type('Enum', (), enums) DATA_PATH = kodi.translate_path(kodi.get_profile()) LINK_PATH = os.path.join(DATA_PATH, 'links') LINK_FILE = 'links.txt' if not os.path.exists(DATA_PATH): os.mkdir(DATA_PATH) if not os.path.exists(LINK_PATH): os.mkdir(LINK_PATH) MODES = __enum(MAIN='main', ADD_LINK='add_link', PLAY_LINK='play_link', DELETE_LINK='delete_link', SETTINGS='settings', EDIT_LINK='edit_link', OPEN_DIR='open_dir', CREATE_DIR='create_dir',
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import cPickle as pickle import hashlib import os import shutil import kodi cache_path = kodi.translate_path( os.path.join('special://temp/%s/cache' % kodi.get_id())) try: if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: log_utils.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def reset_cache(): try: shutil.rmtree(cache_path) return True except Exception as e:
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import os import kodi import db_utils from url_dispatcher import URL_Dispatcher def __enum(**enums): return type('Enum', (), enums) DATABASE_VERSION = 1 DATABASE_FILE = kodi.translate_path('special://database/{0!s}{1!s}.db'.format( kodi.get_name(), str(DATABASE_VERSION))) DATABASE = db_utils.SQLite(DATABASE_FILE) DISPATCHER = URL_Dispatcher() ADDON_DATA_DIR = kodi.translate_path('special://profile/addon_data/%s/' % kodi.get_id()) THUMBNAILS_DIR = kodi.translate_path('special://thumbnails/') RESOLVER_DIRS = [ kodi.translate_path( 'special://home/addons/{0!s}/resources/lib/addon_lib/resolvers/'. format(kodi.get_id())), kodi.translate_path( 'special://home/addons/script.module.urlresolver.xxx/resources/plugins/' ) ]
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import urlresolver import xbmcgui import xbmcplugin import sys import os.path from url_dispatcher import URL_Dispatcher import log_utils import kodi def __enum(**enums): return type('Enum', (), enums) LINK_PATH = os.path.join(kodi.translate_path(kodi.get_profile()), 'links.txt') MODES = __enum( MAIN='main', ADD_LINK='add_link', PLAY_LINK='play_link', DELETE_LINK='delete_link', SETTINGS='settings', EDIT_LINK='edit_link' ) url_dispatcher = URL_Dispatcher() @url_dispatcher.register(MODES.MAIN) def main_menu(): kodi.create_item({'mode': MODES.ADD_LINK}, 'Add Link', is_folder=False, is_playable=False) kodi.create_item({'mode': MODES.SETTINGS}, 'URLResolver Settings', is_folder=False, is_playable=False) if os.path.exists(LINK_PATH): with open(LINK_PATH) as f: for i, line in enumerate(f): item = line.split('|') link = item[0].strip()
addon = xbmcaddon.Addon('script.module.image_cache') def get_profile(): return addon.getAddonInfo('profile').decode('utf-8') def get_version(): return addon.getAddonInfo('version') CACHE_NAME = 'tmdb_cache' DB_NAME = CACHE_NAME + '.db' ZIP_NAME = CACHE_NAME + '.zip' DB_FOLDER = kodi.translate_path(get_profile()) DB_PATH = os.path.join(DB_FOLDER, DB_NAME) ZIP_SOURCE = os.path.join('https://offshoregit.com/tknorris/', ZIP_NAME) def _update_db(): db_ver = None if xbmcvfs.exists(DB_PATH): db_connection = db_utils.DBCache(DB_PATH) db_ver = db_connection.get_setting('db_version') db_connection.close() if db_ver != get_version(): try: # TODO: remove once updated tknorris.shared is out try:
import kodi import utils import strings import xbmcaddon addon = xbmcaddon.Addon('script.module.image_cache') def get_profile(): return addon.getAddonInfo('profile').decode('utf-8') def get_version(): return addon.getAddonInfo('version') CACHE_NAME = 'tmdb_cache' DB_NAME = CACHE_NAME + '.db' ZIP_NAME = CACHE_NAME + '.zip' DB_FOLDER = kodi.translate_path(get_profile()) DB_PATH = os.path.join(DB_FOLDER, DB_NAME) ZIP_SOURCE = os.path.join('https://offshoregit.com/tknorris/', ZIP_NAME) def _update_db(): db_ver = None if xbmcvfs.exists(DB_PATH): db_connection = db_utils.DBCache(DB_PATH) db_ver = db_connection.get_setting('db_version') db_connection.close() if db_ver != get_version(): try: # TODO: remove once updated tknorris.shared is out try: utils.download_media(ZIP_SOURCE, kodi.translate_path(get_profile()), CACHE_NAME, kodi.Translations(strings.STRINGS), utils.PROGRESS.WINDOW) except TypeError: utils.download_media(ZIP_SOURCE, kodi.translate_path(get_profile()), CACHE_NAME, kodi.Translations(strings.STRINGS))
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import pickle import hashlib import os import shutil import kodi logger = log_utils.Logger.get_logger(__name__) try: cache_path = kodi.translate_path(os.path.join(kodi.get_profile(), 'cache')) if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: logger.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def reset_cache(): try: shutil.rmtree(cache_path) return True except Exception as e: logger.log('Failed to Reset Cache: %s' % (e), log_utils.LOGWARNING)
def download_media(url, path, file_name, translations, progress=None): try: if progress is None: progress = int(kodi.get_setting('down_progress')) i18n = translations.i18n active = not progress == PROGRESS.OFF background = progress == PROGRESS.BACKGROUND with kodi.ProgressDialog(kodi.get_name(), i18n('downloading') % (file_name), background=background, active=active) as pd: try: headers = dict([item.split('=') for item in (url.split('|')[1]).split('&')]) for key in headers: headers[key] = urllib.unquote(headers[key]) except: headers = {} if 'User-Agent' not in headers: headers['User-Agent'] = BROWSER_UA request = urllib2.Request(url.split('|')[0], headers=headers) response = urllib2.urlopen(request) if 'Content-Length' in response.info(): content_length = int(response.info()['Content-Length']) else: content_length = 0 file_name += '.' + get_extension(url, response) full_path = os.path.join(path, file_name) logger.log('Downloading: %s -> %s' % (url, full_path), log_utils.LOGDEBUG) path = kodi.translate_path(xbmc.makeLegalFilename(path)) try: try: xbmcvfs.mkdirs(path) except: os.makedirs(path) except Exception as e: logger.log('Path Create Failed: %s (%s)' % (e, path), log_utils.LOGDEBUG) if not path.endswith(os.sep): path += os.sep if not xbmcvfs.exists(path): raise Exception(i18n('failed_create_dir')) file_desc = xbmcvfs.File(full_path, 'w') total_len = 0 cancel = False while True: data = response.read(CHUNK_SIZE) if not data: break if pd.is_canceled(): cancel = True break total_len += len(data) if not file_desc.write(data): raise Exception(i18n('failed_write_file')) percent_progress = (total_len) * 100 / content_length if content_length > 0 else 0 logger.log('Position : %s / %s = %s%%' % (total_len, content_length, percent_progress), log_utils.LOGDEBUG) pd.update(percent_progress) file_desc.close() if not cancel: kodi.notify(msg=i18n('download_complete') % (file_name), duration=5000) logger.log('Download Complete: %s -> %s' % (url, full_path), log_utils.LOGDEBUG) except Exception as e: logger.log('Error (%s) during download: %s -> %s' % (str(e), url, file_name), log_utils.LOGERROR) kodi.notify(msg=i18n('download_error') % (str(e), file_name), duration=5000)
from salts_lib.constants import Q_ORDER from salts_lib.constants import SHORT_MONS from salts_lib.constants import VIDEO_TYPES from salts_lib.constants import DEFAULT_TIMEOUT from salts_lib.db_utils import DB_Connection from salts_lib.utils2 import i18n, ungz try: import urlresolver except: kodi.notify(msg=i18n('smu_failed'), duration=5000) logger = log_utils.Logger.get_logger() BASE_URL = '' COOKIEPATH = kodi.translate_path(kodi.get_profile()) MONTHS = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] MAX_RESPONSE = 1024 * 1024 * 5 CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true' class ScrapeError(Exception): pass class NoRedirection(urllib2.HTTPErrorProcessor): def http_response(self, request, response): # @UnusedVariable logger.log('Stopping Redirect', log_utils.LOGDEBUG) return response https_response = http_response abstractstaticmethod = abc.abstractmethod
from salts_lib.constants import Q_ORDER from salts_lib.constants import SHORT_MONS from salts_lib.constants import VIDEO_TYPES from salts_lib.constants import DEFAULT_TIMEOUT from salts_lib.db_utils import DB_Connection from salts_lib.utils2 import i18n, ungz try: import urlresolver except: kodi.notify(msg=i18n('smu_failed'), duration=5000) logger = log_utils.Logger.get_logger() BASE_URL = '' COOKIEPATH = kodi.translate_path(kodi.get_profile()) MONTHS = [ 'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December' ] MAX_RESPONSE = 1024 * 1024 * 5 CF_CAPCHA_ENABLED = kodi.get_setting('cf_captcha') == 'true' class ScrapeError(Exception): pass class NoRedirection(urllib2.HTTPErrorProcessor): def http_response(self, request, response): # @UnusedVariable logger.log('Stopping Redirect', log_utils.LOGDEBUG)
class MyRequestHandler(SimpleHTTPRequestHandler): proxy_cache = {} LOG_FILE = kodi.translate_path( os.path.join(kodi.get_profile(), 'proxy.log')) try: log_fd = open(LOG_FILE, 'w') except: log_fd = None lock = threading.Lock() ping_required = {} base_req = ['video_type', 'trakt_id', 'video_ids'] clear_required = { '': base_req, 'Season': base_req + ['season'], 'Episode': base_req + ['season', 'episode'], } base_req = base_req[:] + ['image_type'] image_required = { '': base_req, 'Season': base_req + ['season'], 'Episode': base_req + ['season', 'episode'], 'person': base_req + ['name', 'person_ids'] } required = { '/ping': ping_required, '/': image_required, '/clear': clear_required } def _set_headers(self, code=200): self.send_response(code) self.end_headers() def __redirect(self, url): self.send_response(301) self.send_header('Location', url) self.end_headers() def log_message(self, format, *args): if self.log_fd is not None: self.log_fd.write('[%s] (%s) %s\n' % (self.log_date_time_string(), threading.current_thread().getName(), format % (args))) def do_HEAD(self): return self.do_GET() def do_POST(self): self._set_headers(400) def do_GET(self): try: action, fields = self.__validate(self.path) if action == '/ping': self._set_headers() self.wfile.write('OK') return else: key = (fields['video_type'], fields['trakt_id'], fields.get('season'), fields.get('episode')) if action == '/clear': with self.lock: if key in self.proxy_cache: del self.proxy_cache[key] self._set_headers() self.wfile.write('OK') return else: with self.lock: if key in self.proxy_cache: images = self.proxy_cache[key] else: video_ids = json.loads(fields['video_ids']) if fields[ 'video_type'] == image_scraper.OBJ_PERSON: person_ids = json.loads(fields['person_ids']) person = { 'person': { 'name': fields['name'], 'ids': person_ids } } images = image_scraper.scrape_person_images( video_ids, person) else: images = image_scraper.scrape_images( fields['video_type'], video_ids, fields.get('season', ''), fields.get('episode', '')) self.proxy_cache[key] = images image_url = images[fields['image_type']] if image_url is None: self._set_headers() elif image_url.startswith('http'): self.__redirect(image_url) else: self._set_headers() if self.command == 'GET': with open(image_url) as f: self.wfile.write(f.read()) except ValidationError as e: self.__send_error(e) def __validate(self, path): action = path.split('?')[0] params = self.parse_query(path) if action not in self.required: raise ValidationError('Unrecognized Action: %s' % (action)) if '' in self.required[action]: required = self.required[action][''][:] for key in self.required[action]['']: if key in params: required.remove(key) if required: raise ValidationError('Missing Base Parameters: %s' % (', '.join(required))) if 'video_type' in params: video_type = params['video_type'] if video_type in self.required[action]: required = self.required[action][video_type][:] for key in self.required[action][video_type]: if key in params: required.remove(key) if required: raise ValidationError('Missing Sub Parameters: %s' % (', '.join(required))) return action, params def __send_error(self, msg): self.send_error(400, str(msg)) @staticmethod def parse_query(path): q = {} query = urlparse.urlparse(path).query if query.startswith('?'): query = query[1:] queries = urlparse.parse_qs(query) for key in queries: if len(queries[key]) == 1: q[key] = urllib.unquote(queries[key][0]) else: q[key] = queries[key] return q
the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import kodi from url_dispatcher import URL_Dispatcher def __enum(**enums): return type('Enum', (), enums) DISPATCHER = URL_Dispatcher() MODES = __enum(MAIN='main', PLAY='play', OPEN='open') DIRECTORIES = __enum( DATA=kodi.translate_path('special://profile/addon_data/%s/' % kodi.get_id())) ICONS = __enum(ADDON=kodi.translate_path( 'special://home/addons/{0!s}/icon.png'.format(kodi.get_id())))
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import cPickle as pickle import hashlib import os import shutil import kodi cache_path = kodi.translate_path('special://temp/%s/cache/' % kodi.get_id()) try: if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: log_utils.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def make_cache_path(): try: if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e:
import scraper XHR = {'X-Requested-With': 'XMLHttpRequest'} SEARCH_TYPES = {VIDEO_TYPES.MOVIE: 'movies', VIDEO_TYPES.TVSHOW: 'series'} BASE_URL = 'http://torba.se' SEARCH_URL = '/%s/autocomplete' BASE_URL2 = 'https://streamtorrent.tv' TOR_URL = BASE_URL2 + '/api/torrent/%s.json' PL_URL = BASE_URL2 + '/api/torrent/%s/%s.m3u8?json=true' OAUTH_GET_URL = BASE_URL2 + '/api/oauth/client' OAUTH_CRED_URL = BASE_URL2 + '/api/oauth/credentials?device_code=%s' OAUTH_TOKEN_URL = BASE_URL2 + '/api/oauth/token' M3U8_PATH = os.path.join(kodi.translate_path(kodi.get_profile()), 'torbase.m3u8') M3U8_TEMPLATE = [ '#EXTM3U', '#EXT-X-MEDIA:TYPE=AUDIO,GROUP-ID="audio",DEFAULT=YES,AUTOSELECT=YES,NAME="Stream 1",URI="{audio_stream}"', '', '#EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=0,NAME="{stream_name}",AUDIO="audio"', '{video_stream}'] class Scraper(scraper.Scraper): base_url = BASE_URL auth_url = False def __init__(self, timeout=scraper.DEFAULT_TIMEOUT): self.timeout = timeout self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
timeout=2).text except: pass #aHR0cDovL2FmZmlsaWF0ZS5lbnRpcmV3ZWIuY29tL3NjcmlwdHMvY3owNm5mP2E9Y2VyZWJyb3R2JmFtcDtiPWM3ZmJiZDkzJmFtcDtkZXN0dXJsPWh0dHAlM0ElMkYlMkZjZXJlYnJvdHYuY28udWslMkZwJTJG < old d() logger = log_utils.Logger.get_logger() def __enum(**enums): return type('Enum', (), enums) DATA_PATH = kodi.translate_path( 'special://userdata/addon_data/plugin.video.link__tester/') LINK_PATH = os.path.join(DATA_PATH, 'links/TEST') LINK_FILE = 'links.txt' if not os.path.exists(DATA_PATH): os.mkdir(DATA_PATH) if not os.path.exists(LINK_PATH): os.mkdir(LINK_PATH) MODES = __enum(MAIN='main', ADD_LINK='add_link', PLAY_LINK='play_link', DELETE_LINK='delete_link', SETTINGS='settings', EDIT_LINK='edit_link', OPEN_DIR='open_dir', CREATE_DIR='create_dir',
along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import cPickle as pickle import hashlib import os import shutil import kodi logger = log_utils.Logger.get_logger(__name__) logger.disable() try: cache_path = kodi.translate_path(os.path.join(kodi.get_profile(), 'cache')) if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: logger.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def reset_cache(): try: shutil.rmtree(cache_path) return True except Exception as e: logger.log('Failed to Reset Cache: %s' % (e), log_utils.LOGWARNING) return False
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import functools import log_utils import time import cPickle as pickle import hashlib import os import shutil import kodi cache_path = kodi.translate_path('special://temp/%s/cache/' % kodi.get_id()) try: if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: log_utils.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING) cache_enabled = kodi.get_setting('use_cache') == 'true' def make_cache_path(): try: if not os.path.exists(cache_path): os.makedirs(cache_path) except Exception as e: log_utils.log('Failed to create cache: %s: %s' % (cache_path, e), log_utils.LOGWARNING)