def _manifest_middleware(self, data): url = self._session.get('manifest_middleware') if not url: return data data_path = xbmc.translatePath('special://temp/proxy.manifest') with open(data_path, 'wb') as f: f.write(data.encode('utf8')) url = add_url_args(url, _data_path=data_path, _headers=json.dumps(self._headers)) log.debug('PLUGIN MANIFEST MIDDLEWARE REQUEST: {}'.format(url)) dirs, files = run_plugin(url, wait=True) if not files: raise Exception('No data returned from plugin') path = unquote_plus(files[0]) split = path.split('|') data_path = split[0] if len(split) > 1: self._plugin_headers = dict( parse_qsl(u'{}'.format(split[1]), keep_blank_values=True)) with open(data_path, 'rb') as f: data = f.read().decode('utf8') if not ADDON_DEV: remove_file(data_path) return data
def _process_source(self, source, method_name, file_path): remove_file(file_path) path = source.path.strip() source_type = source.source_type archive_type = source.archive_type if source_type == Source.TYPE_ADDON: addon_id = path addon, data = merge_info(addon_id, self.integrations, merging=True) if method_name not in data: raise Error('{} could not be found for {}'.format( method_name, addon_id)) template_tags = { '$ID': addon_id, '$FILE': file_path, '$IP': xbmc.getIPAddress(), } path = data[method_name] for tag in template_tags: path = path.replace(tag, template_tags[tag]) path = path.strip() if path.lower().startswith('plugin'): self._call_addon_method(path) return if path.lower().startswith('http'): source_type = Source.TYPE_URL else: source_type = Source.TYPE_FILE archive_extensions = { '.gz': Source.ARCHIVE_GZIP, '.xz': Source.ARCHIVE_XZ, } name, ext = os.path.splitext(path.lower()) archive_type = archive_extensions.get(ext, Source.ARCHIVE_NONE) if source_type == Source.TYPE_URL and path.lower().startswith('http'): log.debug('Downloading: {} > {}'.format(path, file_path)) Session().chunked_dl(path, file_path) elif not xbmcvfs.exists(path): raise Error(_(_.LOCAL_PATH_MISSING, path=path)) else: log.debug('Copying local file: {} > {}'.format(path, file_path)) xbmcvfs.copy(path, file_path) if archive_type == Source.ARCHIVE_GZIP: gzip_extract(file_path) elif archive_type == Source.ARCHIVE_XZ: xz_extract(file_path)
def install(): remove_file(SO_DST) shutil.copy(SO_SRC, SO_DST) if SYSTEM == 'libreelec': install_libreelec() elif SYSTEM == 'raspbian': install_raspbian() elif SYSTEM == 'osmc': install_osmc() return True elif SYSTEM == 'xbian': install_xbian() return True elif SYSTEM == 'mock': gui.ok(_.SYSTEM_UNSUPPORTED)
def middleware_plugin(response, url, **kwargs): path = 'special://temp/proxy.middleware' real_path = xbmc.translatePath(path) with open(real_path, 'wb') as f: f.write(response.stream.content) if ADDON_DEV: shutil.copy(real_path, real_path + '.in') url = add_url_args(url, _path=path) dirs, files = run_plugin(url, wait=True) if not files: raise Exception('No data returned from plugin') data = json.loads(unquote_plus(files[0])) with open(real_path, 'rb') as f: response.stream.content = f.read() response.headers.update(data.get('headers', {})) if ADDON_DEV: shutil.copy(real_path, real_path + '.out') remove_file(real_path)
def _proxy_request(self, method, url): self._session['redirecting'] = False if not url.lower().startswith( 'http://') and not url.lower().startswith('https://'): response = Response() response.headers = {} response.stream = ResponseStream(response) if os.path.exists(url): response.ok = True response.status_code = 200 with open(url, 'rb') as f: response.stream.content = f.read() if not ADDON_DEV: remove_file(url) else: response.ok = False response.status_code = 500 response.stream.content = "File not found: {}".format( url).encode('utf-8') return response debug = self._session.get('debug_all') or self._session.get( 'debug_{}'.format(method.lower())) if self._post_data and debug: with open( xbmc.translatePath('special://temp/{}-request.txt').format( method.lower()), 'wb') as f: f.write(self._post_data) if not self._session.get('session'): self._session['session'] = RawSession() self._session['session'].set_dns_rewrites( self._session.get('dns_rewrites', [])) else: self._session['session'].headers.clear() #self._session['session'].cookies.clear() #lets handle cookies in session ## Fix any double // in url url = fix_url(url) retries = 3 # some reason we get connection errors every so often when using a session. something to do with the socket for i in range(retries): try: response = self._session['session'].request( method=method, url=url, headers=self._headers, data=self._post_data, allow_redirects=False, stream=True) except ConnectionError as e: if 'Connection aborted' not in str(e) or i == retries - 1: log.exception(e) raise except Exception as e: log.exception(e) raise else: break response.stream = ResponseStream(response) log.debug('{} OUT: {} ({})'.format(method.upper(), url, response.status_code)) headers = {} for header in response.headers: if header.lower() not in REMOVE_OUT_HEADERS: headers[header.lower()] = response.headers[header] response.headers = headers if debug: with open( xbmc.translatePath( 'special://temp/{}-response.txt').format( method.lower()), 'wb') as f: f.write(response.stream.content) if 'location' in response.headers: if '://' not in response.headers['location']: response.headers['location'] = urljoin( url, response.headers['location']) self._session['redirecting'] = True self._update_urls(url, response.headers['location']) response.headers[ 'location'] = PROXY_PATH + response.headers['location'] response.stream.content = b'' if 'set-cookie' in response.headers: log.debug('set-cookie: {}'.format(response.headers['set-cookie'])) ## we handle cookies in the requests session response.headers.pop('set-cookie') self._middleware(url, response) return response
def epgs(self, refresh=True): epg_path = os.path.join(self.output_path, EPG_FILE_NAME) working_path = os.path.join(self.working_path, EPG_FILE_NAME) epg_path_tmp = os.path.join(self.working_path, EPG_FILE_NAME + '_tmp') if not refresh and xbmcvfs.exists(epg_path) and xbmcvfs.exists( working_path): return working_path start_time = time.time() database.connect() try: progress = gui.progressbg() if self.forced else None epgs = list(EPG.select().where(EPG.enabled == True).order_by( EPG.id)) EPG.update({ EPG.start_index: 0, EPG.end_index: 0, EPG.results: [] }).where(EPG.enabled == False).execute() if settings.getBool('remove_epg_orphans', True): epg_ids = Channel.epg_ids() else: epg_ids = None if self._playlist_epgs: epg_urls = [x.path.lower() for x in epgs] for url in self._playlist_epgs: if url.lower() not in epg_urls: epg = EPG(source_type=EPG.TYPE_URL, path=url, enabled=1) epgs.append(epg) epg_urls.append(url.lower()) with FileIO(epg_path_tmp, 'wb') as _out: _out.write(b'<?xml version="1.0" encoding="UTF-8"?><tv>') for count, epg in enumerate(epgs): count += 1 if progress: progress.update( int(count * (100 / len(epgs))), 'Merging EPG ({}/{})'.format(count, len(epgs)), _(epg.label, _bold=True)) file_index = _out.tell() epg_start = time.time() try: log.debug('Processing: {}'.format(epg.path)) self._process_source(epg, METHOD_EPG, self.tmp_file) with FileIO(self.tmp_file, 'rb') as _in: parser = XMLParser(_out, epg_ids) parser.parse(_in, epg) except Exception as e: log.exception(e) result = [int(time.time()), EPG.ERROR, str(e)] else: result = [ int(time.time()), EPG.OK, '{} ({:.2f}s)'.format(parser.epg_count(), time.time() - epg_start) ] epg.results.insert(0, result) if result[1] == EPG.ERROR: _seek_file(_out, file_index) if epg.start_index > 0: if copy_partial_data(working_path, _out, epg.start_index, epg.end_index): log.debug( 'Last used XML data loaded successfully') epg.start_index = file_index epg.end_index = _out.tell() else: log.debug('Failed to load last XML data') epg.start_index = 0 epg.end_index = 0 _seek_file(_out, file_index) if epg.results and epg.results[0][1] == EPG.ERROR: epg.results[0] = result else: epg.results.insert(0, result) epg.results = epg.results[:3] if epg.id: epg.save() remove_file(self.tmp_file) _out.write(b'</tv>') remove_file(working_path) shutil.move(epg_path_tmp, working_path) _safe_copy(working_path, epg_path) finally: database.close() if progress: progress.close() remove_file(self.tmp_file) remove_file(epg_path_tmp) log.debug('EPG Merge Time: {0:.2f}'.format(time.time() - start_time)) return working_path
def playlists(self, refresh=True): playlist_path = os.path.join(self.output_path, PLAYLIST_FILE_NAME) working_path = os.path.join(self.working_path, PLAYLIST_FILE_NAME) if not refresh and xbmcvfs.exists(playlist_path) and xbmcvfs.exists( working_path): return working_path start_time = time.time() database.connect() try: progress = gui.progressbg() if self.forced else None playlists = list(Playlist.select().where( Playlist.enabled == True).order_by(Playlist.order)) Playlist.update({ Playlist.results: [] }).where(Playlist.enabled == False).execute() Channel.delete().where( Channel.custom == False, Channel.playlist.not_in(playlists)).execute() for count, playlist in enumerate(playlists): count += 1 if progress: progress.update( int(count * (100 / len(playlists))), 'Merging Playlist ({}/{})'.format( count, len(playlists)), _(playlist.label, _bold=True)) playlist_start = time.time() error = None try: log.debug('Processing: {}'.format(playlist.path)) if playlist.source_type != Playlist.TYPE_CUSTOM: self._process_source(playlist, METHOD_PLAYLIST, self.tmp_file) with database.db.atomic() as transaction: try: added = self._process_playlist( playlist, self.tmp_file) except: transaction.rollback() raise else: added = len(playlist.channels) except AddonError as e: error = e except Error as e: error = e log.exception(e) except Exception as e: error = e log.exception(e) else: playlist.results.insert(0, [ int(time.time()), Playlist.OK, '{} Channels ({:.2f}s)'.format( added, time.time() - playlist_start) ]) error = None if error: result = [int(time.time()), Playlist.ERROR, str(error)] if playlist.results and playlist.results[0][ 1] == Playlist.ERROR: playlist.results[0] = result else: playlist.results.insert(0, result) remove_file(self.tmp_file) playlist.results = playlist.results[:3] playlist.save() count = 0 starting_ch_no = settings.getInt('start_ch_no', 1) with codecs.open(working_path, 'w', encoding='utf8') as outfile: outfile.write(u'#EXTM3U') group_order = settings.get('group_order') if group_order: outfile.write(u'\n\n#EXTGRP:{}'.format(group_order)) chno = starting_ch_no tv_groups = [] for channel in Channel.playlist_list(radio=False): if channel.chno is None: channel.chno = chno chno = channel.chno + 1 tv_groups.extend(channel.groups) outfile.write(u'\n\n') outfile.write(channel.get_lines()) count += 1 chno = starting_ch_no for channel in Channel.playlist_list(radio=True): if channel.chno is None: channel.chno = chno chno = channel.chno + 1 new_groups = [] for group in channel.groups: count = 1 while group in tv_groups: group = _(_.RADIO_GROUP, group=group) if count > 1: group = u'{} #{}'.format(group, count) count += 1 new_groups.append(group) channel.groups = new_groups outfile.write(u'\n\n') outfile.write(channel.get_lines()) count += 1 if count == 0: outfile.write(u'\n\n#EXTINF:-1,EMPTY PLAYLIST\nhttp') log.debug('Wrote {} Channels'.format(count)) Playlist.after_merge() _safe_copy(working_path, playlist_path) finally: database.close() if progress: progress.close() remove_file(self.tmp_file) log.debug('Playlist Merge Time: {0:.2f}'.format(time.time() - start_time)) return working_path
def _process_source(self, source, method_name, file_path): remove_file(file_path) path = source.path.strip() source_type = source.source_type archive_type = source.archive_type self._is_troll = False if source_type == Source.TYPE_ADDON: addon_id = path addon, data = merge_info(addon_id, merging=True) if method_name not in data: raise Error('{} could not be found for {}'.format( method_name, addon_id)) path = data[method_name] if data['type'] == TYPE_IPTV_MANAGER: iptv_manager.process_path(path, file_path) return template_tags = { '$ID': addon_id, '$FILE': file_path, '$IP': xbmc.getIPAddress(), } for tag in template_tags: path = path.replace(tag, template_tags[tag]) path = path.strip() if path.lower().startswith('plugin://'): self._call_addon_method(path) return if path.lower().startswith('http://') or path.lower().startswith( 'https://'): source_type = Source.TYPE_URL else: source_type = Source.TYPE_FILE if source_type == Source.TYPE_URL and ( path.lower().startswith('http://') or path.lower().startswith('https://')): if 'drive.google.com' in path.lower(): log.debug('Gdrive Downloading: {} > {}'.format( path, file_path)) path = gdrivedl(path, file_path) else: log.debug('Downloading: {} > {}'.format(path, file_path)) resp = Session().chunked_dl(path, file_path) for troll in TROLLS: if troll.lower() in resp.url.lower(): self._is_troll = True break elif not xbmcvfs.exists(path): raise Error(_(_.LOCAL_PATH_MISSING, path=path)) else: _safe_copy(path, file_path) if archive_type == Source.ARCHIVE_AUTO: archive_type = Source.auto_archive_type(path) if archive_type == Source.ARCHIVE_GZIP: gzip_extract(file_path) elif archive_type == Source.ARCHIVE_XZ: xz_extract(file_path)
from .models import Button if PY3: # http://archive.raspberrypi.org/debian/pool/main/r/rpi.gpio/python3-rpi.gpio_0.7.0~buster-1_armhf.deb SO_SRC = os.path.join(ADDON_PATH, 'resources', 'files', '0.7.0_py3.so') else: # http://archive.raspberrypi.org/debian/pool/main/r/rpi.gpio/python-rpi.gpio_0.7.0~buster-1_armhf.deb SO_SRC = os.path.join(ADDON_PATH, 'resources', 'files', '0.7.0_py2.so') SO_DST = os.path.join(ADDON_PATH, 'resources', 'lib', 'RPi', '_GPIO.so') if not os.path.exists(SO_SRC): raise Exception('Missing required {} file'.format(SO_SRC)) if md5sum(SO_SRC) != md5sum(SO_DST): remove_file(SO_DST) shutil.copy(SO_SRC, SO_DST) if os.path.exists('/storage/.kodi'): SYSTEM = 'libreelec' elif os.path.exists('/home/osmc'): SYSTEM = 'osmc' elif os.path.exists('/home/pi'): SYSTEM = 'raspbian' elif os.path.exists('/home/xbian'): SYSTEM = 'xbian' else: SYSTEM = 'mock' sys.path.insert(0, os.path.dirname(os.path.realpath(__file__))) import gpiozero