def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.play_time = 0 self.play_total_secs = 90 * 60 self.player_info = { 'play': defaults.PLAYER_STATE_PLAY, 'position': 0, 'volume': 3, 'playTime': '00:00', 'remainingTime': '00:00' } self.movielist_storage = JsonStorage(self.plugin_id, 'runtime', "movielist.json", {'movielist': {}}) self.movielist = self.movielist_storage.read('movielist', {}) self.lock = threading.Lock() # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True
def __init__(self, modref): ''' inits the plugin ''' # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(plugin_id, 'backup', "config.json", { 'channel_list_urls': [ { 'url': 'file:///Astra_19.2.xspf', 'type':'xspf', 'scheme': 'http', 'netloc': '192.168.1.131' }, { 'url': 'file:///ASTRA_19_2E.m3u', 'type':'m3u', 'scheme': '', 'netloc': '' }, ] } ) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler( plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler( plugin_id, 0, self.query_handler)
def __init__(self, modref): ''' creates the object ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "data.json"), {}) self.channels_info = JsonStorage( os.path.join(self.origin_dir, "channels_info.json"), {}) self.allChannels = set() self.providers = set() self.categories = set() self.movies = {} self.timeline = {} self.favorite_channels = [ 'daserste.de', 'einsextra.daserste.de', 'einsfestival.daserste.de', 'ndrhd.daserste.de', 'hd.zdf.de' ]
def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'runtime', "data.json", {}) self.channels_info = JsonStorage(self.plugin_id, 'runtime', "channels_info.json", {}) self.allChannels = set() self.providers = set() self.categories = set() self.movies = {} self.timeline = {} self.favorite_channels = [ 'daserste.de', 'einsextra.daserste.de', 'einsfestival.daserste.de', 'ndrhd.daserste.de', 'hd.zdf.de' ] # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True
def __init__(self, modref): ''' inits the plugin ''' self.modref = modref self.logger = schnipsllogger.getLogger(__name__) # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'backup', "config.json", { 'epgloops': 1, 'epgtimeout': 60, 'stream_source': 'SatIP Live' }) self.stream_source = self.config.read( 'stream_source' ) # this defines who is the real data provider for the entries found in the EPG data self.epgbuffer_file_name = DirectoryMapper.abspath( self.plugin_id, 'tmpfs', 'epgbuffer.ts', True) self.process = None self.epg_storage = JsonStorage(self.plugin_id, 'runtime', "epgdata.json", {'epgdata': {}}) self.all_EPG_Data = self.epg_storage.read('epgdata') self.timeline = {} # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) # plugin specific stuff # each EPG has its own special hardwired categories self.categories = [ { 'text': 'category_today', 'value': '{"type": "day", "expression": "today"}' }, { 'text': 'category_tomorrow', 'value': '{"type": "day", "expression": "tomorrow"}' }, { 'text': 'category_now', 'value': '{"type": "time", "expression": "now"}' }, { 'text': 'category_evening', 'value': '{"type": "time", "expression": "[\'8 PM\' to tomorrow"}' }, ]
def __init__(self, storageType="memory"): if storageType == "memory": self.realStorage = MemoryStorage() elif storageType == "file": self.realStorage = FileStorage() elif storageType == "json": self.realStorage = JsonStorage() else: raise TypeError('Wrong storage type!')
def __init__(self, modref): ''' inits the plugin ''' # do the plugin specific initialisation first self.providers = set() self.movies = {} self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage( self.plugin_id, 'backup', "config.json", [{ 'url': 'http://192.168.1.7:3000/channels.html', 'channels_per_device': 0 }]) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler)
def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.devices = {} self.zeroconf = zeroconf.Zeroconf() self.lock = Lock() self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'backup', "config.json", {'stopdelay': 1.0}) # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True
def __init__(self, act_modref): ''' creates the HTTP and websocket server ''' global modref modref=act_modref super().__init__(modref.message_handler,self) # reads the config, if any self.config = JsonStorage('webserver', 'backup', "config.json", { 'server_config': { "credentials": "", "host": "0.0.0.0", "port": 8000, "secure": False }, }) server_config = self.config.read("server_config",{}) # set up the argument parser with values from the config parser = argparse.ArgumentParser() parser.add_argument("--host", default=server_config["host"], help="the IP interface to bound the server to") parser.add_argument("-p", "--port", default=server_config["port"], help="the server port") parser.add_argument("-s", "--secure", action="store_true", default=server_config["secure"], help="use secure https: and wss:") parser.add_argument("-c", "--credentials", default=server_config["credentials"], help="user credentials") args = parser.parse_args() self.server = ThreadedHTTPServer((args.host, args.port), WSZuulHandler) modref.message_handler.add_event_handler('webserver', 0, self.server.event_listener) self.server.daemon_threads = True self.server.auth = b64encode(args.credentials.encode("ascii")) if args.secure: self.server.socket = ssl.wrap_socket( self.server.socket, certfile='./server.pem', keyfile='./key.pem', server_side=True) print('initialized secure https server at port %d' % (args.port)) else: print('initialized http server at port %d' % (args.port))
def __init__(self, modref): ''' creates the simulator ''' super().__init__(modref) modref.message_handler.add_event_handler( self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler( self.plugin_id, 0, self.query_handler) # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join( self.origin_dir, "config.json"), { 'channel_file': 'Astra_19.2.xspf', 'scheme': 'http', 'netloc': '192.168.1.99' } )
def __init__(self, modref): ''' creates the object ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "config.json"), { 'epgloops': 1, 'epgtimeout': 60, 'stream_source': 'SatIP Live' }) self.stream_source = self.config.read( 'stream_source' ) # this defines who is the real data provider for the entries found in the EPG data self.epgbuffer_file_name = os.path.join(self.origin_dir, "epgbuffer.ts") self.process = None self.epg_storage = JsonStorage( os.path.join(self.origin_dir, "epgdata.json"), {'epgdata': {}}) self.all_EPG_Data = self.epg_storage.read('epgdata') self.providers = set() self.categories = set() self.movies = {} self.timeline = {} self.lock = Lock()
def __init__(self, modref): ''' inits the plugin ''' self.modref = modref self.logger = schnipsllogger.getLogger(__name__) # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) # plugin specific stuff # each EPG has its own special hardwired categories self.categories = [ { 'text': 'category_last_week', 'value': '{"type": "day", "expression": "[\'-1 week\' to now]"}' }, { 'text': 'category_last_month', 'value': '{"type": "day", "expression": "[\'-4 week\' to now]"}' }, ] # additional to our whoosh db, we need to cache the providers to not have # to read through the huge whoosh db at start to reconstruct the provider list again self.provider_storage = JsonStorage(self.plugin_id, 'runtime', "provider_cache.json", {'provider_cache': []}) self.providers = set(self.provider_storage.read('provider_cache'))
def __init__(self, modref): ''' creates the plugin ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "config.json"), { 'path': '/var/schnipsl', 'www-root': 'http://schnipsl:9092/' }) self.records = JsonStorage( os.path.join(self.origin_dir, "records.json"), {}) self.record_threats = { } # we need to store the thread pointers seperate from self.records, as we can't store them as json self.last_recorded_time = 0 # remembers how long the last recording action is away
class SplPlugin(SplThread): plugin_id = 'xmltvepg' plugin_names = ['XMLTV EPG', 'SAT Channels'] def __init__(self, modref): ''' creates the object ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "data.json"), {}) self.channels_info = JsonStorage( os.path.join(self.origin_dir, "channels_info.json"), {}) self.allChannels = set() self.providers = set() self.categories = set() self.movies = {} self.timeline = {} self.favorite_channels = [ 'daserste.de', 'einsextra.daserste.de', 'einsfestival.daserste.de', 'ndrhd.daserste.de', 'hd.zdf.de' ] def event_listener(self, queue_event): ''' react on events ''' #print("xmltvepg event handler", queue_event.type, queue_event.user) if queue_event.type == defaults.STREAM_REQUEST_PLAY_LIST: self.stream_answer_play_list( queue_event ) # returns None if no further query handling is needed return queue_event # dont forget the event for further pocessing... def query_handler(self, queue_event, max_result_count): ''' answers with list[] of results ''' # print("xmltvepg query handler", queue_event.type, queue_event.user, max_result_count) if queue_event.type == defaults.QUERY_AVAILABLE_SOURCES: return self.plugin_names if queue_event.type == defaults.QUERY_AVAILABLE_PROVIDERS: res = [] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: if plugin_name == self.plugin_names[0]: for provider in self.providers: if max_result_count > 0: res.append(provider) max_result_count -= 1 else: return res # maximal number of results reached if plugin_name == self.plugin_names[1]: for channel in self.allChannels: if max_result_count > 0: res.append(channel) max_result_count -= 1 else: return res # maximal number of results reached return res if queue_event.type == defaults.QUERY_AVAILABLE_CATEGORIES: res = [] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: for category in self.categories: if max_result_count > 0: res.append(category) max_result_count -= 1 else: return res # maximal number of results reached return res if queue_event.type == defaults.QUERY_MOVIE_ID: elements = queue_event.params.split(':') try: return [self.movies[elements[0]][queue_event.params]] except: return [] if queue_event.type == defaults.QUERY_AVAILABLE_MOVIES: res = [] titles = queue_event.params['select_title'].split() # descriptions=queue_event.params['select_description'].split() description_regexs = [ re.compile(r'\b{}\b'.format(description), re.IGNORECASE) for description in queue_event.params['select_description'].split() ] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: if plugin_name in self.movies: # are there any movies stored for this plugin? for movie in self.movies[plugin_name].values(): if movie.provider in queue_event.params[ 'select_provider_values']: if titles: found = False for title in titles: if title.lower() in movie.title.lower( ): found = True if title.lower( ) in movie.category.lower(): found = True if not found: continue if description_regexs: found = False for description_regex in description_regexs: if re.search(description_regex, movie.description): found = True if not found: continue if max_result_count > 0: movie_info = MovieInfo.movie_to_movie_info( movie, '') movie_info['streamable'] = False movie_info['recordable'] = True res.append(movie_info) max_result_count -= 1 else: return res # maximal number of results reached return res return [] def _run(self): ''' starts the server ''' self.load_filmlist('xmltv_datalist.xml.gz') tick = 0 while self.runFlag: time.sleep(1) def _stop(self): self.runFlag = False # ------ plugin specific routines def getAbsolutePath(self, file_name): return os.path.join(self.origin_dir, file_name) def load_filmlist(self, file_name): origin_dir = os.path.dirname(__file__) file_name = os.path.join(origin_dir, file_name) update_list = None print(os.path.abspath(file_name)) try: # does the file exist at all already? xmltv_updates_time_stamp = os.path.getmtime(file_name) except: xmltv_updates_time_stamp = 0 print("timestamp", xmltv_updates_time_stamp, time.time()) if xmltv_updates_time_stamp < time.time( ) - 60 * 60 * 48: # file is older as 48 hours print("Retrieve xmltv_updates list") try: urlretrieve('https://xmltv.xmltv.se/datalist.xml.gz', file_name) except Exception as e: print('failed xmltv_updates download', str(e)) try: with open(file_name, 'rb') as xmltv_updates_file_handle: update_list = parse(xmltv_updates_file_handle) except Exception as e: print('failed xmltv_updates read', str(e)) epg_data = self.config.read('epg', {}) collect_lastmodified = {} if update_list: for channel in update_list.iterfind('channel'): channel_id = channel.attrib['id'] self.allChannels.add(channel_id) if channel_id in self.favorite_channels: if not channel_id in collect_lastmodified: collect_lastmodified[channel_id] = {} for datafor in channel.iterfind('datafor'): day_text = datafor.text last_modified = datafor.attrib['lastmodified'] collect_lastmodified[channel_id][ day_text] = last_modified # first we delete old, outdated dates for channel_id in list(epg_data): if not channel_id in collect_lastmodified: del (epg_data[channel_id]) # delete the whole channel else: for day_text in list(epg_data[channel_id]): if not day_text in collect_lastmodified[channel_id]: del (epg_data[channel_id][day_text]) # check for updates: for channel_id in collect_lastmodified: print(channel_id) if not channel_id in epg_data: epg_data[channel_id] = {} for day_text in collect_lastmodified[channel_id]: try: if not day_text in epg_data[channel_id]: epg_details = self.load_from_xmltv( channel_id, day_text) epg_data[channel_id][day_text] = { 'lastmodified': collect_lastmodified[channel_id][day_text], 'epg_data': epg_details } print('store', channel_id, day_text) else: if epg_data[channel_id][day_text][ 'lastmodified'] < collect_lastmodified[ channel_id][day_text]: epg_details = self.load_from_xmltv( channel_id, day_text) epg_data[channel_id][day_text] = { 'lastmodified': collect_lastmodified[channel_id][day_text], 'epg_data': epg_details } print("update epg for ", channel_id, day_text) except Exception as e: print('exception on load_from_xmltv', channel_id, day_text, str(e)) self.config.write('epg', epg_data, False) # refill the internal lists self.providers = set() self.categories = set() plugin_name = self.plugin_names[0] if not plugin_name in self.movies: # this is an indicator that the epg was loaded from disk and not updated from xmltv.se, so we need to fill a few structures self.movies[plugin_name] = {} for provider, days in epg_data.items(): self.providers.add(provider) self.timeline[provider] = [] for movie_data in days.values(): for movie_info in movie_data['epg_data']: self.timeline[provider].append( type( '', (object, ), { 'timestamp': movie_info['timestamp'], 'movie_info': movie_info })()) self.movies[plugin_name][movie_info['uri']] = Movie( source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, category=movie_info['category'], title=movie_info['title'], timestamp=movie_info['timestamp'], duration=movie_info['duration'], description=movie_info['description'], url=None) self.categories.add(movie_info['category']) for epg_list in self.timeline.values(): epg_list.sort(key=self.get_timestamp) def get_attrib(self, xmlelement, identifier, default=None): ''' reads a attribute fail-safe ''' try: return xmlelement.attrib[identifier] except: return default def get_text(self, xmlelement, default=None): ''' reads a element text fail-safe ''' try: return xmlelement.text except: return default def search_channel_info(self, channel_epg_name): channels_info = self.channels_info.read('channels_info') if channels_info: for channel_info in channels_info: if channel_info['channel_epg_name'] == channel_epg_name: return channel_info def load_from_xmltv(self, channel_id, day_text): ''' ''' var_url = urlopen('https://xmltv.xmltv.se/' + channel_id + '_' + day_text + '.xml') epg_xml = parse(var_url) result = [] count = 0 for programme in epg_xml.iterfind('programme'): provider = self.get_attrib(programme, 'channel') start = self.string_to_timestamp( self.get_attrib(programme, 'start')) stop = self.string_to_timestamp(self.get_attrib(programme, 'stop')) title = self.get_text(programme.find('title'), '') desc = self.get_text(programme.find('desc'), '') category = self.get_text(programme.find('category'), '') episode = programme.find('episode-num') episode_num = None channel_info = self.search_channel_info(provider) url = None media_type = None if channel_info: url = channel_info['url'] media_type = channel_info['mediatype'] if episode: num_system = self.get_attrib(episode, 'system') if num_system == 'xmltv_ns': episode_num = self.get_text(episode) count += 1 plugin_name = self.plugin_names[0] self.providers.add(provider) self.categories.add(category) new_movie = Movie(source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, category=category, title=title, timestamp=str(int(start)), duration=stop - start, description=desc, url=url) new_movie.add_stream(media_type, '', url) if not plugin_name in self.movies: self.movies[plugin_name] = {} self.movies[plugin_name][new_movie.uri()] = new_movie movie_info = MovieInfo.movie_to_movie_info(new_movie, category) movie_info['recordable'] = True result.append(movie_info) print("epg loaded, {0} entries".format(count)) return result def get_timestamp(self, elem): '''helper function for the array sort function ''' return elem.timestamp def string_to_timestamp(self, timestring): if timestring: # read https://stackoverflow.com/a/2956997 to understand why timegm() is used insted of mktime()! return calendar.timegm( datetime.datetime.strptime(timestring, "%Y%m%d%H%M%S %z").timetuple()) else: return '' def stream_answer_play_list(self, queue_event): uri_elements = queue_event.data['uri'].split(':') source = uri_elements[0] if source != self.plugin_names[1]: return queue_event provider = uri_elements[1] time_stamp = time.time() try: epg_list = self.timeline[provider] found = None nr_of_entries = len(epg_list) i = 0 while i < nr_of_entries and time_stamp > int( epg_list[i].timestamp): i += 1 if i < nr_of_entries and i > 0: # we found an entry first_movie_info = epg_list[i - 1].movie_info second_movie_info = epg_list[i].movie_info combined_movie_info = MovieInfo( uri=first_movie_info['uri'], title=first_movie_info['title'], category=second_movie_info['title'], provider=first_movie_info['provider'], timestamp=second_movie_info['timestamp'], duration=0, # description=first_movie_info['description'], query=first_movie_info['query']) combined_movie_info['recordable'] = True self.modref.message_handler.queue_event( None, defaults.STREAM_ANSWER_PLAY_LIST, { 'uri': queue_event.data['uri'], 'movie_info': combined_movie_info }) except: print('unknown provider', provider)
class SplPlugin(SplThread): plugin_id = 'record_hd' plugin_names = ['HD Recorder'] def __init__(self, modref): ''' creates the plugin ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "config.json"), { 'path': '/var/schnipsl', 'www-root': 'http://schnipsl:9092/' }) self.records = JsonStorage( os.path.join(self.origin_dir, "records.json"), {}) self.record_threats = { } # we need to store the thread pointers seperate from self.records, as we can't store them as json self.last_recorded_time = 0 # remembers how long the last recording action is away def event_listener(self, queue_event): if queue_event.type == defaults.TIMER_RECORD_REQUEST: self.timer_record_request(queue_event.data) # for further pocessing, do not forget to return the queue event return queue_event def query_handler(self, queue_event, max_result_count): ''' try to send simulated answers ''' # print("hd_recorder query handler", queue_event.type, queue_event.user, max_result_count) if queue_event.type == defaults.QUERY_MOVIE_ID: new_uri = queue_event.params for record_movie in self.records.read( 'all', {}).values(): # 'all': read the whole config if record_movie['new_uri'] == new_uri: return [ Movie(source=self.plugin_names[0], source_type=defaults.MOVIE_TYPE_RECORD, provider=self.plugin_names[0], category=record_movie['category'], title=record_movie['title'], timestamp=record_movie['timestamp'], duration=record_movie['duration'], description=record_movie['description'], url=record_movie['new_url']) ] return [] def _run(self): ''' starts the server ''' scheduler = Scheduler([(self.check_for_records, 10), (self.cleanup_records, 60)]) while self.runFlag: scheduler.execute() time.sleep(2) def _stop(self): self.runFlag = False def timer_record_request(self, data): uri = data['uri'] uuid = data['uuid'] movie_info_list = self.modref.message_handler.query( Query(None, defaults.QUERY_MOVIE_ID, uri)) if movie_info_list: movie = movie_info_list[0] uri = movie.uri() # do we have that record request already existing_record = self.records.read(uri) if not existing_record: path = urlparse(movie.url).path ext = os.path.splitext(path)[1] if ext.lower() == '.ts': ext = '.mp4' uri_base64 = base64_encode(uri) file_path = os.path.join(self.config.read('path'), uri_base64 + ext) if movie.source_type == defaults.MOVIE_TYPE_RECORD: self.records.write( uri, { # in case of a record we set start and duration to 0 to indicate that the recording can start immediadly & has no duration 'record_starttime': 0, 'record_duration': 0, 'provider': movie.provider, 'category': movie.category, 'title': movie.title, 'timestamp': movie.timestamp, 'duration': movie.duration, 'description': movie.description, 'url': movie.url, 'uri': uri, 'new_uri': self.plugin_names[0] + ':' + uri_base64, 'new_url': self.config.read('www-root') + uri_base64 + ext, 'uuid': uuid, 'ext': ext, 'file_path': file_path, 'state': record_states.WAIT_FOR_RECORDING }) if movie.source_type == defaults.MOVIE_TYPE_STREAM: # recording a stream with a duration of 0 is a very bad idea, because it would never stop.. if movie.duration: self.records.write( uri, { 'record_starttime': int(movie.timestamp), 'record_duration': movie.duration, 'category': movie.category, 'title': movie.title, 'timestamp': movie.timestamp, 'duration': movie.duration, 'description': movie.description, 'url': movie.url, 'uri': uri, 'new_uri': self.plugin_names[0] + ':' + uri_base64, 'new_url': self.config.read('www-root') + uri_base64 + ext, 'uuid': uuid, 'ext': ext, 'file_path': file_path, 'state': record_states.WAIT_FOR_RECORDING }) def check_for_records(self): act_time = time.time() for uri, record in self.records.config.items(): if record['state'] == record_states.WAIT_FOR_RECORDING: if record[ 'record_duration'] == 0: # this is a record, which can be recorded immediadly record['state'] = record_states.ACTUAL_RECORDING self.records.write(uri, record) self.recording(record) continue # something went wrong, the record time was in the past if record['record_starttime'] + record[ 'record_duration'] < act_time: record['state'] = record_states.RECORDING_FAILED self.records.write(uri, record) self.deploy_record_result(record, False) continue # it's time to start if record['record_starttime'] - self.config.read( 'padding_secs', 300 ) <= act_time and record['record_starttime'] + record[ 'record_duration'] > act_time: record['state'] = record_states.ACTUAL_RECORDING self.records.write(uri, record) self.recording(record) continue def cleanup_records(self): records_to_delete = {} act_time = time.time() # request which movies are still in the UI list valid_movieuri_list = self.modref.message_handler.query( Query(None, defaults.QUERY_VALID_MOVIE_RECORDS, {'source': self.plugin_names[0]})) for uri, record in self.records.config.items(): if uri in self.record_threats: # recording is finished, so deploy the result if not self.record_threats[uri].is_alive(): del (self.record_threats[uri]) # we destroy the thread self.deploy_record_result( record, record['state'] == record_states.RECORDING_FINISHED) self.last_recorded_time = act_time if self.last_recorded_time > act_time - 5 * 60: return # don't do any delete action if the last record is just 5 mins ago to give the UI some time to adapt the new movie if record['state'] == record_states.RECORDING_FINISHED or record[ 'state'] == record_states.RECORDING_FAILED: new_uri = record['new_uri'] print('Record on disk:', new_uri) if not new_uri in valid_movieuri_list: records_to_delete[uri] = record # some debug output for uri in valid_movieuri_list: print('recoder uri:', uri) if records_to_delete: # go through the list of records to be deleted for uri, record in records_to_delete.items(): # delete the file file_path = record['file_path'] print('try to delete file', file_path) if os.path.exists(file_path): try: os.remove(file_path) del (self.records.config[uri]) self.records.save() print('file deleted', file_path) except Exception as ex: print("Cant delete record file {0}. Error: {1}".format( file_path, str(ex))) else: # remove the entry print('remove the entry', uri) del (self.records.config[uri]) self.records.save() def deploy_record_result(self, record, sucess): self.modref.message_handler.queue_event( None, defaults.TIMER_RECORD_RESULT, { 'new_uri': record['new_uri'], 'new_url': record['new_url'], 'uuid': record['uuid'], 'sucess': sucess }) def recording(self, record): uri = record['uri'] print('try to record ', uri) threat = threading.Thread(target=record_thread, args=(record, self.config.read('padding_secs', 300))) self.record_threats[uri] = threat threat.start()
class SplPlugin(SplThread): plugin_id = 'uihandler' plugin_names = ['UI Handler'] def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.play_time = 0 self.play_total_secs = 90 * 60 self.player_info = { 'play': defaults.PLAYER_STATE_PLAY, 'position': 0, 'volume': 3, 'playTime': '00:00', 'remainingTime': '00:00' } self.movielist_storage = JsonStorage(self.plugin_id, 'runtime', "movielist.json", {'movielist': {}}) self.movielist = self.movielist_storage.read('movielist', {}) self.lock = threading.Lock() # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True def event_listener(self, queue_event): ''' try to send simulated answers ''' #print("uihandler event handler", queue_event.type, queue_event.user) if queue_event.type == '_join': # send the movie list self.send_home_movie_list(queue_event) if queue_event.type == defaults.MSG_SOCKET_EDIT_DELETE_REQUEST: uuid = queue_event.data['uuid'] if uuid in self.movielist: # does the entry uuid exist movie_list_entry = self.movielist[uuid] # is the user client of this entry? if queue_event.user in movie_list_entry['clients']: del (movie_list_entry['clients'][queue_event.user]) # are there no more clients left? if not movie_list_entry['clients']: # remove the whole entry del (self.movielist[uuid]) self.send_home_movie_list(queue_event) self.movielist_storage.write('movielist', self.movielist) if queue_event.type == defaults.MSG_SOCKET_SELECT_PLAYER_DEVICE: # starts to play movie on device print("plays schnipsl {0} on device ".format( queue_event.data['uri'])) self.user_message(queue_event.user, 'play_request', 'close') movie_info_list = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_MOVIE_ID, queue_event.data['uri'])) if movie_info_list: uuid = self.get_movielist_uuid_by_movie_uri( queue_event.user, queue_event.data['uri']) if uuid: # movie is in movie_list, so it has a current_time time current_time = self.movielist[uuid]['clients'][ queue_event.user]['current_time'] self.modref.message_handler.queue_event( queue_event.user, defaults.PLAYER_PLAY_REQUEST, { 'user': queue_event.user, 'current_time': current_time, 'movie_info': movie_info_list[0], 'device': queue_event.data['timer_dev'], }) if queue_event.type == defaults.MSG_SOCKET_PLAYER_TIME: #self.play_time = queue_event.data['timer_pos'] * self.play_total_secs//100 self.play_time = queue_event.data['timer_pos'] if queue_event.type == defaults.MSG_SOCKET_HOME_PLAY_REQUEST: movie_uri = queue_event.data['uri'] movie_info_list = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_MOVIE_ID, movie_uri)) if movie_info_list: uuid = self.get_movielist_uuid_by_movie_uri( queue_event.user, movie_uri) if uuid: # movie is in movie_list, so it has a current_time time movie_info = self.movielist[uuid]['movie_info'] current_time = self.movielist[uuid]['clients'][ queue_event.user]['current_time'] self.modref.message_handler.queue_event( queue_event.user, defaults.PLAYER_PLAY_REQUEST_WITHOUT_DEVICE, { 'user': queue_event.user, 'current_time': current_time, 'movie_info': movie_info_list[0], }) if queue_event.type == defaults.MSG_SOCKET_EDIT_PLAY_ADD_REQUEST: self.update_movie_list(queue_event) self.user_message(queue_event.user, 'play_added', 'close') if queue_event.type == defaults.MSG_SOCKET_EDIT_RECORD_ADD_REQUEST: self.update_movie_list(queue_event, True) self.user_message(queue_event.user, 'record_added', 'close') if queue_event.type == defaults.MSG_SOCKET_EDIT_PLAY_REQUEST: uuid, movie_uri = self.update_movie_list(queue_event) if uuid: movie_info_list = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_MOVIE_ID, movie_uri)) if movie_info_list: current_time = self.movielist[uuid]['clients'][ queue_event.user]['current_time'] self.modref.message_handler.queue_event( queue_event.user, defaults.PLAYER_PLAY_REQUEST_WITHOUT_DEVICE, { 'user': queue_event.user, 'current_time': current_time, 'movie_info': movie_info_list[0], }) if queue_event.type == defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_SOURCES: available_items = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_AVAILABLE_SOURCES, None, unlimed_nr_of_results=True)) available_items.sort() data = { 'select_items': available_items, 'select_values': self.filter_select_values( available_items, queue_event.data['select_source_values']) } self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_SOURCES_ANSWER, 'config': data }) if queue_event.type == defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_PROVIDERS: available_items = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_AVAILABLE_PROVIDERS, queue_event.data, unlimed_nr_of_results=True)) available_items.sort() data = { 'select_items': available_items, 'select_values': self.filter_select_values( available_items, queue_event.data['select_provider_values']) } self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_PROVIDERS_ANSWER, 'config': data }) if queue_event.type == defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_CATEGORIES: available_items = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_AVAILABLE_CATEGORIES, queue_event.data, unlimed_nr_of_results=True)) try: # object categories like from EPG can't be sorted available_items.sort() except: pass data = { 'select_items': available_items, 'select_values': self.filter_select_values( available_items, queue_event.data['select_category_values']) } self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_CATEGORIES_ANSWER, 'config': data }) if queue_event.type == defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_MOVIES: query_start_page = 0 if 'query_start_page' in queue_event.data: query_start_page = queue_event.data['query_start_page'] movie_info_list = self.modref.message_handler.query( Query(queue_event.user, defaults.QUERY_AVAILABLE_MOVIES, queue_event.data)) if query_start_page < 1: prev_page = -1 else: prev_page = query_start_page - 1 # indicates that there are some more entries if len(movie_info_list) > defaults.MAX_QUERY_SIZE: next_page = query_start_page + 1 else: next_page = -1 self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_EDIT_QUERY_AVAILABLE_MOVIES_ANSWER, 'config': { 'movie_info_list': movie_info_list, 'prev_page': prev_page, 'query_start_page': query_start_page, 'next_page': next_page } }) if queue_event.type == defaults.PLAYER_SAVE_STATE_REQUEST: movie_info = queue_event.data['movie_info'] user = queue_event.user player_info = queue_event.data['player_info'] self.handle_player_save_state_request(user, movie_info, player_info) if queue_event.type == defaults.STREAM_ANSWER_PLAY_LIST: movie_uri = queue_event.data['uri'] movie_info = queue_event.data['movie_info'] for uuid, search_movie in self.movielist.items(): # we must only update live streams, but not e.g. a timer if not search_movie['type'] == defaults.MOVIE_TYPE_STREAM: continue short_search_movie_uri = ':'.join( search_movie['movie_info']['uri'].split(':')[:2]) short_movie_uri = ':'.join(movie_uri.split(':')[:2]) if not short_search_movie_uri == short_movie_uri: continue self.movielist[uuid]['movie_info'] = movie_info for user_name in search_movie['clients']: self.update_live_movie_clip(user_name, uuid, movie_info) if queue_event.type == defaults.TIMER_RECORD_RESULT: movie_new_uri = queue_event.data['new_uri'] movie_new_url = queue_event.data['new_url'] uuid = queue_event.data['uuid'] record_state = queue_event.data['record_state'] if uuid in self.movielist: if record_state == defaults.Record_States.RECORDING_FINISHED: record_movie = self.movielist[uuid] record_movie['type'] = defaults.MOVIE_TYPE_RECORD record_movie['movie_info']['uri'] = movie_new_uri record_movie['movie_info']['url'] = movie_new_url for user_name in self.movielist[uuid]['clients']: self.modref.message_handler.queue_event( user_name, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_LIST, 'config': self.prepare_movie_list(user_name) }) self.movielist_storage.write('movielist', self.movielist) else: self.movielist[uuid]['damaged'] = True if queue_event.type == defaults.MSG_SOCKET_PLAYER_STOP_AND_RECORD or queue_event.type == defaults.MSG_SOCKET_HOME_RECORD_REQUEST: if queue_event.type == defaults.MSG_SOCKET_PLAYER_STOP_AND_RECORD: self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_PLAYER_KEY, {'keyid': 'stop'}) print('Stop play for user {0}'.format(queue_event.user)) uuid = self.get_movielist_uuid_by_movie_uri( queue_event.user, queue_event.data['uri']) query = { 'category_items': [], 'category_values': [], 'description': '', 'name': '', 'provider_items': [], 'provider_values': [], 'source_items': [], 'source_values': [], 'title': '' } with self.lock: self.create_new_movie_list_item(queue_event.user, None, queue_event.data['uri'], uuid, query, True) self.modref.message_handler.queue_event( queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_LIST, 'config': self.prepare_movie_list(queue_event.user) }) # for further pocessing, do not forget to return the queue event return queue_event def query_handler(self, queue_event, max_result_count): # print("ui handler query handler", queue_event.type, queue_event.user, max_result_count) if queue_event.type == defaults.QUERY_VALID_MOVIE_RECORDS: return self.query_valid_movie_records(queue_event.params['source']) return [] def _run(self): ''' starts the server ''' while self.runFlag: act_secs = int(time.time()) # time until the next full minute remaining_secs = act_secs % 60 if remaining_secs: time.sleep(remaining_secs) with self.lock: self.request_stream_playlist() self.timer_record_request() def _stop(self): self.runFlag = False # ------ plugin specific routines def prepare_movie_list(self, user_name): ''' prepares the list of the user_name movies to display on the client in the main window ''' res = {'templates': [], 'records': [], 'streams': [], 'timers': []} for uuid, movie_list_item in self.movielist.items(): if not user_name in movie_list_item['clients']: continue if movie_list_item['type'] == defaults.MOVIE_TYPE_TEMPLATE: res['templates'].append({ 'uuid': uuid, 'icon': 'mdi-magnify', 'iconClass': 'red lighten-1 white--text', 'query': movie_list_item['query'], 'movie_info': movie_list_item['movie_info'], 'current_time': '' }) if movie_list_item['type'] == defaults.MOVIE_TYPE_RECORD_TEMPLATE: res['templates'].append({ 'uuid': uuid, 'icon': 'mdi-record-rec', 'iconClass': 'red lighten-1 white--text', 'query': movie_list_item['query'], 'movie_info': movie_list_item['movie_info'], 'current_time': '' }) if movie_list_item['type'] == defaults.MOVIE_TYPE_RECORD: user_current_time = str( movie_list_item['clients'][user_name]['current_time']) res['records'].append({ 'uuid': uuid, 'icon': 'mdi-play-pause', 'iconClass': 'blue white--text', 'query': movie_list_item['query'], 'movie_info': movie_list_item['movie_info'], 'current_time': user_current_time }) if movie_list_item['type'] == defaults.MOVIE_TYPE_STREAM: res['streams'].append({ 'uuid': uuid, 'icon': 'mdi-radio-tower', 'iconClass': 'green lighten-1 white--text', 'query': movie_list_item['query'], 'movie_info': movie_list_item['movie_info'], 'current_time': '' }) if movie_list_item['type'] == defaults.MOVIE_TYPE_TIMER: res['timers'].append({ 'uuid': uuid, 'icon': 'mdi-clock', 'iconClass': 'amber white--text', 'query': movie_list_item['query'], 'movie_info': movie_list_item['movie_info'], 'current_time': '' }) return res def update_movie_list(self, queue_event, record_request=False): ''' ''' with self.lock: # is it a quick search? Then update the quicksearch data first quick_search_name = queue_event.data['query']['name'] if quick_search_name: quick_search_entry = None for movie_list_entry in self.movielist.values(): if movie_list_entry['query'] and movie_list_entry['query'][ 'name'].lower() == quick_search_name.lower( ) and queue_event.user in movie_list_entry[ 'clients']: quick_search_entry = movie_list_entry break if not quick_search_entry: quick_search_entry = { 'clients': {}, } quick_search_entry['clients'][queue_event.user] = { 'current_time': 0 } # new entry, so it gets its own identifier quick_search_entry_id = str(uuid.uuid4()) self.movielist[quick_search_entry_id] = quick_search_entry if record_request: quick_search_entry[ 'type'] = defaults.MOVIE_TYPE_RECORD_TEMPLATE else: quick_search_entry[ 'type'] = defaults.MOVIE_TYPE_TEMPLATE quick_search_entry['query'] = queue_event.data['query'] quick_search_entry['movie_info'] = MovieInfo( '0', # url '', # mime quick_search_name, # title '', # category '', # source '', # source_type '', # provider '', # timestamp '', # duration '' # description ) return self.create_new_movie_list_item( queue_event.user, quick_search_name, queue_event.data['movie_uri'], queue_event.data['uuid'], queue_event.data['query'], record_request) def create_new_movie_list_item(self, user, quick_search_name, uri, movie_list_uuid, query, record_request): movie_list = self.modref.message_handler.query( Query(user, defaults.QUERY_MOVIE_ID, uri)) if movie_list: if not movie_list[0]['duration'] and record_request: # if the duration is 0, then we can't record it, as this indicates an endless life stream return # TODO # ist es ein Live- Movie? Dann wird es als Live- Schnipsl angehängt # ist es ein benamter Quick-Search? Gibt es ihn schon oder ist er neu? # ist ein normaler Stream? # ist es ein Record- Eintrag? # an existing entry was edited, and it was not a quicksearch if movie_list_uuid in self.movielist and not quick_search_name and not record_request: print("Movie list Eintrag existiert schon") movie_list_entry = self.movielist[movie_list_uuid] else: movie_list_entry = {'clients': {}, 'damaged': False} movie_list_entry['clients'][user] = {'current_time': 0} # new entry, so it gets its own identifier movie_list_uuid = str(uuid.uuid4()) self.movielist[movie_list_uuid] = movie_list_entry if record_request: movie_list_entry['type'] = defaults.MOVIE_TYPE_TIMER else: movie_list_entry['type'] = movie_list[0]['source_type'] # we need to make a copy here, because in case of a new created quicksearch item the quicksearch query data and this normal item both points to the same query object, # which causes an error (name="") in the quicksearch item when we remove the name here... movie_list_entry['query'] = copy.copy(query) # as this is not a quicksearch entry anymore, we must make sure that it does not contain a # quicksearch name anymore movie_list_entry['query']['name'] = '' movie_list_entry['movie_info'] = movie_list[0] self.movielist_storage.write('movielist', self.movielist) return movie_list_uuid, movie_list[0]['uri'] else: self.movielist_storage.write('movielist', self.movielist) return None def filter_select_values(self, value_list, actual_values): '''returns list of the values of actual_values, which are included in value list ''' res = [] for value in actual_values: if value in value_list: res.append(value) return res def update_single_movie_clip(self, user_name, movie_list_uuid): movie_list_item = self.movielist[movie_list_uuid] current_time = movie_list_item["clients"][user_name]['current_time'] self.modref.message_handler.queue_event( user_name, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_UPDATE, 'config': { 'uuid': movie_list_uuid, 'current_time': current_time, 'movie_info': movie_list_item['movie_info'] } }) def update_live_movie_clip(self, user_name, movie_list_uuid, live_movie_info): current_time = live_movie_info['duration'] - \ int(time.time())+int(live_movie_info['timestamp']) self.modref.message_handler.queue_event( user_name, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_UPDATE, 'config': { 'uuid': movie_list_uuid, 'current_time': current_time, 'movie_info': live_movie_info } }) def send_home_movie_list(self, original_queue_event): #new_event = copy.copy(original_queue_event) #new_event.type = defaults.MSG_SOCKET_MSG # new_event.data = { # 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_LIST, 'config': self.prepare_movie_list(original_queue_event.user)} #print("new_event", new_event.data['config']) # self.modref.message_handler.queue_event_obj(new_event) self.modref.message_handler.queue_event( original_queue_event.user, defaults.MSG_SOCKET_MSG, { 'type': defaults.MSG_SOCKET_HOME_MOVIE_INFO_LIST, 'config': self.prepare_movie_list(original_queue_event.user) }) def get_movielist_uuid_by_movie_uri(self, user, movie_uri): for uuid, search_movie in self.movielist.items(): if not search_movie['movie_info']['uri'] == movie_uri: continue if not user in search_movie['clients']: continue return uuid return None def handle_player_save_state_request(self, user, movie_info, player_info): uuid = self.get_movielist_uuid_by_movie_uri(user, movie_info['uri']) if uuid: search_movie = self.movielist[uuid] search_movie['clients'][user][ 'current_time'] = player_info.current_time self.update_single_movie_clip(user, uuid) self.movielist_storage.write('movielist', self.movielist) def request_stream_playlist(self): for movie_list_item in self.movielist.values(): if movie_list_item['type'] == defaults.MOVIE_TYPE_STREAM: self.modref.message_handler.queue_event( None, defaults.STREAM_REQUEST_PLAY_LIST, {'uri': movie_list_item['movie_info']['uri']}) def timer_record_request(self): for uuid, movie_list_item in self.movielist.items(): if movie_list_item[ 'type'] == defaults.MOVIE_TYPE_TIMER and not movie_list_item[ 'damaged']: self.modref.message_handler.queue_event( None, defaults.TIMER_RECORD_REQUEST, { 'uri': movie_list_item['movie_info']['uri'], 'uuid': uuid }) def query_valid_movie_records(self, recorder_source): ''' tells the recorder, which records are still in use to let the recorder delete the unused ones ''' res = [] for movie_list_item in self.movielist.values(): uri = movie_list_item['movie_info']['uri'] source = uri.split(':')[0] if source == recorder_source: res.append(uri) return res
class SplPlugin(SplThread): plugin_id = 'satepg' plugin_names = ['SAT EPG'] def __init__(self, modref): ''' creates the object ''' self.modref = modref super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join(self.origin_dir, "config.json"), { 'epgloops': 1, 'epgtimeout': 60, 'stream_source': 'SatIP Live' }) self.stream_source = self.config.read( 'stream_source' ) # this defines who is the real data provider for the entries found in the EPG data self.epgbuffer_file_name = os.path.join(self.origin_dir, "epgbuffer.ts") self.process = None self.epg_storage = JsonStorage( os.path.join(self.origin_dir, "epgdata.json"), {'epgdata': {}}) self.all_EPG_Data = self.epg_storage.read('epgdata') self.providers = set() self.categories = set() self.movies = {} self.timeline = {} self.lock = Lock() def event_listener(self, queue_event): ''' react on events ''' #print("event handler", self.plugin_id, queue_event.type, queue_event.user) if queue_event.type == defaults.STREAM_REQUEST_PLAY_LIST: self.stream_answer_play_list(queue_event) return queue_event # dont forget the event for further pocessing... def query_handler(self, queue_event, max_result_count): ''' answers with list[] of results ''' # print("query handler", self.plugin_id, queue_event.type, queue_event.user, max_result_count) if queue_event.type == defaults.QUERY_AVAILABLE_SOURCES: return self.plugin_names if queue_event.type == defaults.QUERY_AVAILABLE_PROVIDERS: res = [] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: if plugin_name == self.plugin_names[0]: for provider in self.providers: if max_result_count > 0: res.append(provider) max_result_count -= 1 else: return res # maximal number of results reached return res if queue_event.type == defaults.QUERY_AVAILABLE_CATEGORIES: res = [] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: for category in self.categories: if max_result_count > 0: res.append(category) max_result_count -= 1 else: return res # maximal number of results reached return res if queue_event.type == defaults.QUERY_MOVIE_ID: elements = queue_event.params.split(':') try: return [self.movies[elements[0]][queue_event.params]] except: return [] if queue_event.type == defaults.QUERY_AVAILABLE_MOVIES: res = [] titles = queue_event.params['select_title'].split() # descriptions=queue_event.params['select_description'].split() description_regexs = [ re.compile(r'\b{}\b'.format(description), re.IGNORECASE) for description in queue_event.params['select_description'].split() ] for plugin_name in self.plugin_names: # this plugin is one of the wanted if plugin_name in queue_event.params['select_source_values']: # now we need to do a dirty trick, because in our movies the entries are not store be the correct plugin name, # but the real data source instead, which is slighty confusing,, plugin_name = self.stream_source if plugin_name in self.movies: # are there any movies stored for this plugin? with self.lock: for movie in self.movies[plugin_name].values(): if movie.provider in queue_event.params[ 'select_provider_values']: if titles or description_regexs: # in case any search criteria is given if titles: found = False for title in titles: if title.lower( ) in movie.title.lower(): found = True if title.lower( ) in movie.category.lower(): found = True if not found: continue if description_regexs: found = False for description_regex in description_regexs: if re.search( description_regex, movie.description): found = True if not found: continue if max_result_count > 0: movie_info = MovieInfo.movie_to_movie_info( movie, '') movie_info['recordable'] = True res.append(movie_info) max_result_count -= 1 else: return res # maximal number of results reached return res return [] def _run(self): ''' starts the server ''' tick = 0 while self.runFlag: with self.lock: self.check_for_updates() time.sleep(10) def _stop(self): self.runFlag = False # ------ plugin specific routines def getAbsolutePath(self, file_name): return os.path.join(self.origin_dir, file_name) def check_for_updates(self): # check for updates: new_epg_loaded = False actual_time = time.time() for provider in self.all_EPG_Data: if self.all_EPG_Data[provider]['requested']: self.all_EPG_Data[provider]['requested'] = False if self.all_EPG_Data[provider][ 'lastmodified'] < actual_time - 60 * 60 or not self.all_EPG_Data[ provider]['epg_data']: epg_details = self.get_epg_from_linvdr( provider, self.all_EPG_Data[provider]['url']) if epg_details: new_epg_loaded = True self.all_EPG_Data[provider][ 'lastmodified'] = time.time() for start_time, movie_info in epg_details.items(): # refresh or add data self.all_EPG_Data[provider]['epg_data'][ start_time] = movie_info movie_infos_to_delete = [] for start_time, movie_info in self.all_EPG_Data[provider][ 'epg_data'].items(): if int(start_time) + movie_info[ 'duration'] < actual_time - 60 * 60: # the movie ended at least one hour ago movie_infos_to_delete.append(start_time) for start_time in movie_infos_to_delete: del (self.all_EPG_Data[provider]['epg_data'][start_time]) new_epg_loaded = True for provider_reference in list(self.all_EPG_Data.keys()): if self.all_EPG_Data[provider_reference][ 'lastmodified'] < actual_time - 24 * 60 * 60: # no update the last 24 h? remove it.. del (self.all_EPG_Data[provider_reference]) new_epg_loaded = True if self.providers and not new_epg_loaded: # if this is not the first call (self.provides contains already data),but no new epg data return self.epg_storage.write('epgdata', self.all_EPG_Data) # refill the internal lists self.providers = set() self.categories = set() # we'll use the name of the stream source plugin instead the name of the EPG plugin itself # plugin_name = self.plugin_names[0] plugin_name = self.stream_source if not plugin_name in self.movies: self.movies[plugin_name] = {} for provider, movie_data in self.all_EPG_Data.items(): self.providers.add(provider) self.timeline[provider] = [] for movie_info in movie_data['epg_data'].values(): self.timeline[provider].append( type( '', (object, ), { 'timestamp': movie_info['timestamp'], 'movie_info': movie_info })()) self.movies[plugin_name][movie_info['uri']] = Movie( source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, category=movie_info['category'], title=movie_info['title'], timestamp=movie_info['timestamp'], duration=movie_info['duration'], description=movie_info['description'], url=movie_data['url']) self.categories.add(movie_info['category']) for epg_list in self.timeline.values(): epg_list.sort(key=self.get_timestamp) def search_channel_info(self, channel_epg_name): channels_info = self.channels_info.read('channels_info') if channels_info: for channel_info in channels_info: if channel_info['channel_epg_name'] == channel_epg_name: return channel_info def get_epg_from_linvdr(self, provider, url): # reduce the pids to the ones containing SDT (0x11) and EIT (0x12) print('original URL:', url) url_st = urlparse(url) queries = url_st.query new_queries = "" if queries: for eq in queries.split("&"): key = eq.split("=")[0] value = eq.split("=")[1] if key == 'pids': value = "0,17,18" new_queries += key + "=" + value + "&" new_queries = new_queries.strip("&") url = urlunparse(( url_st.scheme, url_st.netloc, url_st.path, url_st.params, new_queries, url_st.fragment, )) attr = [ os.path.join(self.origin_dir, 'epg_grap.sh'), url, provider, str(self.config.read('epgloops')), str(self.config.read('epgtimeout')) ] # process arguments print("epg_grap started", provider, url, repr(attr)) try: self.process = subprocess.Popen(attr, stdout=subprocess.PIPE, stderr=subprocess.PIPE) cleaner = Timer( 600, self.cleanProcess ) # if epg_grap won't exit, try to terminate its process in 30 seconds cleaner.start() epg_out, err = self.process.communicate() #self.process.wait() # oops... not needed? harmless! cleaner.cancel() if err: print("epg_grap ended with an error:\n%s" % (err)) else: print("epg_grap' ended") epg_json_string = epg_out.decode() epg_json = json.loads(epg_json_string) result = {} count = 0 for json_movie in epg_json['details'].values(): start = json_movie['unixTimeBegin'] stop = json_movie['unixTimeEnd'] if json_movie['title']: title = json_movie['title'] + ' - ' + json_movie['name'] else: title = json_movie['name'] desc = json_movie['description'] category = '' count += 1 # we'll use the name of the stream source plugin instead the name of the EPG plugin itself # plugin_name = self.plugin_names[0] plugin_name = self.stream_source self.providers.add(provider) self.categories.add(category) new_movie = Movie(source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, category=category, title=title, timestamp=str(int(start)), duration=stop - start, description=desc, url=url) new_movie.add_stream('ts', '', url) if not plugin_name in self.movies: self.movies[plugin_name] = {} self.movies[plugin_name][new_movie.uri()] = new_movie movie_info = MovieInfo.movie_to_movie_info( new_movie, category) movie_info['recordable'] = True result[start] = movie_info print("epg loaded, {0} entries".format(count)) return result except Exception as ex: print("epg_grap could not be started. Error: %s" % (ex)) return def get_timestamp(self, elem): '''helper function for the array sort function ''' return elem.timestamp def string_to_timestamp(self, timestring): if timestring: # read https://stackoverflow.com/a/2956997 to understand why timegm() is used insted of mktime()! return calendar.timegm( datetime.datetime.strptime(timestring, "%Y%m%d%H%M%S %z").timetuple()) else: return '' def stream_answer_play_list(self, queue_event): uri = queue_event.data['uri'] uri_elements = uri.split(':') source = uri_elements[0] if source != self.stream_source: return queue_event provider = uri_elements[1] if not provider in self.all_EPG_Data: movie_info_list = self.modref.message_handler.query( Query(None, defaults.QUERY_MOVIE_ID, source + ':' + provider + ':0')) if movie_info_list: movie = movie_info_list[0] url = movie.url with self.lock: self.all_EPG_Data[provider] = { 'requested': True, 'url': url, 'epg_data': {}, 'lastmodified': 0 } else: self.all_EPG_Data[provider]['requested'] = True time_stamp = time.time() try: epg_list = [] if provider in self.timeline: epg_list = self.timeline[provider] nr_of_entries = len(epg_list) i = 0 while i < nr_of_entries and time_stamp > int( epg_list[i].timestamp): i += 1 if i < nr_of_entries and i > 0 and time_stamp < int( epg_list[i - 1].timestamp) + int(epg_list[ i - 1].movie_info['duration']): # we found an entry first_movie_info = epg_list[i - 1].movie_info second_movie_info = epg_list[i].movie_info combined_movie_info = MovieInfo( uri=first_movie_info['uri'], title=first_movie_info['title'], category=second_movie_info['title'], provider=first_movie_info['provider'], timestamp=second_movie_info['timestamp'], duration=0, # description=first_movie_info['description'], query=first_movie_info['query']) combined_movie_info['recordable'] = True else: combined_movie_info = MovieInfo( uri=':'.join([self.stream_source, provider, '0']), title='-', category='', provider=provider, timestamp=time_stamp, duration=0, # description='', query=None) combined_movie_info['recordable'] = False self.modref.message_handler.queue_event( None, defaults.STREAM_ANSWER_PLAY_LIST, { 'uri': queue_event.data['uri'], 'movie_info': combined_movie_info }) except Exception as e: print('unknown provider', provider, str(e)) def cleanProcess(self): try: if not self.process == None: self.process.terminate() time.sleep(3) if not self.process == None: self.process.kill() print("Curl had to be killed. R.I.P.") else: print("Curl had to be terminated.") except: print("Curl termination error, process might be running") if not self.process == None: print("Curl: termination may have failed") self.running = 0
class SplPlugin(StreamChannel): #plugin_id = 'channels_satip' plugin_names = ['SatIP Live'] def __init__(self, modref): ''' inits the plugin ''' # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(plugin_id, 'backup', "config.json", { 'channel_list_urls': [ { 'url': 'file:///Astra_19.2.xspf', 'type':'xspf', 'scheme': 'http', 'netloc': '192.168.1.131' }, { 'url': 'file:///ASTRA_19_2E.m3u', 'type':'m3u', 'scheme': '', 'netloc': '' }, ] } ) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler( plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler( plugin_id, 0, self.query_handler) def add_movie(self, provider, full_url): plugin_name = self.plugin_names[0] source_type = defaults.MOVIE_TYPE_STREAM self.providers.add(provider) new_movie = MovieInfo( url=full_url, mime='video/MP2T', title=provider+' Live', category='live', source=plugin_name, source_type=source_type, provider=provider, timestamp=0, duration=0, description='', # we have to handmade the uri here to not have the title crc32 hash as part of it uri=':'.join([plugin_name, provider, '0']) ) if not plugin_name in self.movies: self.movies[plugin_name] = {} self.movies[plugin_name][new_movie['uri']] = new_movie def load_xspf(self, req,scheme,netloc): try: root = xmltodict.parse(req.text) for track in root['playlist']['trackList']['track']: provider = track['title'] #print (track['album']) location = track['location'] url_st = urlparse(location) if scheme: this_scheme= scheme else: this_scheme= url_st.scheme if netloc: this_netloc=netloc else: this_netloc=url_st.netloc full_url = urlunparse(( this_scheme, this_netloc, url_st.path, url_st.params, url_st.query, url_st.fragment, )) # print(full_url) self.add_movie(provider, full_url) except Exception as e: print(str(e)) def load_m3u(self, req,scheme,netloc): try: is_provider_line=False for line in req.iter_lines(decode_unicode=True): line = line.decode('utf-8').strip() try: if line.upper().startswith('#EXTINF:'): provider = line.split(',',maxsplit=1)[1] is_provider_line=True else: if not is_provider_line: continue if line: # if not eol is_provider_line=False url_st = urlparse(line) if scheme: this_scheme= scheme else: this_scheme= url_st.scheme if netloc: this_netloc=netloc else: this_netloc=url_st.netloc full_url = urlunparse(( this_scheme, this_netloc, url_st.path, url_st.params, url_st.query, url_st.fragment, )) # print(full_url) self.add_movie(provider, full_url) except Exception as e: print('mailformed m3u element {0}'.format(str(e))) except Exception as e: print(str(e)) def loadChannels(self): for channel_info in self.config.read('channel_list_urls'): requests_session = requests.session() requests_session.mount('file://', LocalFileAdapter()) #req = requests_session.get(full_url) req = requests_session.get(channel_info['url']) if channel_info['type'] == 'xspf': self.load_xspf(req,channel_info['scheme'],channel_info['netloc']) if channel_info['type'] == 'm3u': self.load_m3u(req,channel_info['scheme'],channel_info['netloc'])
class SplPlugin(StreamChannel): plugin_id = 'channels_satip' plugin_names = ['SatIP Live'] def __init__(self, modref): ''' creates the simulator ''' super().__init__(modref) modref.message_handler.add_event_handler( self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler( self.plugin_id, 0, self.query_handler) # plugin specific stuff self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(os.path.join( self.origin_dir, "config.json"), { 'channel_file': 'Astra_19.2.xspf', 'scheme': 'http', 'netloc': '192.168.1.99' } ) #------ plugin specific routines def loadChannels(self): plugin_name=self.plugin_names[0] source_type=defaults.MOVIE_TYPE_STREAM try: with open(os.path.join( self.origin_dir,self.config.read('channel_file'))) as fd: root = xmltodict.parse(fd.read()) for track in root['playlist']['trackList']['track']: provider=track['title'] #print (track['album']) location=track['location'] url_st=urlparse(location) full_url = urlunparse(( #url_st.scheme, self.config.read('scheme'), #url_st.netloc, self.config.read('netloc'), url_st.path, url_st.params, url_st.query, url_st.fragment, )) #print(full_url) self.providers.add(provider) new_movie = Movie( source=plugin_name, source_type=source_type, provider=provider, category='live', title=provider+" Live", timestamp="0", duration=0, description='', url=full_url ) new_movie.add_stream('ts','',full_url) if not plugin_name in self.movies: self.movies[plugin_name]={} self.movies[plugin_name][new_movie.uri()]=new_movie except Exception as e: print(str(e))
class SplPlugin(EPGProvider): plugin_id = 'satepg' plugin_names = ['SAT EPG'] def __init__(self, modref): ''' inits the plugin ''' self.modref = modref self.logger = schnipsllogger.getLogger(__name__) # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'backup', "config.json", { 'epgloops': 1, 'epgtimeout': 60, 'stream_source': 'SatIP Live' }) self.stream_source = self.config.read( 'stream_source' ) # this defines who is the real data provider for the entries found in the EPG data self.epgbuffer_file_name = DirectoryMapper.abspath( self.plugin_id, 'tmpfs', 'epgbuffer.ts', True) self.process = None self.epg_storage = JsonStorage(self.plugin_id, 'runtime', "epgdata.json", {'epgdata': {}}) self.all_EPG_Data = self.epg_storage.read('epgdata') self.timeline = {} # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) # plugin specific stuff # each EPG has its own special hardwired categories self.categories = [ { 'text': 'category_today', 'value': '{"type": "day", "expression": "today"}' }, { 'text': 'category_tomorrow', 'value': '{"type": "day", "expression": "tomorrow"}' }, { 'text': 'category_now', 'value': '{"type": "time", "expression": "now"}' }, { 'text': 'category_evening', 'value': '{"type": "time", "expression": "[\'8 PM\' to tomorrow"}' }, ] def event_listener(self, queue_event): ''' react on events ''' #print("event handler", self.plugin_id, queue_event.type, queue_event.user) if queue_event.type == defaults.STREAM_REQUEST_PLAY_LIST: self.stream_answer_play_list(queue_event) return queue_event # dont forget the event for further pocessing... def get_real_plugin_name(self, initial_plugin_name): ''' helper routine, as on some epg types we need to correct the plugin name if this is the case, this method need to return its corrected plugin name ''' return self.stream_source def get_plugin_id(self): return self.plugin_id def get_plugin_names(self): return self.plugin_names def get_categories(self): return self.categories def get_instance(self): return self def is_streamable(self): ''' helper routine, as some EPGs are streamable (e.g. Youtube, mediathecs) but others are not, as there time is in the future ''' return False # ------ plugin specific routines def check_for_updates(self): # check for updates: # we'll use the name of the stream source plugin instead the name of the EPG plugin itself plugin_name = self.stream_source new_epg_loaded = False actual_time = time.time() with self.whoosh_ix.writer() as whoosh_writer: # we need to make a local copy first of the providers to avoid a "array changed size during iteration" error for provider in list(self.all_EPG_Data.keys()): if self.all_EPG_Data[provider]['requested']: self.all_EPG_Data[provider]['requested'] = False if self.all_EPG_Data[provider][ 'lastmodified'] < actual_time - 60 * 60 or not self.all_EPG_Data[ provider]['epg_data']: time.sleep( 10 ) # give the sat receiver some time to recover?!?!?! epg_details = self.get_epg_from_receiver( provider, self.all_EPG_Data[provider]['url']) if epg_details: new_epg_loaded = True self.all_EPG_Data[provider][ 'lastmodified'] = time.time() for start_time, movie_info in epg_details.items(): # refresh or add data if int( start_time ) < actual_time + 24 * 60 * 60: # only if the movie starts within the next 24 h, store it in Memory self.all_EPG_Data[provider]['epg_data'][ start_time] = movie_info # fill the search engine whoosh_writer.update_document( source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, title=movie_info['title'], category=movie_info['category'], uri=movie_info['uri'], url=movie_info['url'], mime=movie_info['mime'], duration=movie_info['duration'], description=movie_info['description'], timestamp=datetime.datetime.fromtimestamp( int(movie_info['timestamp']))) # do to only one epg update at a time and give the other threads some recources, we'll # stop the loop after each providerupdate and wait for the next one break movie_infos_to_delete = {} max_age_timestamp = actual_time - 6 * 60 * 60 # the movie started at least six hour ago max_age_timestamp_datetime = datetime.datetime.fromtimestamp( max_age_timestamp) for start_time, movie_info in self.all_EPG_Data[provider][ 'epg_data'].items(): if int(start_time) < max_age_timestamp: movie_infos_to_delete[start_time] = movie_info[ 'uri'] for start_time, uri in movie_infos_to_delete.items(): del (self.all_EPG_Data[provider]['epg_data'] [start_time]) new_epg_loaded = True if movie_infos_to_delete: qp = QueryParser('timestamp', schema=self.whoosh_ix.schema) querystring = "timestamp:[19700101 to {0}]".format( max_age_timestamp_datetime.strftime( '%Y%m%d%H%M%S')) q = qp.parse(querystring) whoosh_writer.delete_by_query(q) #delete old provider for provider_reference in list(self.all_EPG_Data.keys()): if self.all_EPG_Data[provider_reference][ 'lastmodified'] < actual_time - 24 * 60 * 60: # no update the last 24 h? remove it.. whoosh_writer.delete_by_term('provider', provider_reference) del (self.all_EPG_Data[provider_reference]) new_epg_loaded = True if self.providers and not new_epg_loaded: # if this is not the first call (self.provides contains already data),but no new epg data return self.epg_storage.write('epgdata', self.all_EPG_Data) # refill the internal lists new_providers = set() new_timeline = {} # EPG has its own special hardwired categories #self.categories = set() if not plugin_name in self.movies: self.movies[plugin_name] = {} for provider, movie_data in self.all_EPG_Data.copy().items(): new_providers.add(provider) new_timeline[provider] = [] for movie_info in movie_data['epg_data'].values(): new_timeline[provider].append( type( '', (object, ), { 'timestamp': movie_info['timestamp'], 'movie_info': movie_info })()) self.movies[plugin_name][movie_info['uri']] = movie_info #replace the old data with the new one self.providers = new_providers self.timeline = new_timeline # sort by datetime for epg_list in self.timeline.values(): epg_list.sort(key=self.get_timestamp) def search_channel_info(self, channel_epg_name): channels_info = self.channels_info.read('channels_info') if channels_info: for channel_info in channels_info: if channel_info['channel_epg_name'] == channel_epg_name: return channel_info def split_text_by_capital_chars(self, text): ''' Tricky: Somehow the text in EPG seems not to have a line seperator ?!?, but it helps to split the text wherever a capital letter follows a small letter or digit like in erster SatzZweiter Satz2009Dritter Satz which gives erster Satz Zweiter Satz2009 Dritter Satz ''' pattern = re.compile(r'([^\sA-Z])([A-Z])') # step 1: insert a seperator in between newstring = pattern.sub(r"\1\n\2", text) # step 2: split by that seperator return newstring.split('\n') def get_epg_from_receiver(self, provider, url): # reduce the pids to the ones containing SDT (0x11) and EIT (0x12) url_st = urlparse(url) queries = url_st.query new_queries = "" if queries: for eq in queries.split("&"): key = eq.split("=")[0] value = eq.split("=")[1] if key == 'pids': value = "0,17,18" new_queries += key + "=" + value + "&" new_queries = new_queries.strip("&") url_epd_pids_only = urlunparse(( url_st.scheme, url_st.netloc, url_st.path, url_st.params, new_queries, url_st.fragment, )) attr = [ os.path.join(self.origin_dir, 'epg_grap.sh'), url_epd_pids_only, provider, str(self.config.read('epgloops')), str(self.config.read('epgtimeout')) ] # process arguments self.logger.info("epg_grap started {0} {1} {2}".format( provider, url_epd_pids_only, repr(attr))) try: self.process = subprocess.Popen(attr, stdout=subprocess.PIPE, stderr=subprocess.PIPE) cleaner = Timer( 600, self.cleanProcess ) # if epg_grap won't exit, try to terminate its process in 30 seconds cleaner.start() epg_out, err = self.process.communicate() #self.process.wait() # oops... not needed? harmless! cleaner.cancel() if err: self.logger.warning("epg_grap ended with an error:\n%s" % (err)) else: self.logger.debug("epg_grap' ended") epg_json_string = epg_out.decode() epg_json = json.loads(epg_json_string) result = {} count = 0 for json_movie in epg_json['details'].values(): start = json_movie['unixTimeBegin'] stop = json_movie['unixTimeEnd'] if json_movie['title']: title = self.split_text_by_capital_chars( json_movie['title'])[0] else: title = json_movie['name'] desc = '\n'.join( self.split_text_by_capital_chars( json_movie['description'])) category = json_movie['name'] count += 1 # we'll use the name of the stream source plugin instead the name of the EPG plugin itself # plugin_name = self.plugin_names[0] plugin_name = self.stream_source self.providers.add(provider) # EPG has its own special hardwired categories #self.categories.add(category) new_movie = MovieInfo( url=url, mime='video/MP2T', title=title, category=category, source=plugin_name, source_type=defaults.MOVIE_TYPE_STREAM, provider=provider, timestamp=int(start), duration=stop - start, description=desc) if not plugin_name in self.movies: self.movies[plugin_name] = {} self.movies[plugin_name][new_movie['uri']] = new_movie result[start] = new_movie for json_provider in epg_json['providers']: self.logger.debug( "channel found in epg: {0}".format(json_provider)) self.logger.info("{0} epg loaded, {1} entries".format( provider, count)) return result except Exception as ex: self.logger.warning("epg_grap could not be started. Error: %s" % (ex)) return def get_timestamp(self, elem): '''helper function for the array sort function ''' return elem.timestamp def string_to_timestamp(self, timestring): if timestring: # read https://stackoverflow.com/a/2956997 to understand why timegm() is used insted of mktime()! return calendar.timegm( datetime.datetime.strptime(timestring, "%Y%m%d%H%M%S %z").timetuple()) else: return '' def stream_answer_play_list(self, queue_event): uri = queue_event.data['uri'] uri_elements = uri.split(':') source = uri_elements[0] if source != self.stream_source: return queue_event provider = uri_elements[1] if not provider in self.all_EPG_Data: movie_info_list = self.modref.message_handler.query( Query(None, defaults.QUERY_MOVIE_ID, source + ':' + provider + ':0')) if movie_info_list: movie_info = movie_info_list[0] with self.lock: self.all_EPG_Data[provider] = { 'requested': True, 'url': movie_info['url'], 'epg_data': {}, 'lastmodified': 0 } else: self.all_EPG_Data[provider]['requested'] = True time_stamp = time.time() try: epg_list = [] if provider in self.timeline: epg_list = self.timeline[provider] nr_of_entries = len(epg_list) i = 0 while i < nr_of_entries and time_stamp > int( epg_list[i].timestamp): i += 1 if i < nr_of_entries and i > 0 and time_stamp < int( epg_list[i - 1].timestamp) + int(epg_list[ i - 1].movie_info['duration']): # we found an entry first_movie_info = epg_list[i - 1].movie_info second_movie_info = epg_list[i].movie_info processed_time_percentage = ( time_stamp - int(first_movie_info['timestamp']) ) * 100 / first_movie_info['duration'] if processed_time_percentage < 0: processed_time_percentage = 0 if processed_time_percentage > 100: processed_time_percentage = 100 combined_movie_info = MovieInfo( url=first_movie_info['url'], mime=first_movie_info['mime'], source=first_movie_info['source'], source_type=first_movie_info['source_type'], uri=first_movie_info['uri'], title=first_movie_info['title'], category=first_movie_info['category'], next_title=second_movie_info['title'], provider=first_movie_info['provider'], timestamp=second_movie_info['timestamp'], duration=processed_time_percentage, # description=first_movie_info['description'], query=first_movie_info['query']) combined_movie_info['recordable'] = True else: combined_movie_info = MovieInfo( url='', mime='', source='', source_type='', uri=':'.join([self.stream_source, provider, '0']), title='-', category='', provider=provider, timestamp=time_stamp, duration=0, # description='', query=None) combined_movie_info['recordable'] = False # as we didn't found a matching EPG record, we "rewind" the provider update time by 2 hours to force another epg read self.all_EPG_Data[provider]['lastmodified'] = time.time( ) - 2 * 60 * 60 self.modref.message_handler.queue_event( None, defaults.STREAM_ANSWER_PLAY_LIST, { 'uri': queue_event.data['uri'], 'movie_info': combined_movie_info }) except Exception as e: print('unknown provider', provider, str(e)) def cleanProcess(self): try: if not self.process == None: self.process.terminate() time.sleep(3) if not self.process == None: self.process.kill() print("Curl had to be killed. R.I.P.") else: print("Curl had to be terminated.") except: print("Curl termination error, process might be running") if not self.process == None: print("Curl: termination may have failed") self.running = 0
class SplPlugin(SplThread): plugin_id = 'record_hd' plugin_names = ['HD Recorder'] def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'backup', "config.json", { 'path': '/var/schnipsl', 'www-root': 'http://schnipsl:9092/' }) self.records = JsonStorage(self.plugin_id, 'runtime', "records.json", {}) self.record_threats = { } # we need to store the thread pointers seperate from self.records, as we can't store them as json self.last_recorded_time = 0 # remembers how long the last recording action is away # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True def event_listener(self, queue_event): if queue_event.type == defaults.TIMER_RECORD_REQUEST: self.timer_record_request(queue_event.data) # for further pocessing, do not forget to return the queue event return queue_event def query_handler(self, queue_event, max_result_count): ''' try to send simulated answers ''' # logger.info(f"hd_recorder query handler" {queue_event.type} {queue_event.user} {max_result_count"}) if queue_event.type == defaults.QUERY_MOVIE_ID: new_uri = queue_event.params for record_movie in self.records.read( 'all', {}).values(): # 'all': read the whole config if record_movie['new_uri'] == new_uri: return [ MovieInfo( source=self.plugin_names[0], source_type=defaults.MOVIE_TYPE_RECORD, provider=record_movie['new_uri'].split(':') [1], # extracts the original provider back out of the uri category=record_movie['category'], title=record_movie['title'], timestamp=record_movie['timestamp'], duration=record_movie['duration'], description=record_movie['description'], url=record_movie['new_url'], mime=record_movie['mime']) ] return [] def _run(self): ''' starts the server ''' scheduler = Scheduler([(self.check_for_records, 10), (self.cleanup_records, 60)]) while self.runFlag: scheduler.execute() time.sleep(2) def _stop(self): self.runFlag = False def timer_record_request(self, data): uri = data['uri'] uuid = data['uuid'] movie_info_list = self.modref.message_handler.query( Query(None, defaults.QUERY_MOVIE_ID, uri)) if movie_info_list: movie_info = movie_info_list[0] uri = movie_info['uri'] # do we have that record request already existing_record = self.records.read(uri) if not existing_record: uri_base64 = base64_encode(uri) ext = '.mp4' if movie_info['mime'] == 'video/MP2T': ext = '.mp4' file_path = DirectoryMapper.abspath( '', 'videos', self.config.read('path') + uri_base64 + ext) if movie_info['source_type'] == defaults.MOVIE_TYPE_RECORD: self.records.write( uri, { # in case of a record we set start and duration to 0 to indicate that the recording can start immediadly & has no duration 'record_starttime': 0, 'record_duration': 0, 'provider': movie_info['provider'], 'category': movie_info['category'], 'title': movie_info['title'], 'timestamp': movie_info['timestamp'], 'duration': movie_info['duration'], 'description': movie_info['description'], 'url': movie_info['url'], 'uri': uri, 'new_uri': self.plugin_names[0] + ':' + ':'.join(movie_info['uri'].split(':')[1:]), 'new_url': self.config.read('www-root') + uri_base64 + ext, 'uuid': uuid, 'file_path': file_path, 'state': Record_States.WAIT_FOR_RECORDING, 'errorcount': 4 # try to start the record up to 4 times before it finally failes }) if movie_info['source_type'] == defaults.MOVIE_TYPE_STREAM: # recording a stream with a duration of 0 is a very bad idea, because it would never stop.. if movie_info['duration']: self.records.write( uri, { 'record_starttime': movie_info['timestamp'], 'record_duration': movie_info['duration'], 'category': movie_info['category'], 'title': movie_info['title'], 'timestamp': movie_info['timestamp'], 'duration': movie_info['duration'], 'description': movie_info['description'], 'url': movie_info['url'], 'mime': movie_info['mime'], 'uri': uri, 'new_uri': self.plugin_names[0] + ':' + ':'.join(movie_info['uri'].split(':')[1:]), 'new_url': self.config.read('www-root') + uri_base64 + ext, 'uuid': uuid, 'file_path': file_path, 'state': Record_States.WAIT_FOR_RECORDING, 'errorcount': 4 # try to start the record up to 4 times before it finally failes }) def check_for_records(self): act_time = time.time() for uri, record in self.records.read('all', '').items(): if record['state'] == Record_States.WAIT_FOR_RECORDING: if record[ 'record_duration'] == 0: # this is a record, which can be recorded immediadly record['state'] = Record_States.ACTUAL_RECORDING self.records.write(uri, record) self.recording(record) continue # something went wrong, the record time was in the past. Mark the entry as failed if record['record_starttime'] + record[ 'record_duration'] < act_time: record['state'] = Record_States.RECORDING_FAILED # something went wrong during recording if record['state'] == Record_States.RECORDING_FAILED: self.records.write(uri, record) self.deploy_record_result(record, record['state']) continue # it's time to start if record['record_starttime'] - self.config.read( 'padding_secs', 300 ) <= act_time and record['record_starttime'] + record[ 'record_duration'] > act_time: # in case the movie has already started, we correct starttime and duration to show the real values if record['record_starttime'] < act_time: record['starttime'] = str(act_time) record['duration'] = record['duration'] - ( act_time - record['record_starttime']) record['state'] = Record_States.ACTUAL_RECORDING self.records.write(uri, record) self.recording(record) continue def cleanup_records(self): records_to_delete = {} act_time = time.time() # request which movies are still in the UI list valid_movieuri_list = self.modref.message_handler.query( Query(None, defaults.QUERY_VALID_MOVIE_RECORDS, {'source': self.plugin_names[0]})) for uri, record in self.records.config.items(): if uri in self.record_threats: # recording is finished, so deploy the result if not self.record_threats[uri].is_alive(): del (self.record_threats[uri]) # we destroy the thread self.deploy_record_result(record, record['state']) self.last_recorded_time = act_time if self.last_recorded_time > act_time - 5 * 60: return # don't do any delete action if the last record is just 5 mins ago to give the UI some time to adapt the new movie if record[ 'state'] == Record_States.ACTUAL_RECORDING and not uri in self.record_threats: # seems to be a zombie record records_to_delete[uri] = record self.deploy_record_result(record, Record_States.RECORDING_FAILED) if record['state'] == Record_States.RECORDING_FINISHED or record[ 'state'] == Record_States.RECORDING_FAILED: new_uri = record['new_uri'] #logger.info(f'Record on disk: {new_uri}') if not new_uri in valid_movieuri_list: records_to_delete[uri] = record # some debug output #for uri in valid_movieuri_list: # logger.info(f'recoder uri: {uri}') if records_to_delete: # go through the list of records to be deleted for uri, record in records_to_delete.items(): # delete the file file_path = record['file_path'] logger.info(f'try to delete file {file_path}') if os.path.exists(file_path): try: os.remove(file_path) logger.info(f'file deleted {file_path}') except Exception as ex: logger.warning( "Cant delete record file {0}. Error: {1}".format( file_path, str(ex))) else: # remove the entry logger.info(f'file not found, just remove the entry {uri}') del (self.records.config[uri]) self.records.save() def deploy_record_result(self, record, record_state): # save changes self.records.write(record['uri'], record) self.modref.message_handler.queue_event( None, defaults.TIMER_RECORD_RESULT, { 'new_uri': record['new_uri'], 'new_url': record['new_url'], 'uuid': record['uuid'], 'record_state': record_state }) def recording(self, record): uri = record['uri'] logger.info(f'try to record {uri}') threat = threading.Thread(target=record_thread, args=(record, self.config.read('padding_secs', 300))) self.record_threats[uri] = threat threat.start()
class SplPlugin(EPGProvider): plugin_id = 'mediathek_ard' plugin_names = ['Öffi Mediathek', 'LiveTV'] def __init__(self, modref): ''' inits the plugin ''' self.modref = modref self.logger = schnipsllogger.getLogger(__name__) # do the plugin specific initialisation first self.origin_dir = os.path.dirname(__file__) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) # plugin specific stuff # each EPG has its own special hardwired categories self.categories = [ { 'text': 'category_last_week', 'value': '{"type": "day", "expression": "[\'-1 week\' to now]"}' }, { 'text': 'category_last_month', 'value': '{"type": "day", "expression": "[\'-4 week\' to now]"}' }, ] # additional to our whoosh db, we need to cache the providers to not have # to read through the huge whoosh db at start to reconstruct the provider list again self.provider_storage = JsonStorage(self.plugin_id, 'runtime', "provider_cache.json", {'provider_cache': []}) self.providers = set(self.provider_storage.read('provider_cache')) def event_listener(self, queue_event): ''' react on events ''' #print("event handler", self.plugin_id, queue_event.type, queue_event.user) # for further pocessing, do not forget to return the queue event return queue_event def get_real_plugin_name(self, initial_plugin_name): ''' helper routine, as on some epg types we need to correct the plugin name if this is the case, this method need to return its corrected plugin name ''' return initial_plugin_name def get_plugin_id(self): return self.plugin_id def get_plugin_names(self): return self.plugin_names def get_categories(self): return self.categories def get_instance(self): return self def is_streamable(self): ''' helper routine, as some EPGs are streamable (e.g. Youtube, mediathecs) but others are not, as there time is in the future ''' return True # ------ plugin specific routines def check_for_updates(self): file_name = 'online_filmlist' full_file_name = DirectoryMapper.abspath(self.plugin_id, 'tmpfs', file_name, True) try: # does the file exist at all already? filmlist_time_stamp = DirectoryMapper.getmtime( self.plugin_id, 'tmpfs', file_name) except: filmlist_time_stamp = 0 if filmlist_time_stamp < time.time( ) - 60 * 60 * 48: # file is older as 48 hours ''' Bootstrap to read the filmlist: 1. read the list of actual filmlist URLs from https://res.mediathekview.de/akt.xml ''' self.logger.info("Retrieve film list") try: var_url = urlopen('https://res.mediathekview.de/akt.xml') server_list = parse(var_url) print(server_list) url = None prio = 999 # dummy start value for item in server_list.iterfind('Server'): this_prio = int(item.findtext('Prio')) if this_prio < prio: # filter for the server with the lowest prio prio = this_prio url = item.findtext('URL') self.logger.info(f'Mediathek filmlist url {url}') if url: try: urlretrieve(url, full_file_name + '.pack') self.logger.info("filmlist downloaded") except Exception as e: self.logger.warning( f'failed filmlist download {str(e)}') try: with DirectoryMapper.open(self.plugin_id, 'tmpfs', file_name, 'wb') as unpack_file_handle: with lzma.open( DirectoryMapper.open( self.plugin_id, 'tmpfs', file_name + '.pack', 'rb')) as archive_file_handle: bytes = archive_file_handle.read(4096) while bytes: unpack_file_handle.write(bytes) bytes = archive_file_handle.read(4096) self.reset_index() # destroy the existing index self.logger.info('filmlist server list unpacked') except Exception as e: print('failed filmlist unpack', str(e)) except Exception as e: print('failed filmlist server list download') else: if not self.is_empty() and self.providers: return # no need to load, we have already movie data loader_remember_data = {'provider': '', 'category': ''} try: with DirectoryMapper.open(self.plugin_id, 'tmpfs', file_name) as data: self.reset_index() with self.whoosh_ix.writer() as whoosh_writer: count = 0 self.logger.info(f"loading filmlist...") for liste in JsonSlicer(data, ('X'), path_mode='map_keys'): count += 1 data_array = liste[1] # "Sender" 0, # "Thema" 1, # "Titel" 2, # "Datum" 3, # "Zeit" 4, # "Dauer" 5, # "Größe [MB]" 6, # "Beschreibung" 7, # "Url" 8, # "Website" 9, # "Url Untertitel" 10, # "Url RTMP" 11, # "Url Klein" 12, # "Url RTMP Klein" 13, # "Url HD" 14, # "Url RTMP HD" 15, # "DatumL" 16, # "Url History" 17, # "Geo" 18, # "neu" 19 provider = data_array[0] category = data_array[1] if provider: loader_remember_data['provider'] = provider else: provider = loader_remember_data['provider'] if category: loader_remember_data['category'] = category else: category = loader_remember_data['category'] if category == 'Livestream': source_type = defaults.MOVIE_TYPE_STREAM plugin_name = self.plugin_names[1] provider = provider.replace('Livestream', '').strip() #print("Livestream") else: plugin_name = self.plugin_names[0] source_type = defaults.MOVIE_TYPE_RECORD self.providers.add(provider) try: # livestream do not have a duration timestamp = int(data_array[16]) timestamp_datetime = datetime.datetime.fromtimestamp( timestamp) except: timestamp = 1 timestamp_datetime = datetime.datetime.fromtimestamp( timestamp) movie_info = MovieInfo( url=data_array[8], mime='video/mp4', title=data_array[2], category=category, source=plugin_name, source_type=source_type, provider=provider, timestamp=timestamp, duration=self.time_string_to_secs(data_array[5]), description=data_array[7], ) # fill the search engine whoosh_writer.update_document( source=plugin_name, source_type=source_type, provider=provider, title=data_array[2], category=category, uri=movie_info['uri'], description=data_array[7], timestamp=timestamp_datetime, url=movie_info['url'], mime=movie_info['mime'], duration=movie_info['duration']) if not plugin_name in self.movies: self.movies[plugin_name] = {} # experimental: Do not save the movies in mem anymore, just in Whoosh #self.movies[plugin_name][movie_info['uri']]=movie_info self.provider_storage.write('provider_cache', list(self.providers)) self.logger.info(f"filmlist loaded, {count} entries") except Exception as err: self.logger.warning(f'failed to read filmlist:{err}') def time_string_to_secs(self, time_string): elements = time_string.split(':') seconds = 0 for element in elements: try: seconds = seconds * 60 + int(element) except: return -1 return seconds
class SplPlugin(SplThread): plugin_id = 'kodi' plugin_names = ['Kodi'] def __init__(self, modref): ''' inits the plugin ''' self.modref = modref # do the plugin specific initialisation first self.devices = {} self.zeroconf = zeroconf.Zeroconf() self.lock = Lock() self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage(self.plugin_id, 'backup', "config.json", {'stopdelay': 1.0}) # at last announce the own plugin super().__init__(modref.message_handler, self) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) self.runFlag = True def event_listener(self, queue_event): if queue_event.type == defaults.DEVICE_PLAY_REQUEST: cast = self.get_cast(queue_event.data['device_friendly_name']) if cast and cast.online: cast.play_media(queue_event.data['movie_url'], queue_event.data['movie_mime_type'], current_time=queue_event.data['current_time']) if queue_event.type == defaults.DEVICE_PLAY_PAUSE: cast = self.get_cast(queue_event.data['device_friendly_name']) if cast and cast.supports_pause: cast.pause() self.send_device_play_status( queue_event.data['device_friendly_name'], True) if queue_event.type == defaults.DEVICE_PLAY_STOP: cast = self.get_cast(queue_event.data['device_friendly_name']) if cast and cast.online: cast.stop() self.send_device_play_status( queue_event.data['device_friendly_name'], True) time.sleep(self.config.read('stopdelay', 0)) else: if not cast: print("kodi player stop command: Device {0} not found". format(queue_event.data['device_friendly_name'])) else: print("kodi player stop command: Device {0} not online". format(queue_event.data['device_friendly_name'])) if queue_event.type == defaults.DEVICE_PLAY_RESUME: cast = self.get_cast(queue_event.data['device_friendly_name']) if cast and cast.supports_pause: cast.play() if queue_event.type == defaults.DEVICE_PLAY_SETPOS: cast = self.get_cast(queue_event.data['device_friendly_name']) pos = queue_event.data['pos'] if cast and pos: self.set_seek(cast, pos) if queue_event.type == defaults.DEVICE_PLAY_SETVOLUME: cast = self.get_cast(queue_event.data['device_friendly_name']) # schnipsl handles the volume as percent value from 1 to 100, Kodi also from 0 .. 100 volume = int(queue_event.data['volume']) if cast: self.set_volume(cast, volume) # for further pocessing, do not forget to return the queue event return queue_event def query_handler(self, queue_event, max_result_count): ''' try to send simulated answers ''' # print("kodi query handler", queue_event.type, queue_event.user, max_result_count) if queue_event.type == defaults.QUERY_FEASIBLE_DEVICES: res = [] for device_friedly_name in self.devices: res.append(device_friedly_name) return res[:max_result_count] return [] def get_cast(self, device_friendly_name): try: return self.devices[device_friendly_name].cast except: return None def list_devices(self): print("Currently known cast devices:") for device_friedly_name in self.devices: print(" {}".format(device_friedly_name)) def get_device_friendly_name_of_uuid(self, uuid): # kodi specific return uuid.split('.')[0] ''' if the uuid is not part of the uuid itself for device_friedly_name, cast in self.devices.items(): if uuid == cast.uuid: return device_friedly_name return None ''' def add_service(self, zeroconf, type, uuid): service_info = zeroconf.get_service_info(type, uuid) print("Service %s added, service info: %s" % (uuid, service_info)) # print("Found mDNS service for cast name {}".format(name)) device_friendly_name = self.get_device_friendly_name_of_uuid(uuid) if device_friendly_name and device_friendly_name in self.devices: cast = self.devices[device_friendly_name] else: cast = Kodi(service_info) with self.lock: self.devices[cast.device_friendly_name] = cast cast.online = True self.list_devices() def remove_service(self, zeroconf, type, uuid): print("Service %s removed" % (uuid, )) # print("Lost mDNS service for cast name {} {}".format( # name, service)) device_friendly_name = self.get_device_friendly_name_of_uuid(uuid) if device_friendly_name and device_friendly_name in self.devices: cast = self.devices[device_friendly_name] cast.online = False # sent last known position for later restart cast_status = cast.cast_info cast_status['state_change'] = True # set the update marker self.modref.message_handler.queue_event( None, defaults.DEVICE_PLAY_STATUS, cast_status) self.list_devices() def update_service(self, zeroconf, type, uuid): # can be empty, but it's required by zeroconfig browser pass def set_seek(self, cast, position): if cast.supports_seek is False: return cast.update_status() try: if position > cast.duration: return if position < 0: position = 0 cast.seek(position) except: return def set_volume(self, cast, volume): if volume > 100: volume = 100 if volume < 0: volume = 0 cast.set_volume(int(volume)) def send_device_play_status(self, device_friendly_name, state_change_flag): if device_friendly_name in self.devices: cast = self.devices[device_friendly_name] if not cast.online: return # device is actual not acessable cast.update_status() cast_info = cast.cast_info cast_info['state_change'] = cast_info[ 'state_change'] or state_change_flag # state change is set either on request or of kodi itself is asking for if not cast_info['duration']: cast_info['duration'] = -1 if cast.supports_seek: cast_info['current_time'] = cast.current_time else: cast_info['current_time'] = -1 cast.cast_info = cast_info self.modref.message_handler.queue_event( None, defaults.DEVICE_PLAY_STATUS, cast) def _run(self): ''' starts the server ''' self.browser = zeroconf.ServiceBrowser(self.zeroconf, "_xbmc-jsonrpc._tcp.local.", self) while self.runFlag: time.sleep(2) with self.lock: for device_friendly_name in self.devices: self.send_device_play_status(device_friendly_name, False) def _stop(self): self.zeroconf.close() self.runFlag = False
class Webserver(SplThread): def __init__(self, act_modref): ''' creates the HTTP and websocket server ''' global modref modref=act_modref super().__init__(modref.message_handler,self) # reads the config, if any self.config = JsonStorage('webserver', 'backup', "config.json", { 'server_config': { "credentials": "", "host": "0.0.0.0", "port": 8000, "secure": False }, }) server_config = self.config.read("server_config",{}) # set up the argument parser with values from the config parser = argparse.ArgumentParser() parser.add_argument("--host", default=server_config["host"], help="the IP interface to bound the server to") parser.add_argument("-p", "--port", default=server_config["port"], help="the server port") parser.add_argument("-s", "--secure", action="store_true", default=server_config["secure"], help="use secure https: and wss:") parser.add_argument("-c", "--credentials", default=server_config["credentials"], help="user credentials") args = parser.parse_args() self.server = ThreadedHTTPServer((args.host, args.port), WSZuulHandler) modref.message_handler.add_event_handler('webserver', 0, self.server.event_listener) self.server.daemon_threads = True self.server.auth = b64encode(args.credentials.encode("ascii")) if args.secure: self.server.socket = ssl.wrap_socket( self.server.socket, certfile='./server.pem', keyfile='./key.pem', server_side=True) print('initialized secure https server at port %d' % (args.port)) else: print('initialized http server at port %d' % (args.port)) def _run(self): ''' starts the server ''' try: origin_dir = os.path.dirname(__file__) web_dir = os.path.join(os.path.dirname(__file__), defaults.WEB_ROOT_DIR) os.chdir(web_dir) self.server.serve_forever() os.chdir(origin_dir) except KeyboardInterrupt: print('^C received, shutting down server') self.server.socket.close() def _stop(self): self.server.socket.close() def event_listener(self, queue_event): ''' handler for system events ''' pass def query_handler(self, queue_event, max_result_count): ''' handler for system queries ''' pass
def __init__(self, modref, plugin_root_dir): self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage('PluginManager', 'backup', "plugins.json", { 'plugins':{ "channels_satip": { "active": True }, "chromecast": { "active": True }, "kodi": { "active": True }, "mediathek_ard": { "active": True }, "playerhandler": { "active": True }, "record_hd": { "active": True }, "satepg": { "active": True }, "uihandler": { "active": True } } } ) self.plugins = {} regex = re.compile(r'^spl_.+.py$') try: plugin_path = os.path.realpath(os.path.join( os.path.dirname(__file__), plugin_root_dir)) list_subfolders_with_paths = [ f.path for f in os.scandir(plugin_path) if f.is_dir()] new_plugins_found=False config_plugins=self.config.read('plugins') for sub_folder in list_subfolders_with_paths: list_file_infos = [f for f in os.scandir( sub_folder) if f.is_file()] for file_info in list_file_infos: if regex.match(file_info.name): print(file_info.path) module_spec = importlib.util.spec_from_file_location(file_info.name, file_info.path) my_module = importlib.util.module_from_spec(module_spec) module_spec.loader.exec_module(my_module) plugin_id=my_module.SplPlugin.plugin_id if not plugin_id in config_plugins: #new module config_plugins[plugin_id]={'active':False} new_plugins_found=True continue if not config_plugins[plugin_id]['active']: continue instance = my_module.SplPlugin(modref) self.plugins[file_info.name] =instance if new_plugins_found: self.config.write('plugins',config_plugins) #finally run all active modules for instance in self.plugins.values(): instance.run() except Exception as e: print("Can't load plugin "+str(e)) traceback.print_exc(file=sys.stdout)
class SplPlugin(StreamChannel): plugin_id = 'channels_linvdr' plugin_names = ['LinVDR Live'] def __init__(self, modref): ''' inits the plugin ''' # do the plugin specific initialisation first self.providers = set() self.movies = {} self.origin_dir = os.path.dirname(__file__) self.config = JsonStorage( self.plugin_id, 'backup', "config.json", [{ 'url': 'http://192.168.1.7:3000/channels.html', 'channels_per_device': 0 }]) # at last announce the own plugin super().__init__(modref) modref.message_handler.add_event_handler(self.plugin_id, 0, self.event_listener) modref.message_handler.add_query_handler(self.plugin_id, 0, self.query_handler) #------ plugin specific routines def loadChannels(self): for server in self.config.read('all'): try: f = requests.get(server['url']) content = f.text match = re.search(r'<ol class="items">(.*)</ol>', content, re.DOTALL) if match: lines = match.group(1).split('\n') item_regex = re.compile( r'<li value=".*"><a href="(.*)" vod tvid=".*">(.*)</a>' ) plugin_name = self.plugin_names[0] source_type = defaults.MOVIE_TYPE_STREAM with self.lock: for line in lines: item_match = re.search(item_regex, line) if item_match: full_url = urljoin(server['url'], item_match.group(1)) provider = item_match.group(2) self.providers.add(provider) new_movie = MovieInfo(url=full_url, mime='video/MP2T', title=provider + ' Live', category='live', source=plugin_name, source_type=source_type, provider=provider, timestamp=0, duration=0, description='') if not plugin_name in self.movies: self.movies[plugin_name] = {} self.movies[plugin_name][ new_movie['uri']] = new_movie except Exception as e: print(str(e))