def _syncEpgIds( self ): conn = DBConnection() # By not including inactive channels, we automatically delete epgIds that # are currently not active uniqueEpgIds = Channel.getUniqueEpgIdsFromDb( conn, includeRadio=True ) conn.delayCommit( True ) # Get epgids from database (contains channels per epg_id and strategy) epgIds = EpgId.getAllFromDb( conn, includeRadio=True ) epgIdsDict = { epgId.epgId: epgId for epgId in epgIds } currentEpgIds = epgIdsDict.keys() for epgId in epgIds: if epgId.epgId not in uniqueEpgIds: epgId.deleteFromDb( conn ) self._logger.info( "_syncEpgIds: removing epgId=%s" % ( epgId.epgId ) ) newEpgIds = set( uniqueEpgIds ).difference( set( currentEpgIds ) ) self._logger.info( "_syncEpgIds: newEpgIds=%r" % ( newEpgIds ) ) for newEpgId in newEpgIds: epgId = EpgId( newEpgId, "none" ) epgId.addToDb( conn ) self._logger.info( "_syncEpgIds: adding epgId=%s" % ( epgId.epgId ) ) conn.delayCommit( False )
def _serveDBContent( self, filename, contentType = None ): conn = DBConnection() try: result = conn.execute( "SELECT content FROM glashart_pages WHERE page=?", [filename] ) if result: row = result[0] if contentType: cherrypy.response.headers["Content-Type"] = contentType return row["content"] else: raise cherrypy.HTTPError( 404 ) except: raise cherrypy.HTTPError( 500 )
def _serveDBContent(self, filename, contentType=None): conn = DBConnection() try: result = conn.execute( "SELECT content FROM glashart_pages WHERE page=?", [filename]) if result: row = result[0] if contentType: cherrypy.response.headers["Content-Type"] = contentType return row["content"] else: raise cherrypy.HTTPError(404) except: raise cherrypy.HTTPError(500)
def addSchedule(self, schedule): self._logger.debug("addSchedule( schedule=%s )" % (schedule)) conn = DBConnection() scheduleDict = json.loads(schedule) if scheduleDict: newSchedule = Schedule.fromDict(scheduleDict) if newSchedule: if newSchedule.channelId != -1: channel = Channel.getFromDb(conn, newSchedule.channelId) if not channel: self._logger.warning( "addSchedule: Schedule refers to non-existing channelId=%d" % (newSchedule.channelId)) newSchedule.addToDb(conn) Scheduler().requestReschedule() return self._createResponse(API.STATUS_SUCCESS, newSchedule.id) else: self._logger.error( "addSchedule: Unable to create new schedule from scheduleDict=%r" % (scheduleDict)) return self._createResponse( API.STATUS_FAIL, "Unable to create Schedule object") self._logger.error("addSchedule: Unable to parse json=%s" % (schedule)) return self._createResponse(API.STATUS_FAIL, "Error parsing json")
def changeSchedule(self, id, schedule): # @ReservedAssignment self._logger.debug("changeSchedule( id=%s, schedule=%s )" % (id, schedule)) scheduleDict = json.loads(schedule) if scheduleDict: conn = DBConnection() currSchedule = Schedule.getFromDb(conn, int(id)) if currSchedule: newSchedule = Schedule.fromDict(scheduleDict, int(id)) if newSchedule.channelId != -1: channel = Channel.getFromDb(conn, newSchedule.channelId) if not channel: self._logger.warning( "changeSchedule: Schedule refers to non-existing channelId=%d" % (newSchedule.channelId)) if currSchedule != newSchedule: newSchedule.addToDb(conn) Scheduler().requestReschedule() else: self._logger.warning( "changeSchedule: no changes in Schedule") return self._createResponse(API.STATUS_SUCCESS) else: self._logger.error( "changeSchedule: Unable to find schedule with id=%d" % (int(id))) else: self._logger.error( "changeSchedule: Unable to create dictionary from schedule=%s" % (schedule)) return self._createResponse(API.STATUS_FAIL)
def getRecordingMarker( self, id ): # @ReservedAssignment self._logger.debug( "getRecordingMarker( id=%s )" % ( id ) ) conn = DBConnection() recording = Recording.getFromDb( conn, id ) if recording: return self._createResponse( API.STATUS_SUCCESS, { "marker": recording.marker } ) return self._createResponse( API.STATUS_FAIL )
def _grabAll(self): self._logger.debug("EpgProvider._grabAll") self._logger.warning("Grabbing EPG for all channels.") self._syncEpgIds() db = DBConnection() allChannels = Channel.getAllFromDb(db) if len(allChannels) == 0: self._logger.critical("No channels in the database. Script error?") return # Get epgids from database (contains channels per epg_id and strategy) epgIds = EpgId.getAllFromDb(db) now = time.localtime() nowDay = datetime.datetime(now[0], now[1], now[2]) # Remove program older than this day self._logger.warning("Removing EPG from before %s" % (getTimestamp(nowDay))) EpgProgram.deleteByTimeFromDB(db, getTimestamp(nowDay)) for epgId in epgIds: if not self._running: break self._grabEpgForChannel(epgId=epgId) if self._running: self._logger.warning("Grabbing EPG data complete.") else: self._logger.warning("Grabbing EPG interrupted.")
def default(self, *args, **kwargs): self._logger.debug("default( %s, %s )" % (str(args), str(kwargs))) # for header in cherrypy.request.headers: # self._logger.info( "default: header: %s: %s" % ( header, cherrypy.request.headers[header] ) ) conn = DBConnection() recordingId = list(args)[0] recording = Recording.getFromDb(conn, recordingId) if recording: generalConfig = GeneralConfig(Config()) filename = os.path.join(generalConfig.recordingsPath, recording.filename) # BUF_SIZE = 16 * 1024 if os.path.exists(filename): # f = open( filename, 'rb' ) # cherrypy.response.headers[ "Content-Type" ] = mimetypes.guess_type( filename )[0] # cherrypy.response.headers[ "Content-Length" ] = os.path.getsize( filename ) return serve_file( os.path.abspath(filename), content_type=mimetypes.guess_type(filename)[0]) # def content(): # data = f.read( BUF_SIZE ) # while len( data ) > 0: # yield data # data = f.read( BUF_SIZE ) # # return content() else: return self._createResponse(API.STATUS_FAIL) else: return self._createResponse(API.STATUS_FAIL)
def getChannelList(self, tv=True, radio=False, unicast=True, includeScrambled=True, includeHd=True): self._logger.debug( "getChannelList( tv=%s, radio=%s, unicast=%s, includeScrambled=%s, includeHd=%s )" % (tv, radio, unicast, includeScrambled, includeHd)) conn = DBConnection() channels = Channel.getAllFromDb(conn, includeRadio=radio, tv=tv) channelsArray = [] protocol = InputStreamProtocol.HTTP if IsTsDecryptSupported(): protocol = InputStreamProtocol.TSDECRYPT includeScrambled = True elif not unicast: protocol = InputStreamProtocol.MULTICAST for channel in channels: channelJson = channel.toDict(protocol, includeScrambled, includeHd) if channelJson: channelsArray.append(channelJson) return self._createResponse(API.STATUS_SUCCESS, channelsArray)
def setChannelList( self, channelList ): self._logger.debug( "setChannelList( %s )" % ( channelList ) ) try: channels = json.loads( channelList ) conn = DBConnection() conn.delayCommit( True ) newChannelNumbers = [] for channel in channels: channelId = -1 channelOld = PendingChannel.getByNumberFromDb( conn, channel["id"] ) if channelOld: channelId = channelOld.id channelNew = self._getChannelFromJson( channel, channelId ) newChannelNumbers.append( channelNew.number ) self._logger.debug( "setChannelList: processing channel: %s" % ( channelNew.dump() ) ) if not channelNew: self._logger.error( "setChannelList: unable to create channel for channel=%s", ( channel ) ) elif not channelOld: self._logger.info( "setChannelList: adding channel: %i - %s" % ( channelNew.number, channelNew.name ) ) channelNew.addToDb( conn ) elif channelOld != channelNew: self._logger.info( "setChannelList: updating channel: %i - %s" % ( channelNew.number, channelNew.name ) ) channelNew.addToDb( conn ) else: self._logger.debug( "setChannelList: same channel: %i - %s" % ( channelNew.number, channelNew.name ) ) currentChannels = PendingChannel.getAllFromDb( conn, includeRadio=True, tv=True ) currentChannelNumbers = [ channel.number for channel in currentChannels ] removedChannelNumbers = set( currentChannelNumbers ).difference( set( newChannelNumbers ) ) for number in removedChannelNumbers: channel = PendingChannel.getByNumberFromDb( conn, number ) if channel: self._logger.info( "setChannelList: remove channel: %i - %s" % ( channel.number, channel.name ) ) channel.deleteFromDb( conn ) return self._createResponse( API.STATUS_SUCCESS, { "numChannels": len( channels ) } ) except: self._logger.exception( "setChannelList: exception: channelList=%s" % ( channelList ) ) return self._createResponse( API.STATUS_FAIL, { "numChannels": 0 } )
def search(self, query, where, shortForm=True): self._logger.debug("search( query=%s, where=%s, shortForm=%d" % (query, where, shortForm)) conn = DBConnection() results = {} if conn: where = where.split(',') if "programs" in where: results["programs"] = [] programs = EpgProgram.getByTitleFromDb( conn, query, searchWhere=ProgramAbstract.SEARCH_TITLE) for program in programs: if shortForm: title = program.title # if program.subtitle != "": # title = program.title + ": " + program.subtitle if not title in results["programs"]: results["programs"].append(title) else: results["programs"].append(program.toDict()) if "channels" in where: results["channels"] = [] channels = Channel.search(conn, query, shortForm) if shortForm: results["channels"] = channels else: for channel in channels: results["channels"].append(channel.toDict()) if "persons" in where: results["persons"] = [] persons = Person.search(conn, query, shortForm) if shortForm: results["persons"] = persons else: for person in persons: results["persons"].append(person.toDict()) if "recordings" in where: results["recordings"] = [] recordings = Recording.getByTitleFromDb(conn, query) for recording in recordings: if shortForm: if not recording.title in results["recordings"]: results["recordings"].append(recording.title) else: results["recordings"].append(recording.toDict()) if "schedules" in where: results["schedules"] = [] schedules = Schedule.search(conn, query, shortForm) if shortForm: results["schedules"] = schedules else: for schedule in schedules: results["schedules"].append(schedule.toDict()) return self._createResponse(API.STATUS_SUCCESS, results)
def getEpgProgramById(self, id): self._logger.debug("getEpgProgramById( id=%d )" % (id)) conn = DBConnection() epgProgram = EpgProgram.getFromDb(conn, id) if epgProgram: return self._createResponse(API.STATUS_SUCCESS, epgProgram.toDict()) else: return self._createResponse(API.STATUS_FAIL)
def getRecordingById( self, id ): # @ReservedAssignment self._logger.debug( "getRecordingById( id=%s )" % ( id ) ) conn = DBConnection() recording = Recording.getFromDb( conn, id ) if recording: return self._createResponse( API.STATUS_SUCCESS, recording.toDict() ) else: return self._createResponse( API.STATUS_FAIL );
def setRecordingMarker( self, id, marker ): # @ReservedAssignment self._logger.debug( "setRecordingMarker( id=%s, marker=%s )" % ( id, marker ) ) conn = DBConnection() recording = Recording.getFromDb( conn, id ) if recording: recording.marker = marker self._logger.warn( "setRecordingMarker: marker=%d" % ( recording.marker ) ) recording.addToDb( conn ) return self._createResponse( API.STATUS_SUCCESS ) return self._createResponse( API.STATUS_FAIL )
def getScheduleList(self): self._logger.debug("getScheduleList()") conn = DBConnection() schedules = Schedule.getAllFromDb(conn) schedulesArray = [] for schedule in schedules: scheduleJson = schedule.toDict() if scheduleJson: schedulesArray.append(scheduleJson) return self._createResponse(API.STATUS_SUCCESS, schedulesArray)
def getNowNextProgramList(self): self._logger.debug("getNowNextProgramList()") conn = DBConnection() epgData = EpgProgram.getNowNextFromDb(conn) epgDict = {} for epg in epgData: if epg.epgId not in epgDict: epgDict[epg.epgId] = [] epgDict[epg.epgId].append(epg.toDict()) return self._createResponse(API.STATUS_SUCCESS, epgDict)
def getChannelByIpPort(self, ip, port): self._logger.debug("getChannelByIpPort( ip=%s, port=%d )" % (ip, port)) conn = DBConnection() channelId = ChannelUrl.getChannelByIpPortFromDb(conn, ip, int(port)) if channelId: channel = Channel.getFromDb(conn, channelId) if channel: return self._createResponse(API.STATUS_SUCCESS, channel.toDict()) return self._createResponse(API.STATUS_FAIL)
def default( self, *args, **kwargs ): self._logger.debug( "default( %s, %s )" % ( str( args ), str( kwargs ) ) ) #for header in cherrypy.request.headers: # self._logger.debug( "default: header: %s: %s" % ( header, cherrypy.request.headers[header] ) ) API._parseArguments( [("includeScrambled", types.BooleanType), ("includeHd", types.BooleanType)] ) includeScrambled = False includeHd = True if "includeScrambled" in kwargs: includeScrambled = kwargs["includeScrambled"] if "includeHd" in kwargs: includeHd = kwargs["includeHd"] conn = DBConnection() channelId = list( args )[0] channel = Channel.getFromDb( conn, channelId ) if channel: url = None protocol = InputStreamProtocol.HTTP if IsTsDecryptSupported(): protocol = InputStreamProtocol.TSDECRYPT includeScrambled = True if includeHd and "hd" in channel.urls.keys() and ( includeScrambled or not channel.urls["hd"].scrambled ): url = channel.urls["hd"] elif includeHd and "hd+" in channel.urls.keys() and ( includeScrambled or not channel.urls["hd+"].scrambled ): url = channel.urls["hd+"] elif "sd" in channel.urls.keys() and ( includeScrambled or not channel.urls["sd"].scrambled ): url = channel.urls["sd"] if url: tuner = VirtualTuner.getTuner( url, protocol ) if tuner: listenerId = uuid.uuid1() tuner.addListener( listenerId ) cherrypy.response.headers["Content-Type"] = "video/mp2t" def content(): self._logger.info( "default: opened tuner" ) data = tuner.read( listenerId ) while data and len( data ) > 0: yield data data = tuner.read( listenerId ) self._logger.info( "default: EOS" ) tuner.removeListener( listenerId ) return content() else: return self._createResponse( API.STATUS_FAIL ) else: return self._createResponse( API.STATUS_FAIL ) else: return self._createResponse( API.STATUS_FAIL )
def getScheduleByTitleAndChannelId(self, title, channelId): self._logger.debug( "getScheduleByTitleAndChannelId( title=%s, channelId=%d )" % (title, channelId)) conn = DBConnection() schedule = Schedule.getByTitleAndChannelIdFromDb( conn, title, channelId) if schedule: return self._createResponse(API.STATUS_SUCCESS, schedule.toDict()) else: return self._createResponse(API.STATUS_FAIL)
def getRecordingList( self, offset=None, count=None, sort=None ): self._logger.debug( "getRecordingList( offset=%s, count=%s, sort=%s )" % ( offset, count, sort ) ) conn = DBConnection() recordings = Recording.getAllFromDb( conn, offset=offset, count=count, sort=sort ) recordingsArray = [] for recording in recordings: recordingJson = recording.toDict() if recordingJson: recordingsArray.append( recordingJson ) return self._createResponse( API.STATUS_SUCCESS, recordingsArray )
def _haveEnoughEpgData(self): conn = DBConnection() lastProgram = EpgProgram.getTimestampLastProgram(conn) timestamp = getTimestamp() if timestamp < lastProgram: daysLeft = float(lastProgram - timestamp) / (24 * 60 * 60) self._logger.warning( "Currently %.1f days of Epg data in database." % (daysLeft)) if timestamp + (24 * 60 * 60) > lastProgram: return False return True
def api_js(self): conn = DBConnection() if conn: symbols = PageSymbol.getAllFromDb(conn) template = Template(file=os.path.join(DATA_ROOT, "assets/js/api.js.tmpl"), searchList=[symbols]) cherrypy.response.headers[ "Content-Type"] = "application/javascript" return template.respond() return self._serveDBContent("api.js", "application/javascript")
def deleteRecording( self, id, rerecord=False ): # @ReservedAssignment self._logger.debug( "deleteRecording( id=%s, rerecord=%s )" % ( id, rerecord ) ) conn = DBConnection() recording = Recording.getFromDb( conn, id ) if recording: recording.deleteFromDb( conn, rerecord ) if rerecord: Scheduler().requestReschedule() return self._createResponse( API.STATUS_SUCCESS ) else: self._logger.warning( "deleteRecording: recording with id=%d does not exist" % ( id ) ) return self._createResponse( API.STATUS_FAIL )
def _syncEpgIds(self): conn = DBConnection() # By not including inactive channels, we automatically delete epgIds that # are currently not active uniqueEpgIds = Channel.getUniqueEpgIdsFromDb(conn, includeRadio=True) conn.delayCommit(True) # Get epgids from database (contains channels per epg_id and strategy) epgIds = EpgId.getAllFromDb(conn, includeRadio=True) epgIdsDict = {epgId.epgId: epgId for epgId in epgIds} currentEpgIds = epgIdsDict.keys() for epgId in epgIds: if epgId.epgId not in uniqueEpgIds: epgId.deleteFromDb(conn) self._logger.info("_syncEpgIds: removing epgId=%s" % (epgId.epgId)) newEpgIds = set(uniqueEpgIds).difference(set(currentEpgIds)) self._logger.info("_syncEpgIds: newEpgIds=%r" % (newEpgIds)) for newEpgId in newEpgIds: epgId = EpgId(newEpgId, "none") epgId.addToDb(conn) self._logger.info("_syncEpgIds: adding epgId=%s" % (epgId.epgId)) conn.delayCommit(False)
def getEpgForChannel(self, channelId, startTime=None, endTime=None): self._logger.debug( "getEpgForChannel( channelId=%s, startTime=%d, endTime=%d )" % (channelId, startTime, endTime)) conn = DBConnection() channel = Channel.getFromDb(conn, channelId) epgData = EpgProgram.getAllByEpgIdFromDb(conn, channel.epgId, startTime, endTime) epgArray = [] for epg in epgData: epgArray.append(epg.toDict()) return self._createResponse(API.STATUS_SUCCESS, epgArray)
def getEpg(self, startTime=None, endTime=None): self._logger.debug("getEpg( startTime=%d, endTime=%d )" % (startTime, endTime)) conn = DBConnection() epgData = EpgProgram.getAllByEpgIdFromDb(conn, None, startTime, endTime) epgDict = {} for epg in epgData: if epg.epgId not in epgDict: epgDict[epg.epgId] = [] epgDict[epg.epgId].append(epg.toDict()) return self._createResponse(API.STATUS_SUCCESS, epgDict)
def getEpgProgramsByTitleAndEpgId(self, title, epgId=""): self._logger.debug( "getEpgProgramsByTitleAndChannelId( title=%s, epgId=%s )" % (title, epgId)) conn = DBConnection() if epgId == "": epgId = None epgData = EpgProgram.getByTitleFromDb(conn, title, epgId) epgDict = {} for epg in epgData: if epg.epgId not in epgDict: epgDict[epg.epgId] = [] epgDict[epg.epgId].append(epg.toDict()) return self._createResponse(API.STATUS_SUCCESS, epgDict)
def deleteSchedule(self, id): # @ReservedAssignment self._logger.debug("deleteSchedule( id=%s )" % (id)) conn = DBConnection() schedule = Schedule.getFromDb(conn, int(id)) if schedule: schedule.deleteFromDb(conn) Scheduler().requestReschedule() return self._createResponse(API.STATUS_SUCCESS) else: self._logger.error( "deleteSchedule: Unable to find schedule with id=%d" % (int(id))) return self._createResponse(API.STATUS_FAIL)
def aminoPVRProcess(): logger.debug('aminoPVRProcess') global epgGrabber, contentProvider, vcasProvider, recorder, scheduler, resourceMonitor, watchdog conn = DBConnection() if conn: # upgrading the db upgradeDatabase(conn, InitialSchema) # fix up any db problems sanityCheckDatabase(conn, MainSanityCheck) else: logger.error("Unable to initialise database!") sys.exit() generalConfig = GeneralConfig(Config()) resourceMonitor = ResourceMonitor() watchdog = Watchdog() recorder = Recorder() scheduler = Scheduler() startRtspServer() if generalConfig.provider == "glashart": import providers.glashart as provider provider.RegisterProvider() try: initWebserver(generalConfig.serverPort) except IOError: logger.error( u"Unable to start web server, is something else running on port %d?" % (generalConfig.serverPort)) sys.exit() scheduler.start() scheduler.requestReschedule() recorder.start() epgGrabber = provider.EpgProvider() epgGrabber.start() # epgGrabber.requestEpgUpdate() contentProvider = provider.ContentProvider() contentProvider.start() # contentProvider.requestContentUpdate() vcasProvider = provider.VcasProvider()
def createM3U(self, protocol=InputStreamProtocol.TSDECRYPT, includeScrambled=False, includeHd=True): self._logger.info( "createM3U( protocol=%d, includeScrambled=%s, includeHd=%s )" % (protocol, includeScrambled, includeHd)) conn = DBConnection() channels = Channel.getAllFromDb(conn) oneUrlPerChannel = False if protocol == InputStreamProtocol.TSDECRYPT: oneUrlPerChannel = True m3u = "#EXTM3U\n" for channel in channels: m3u += channel.toM3UEntry(protocol, includeScrambled, includeHd, oneUrlPerChannel) cherrypy.response.headers["Content-Type"] = "text/plain" return m3u
def getEpgInfo(self): self._logger.debug("getEpgInfo()") timestampLastProgram = getTimestamp() timestampLastUpdate = 0 numPrograms = 0 conn = DBConnection() if conn: timestampLastProgram = EpgProgram.getTimestampLastProgram(conn) numPrograms = EpgProgram.getNumberOfPrograms(conn) if aminopvr.providers.epgProvider: epgProvider = aminopvr.providers.epgProvider() timestampLastUpdate = epgProvider.getLastUpdate() return self._createResponse( API.STATUS_SUCCESS, { "provider": "", "num_programs": numPrograms, "last_update": timestampLastUpdate, "last_program": timestampLastProgram })
def _translateContent(self): indexContent, title, codeJsPath, styleCssPath = self._parseIndexPage() if title and indexContent: codeJsContent, symbolNames = self._parseCodeJs(codeJsPath) if codeJsContent: indexContent = self._modifyIndexPage(indexContent) codeJsContent = self._modifyCodeJs(codeJsContent, symbolNames) styleCssContent = self._getStyleCss(styleCssPath) apiJsContent = self._modifyApiJs(symbolNames) if indexContent and codeJsContent and styleCssContent and apiJsContent: self._logger.warning( "_translateContent: content translated: title=%s" % (title)) conn = DBConnection() if conn: row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ("index.xhtml", )) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", (indexContent, "index.xhtml")) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ("index.xhtml", indexContent)) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ("code.js", )) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", (codeJsContent, "code.js")) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ("code.js", codeJsContent)) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ("style.css", )) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", (styleCssContent, "style.css")) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ("style.css", styleCssContent)) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ("api.js", )) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", (apiJsContent, "api.js")) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ("api.js", apiJsContent)) if symbolNames: conn = DBConnection() if conn: PageSymbol.addAllDictToDb(conn, symbolNames)
def _grabEpgForChannel( self, channel=None, epgId=None ): conn = DBConnection() if channel: epgId = EpgId.getFromDb( conn, channel.epgId ) self._logger.info( "Grabbing EPG for channel: %s (%s; method: %s)" % ( channel.name, channel.epgId, epgId.strategy ) ) if not epgId: return else: self._logger.info( "Grabbing EPG for epgId: %s (method: %s)" % ( epgId.epgId, epgId.strategy ) ) # Check if _fail_# is behind strategy # This is to indicate epg grabbing for this epgId failed previously strategy = epgId.strategy strategyRe = re.compile( r'_fail_(?P<fail>\d+)' ) failMatch = strategyRe.search( strategy ) failCount = 0 if failMatch: failCount = int( failMatch.group( "fail" ) ) strategy = epgId.strategy.split( '_' )[0] # We're going to attempt to grab EPG information for this channel 5 times # before we stop grabbing this epgId in the future. if failCount < 5: now = time.localtime() nowDay = datetime.datetime( now[0], now[1], now[2] ) daysDetailDelta = datetime.timedelta( days = 3 ) epgFilename = "/%s.json.gz" % ( epgId.epgId ) epgUrl = self._glashartConfig.epgChannelsPath + epgFilename currentPrograms = EpgProgram.getAllByEpgIdFromDb( conn, epgId.epgId ) currentProgramsDict = { currProgram.originalId: currProgram for currProgram in currentPrograms } newProgramsDict = {} content, _, _ = getPage( epgUrl ) if content: fileHandle = gzip.GzipFile( fileobj=StringIO( content ) ) epgData = json.loads( fileHandle.read() ) # If strategy has changed (working after (a few) failed attempts) if epgId.strategy != strategy: epgId.strategy = strategy epgId.addToDb( conn ) numPrograms = 0 numProgramsDetail = 0 numProgramsDetailFailed = 0 numProgramsNew = 0 numProgramsUpdated = 0 for program in epgData: if not self._running: break numPrograms += 1 programNew = self._getProgramFromJson( epgId.epgId, program ) if programNew.originalId in newProgramsDict: self._logger.warning( "Program with originalId %d already in newProgramsDict" % ( programNew.originalId ) ) newProgramsDict[programNew.originalId] = programNew updateDetailedData = True programOld = None if programNew.originalId in currentProgramsDict: programOld = currentProgramsDict[programNew.originalId] # If the old program has detailed info, copy those fields # TODO: do this somewhere else if programOld and programOld.detailed: programNew.subtitle = programOld.subtitle programNew.description = programOld.description programNew.aspectRatio = programOld.aspectRatio programNew.parentalRating = programOld.parentalRating programNew.genres = programOld.genres programNew.actors = programOld.actors programNew.directors = programOld.directors programNew.presenters = programOld.presenters programNew.ratings = programOld.ratings programNew.detailed = programOld.detailed # Now, compare the old program and the new program # Are they the same, then we don't need to download detailed information if programOld and programOld.detailed and programNew == programOld: programNew = programOld updateDetailedData = False if updateDetailedData: if ( ( epgId.strategy == "default" and (nowDay + daysDetailDelta) > datetime.datetime.fromtimestamp( programNew.startTime ) ) or ( epgId.strategy == "full" ) ): time.sleep( random.uniform( 0.5, 1.0 ) ) programNew, grabbed = self._grabDetailedEpgForProgram( programNew ) if grabbed: numProgramsDetail += 1 else: # if more than 10 detailed program information grabs failed, set strategy to none. numProgramsDetailFailed += 1 if numProgramsDetailFailed == 10: self._logger.error( "Couldn't download at least 10 detailed program information files, so setting strategy to 'none', but do not store" ) epgId.strategy = "none" conn.delayCommit( True ) for programId in newProgramsDict: programNew = newProgramsDict[programId] programOld = None if programNew.originalId in currentProgramsDict: programOld = currentProgramsDict[programNew.originalId] if not programOld or programNew != programOld: if programOld: self._logger.debug( "Updated program: id = %s" % ( programNew.originalId ) ) self._logger.debug( "Start time: %s > %s" % ( str( programOld.startTime ), str( programNew.startTime ) ) ) self._logger.debug( "End time: %s > %s" % ( str( programOld.endTime ), str( programNew.endTime ) ) ) self._logger.debug( "Name: %s > %s" % ( repr( programOld.title ), repr( programNew.title ) ) ) programNew.id = programOld.id numProgramsUpdated += 1 else: numProgramsNew += 1 try: programNew.addToDb( conn ) except: self._logger.exception( programNew.dump() ) conn.delayCommit( False ) if self._running: self._logger.debug( "Num programs: %i" % ( numPrograms ) ) self._logger.debug( "Num program details: %i" % ( numProgramsDetail ) ) self._logger.info( "Num new programs: %i" % ( numProgramsNew ) ) self._logger.info( "Num updated programs: %i" % ( numProgramsUpdated ) ) if numProgramsNew == 0: self._logger.warning( "No new programs were added for epgId: %s" % ( epgId.epgId ) ) else: self._logger.warning( "Unable to download EPG information for epgId: %s" % ( epgId.epgId ) ) failCount += 1 epgId.strategy = "%s_fail_%d" % ( strategy, failCount ) epgId.addToDb( conn ) else: self._logger.info( "Downloading of EPG information for epgId: %s skipped becaused it failed too many times" % ( epgId.epgId ) )
for recordedProgram in mythTvRecordedPrograms: logger.debug( "%s" % recordedProgram.dump() ) mythTvChannelsDict = { x.chanId: x for x in mythTvChannels } mythTvIptvChannelsDict = { x.chanId: x for x in mythTvIptvChannels } mythTvRecordedDict = { "%i_%i" % ( x.chanId, x.programStart ): x for x in mythTvRecorded } mythTvOldRecordedDict = { "%i_%i" % ( x.chanId, x.startTime ): x for x in mythTvOldRecorded } mythTvRecordedProgramsDict = { "%i_%i" % ( x.chanId, x.startTime ): x for x in mythTvRecordedPrograms } mythTvRecordMap = {} mythTvChannelMap = {} mythTvChannelUrlTypeMap = {} # TODO: storage path: storagegroup.groupname == 'Default' conn = DBConnection() if conn: try: # Create a channel map to map channels between MythTV and AminoPVR for mythTvChannel in mythTvChannels: if mythTvChannel.chanId in mythTvIptvChannelsDict: mythTvIptvChannel = mythTvIptvChannelsDict[mythTvChannel.chanId] else: logger.warning( "channel with chanId=%i not found in mythTvIptvChannelsDict" % ( mythTvChannel.chanId ) ) # The xmlTvId in MythTV is known as epgId in AminoPVR # Search for AminoPVR channels with the same epgId # Then match the Iptv Channel Url against AminoPVR ChannelUrl's epgId = mythTvChannel.xmlTvId currChannels = Channel.getAllByEpgIdFromDb( conn, epgId, includeInactive=True )
mythTvOldRecordedDict = { "%i_%i" % (x.chanId, x.startTime): x for x in mythTvOldRecorded } mythTvRecordedProgramsDict = { "%i_%i" % (x.chanId, x.startTime): x for x in mythTvRecordedPrograms } mythTvRecordMap = {} mythTvChannelMap = {} mythTvChannelUrlTypeMap = {} # TODO: storage path: storagegroup.groupname == 'Default' conn = DBConnection() if conn: try: # Create a channel map to map channels between MythTV and AminoPVR for mythTvChannel in mythTvChannels: if mythTvChannel.chanId in mythTvIptvChannelsDict: mythTvIptvChannel = mythTvIptvChannelsDict[ mythTvChannel.chanId] else: logger.warning( "channel with chanId=%i not found in mythTvIptvChannelsDict" % (mythTvChannel.chanId)) # The xmlTvId in MythTV is known as epgId in AminoPVR # Search for AminoPVR channels with the same epgId
def activatePendingChannels( self ): self._logger.debug( "activatePendingChannels()" ) conn = DBConnection() pendingChannels = PendingChannel.getAllFromDb( conn, includeInactive=True, includeRadio=True, tv=True ) pendingChannelNumbers = [ channel.number for channel in pendingChannels ] conn.delayCommit( True ) for channel in pendingChannels: currChannel = Channel.getByNumberFromDb( conn, channel.number ) if currChannel and currChannel.epgId != channel.epgId: # Found a channel on the same channel number, but epgId is different # Find another match. self._logger.info( "activatePendingChannels: epgId mismatch for channel %i - %s: %s != %s" % ( channel.number, channel.name, channel.epgId, currChannel.epgId ) ) # If channel name is the same (or partially the same), then they must have changed epgId # Else, try to find a channel that would match if (channel.name != currChannel.name) and \ (not channel.name in currChannel.name) and \ (not currChannel.name in channel.name): currChannel = None epgIdChannels = Channel.getAllByEpgIdFromDb( conn, channel.epgId, includeInactive=True, includeRadio=True ) for epgIdChannel in epgIdChannels: if (epgIdChannel.name == channel.name) or \ (channel.name in epgIdChannel.name) or \ (epgIdChannel.name in channel.name): currChannel = epgIdChannel break # TODO: if still no match is found (based on epgId), then look for similar names # and maybe equal urls if currChannel: # Convert PendingChannel to Channel but keep channel id newCurrChannel = Channel.copy( channel, currChannel.id ) # Keep the scrambled setting from ChannelUrl's currently in the Db. # This setting cannot be retrieved from the source for key in currChannel.urls.keys(): if newCurrChannel.urls.has_key( key ): newCurrChannel.urls[key].scrambled = currChannel.urls[key].scrambled # Has the channel really changed? if newCurrChannel != currChannel: self._logger.info( "activatePendingChannels: existing channel: %i - %s" % ( channel.number, channel.name ) ) # Hmm, channel number and name are the same, but epgId is different if newCurrChannel.epgId != currChannel.epgId: self._logger.info( "activatePendingChannels: epgId has changed: %s > %s" % ( currChannel.epgId, newCurrChannel.epgId ) ) # Make sure the changed channel is activated (again) newCurrChannel.inactive = False if currChannel.logo != "" and os.path.basename( newCurrChannel.logo ) != os.path.basename( currChannel.logo ): currChannel.removeLogo( conn ) if currChannel.thumbnail != "" and os.path.basename( newCurrChannel.thumbnail ) != os.path.basename( currChannel.thumbnail ): currChannel.removeThumbnail( conn ) # Download the logo and thumbnail for this channel newCurrChannel.downloadLogoAndThumbnail() newCurrChannel.addToDb( conn ) else: self._logger.info( "activatePendingChannels: new channel: %i - %s" % ( channel.number, channel.name ) ) newChannel = Channel.copy( channel ) newChannel.downloadLogoAndThumbnail() newChannel.addToDb( conn ) currentChannels = Channel.getAllFromDb( conn, includeInactive=True, includeRadio=True, tv=True ) currentChannelNumbers = [ channel.number for channel in currentChannels ] removedChannelNumbers = set( currentChannelNumbers ).difference( set( pendingChannelNumbers ) ) self._logger.info( "activatePendingChannels: %i, %i, %i" % ( len( set( currentChannelNumbers ) ), len( set( pendingChannelNumbers ) ), len( removedChannelNumbers ) ) ) for number in removedChannelNumbers: currChannel = Channel.getByNumberFromDb( conn, number ) if not currChannel.inactive: self._logger.info( "activatePendingChannels: inactive channel: %i - %s" % ( currChannel.number, currChannel.name ) ) currChannel.inactive = True currChannel.addToDb( conn ) conn.delayCommit( False ) channels = Channel.getAllFromDb( conn, includeRadio=True, tv=True ) channelsArray = [] for channel in channels: channelJson = channel.toDict( InputStreamProtocol.HTTP, includeScrambled=False, includeHd=True ) if channelJson: channelsArray.append( channelJson ) return self._createResponse( API.STATUS_SUCCESS, channelsArray )
def _translateContent( self ): indexContent, title, codeJsPath, styleCssPath = self._parseIndexPage() if title and indexContent: codeJsContent, symbolNames = self._parseCodeJs( codeJsPath ) if codeJsContent: indexContent = self._modifyIndexPage( indexContent ) codeJsContent = self._modifyCodeJs( codeJsContent, symbolNames ) styleCssContent = self._getStyleCss( styleCssPath ) apiJsContent = self._modifyApiJs( symbolNames ) if indexContent and codeJsContent and styleCssContent and apiJsContent: self._logger.warning( "_translateContent: content translated: title=%s" % ( title ) ) conn = DBConnection() if conn: row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ( "index.xhtml", ) ) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", ( indexContent, "index.xhtml" ) ) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ( "index.xhtml", indexContent ) ) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ( "code.js", ) ) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", ( codeJsContent, "code.js" ) ) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ( "code.js", codeJsContent ) ) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ( "style.css", ) ) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", ( styleCssContent, "style.css" ) ) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ( "style.css", styleCssContent ) ) row = conn.execute( "SELECT * FROM glashart_pages WHERE page=?", ( "api.js", ) ) if row: conn.execute( "UPDATE glashart_pages SET content=? WHERE page=?", ( apiJsContent, "api.js" ) ) else: conn.insert( "INSERT INTO glashart_pages (page, content) VALUES (?, ?)", ( "api.js", apiJsContent ) ) if symbolNames: conn = DBConnection() if conn: PageSymbol.addAllDictToDb( conn, symbolNames )