def saveConfig(): from sql import sqlRun sql = '' for var in getDict(): sql += "UPDATE config SET value='%s' WHERE param='%s';" % (globals()[var], var) sqlRun(sql, -1, 1) return
def create_p(): prev = request.forms.prev recname = request.forms.recname sender = request.forms.Sender von = request.forms.von bis = request.forms.bis am = request.forms.am aktiv = getBool(request.forms.aktiv) recurr = request.forms.recurr d_von = datetime.strptime(am + " " + von, "%Y-%m-%d %H:%M") d_bis = datetime.strptime(am + " " + bis, "%Y-%m-%d %H:%M") # d_von = datetime.strptime(am + " " + von, "%d.%m.%Y %H:%M") # d_bis = datetime.strptime(am + " " + bis, "%d.%m.%Y %H:%M") delta = timedelta(days=1) if d_bis < d_von: d_bis = d_bis + delta if prev=="": sqlRun("INSERT INTO records VALUES (?, ?, ?, ?, ?, ?)", (recname, sender, d_von, d_bis, aktiv, recurr)) else: sqlRun("UPDATE records SET recname=?, cid=?, rvon=?, rbis=?, renabled=?, rmask=? WHERE rowid=?", (recname, sender, d_von, d_bis, aktiv, recurr, prev)) setRecords() return "null"
def checkIP(ip): sqlRun("DELETE FROM blacklist WHERE julianday('now', 'localtime')-julianday(lasttry)>=2;") rows = sqlRun("SELECT trycount FROM blacklist WHERE ip=?", (ip, )) if rows: if rows[0][0]>=3: return False return True
def __init__(self, row): threading.Thread.__init__(self) self.id = row[0] self.von = datetime.strptime(row[2], "%Y-%m-%d %H:%M:%S") self.bis = datetime.strptime(row[3], "%Y-%m-%d %H:%M:%S") self.name = row[5] self.url = row[1].strip() self.mask = row[6] self.myrow = row if row[7] == "": self.ext = config.cfg_file_extension else: self.ext = row[7] if self.mask > 0: w = self.bis.weekday() if not (self.bis >= datetime.now() and getWeekdays(self.mask)[w]): delta = timedelta(days=1) while not (self.bis >= datetime.now() and getWeekdays(self.mask)[w]): self.von = self.von + delta self.bis = self.bis + delta w = self.bis.weekday() print "Recurrent record '%s' moved to %s" % (self.name, self.von) sqlRun( "UPDATE records SET rvon='%s', rbis='%s' WHERE rowid=%d" % ( datetime.strftime(self.von, "%Y-%m-%d %H:%M:%S"), datetime.strftime(self.bis, "%Y-%m-%d %H:%M:%S"), self.id, ) )
def __init__(self, row): Thread.__init__(self) self.id = row[0] self.von = datetime.strptime(row[2],"%Y-%m-%d %H:%M:%S") self.bis = datetime.strptime(row[3],"%Y-%m-%d %H:%M:%S") self.name = row[5] self.url = row[1].strip() self.mask = row[6] self.myrow = row if config.cfg_retry_count.isdigit(): self.retry_count = int(config.cfg_retry_count) if row[7]=='': self.ext = config.cfg_file_extension else: self.ext = row[7] if self.mask > 0: w = self.von.isoweekday() if self.von.isoweekday()<7 else 0 if not (self.von>=datetime.now() and getWeekdays(self.mask)[w]): delta = timedelta(days=1) while not (self.von>=datetime.now() and getWeekdays(self.mask)[w]): self.von = self.von + delta self.bis = self.bis + delta w = self.von.isoweekday() if self.von.isoweekday()<7 else 0 print ("Recurrent record '%s' moved to %s" % (self.name, self.von)) sqlRun("UPDATE records SET rvon='%s', rbis='%s' WHERE rowid=%d" % (datetime.strftime(self.von,"%Y-%m-%d %H:%M:%S"), datetime.strftime(self.bis,"%Y-%m-%d %H:%M:%S"), self.id ) )
def create_tvb(): recname = request.forms.recname sender = request.forms.sender von = request.forms.von bis = request.forms.bis am = request.forms.am d_von = datetime.strptime(am + " " + von, "%Y-%m-%d %H:%M") d_bis = datetime.strptime(am + " " + bis, "%Y-%m-%d %H:%M") delta = timedelta(days=1) if d_bis < d_von: d_bis = d_bis + delta print ("POST request received from TV-Browser plugin") print ("Name: %s, channel: %s, start: %s, stop: %s" % (recname, sender, d_von, d_bis)) rows=sqlRun("SELECT cid FROM channels WHERE cname=? AND cenabled=1", (sender, )) if rows: cid = rows[0][0] print ("Channel %s was found with CID %s, creating record" % (sender, cid)) deltaepg = timedelta(minutes=int(config.cfg_delta_for_epg)) d_von = d_von - deltaepg d_bis = d_bis + deltaepg sqlRun("INSERT INTO records VALUES (?, ?, ?, ?, 1, 0)", (recname, cid, d_von, d_bis)) else: print ("Channel %s could not be found, please check your channel names" % (sender))
def getProg(strp, channellist=[]): deltaxmltv_txt = config.cfg_xmltvtimeshift try: deltaxmltv = timedelta(hours=float(config.cfg_xmltvtimeshift)) except: deltaxmltv = timedelta(hours=0) sqllist = [] for attr,innertxt in getList(strp, 'programme'): dt1 = datetime.strptime(getAttr(attr, "start")[0:14],"%Y%m%d%H%M%S") + deltaxmltv try: dt2 = datetime.strptime(getAttr(attr, "stop")[0:14],"%Y%m%d%H%M%S") + deltaxmltv except: dt2 = datetime.strptime(getAttr(attr, "end")[0:14],"%Y%m%d%H%M%S") + deltaxmltv p_id = getAttr(attr, "channel") if len(channellist)==0 or p_id in channellist: desc = "" title = getFirst(innertxt, 'title') sub_title = getFirst(innertxt, 'sub-title') if not "http://" in sub_title and len(sub_title)>0: # fix for corrupted XML data if title != "": title = title + " - " title = title + sub_title eplist = getFirst(innertxt, 'episode-num') for epatt, epin in getList(eplist, 'system'): if getAttr(epatt, 'system') == 'onscreen': desc = epin + ". " break tmpdesc = getFirst(innertxt, 'desc') desc = desc + tmpdesc sqllist.append([p_id, title, datetime.strftime(dt1, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2, "%Y-%m-%d %H:%M:%S"), desc]) sqlRun("INSERT OR IGNORE INTO guide VALUES (?, ?, ?, ?, ?)", sqllist, 1) return len(sqllist)
def getFile(file_in, override=0): rows=sqlRun("SELECT * FROM caching WHERE url='%s'" % file_in) lastmod = "" etag = "" out = "" if rows: lastmod = rows[0][2] etag = rows[0][3] try: #print lastmod, etag httplib.HTTPConnection.debuglevel = 1 request = urllib2.Request(file_in) request.add_header('User-Agent', 'tvstreamrecord/' + version) if override==0: request.add_header('If-Modified-Since', lastmod) request.add_header('If-None-Match', etag) opener = urllib2.build_opener() response = opener.open(request) feeddata = response.read() if rows: sqlRun("UPDATE caching SET crTime=datetime('now', 'localtime'), Last_Modified=?, ETag=? WHERE url='%s'" % file_in, (response.info().getheader('Last-Modified'), response.info().getheader('ETag'))) else: sqlRun("INSERT INTO caching VALUES (datetime('now', 'localtime'), ?, ?, ?)", (file_in, response.info().getheader('Last-Modified'), response.info().getheader('ETag'))) d = zlib.decompressobj(16+zlib.MAX_WBITS) out = d.decompress(feeddata) print "XMLTV: reading URL %s" % file_in except: print "XMLTV: no new data, try again later" pass return out
def getProg(p_id): stri = getFile(p_id) sqllist = [] if stri: #tree = et.parse("hd.zdf.de_2013-02-14.xml") tree = et.fromstring(stri) for dict_el in tree.iterfind('programme'): dt1 = datetime.strptime( dict_el.attrib.get("start")[0:14], "%Y%m%d%H%M%S") dt2 = datetime.strptime( dict_el.attrib.get("stop")[0:14], "%Y%m%d%H%M%S") p_id = dict_el.attrib.get("channel") title = "" desc = "" if dict_el.find('title') is not None: title = dict_el.find('title').text if dict_el.find('desc') is not None: desc = dict_el.find('desc').text #print dt1, dt2, p_id, title sqllist.append([ p_id, title, datetime.strftime(dt1, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2, "%Y-%m-%d %H:%M:%S"), desc ]) sqlRun("INSERT OR IGNORE INTO guide VALUES (?, ?, ?, ?, ?)", sqllist, 1)
def createepg(): sqlRun( "INSERT INTO records SELECT guide.g_title, channels.cid, datetime(guide.g_start, '-%s minutes'), datetime(guide.g_stop, '+%s minutes'), 1, 0 FROM guide, guide_chan, channels WHERE guide.g_id = guide_chan.g_id AND channels.cname = guide_chan.g_name AND guide.rowid=%s GROUP BY datetime(guide.g_start, '-3 minutes')" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, request.forms.ret) ) setRecords() redirect("/records") return
def saveConfig(): from sql import sqlRun sql = '' for var in getDict(): sql += "UPDATE config SET value='%s' WHERE param='%s';" % ( globals()[var], var) sqlRun(sql, -1, 1) return
def createepg(): sqlRun( "INSERT INTO records SELECT guide.g_title, channels.cid, datetime(guide.g_start, '-%s minutes'), datetime(guide.g_stop, '+%s minutes'), 1, 0 FROM guide, guide_chan, channels WHERE guide.g_id = guide_chan.g_id AND channels.cname = guide_chan.g_name AND guide.rowid=%s GROUP BY datetime(guide.g_start, '-3 minutes')" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, request.forms.ret)) setRecords() redirect("/records") return
def epg_s(): grabthread.setChannelCount() global dayshown if dayshown < datetime.combine(date.today(), time.min): dayshown = datetime.combine(date.today(), time.min) todaysql = datetime.strftime(dayshown, "%Y-%m-%d %H:%M:%S") if dayshown == datetime.combine(date.today(), time.min): # really today sthour = datetime.now().time().hour daystart = datetime.combine(date.today(), time(sthour,0,0)) totalwidth = 86400 - total(daystart - dayshown) else: sthour = 0 daystart = dayshown totalwidth = 86400 hours = int(totalwidth / 3600) d_von = daystart widthq = 1 ret = list() rtemp = list() w = 0.0 for i in range(0, hours): t = time(i+sthour) x = i * 100.0 / hours * widthq w = 1.0 / hours * widthq * 100.0 rtemp.append([-1, x, w, t.strftime("%H:%M"), "", "", "", -1, "", 0]) ret.append(rtemp) rows=sqlRun("SELECT guide.g_id, channels.cid, channels.cname FROM guide, guide_chan, channels WHERE channels.cenabled=1 AND channels.cname=guide_chan.g_name AND guide.g_id=guide_chan.g_id AND (date(g_start)=date(?) OR date(g_stop)=date(?)) GROUP BY channels.cid ORDER BY channels.cid", (todaysql, todaysql)) for row in rows: cid=row[1] rtemp = list() c_rows=sqlRun("SELECT g_title, g_start, g_stop, g_desc, guide.rowid, (records.renabled is not null and records.renabled = 1) FROM guide LEFT JOIN records ON records.cid=? AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE (date(g_start)=date(?) OR date(g_stop)=date(?)) AND datetime(g_stop, '+60 minutes')>datetime('now', 'localtime') AND g_id=? ORDER BY g_start" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg), (cid, todaysql, todaysql, row[0])) for event in c_rows: d_von = datetime.strptime(event[1],"%Y-%m-%d %H:%M:%S") d_bis = datetime.strptime(event[2],"%Y-%m-%d %H:%M:%S") if d_von < daystart: d_von = daystart if d_bis.date() > daystart.date(): d_bis=datetime.combine(d_bis.date(),time.min) x = total(d_von - daystart) w = total(d_bis - d_von) # restoring dates for correct record times d_von = datetime.strptime(event[1],"%Y-%m-%d %H:%M:%S") d_bis = datetime.strptime(event[2],"%Y-%m-%d %H:%M:%S") if x >= 0 and w > 0: rtemp.append ([cid, x/totalwidth*100.0*widthq, w/totalwidth*100.0*widthq, event[0], d_von, d_bis, event[3], event[4], row[2], event[5]]) ret.append(rtemp) return internationalize(template('epgchart', curr=datetime.strftime(d_von, "%Y-%m-%d"), rowss=ret, zoom=config.cfg_grab_zoom, rows2=sqlRun('SELECT cid, cname FROM channels where cenabled=1 ORDER BY cid'), delta=config.cfg_delta_for_epg))
def banIP(ip): rows = sqlRun("SELECT trycount FROM blacklist WHERE ip=?", (ip, )) now = datetime.strftime(datetime.now(), "%Y-%m-%d %H:%M:%S") if rows: sqlRun("UPDATE blacklist SET trycount=?, lasttry=? WHERE ip=?", (rows[0][0]+1, now, ip)) if rows[0][0]+1==3: print ("IP %s has been blacklisted for for three unsuccessful login attempts" % ip) else: sqlRun("INSERT INTO blacklist VALUES (?, ?, ?)", (ip, 1, now) )
def getFile(file_in, override=0, ver=""): rows=sqlRun("SELECT * FROM caching WHERE url=?", (file_in, )) lastmod = "" etag = "" out = "" if rows: lastmod = rows[0][2] etag = rows[0][3] try: httplib.HTTPConnection.debuglevel = 0 request = urllib32.Request(file_in) request.add_header('User-Agent', 'tvstreamrecord/' + ver) if override==0: request.add_header('If-Modified-Since', lastmod) request.add_header('If-None-Match', etag) opener = urllib32.build_opener() try: hresponse = opener.open(request, timeout=10) except Exception as ex: print ("XMLTV Warning: connection timeout detected, retry in 5 seconds") sleep (5) hresponse = opener.open(request, timeout=20) feeddata = hresponse.read() hr = hresponse.info() lastmod = hr.get('Last-Modified') etag = hr.get('ETag') if rows and lastmod and etag: sqlRun("UPDATE caching SET crTime=datetime('now', 'localtime'), Last_Modified=?, ETag=? WHERE url=?", (lastmod, etag, file_in)) elif lastmod and etag: sqlRun("INSERT INTO caching VALUES (datetime('now', 'localtime'), ?, ?, ?)", (file_in, lastmod, etag)) try: d = zlib.decompressobj(16+zlib.MAX_WBITS) out = d.decompress(feeddata) except: out = feeddata print ("XMLTV: reading URL %s with %s bytes" % (file_in, len(out))) if not b"</tv>" in out[-1000:]: print ("Possibly corrupted XML file, attempting to repair...") pos = out.rfind(b"</programme>") if pos != -1: out = out[:pos+12] + b"</tv>" else: pos = out.rfind(b"</channel>") if pos != -1: out = out[:pos+10] + b"</tv>" except Exception as ex: print ("XMLTV: no new data / unknown error, try again later (%s)" % file_in) pass try: out = out.decode("UTF-8") except: pass return out
def getProg(strp, channellist=[], keylist=[]): deltaxmltv_txt = config.cfg_xmltvtimeshift try: deltaxmltv = timedelta(hours=float(config.cfg_xmltvtimeshift)) except: deltaxmltv = timedelta(hours=0) #2018-12-31 automatic recording delta_b = timedelta(minutes=float(config.cfg_delta_after_epg)) delta_a = timedelta(minutes=float(config.cfg_delta_before_epg)) reclist = [] sqllist = [] for attr,innertxt in getList(strp, 'programme'): dt1 = datetime.strptime(getAttr(attr, "start")[0:14],"%Y%m%d%H%M%S") + deltaxmltv try: dt2 = datetime.strptime(getAttr(attr, "stop")[0:14],"%Y%m%d%H%M%S") + deltaxmltv except: dt2 = datetime.strptime(getAttr(attr, "end")[0:14],"%Y%m%d%H%M%S") + deltaxmltv p_id = getAttr(attr, "channel") if len(channellist)==0 or p_id in channellist: desc = "" title = getFirst(innertxt, 'title') sub_title = getFirst(innertxt, 'sub-title') if not "http://" in sub_title and len(sub_title)>0: # fix for corrupted XML data if title != "": title = title + " - " title = title + sub_title for epatt, epin in getList(innertxt, 'episode-num'): if getAttr(epatt, 'system') == 'xmltv_ns': e = epin.split(".") if len(e)>1: try: episode = "E" + format(int(e[1].strip()) + 1, '02d') if e[0].strip() != "": episode = "S" + format(int(e[0].strip()) + 1, '02d') + episode desc += episode + ". " title += " (" + episode + ")" break except: pass elif getAttr(epatt, 'system') == 'onscreen': desc = epin + ". " break tmpdesc = getFirst(innertxt, 'desc') desc = desc + tmpdesc sqllist.append([p_id, title, datetime.strftime(dt1, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2, "%Y-%m-%d %H:%M:%S"), desc]) for key in keylist: if key in title.lower(): print("XMLTV: Record '%s' is queued for autocreation" % (title, )) reclist.append([title, datetime.strftime(dt1-delta_b, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2+delta_a, "%Y-%m-%d %H:%M:%S"), p_id]) break sqlRun("INSERT OR IGNORE INTO guide VALUES (?, ?, ?, ?, ?)", sqllist, 1) return len(sqllist), reclist
def epglist_getter(): sEcho = request.query.sEcho retlist = [] totalrows = 0 if sEcho: # Server-side processing columns = [ 'guide_chan.g_name', 'guide.g_title', 'guide.g_desc', 'guide.g_start', 'guide.g_stop' ] sLimit = "LIMIT %s OFFSET %s" % (request.query.iDisplayLength, request.query.iDisplayStart) iSortingCols = int(request.query.iSortingCols) sOrder = "" if iSortingCols: sOrder = "ORDER BY" col = int(request.query['iSortCol_0']) sOrder += " %s " % columns[col] sOrder += "ASC" if request.query['sSortDir_0'] == "asc" else "DESC" if sOrder == "ORDER BY": sOrder = "" iSearch = request.query.sSearch sWhere = "" if iSearch and iSearch != "": sWhere = "AND (guide_chan.g_name LIKE '%" + iSearch + "%' OR guide.g_title LIKE '%" + iSearch + "%' OR guide.g_desc LIKE '%" + iSearch + "%')" query = "SELECT guide_chan.g_name, guide.g_title, guide.g_desc, guide.g_start, guide.g_stop, (records.renabled is not null and records.renabled = 1), guide.rowid FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 %s %s %s" % ( config.cfg_delta_for_epg, config.cfg_delta_for_epg, sWhere, sOrder, sLimit) countquery = "SELECT COUNT(guide.g_start) FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 %s" % ( config.cfg_delta_for_epg, config.cfg_delta_for_epg, sWhere) count = sqlRun(countquery) if count: totalrows = count[0][0] rows = sqlRun(query) else: # Client-side processing rows = sqlRun( "SELECT guide_chan.g_name, guide.g_title, guide.g_desc, guide.g_start, guide.g_stop, (records.renabled is not null and records.renabled = 1), guide.rowid FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 ORDER BY g_start LIMIT %s;" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, config.cfg_epg_max_events)) for row in rows: retlist.append( [row[0], row[1], row[2], row[3], row[4], row[5], row[6]]) return json.dumps({ "aaData": retlist, "sEcho": sEcho, "iTotalRecords": totalrows, "iTotalDisplayRecords": totalrows })
def setChannelCount(self): self.epggrabberstate[1] = 0 rows = sqlRun( "SELECT count(cname) FROM channels WHERE epgscan = 1 AND cenabled = 1;" ) if rows: self.epggrabberstate[1] = rows[0][0]
def getrecordlist(): l = [] rows=sqlRun("SELECT recname, cname, rvon, rbis, rmask, renabled, 100*(strftime('%s','now', 'localtime')-strftime('%s',rvon)) / (strftime('%s',rbis)-strftime('%s',rvon)), records.rowid, rvon, rbis, channels.cid FROM channels, records where channels.cid=records.cid ORDER BY rvon") for row in rows: m3u = "<a href=\"live/" + str(row[10]) + ".m3u\">" + row[1] + "</a>" l.append([row[0], m3u, row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9]]) return json.dumps({"aaData": l} )
def server_static9(filename): rows = sqlRun("SELECT * FROM channels WHERE cid=?", (filename.split(".")[0], )) if rows: write_m3u(rows[0][0], rows[0][1]) else: return return static_file("/live.m3u", root='', mimetype='video')
def setRecords(): if shutdown: return rows=sqlRun("SELECT records.rowid, cpath, rvon, rbis, cname, records.recname, records.rmask, channels.cext, channels.cid, channels.cname FROM channels, records where channels.cid=records.cid AND (datetime(rbis)>=datetime('now', 'localtime') OR rmask>0) AND renabled = 1 ORDER BY datetime(rvon)") for row in rows: chk = False for t in records: if t.id == row[0]: if t.myrow[1]!=row[1] or t.myrow[2]!=row[2] or t.myrow[3]!=row[3] or t.myrow[4]!=row[4] or t.myrow[5]!=row[5] or t.myrow[6]!=row[6] or t.myrow[7]!=row[7]: t.stop() chk = False else: chk = True break if chk == False: thread = record(row) thread.start() records.append(thread) for i in range(len(records)-1,-1,-1): t = records[i] chk = False for row in rows: if t.id == row[0]: chk = True break if chk == False: t.stop()
def chanlist(): l = [] rows = sqlRun("SELECT channels.cid, cname, cpath, cext, epgscan, cenabled FROM channels") for row in rows: m3u = '<a href="live/' + str(row[0]) + '.m3u">' + row[1] + "</a>" l.append([row[0], m3u, row[2], row[3], row[4], row[5]]) return json.dumps({"aaData": l})
def list_s(): return internationalize( template( 'list', rows2=sqlRun( 'SELECT cid, cname FROM channels where cenabled=1 ORDER BY cid' )))
def getchannelgroups(): l = [] sql = "case WHEN substr(upper(cname), 1, 1) >= 'A' AND substr(upper(cname), 1, 1) <= 'Z' THEN substr(upper(cname), 1, 1) ELSE '0' END" sql = "select " + sql + ", count(cname) from channels where cenabled=1 group by " + sql rows=sqlRun(sql) for row in rows: l.append([row[0], row[1]]) return json.dumps({"aaData": l} )
def setChannelCount(self): self.epggrabberstate[1] = 0 if config.cfg_switch_grab_auto == "1": rows = sqlRun("SELECT count(cname) FROM channels WHERE epgscan = 1 AND cenabled = 1;") if rows: self.epggrabberstate[1] += rows[0][0] if config.cfg_switch_xmltv_auto=="1": self.epggrabberstate[1] += 1
def upload_p(): print "M3U upload parsing started" retl = [] upfile = request.files.upfile header = upfile.file.read(7) if header.startswith("#EXTM3U"): how = getBool(request.forms.get("switch00")) upfilecontent = upfile.file.read() rowid = 1 if how == 0: sqlRun("DELETE FROM channels") sqlRun("DELETE FROM records") setRecords() else: rows2 = sqlRun("select max(cid) from channels") if rows2 and not rows2[0][0] is None: rowid = rows2[0][0] + 1 lines = upfilecontent.splitlines() i = 0 name = "" for line in lines: i = i + 1 if i > 1: if i % 2 == 0: name = line.split(",", 1)[1] if i % 2 == 1: retl.append([name, line, rowid]) rowid = rowid + 1 name = "" sqlRun("INSERT OR IGNORE INTO channels VALUES (?, ?, '1', '', ?, 0)", retl, 1) redirect("/list")
def upload_p(): print "M3U upload parsing started" retl = [] upfile = request.files.upfile header = upfile.file.read(7) if header.startswith("#EXTM3U"): how = getBool(request.forms.get("switch00")) upfilecontent = upfile.file.read() rowid = 1 if how == 0: sqlRun('DELETE FROM channels') sqlRun('DELETE FROM records') setRecords() else: rows2 = sqlRun("select max(cid) from channels") if rows2 and not rows2[0][0] is None: rowid = rows2[0][0] + 1 lines = upfilecontent.splitlines() i = 0 name = "" for line in lines: i = i + 1 if i > 1: if i % 2 == 0: name = line.split(",", 1)[1] if i % 2 == 1: retl.append([name, line, rowid]) rowid = rowid + 1 name = "" sqlRun("INSERT OR IGNORE INTO channels VALUES (?, ?, '1', '', ?, 0)", retl, 1) redirect("/list")
def getProg(p_id): stri = getFile(p_id) sqllist = [] if stri: #tree = et.parse("hd.zdf.de_2013-02-14.xml") tree = et.fromstring(stri) for dict_el in tree.iterfind('programme'): dt1 = datetime.strptime(dict_el.attrib.get("start")[0:14],"%Y%m%d%H%M%S") dt2 = datetime.strptime(dict_el.attrib.get("stop")[0:14],"%Y%m%d%H%M%S") p_id = dict_el.attrib.get("channel") title = "" desc = "" if dict_el.find('title') is not None: title = dict_el.find('title').text if dict_el.find('desc') is not None: desc = dict_el.find('desc').text #print dt1, dt2, p_id, title sqllist.append([p_id, title, datetime.strftime(dt1, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2, "%Y-%m-%d %H:%M:%S"), desc]) sqlRun("INSERT OR IGNORE INTO guide VALUES (?, ?, ?, ?, ?)", sqllist, 1)
def chanlist(): l = [] rows = sqlRun( 'SELECT channels.cid, cname, cpath, cext, epgscan, cenabled FROM channels' ) for row in rows: m3u = "<a href=\"live/" + str(row[0]) + ".m3u\">" + row[1] + "</a>" l.append([row[0], m3u, row[2], row[3], row[4], row[5]]) return json.dumps({"aaData": l})
def getFullList(f): fulllist = list() lists = read_stream(f) if not lists: print ("No EPG information found") return fulllist guides = lists[0] channellist = lists[1] # print ("guides %s" % (len(guides))) # for g in guides: # print (g[0], g[1], g[2], g[3].encode("UTF-8")) # print ("channellist %s" % (len(channellist))) # for g in channellist: # print (g[0], g[1], g[2]) # If there is no channel list contained within the stream, try to use URLs instead if len(channellist)==0: rows=sqlRun('SELECT cname, cpath FROM channels WHERE cenabled=1') if rows: for row in rows: lastpart = row[1].split("/")[-1] if lastpart.endswith("FF"): sid = int(row[1][-12:-8], 16) channellist.append([sid, "SQL", row[0]]) else: spl = lastpart.split(":",7) if len(spl) == 8: if len(spl[3])==4: sid = int(spl[3], 16) channellist.append([sid, "SQL", row[0]]) if len(channellist) > 0: print ("Could not extract a channel list from provided stream, tried to use URLs instead") else: print ("Could not also extract a channel list from your URLs. Please check the About page for more details") for l in guides: for c in channellist: if l[0] == c[0] and l[1] > datetime.now() - timedelta(hours=8): fulllist.append([c[2], l[1], l[2], l[3]]) break fulllist = sorted(fulllist, key=itemgetter(0,1,2)) # remove duplicates for i in range(len(fulllist)-1,0,-1): if fulllist[i][0] == fulllist[i-1][0] and fulllist[i][1] == fulllist[i-1][1] and fulllist[i][2] == fulllist[i-1][2]: if len(fulllist[i][3]) > len(fulllist[i-1][3]): fulllist[i-1][3] = fulllist[i][3] fulllist.pop(i) print ("EPG grab finished with %s channels, %s guide infos, joined amount: %s" % (len(channellist), len(guides), len(fulllist))) return fulllist
def list_p(): what = request.forms.get("what") myid = request.forms.get("myid") if what=="-1": sqlRun("DELETE FROM channels WHERE cid=%s" % (myid)) sqlRun("DELETE FROM records WHERE cid=%s" % (myid)) else: sqlRun("UPDATE channels SET cenabled=%s WHERE cid=%s" % (what, myid)) sqlRun("UPDATE records SET renabled=%s WHERE cid=%s" % (what, myid)) setRecords() return "null"
def root_s(): agent = request.headers.get('User-Agent') if ("Android" in agent and "Mobile" in agent) or "berry" in agent or "Symbian" in agent or "Nokia" in agent or "iPhone" in agent: count = sqlRun("select count(cid) from channels where cenabled=1")[0][0] if count > 0: redirect("/mobile") else: redirect("/records") else: redirect("/records")
def clgen_p(): rows = sqlRun("select cid, cname, cpath from channels where cenabled=1 ORDER BY cid") if rows: f = codecs.open("channels.m3u", "w", "utf-8") f.write("#EXTM3U\n") for row in rows: f.write("#EXTINF:0,"+row[1]+"\n") f.write(row[2]+"\n") f.close() return "null"
def server_static9(filename): rows = sqlRun("SELECT * FROM channels WHERE cid=%s" % filename.split(".")[0]) if rows: f = open("live.m3u", "w") f.write("#EXTM3U\n") f.write("#EXTINF:0," + rows[0][0] + "\n") f.write(rows[0][1] + "\n") f.close() return static_file("/live.m3u", root="", mimetype="video") else: redirect("/epg")
def getProg(strp, channellist=[]): deltaxmltv_txt = config.cfg_xmltvtimeshift try: deltaxmltv = timedelta(hours=float(config.cfg_xmltvtimeshift)) except: deltaxmltv = timedelta(hours=0) sqllist = [] for attr, innertxt in getList(strp, 'programme'): dt1 = datetime.strptime(getAttr(attr, "start")[0:14], "%Y%m%d%H%M%S") + deltaxmltv try: dt2 = datetime.strptime( getAttr(attr, "stop")[0:14], "%Y%m%d%H%M%S") + deltaxmltv except: dt2 = datetime.strptime( getAttr(attr, "end")[0:14], "%Y%m%d%H%M%S") + deltaxmltv p_id = getAttr(attr, "channel") if len(channellist) == 0 or p_id in channellist: desc = "" title = getFirst(innertxt, 'title') sub_title = getFirst(innertxt, 'sub-title') if not "http://" in sub_title and len( sub_title) > 0: # fix for corrupted XML data if title != "": title = title + " - " title = title + sub_title eplist = getFirst(innertxt, 'episode-num') for epatt, epin in getList(eplist, 'system'): if getAttr(epatt, 'system') == 'onscreen': desc = epin + ". " break tmpdesc = getFirst(innertxt, 'desc') desc = desc + tmpdesc sqllist.append([ p_id, title, datetime.strftime(dt1, "%Y-%m-%d %H:%M:%S"), datetime.strftime(dt2, "%Y-%m-%d %H:%M:%S"), desc ]) sqlRun("INSERT OR IGNORE INTO guide VALUES (?, ?, ?, ?, ?)", sqllist, 1) return len(sqllist)
def server_static9(filename): rows = sqlRun("SELECT * FROM channels WHERE cid=%s" % filename.split(".")[0]) if rows: f = open("live.m3u", "w") f.write("#EXTM3U\n") f.write("#EXTINF:0," + rows[0][0] + "\n") f.write(rows[0][1] + "\n") f.close() return static_file("/live.m3u", root='', mimetype='video') else: redirect("/epg")
def getepgday(): cname = request.forms.get("cname") try: cname = cname.decode("utf-8") except: pass rdate = request.forms.get("rdate") rows=sqlRun("SELECT substr(g_title,1,50), g_start, substr(g_desc, 1, 100), g_stop FROM guide, guide_chan WHERE guide.g_id = guide_chan.g_id AND guide_chan.g_name=? AND (date(g_start)=date(?) OR date(g_stop)=date(?)) AND datetime(guide.g_stop)>datetime('now', 'localtime') ORDER BY g_start", (cname, rdate, rdate)) if rows: return json.dumps({"aaData": rows} ) else: return "null"
def getProgList(ver=''): print ("tvstreamrecord v.%s / XMLTV import started" % ver) totalentries = 0 initpath = config.cfg_xmltvinitpath if not ("http:" in initpath or "https:" in initpath or "www." in initpath or "ftp:" in initpath) or initpath[0]=="/" or initpath[1]==":" or "file://" in initpath: stri = getLocalFile(initpath.replace("file://","")) else: stri = getFile(initpath, 1, ver) #stri = getTestFile() channellist = [] typ = getAttr(stri[:200], "generator-info-name") for attr,innertxt in getList(stri, "channel"): g_id = getAttr(attr, "id") names = getAll(innertxt, 'display-name') if checkType(typ) == 1: url = getFirst(innertxt, 'base-url') elif checkType(typ) == 2: pass else: print ("Unknown XMLTV generator '%s', please contact me if it fails" % typ) typ = "dummy" # override for unknown types #url = getFirst(innertxt, 'base-url') for name in names: rows=sqlRun("SELECT cname from channels WHERE cname = ? and cenabled=1 GROUP BY cname", (name, )) if rows: timerows=sqlRun("SELECT g_lasttime FROM guide_chan WHERE g_id=?", (g_id, )) dtmax = datetime.now() if checkType(typ) == 2: channellist.append(g_id) else: lastdate = datetime.now()-timedelta(days=30) if timerows: lastdate = datetime.strptime(timerows[0][0], "%Y-%m-%d %H:%M:%S") dtmax = datetime.min for t_attr, dttext in getList(innertxt, "datafor"): dtepg = datetime.strptime(dttext, "%Y-%m-%d") dt = datetime.strptime(getAttr(t_attr, "lastmodified")[0:14],"%Y%m%d%H%M%S") if dt>lastdate and dtepg>=datetime.now()-timedelta(days=1): source = url+g_id+"_"+dttext+".xml.gz" totalentries = getProg(getFile(source,0,ver)) if dt>dtmax: dtmax = dt if not timerows: sqlRun("INSERT OR IGNORE INTO guide_chan VALUES (?, ?, ?)", (g_id, name, datetime.strftime(dtmax, "%Y-%m-%d %H:%M:%S") )) else: sqlRun("UPDATE guide_chan SET g_lasttime=? WHERE g_id=?", (datetime.strftime(dtmax, "%Y-%m-%d %H:%M:%S"), g_id)) break if (checkType(typ)==2) and len(channellist)>0: totalentries = getProg(stri, channellist) del (stri) print ("XMLTV import completed with %s entries" % totalentries) return
def epglist_getter(): sEcho = request.query.sEcho retlist = [] totalrows = 0 if sEcho: # Server-side processing columns = ['guide_chan.g_name', 'guide.g_title', 'guide.g_desc', 'guide.g_start', 'guide.g_stop'] sLimit = "LIMIT %s OFFSET %s" % (request.query.iDisplayLength, request.query.iDisplayStart) iSortingCols = int(request.query.iSortingCols) sOrder = "" if iSortingCols: sOrder = "ORDER BY" col = int(request.query['iSortCol_0']) sOrder += " %s " % columns[col] sOrder += "ASC" if request.query['sSortDir_0']=="asc" else "DESC" if sOrder == "ORDER BY": sOrder = "" iSearch = request.query.sSearch sWhere = "" if iSearch and iSearch!="": sWhere = "AND (guide_chan.g_name LIKE '%" + iSearch + "%' OR guide.g_title LIKE '%" + iSearch + "%' OR guide.g_desc LIKE '%" + iSearch + "%')" query = "SELECT guide_chan.g_name, guide.g_title, guide.g_desc, guide.g_start, guide.g_stop, (records.renabled is not null and records.renabled = 1), guide.rowid FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 %s %s %s" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, sWhere, sOrder, sLimit) countquery = "SELECT COUNT(guide.g_start) FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 %s" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, sWhere) count = sqlRun(countquery) if count: totalrows = count[0][0] rows=sqlRun(query) else: # Client-side processing rows=sqlRun("SELECT guide_chan.g_name, guide.g_title, guide.g_desc, guide.g_start, guide.g_stop, (records.renabled is not null and records.renabled = 1), guide.rowid FROM guide INNER JOIN guide_chan ON guide.g_id = guide_chan.g_id INNER JOIN channels ON channels.cname=guide_chan.g_name LEFT JOIN records ON records.cid=channels.cid AND datetime(guide.g_start, '-%s minutes')=records.rvon and datetime(guide.g_stop, '+%s minutes')=records.rbis WHERE datetime(guide.g_stop)>datetime('now', 'localtime') AND channels.cenabled<>0 ORDER BY g_start LIMIT %s;" % (config.cfg_delta_for_epg, config.cfg_delta_for_epg, config.cfg_epg_max_events)) for row in rows: retlist.append([row[0], row[1], row[2], row[3], row[4], row[5], row[6]]) return json.dumps( {"aaData": retlist, "sEcho": sEcho, "iTotalRecords": totalrows, "iTotalDisplayRecords": totalrows } )
def getchannelgroup(): l = [] id = request.forms.get("id") sql = "select cid, cname from channels where cenabled=1 " if id=='-': sql = sql + "LIMIT 10" elif id=='0': sql = sql + "AND NOT (substr(upper(cname), 1, 1) >= 'A' AND substr(upper(cname), 1, 1) <= 'Z')" else: sql = sql + "AND substr(upper(cname), 1, 1) = '" + id + "'" rows=sqlRun(sql) for row in rows: l.append([row[0], row[1]]) return json.dumps({"aaData": l} )
def getFile(file_in, override=0): rows = sqlRun("SELECT * FROM caching WHERE url='%s'" % file_in) lastmod = "" etag = "" out = "" if rows: lastmod = rows[0][2] etag = rows[0][3] try: #print lastmod, etag httplib.HTTPConnection.debuglevel = 1 request = urllib2.Request(file_in) request.add_header('User-Agent', 'tvstreamrecord/' + version) if override == 0: request.add_header('If-Modified-Since', lastmod) request.add_header('If-None-Match', etag) opener = urllib2.build_opener() response = opener.open(request) feeddata = response.read() if rows: sqlRun( "UPDATE caching SET crTime=datetime('now', 'localtime'), Last_Modified=?, ETag=? WHERE url='%s'" % file_in, (response.info().getheader('Last-Modified'), response.info().getheader('ETag'))) else: sqlRun( "INSERT INTO caching VALUES (datetime('now', 'localtime'), ?, ?, ?)", (file_in, response.info().getheader('Last-Modified'), response.info().getheader('ETag'))) d = zlib.decompressobj(16 + zlib.MAX_WBITS) out = d.decompress(feeddata) print "XMLTV: reading URL %s" % file_in except: print "XMLTV: no new data, try again later" pass return out
def records_p(): what = request.forms.get("what") myid = request.forms.get("myid") if what=="-2": sqlRun("DELETE FROM records WHERE datetime(rbis)<datetime('now', 'localtime') AND NOT rmask>0") if what=="-1": sqlRun("DELETE FROM records WHERE records.rowid=?", (myid, )) else: sqlRun("UPDATE records SET renabled=? WHERE rowid=?", (what, myid)) setRecords() return "null"
def getrecordlist(): l = [] rows = sqlRun( "SELECT recname, cname, strftime('" + "%" + "d." + "%" + "m." + "%" + "Y " + "%" + "H:" + "%" + "M', rvon), strftime('" + "%" + "d." + "%" + "m." + "%" + "Y " + "%" + "H:" + "%" + "M', rbis), rmask, renabled, 100*(strftime('%s','now', 'localtime')-strftime('%s',rvon)) / (strftime('%s',rbis)-strftime('%s',rvon)), records.rowid, rvon, rbis, channels.cid FROM channels, records where channels.cid=records.cid ORDER BY rvon" ) for row in rows: rec = "" if row[4] == 0: rec = "no" else: wd = getWeekdays(row[4]) for index, item in enumerate(wd): if item: rec += weekdays[index] m3u = '<a href="live/' + str(row[10]) + '.m3u">' + row[1] + "</a>" l.append([row[0], m3u, row[2], row[3], rec, row[5], row[6], row[7], row[8], row[9]]) return json.dumps({"aaData": l})