def AddHistory(user: User, taskid, tasktype, epgid, beginn, sender, titel, genre, previewimagelink, resolution, sourcefile, ip, platform, browser, version, language): """ handle history entries """ history = History(PartitionKey=user.RowKey, RowKey=str(taskid)) history.taskid = taskid history.tasktype = tasktype history.epgid = int(epgid) history.sourcefile = sourcefile history.beginn = beginn history.sender = sender history.titel = titel history.genre = genre history.previewimagelink = previewimagelink history.resolution = resolution history.ip = ip history.platform = platform history.browser = browser history.version = version history.language = language history.status = 'new' history.created = datetime.now() history.updated = history.created db.insert(history)
def post(self): """ parse request data and use model attribute info""" data = request.json for key, value in login.items(): if key in data: data[value.attribute] = data.pop(key) #log.debug('{!s}: {!s}'.format(value.attribute, data[value.attribute])) """ retrieve user info """ if 'PartitionKey' in data: if data['PartitionKey'] == "": data['PartitionKey'] = config['APPLICATION_CLIENT_ID'] else: data['PartitionKey'] = config['APPLICATION_CLIENT_ID'] loginuser = db.get(User(**data)) """ user exists ? Create a new and """ if not db.exists(loginuser): loginuser.created = datetime.now() db.insert(loginuser) """ login user """ #log.debug(loginuser.dict()) g.user = loginuser token = generate_auth_token(loginuser) session['authtoken'] = token """ prepare return dict """ data['loggedin'] = True data['timeout'] = 600 log.debug(session) return data, 200
def wrapper(*args, **kwargs): if g.deviceuuid == '' or g.clientid == '': g.user = None else: loginuser = db.get( User(PartitionKey=g.clientid, RowKey=g.deviceuuid)) """ user exists ? Create a new and """ if not db.exists(loginuser): loginuser.created = datetime.now() db.insert(loginuser) """ login user """ g.user = loginuser if backtoindex and not g.user: return redirect(url_for('ui.index', messageid=4)) else: return func(*args, **kwargs)
def import_otrgenres(config, log) -> StorageTableCollection: """ import genres csv into azure storage table """ log.debug('try to import genres...') """ load Genres """ db.register_model(Genre()) Genres = StorageTableCollection('genres', "PartitionKey eq 'all'") Genres = db.query(Genres) if len(Genres) == 0: """ if genres are empty """ if not os.path.exists('genre.csv'): with urllib.request.urlopen( 'https://www.onlinetvrecorder.com/epg/genres.csv' ) as response: genrecsv = response.read() with open('genre.csv', 'wb') as ofile: ofile.write(genrecsv) with open('genre.csv', 'r') as csvfile: reader = csv.DictReader(csvfile, dialect='excel', delimiter=';') fieldnames = reader.fieldnames rows = [row for row in reader] for row in rows: db.insert(Genre(Genre_Id=row['Nummer'], Genre=row['Kategorie'])) os.remove('genre.csv') log.info('genres successfully imported') Genres = db.query(Genres) else: """ genres are not empty """ log.info('genres already imported') return Genres pass
def import_otrepg(date, genres: StorageTableCollection, config, log): """ import otr epg for date into database: date:Str = Date a in dd.mm.yyyy to import 1) check if entries for date exists 2) if not then a) download epg csv file from otr b) for all egp entries in csv try add to table storage """ PartitionKey = date.strftime('%Y_%m_%d') csvfile = 'epg_' + PartitionKey + '.csv' log.debug('try to import epg csvfile: {!s} ...'.format(csvfile)) db.register_model(Recording()) if db.table_isempty(Recording._tablename, PartitionKey): try: if not os.path.exists(csvfile): with urllib.request.urlopen( 'https://www.onlinetvrecorder.com/epg/csv/' + csvfile) as response: epgcsv = response.read() with open(csvfile, 'wb') as ofile: ofile.write(epgcsv) log.info( 'download epg file successfull: {}'.format(csvfile)) else: log.info('epg already downloaded: {}'.format(csvfile)) except: log.exception('Download egp csv:') return try: with open(csvfile, 'r', encoding='utf8', errors='ignore') as epgcsv: reader = csv.DictReader(epgcsv, dialect='excel', delimiter=';') fieldnames = reader.fieldnames rows = [row for row in reader] for row in rows: row['PartitionKey'] = PartitionKey row['RowKey'] = row['Id'] """ get genre """ genre = Genre(PartitionKey='all', RowKey=str(row['genre_id'])) if not genre: row['genre'] = 'Sonstiges' else: row['genre'] = genre.Genre tmp = Recording(**row) db.insert(tmp) os.remove(csvfile) except: log.exception('Import epg csv data:') return log.info('import epg file successfull: {}'.format(csvfile)) else: log.info('epg csv file {} already imported.'.format(csvfile))
def update_torrents(startdate: date, config, log): """ rufe alle torrents der letzten 8 Tage ab und ordne diese einem top recording zu https://www.onlinetvrecorder.com/v2/?go=tracker&search=&order=ctime%20DESC&start=0 """ log.debug('try to update torrents webcontent...') stopflag = False start = 0 torrentlist = [] db.register_model(Torrent()) while not stopflag: """ download webcontent into content""" with urllib.request.urlopen( 'https://www.onlinetvrecorder.com/v2/?go=tracker&search=&order=ctime%20DESC&start=' + str(start)) as response: content = response.read() """ für jeden Eintrag in ID= searchrow """ content = str(content.decode('utf-8', 'ignore')).split( ' class="bordertable">')[1].split('</table>')[0].split('</tr>') for index in range(1, len(content) - 1): lines = content[index].split('</td>') """ parse data from entry """ torrentlink = lines[1].split("href='")[1].split("'")[0] torrentfile = lines[1].split(torrentlink + "'>")[1].split('</a>')[0] finished = safe_cast(lines[2].split('>')[1].split('</td>')[0], int, 0) loading = safe_cast(lines[3].split('>')[1].split('</td>')[0], int, 0) loaded = safe_cast(lines[4].split('>')[1].split('</td>')[0], int, 0) fileparts = torrentfile.split(' ') beginn = safe_cast( fileparts[len(fileparts) - 4] + ' ' + fileparts[len(fileparts) - 3] + '-00', datetime, None, '%y.%m.%d %H-%M-%S') sender = fileparts[len(fileparts) - 2] if beginn.date() >= startdate: """ update list """ torrent = {} torrent['TorrentLink'] = torrentlink torrent['TorrentFile'] = torrentfile torrent['finished'] = finished torrent['loading'] = loading torrent['loaded'] = loaded torrent['beginn'] = beginn torrent['sender'] = sender.replace(' ', '').lower() resolution = '' resolution = torrentlink.split('TVOON_DE')[1].split( 'otrkey.torrent')[0] if resolution == ('.mpg.HD.avi.'): """ TVOON_DE.mpg.HD.avi.otrkey.torrent""" resolution = 'HD' elif resolution == ('.mpg.HQ.avi.'): """ _TVOON_DE.mpg.HQ.avi.otrkey.torrent""" resolution = 'HQ' elif resolution == ('.mpg.avi.'): """ DIVX _TVOON_DE.mpg.avi.otrkey.torrent """ resolution = 'DIVX' elif resolution == ('.mpg.mp4.'): """ MP4 0_TVOON_DE.mpg.mp4.otrkey.torrent """ resolution = 'MP4' elif resolution == ('.mpg.HD.ac3.'): """ f1_130_TVOON_DE.mpg.HD.ac3.otrkey.torrent """ resolution = 'HD.AC3' else: resolution = 'AVI' torrent['Resolution'] = resolution torrentlist.append(torrent) #log.debug('parsed torrent: {} in {} recorded at {!s} on {}'.format(torrentfile, resolution, beginn, sender)) else: stopflag = True break start = start + 50 log.info('{!s} torrents successfully retrieved...'.format( len(torrentlist))) """ retrieve epg id from top recordings """ tops = StorageTableCollection('recordings', "PartitionKey eq 'top'") tops = db.query(tops) for top in tops: torrents = [ item for item in torrentlist if item['beginn'].strftime('%y.%m.%d %H-%M-%S') == top.beginn.strftime('%y.%m.%d %H-%M-%S') and item['sender'] == top.sender.replace(' ', '').lower() ] log.debug('filterded {!s} torrents for top recording {}'.format( len(torrents), top.titel)) if len(torrents) >= 1: """ Torrent Count """ topItem = Recording(**top) topItem.torrentCount = len(torrents) db.insert(topItem) """ Insert Torrent """ for torrent in torrents: db.insert(Torrent(Id=top.Id, **torrent)) else: db.delete(Torrent(Id=top.Id, **torrent)) db.delete(Recording(**top))
def update_toprecordings(config, log): """ Rufe alle top bewerteten otr Aufzeichnungen ab: https://www.onlinetvrecorder.com/v2/?go=list&tab=toplist&tlview=all&listid=104&start=0 in 20er Paketen wird der content durchsucht und bei hohen Bewertungen das Recording in eine neue Partition verschoben """ log.debug('try to update toprecordings webcontent...') stopflag = False start = 0 toplist = [] db.register_model(Recording()) while not stopflag: """ download webcontent into content""" with urllib.request.urlopen( 'https://www.onlinetvrecorder.com/v2/?go=list&tab=toplist&tlview=all&listid=104&start=' + str(start)) as response: content = response.read() """ für jeden Eintrag in ID= searchrow """ content = str(content.decode('utf-8', 'ignore')).split("<tr id='serchrow") for index in range(1, len(content)): lines = content[index].split('<td oncontextmenu="showNewTabMenu(') """ parse option #1 """ parsed = False try: epg_id = lines[1].split(',')[0] previewimagelink = lines[10].split('<img src=')[1].split( ' width=')[0] primarykey = datetime.strptime( lines[4].split('>')[1].split('<')[0], '%d.%m.%y').date().strftime('%Y_%m_%d') rating = lines[8].split('Beliebtheit: ')[1].split("'")[0] log.debug( 'parsed recording: {} with rating: {} and preview = {}'. format(epg_id, rating, previewimagelink)) parsed = True except Exception as e: log.error( 'parsing option #1 not possible {!s} due to {!s}'.format( lines, e)) """ parse option #2 """ if not parsed: try: epg_id = lines[1].split(',')[0] previewimagelink = lines[8].split('<img src=')[1].split( ' width=')[0] primarykey = datetime.strptime( lines[2].split('>')[1].split('<')[0], '%d.%m.%y').date().strftime('%Y_%m_%d') rating = lines[6].split('Beliebtheit: ')[1].split("'")[0] log.debug( 'parsed recording: {} with rating: {} and preview = {}' .format(epg_id, rating, previewimagelink)) parsed = True except Exception as e: log.error( 'parsing option #2 not possible {!s} due to {!s}'. format(lines, e)) """ save top records to storage """ if parsed: top = db.get(Recording(PartitionKey=primarykey, RowKey=epg_id)) if not top is None: top.rating = rating top.previewimagelink = previewimagelink.replace( 'http://', 'https://') top.PartitionKey = 'top' db.insert(top) if top.rating not in ['sehr hoch', 'hoch']: stopflag = True log.info( 'recording {} moved or is already moved successfully ({}, {!s}, at {})' .format(epg_id, top.titel, top.beginn, top.sender)) else: log.info( 'epg not found: {} with rating: {} and preview = {}'. format(epg_id, rating, previewimagelink)) start = start + 20 log.info('toprecordings successfully retireved!')
def settings(): """ validate settings form at POST request """ """ get request data """ form = Settings() """ init values in get method """ if form.validate_on_submit() or form.is_submitted(): """ save form data to userprofile """ if (g.user.PushVideo != form.PushVideo.data): g.user.PushVideo = form.PushVideo.data if (g.user.OtrUser != form.OtrUser.data): g.user.OtrUser = form.OtrUser.data if (g.user.UseCutlist != form.UseCutlist.data): g.user.UseCutlist = form.UseCutlist.data if (g.user.Protocol != form.Protocol.data): g.user.Protocol = form.Protocol.data g.user.FtpConnectionChecked = False if (g.user.Server != form.Server.data): g.user.Server = form.Server.data g.user.FtpConnectionChecked = False if (g.user.Port != form.Port.data): g.user.Port = form.Port.data g.user.FtpConnectionChecked = False if (g.user.FtpUser != form.FtpUser.data): g.user.FtpUser = form.FtpUser.data g.user.FtpConnectionChecked = False if (g.user.ServerPath != form.ServerPath.data): g.user.ServerPath = form.ServerPath.data g.user.FtpConnectionChecked = False if (form.OtrPassword.data not in [ '*****' ]) and (g.user.OtrPassword != form.OtrPassword.data): g.user.OtrPassword = form.OtrPassword.data if (form.FtpPassword.data not in [ '*****' ]) and (g.user.FtpPassword != form.FtpPassword.data): g.user.FtpPassword = form.FtpPassword.data g.user.FtpConnectionChecked = False if (g.user.EMailEndpoint != form.EMailEndpoint.data): g.user.EMailEndpoint = form.EMailEndpoint.data """ check ftp Connection """ if not g.user.FtpConnectionChecked: g.user.FtpConnectionChecked, validftpmessage = test_ftpconnection( g.user.Server, g.user.Port, g.user.FtpUser, g.user.FtpPassword, g.user.ServerPath) if not g.user.FtpConnectionChecked: log.error(validftpmessage) """ check otr credentials """ if not g.user.OtrCredentialsChecked: pass """ update user """ g.user.updated = datetime.now() db.insert(g.user) else: form.PushVideo.data = g.user.PushVideo form.OtrUser.data = g.user.OtrUser if g.user.OtrPassword != '': form.OtrPassword.data = '*****' else: form.OtrPassword.data = '' form.UseCutlist.data = g.user.UseCutlist form.Protocol.data = g.user.Protocol form.Server.data = g.user.Server form.Port.data = g.user.Port form.FtpUser.data = g.user.FtpUser if g.user.FtpPassword != '': form.FtpPassword.data = '*****' else: form.FtpPassword.data = '' form.ServerPath.data = g.user.ServerPath form.EMailEndpoint.data = g.user.EMailEndpoint """ return """ pathtemplate = g.platform + '/' + 'settings.html' return render_template(pathtemplate, title='Einstellungen', pagetitle='settings', form=form, User=g.user, message=g.message)
def do_pushtorrent_queue_message(config, log): """ retrieve and process all visible push queue messages - if link is local check if file exist then push to ftp endpoint - if link is url then download torrentfile and push to endpoint """ queue.register_model(PushMessage()) db.register_model(History()) if config['APPLICATION_ENVIRONMENT'] in ['Development', 'Test']: queuehide = 1 else: queuehide = 5 * 60 """ loop all push queue messages """ message = queue.get(PushMessage(), queuehide) while not message is None: """ get history entry for message for an status update """ historylist = StorageTableCollection('history', "RowKey eq '" + message.id + "'") historylist = db.query(historylist) for item in historylist: history = db.get( History(PartitionKey=item.PartitionKey, RowKey=message.id)) if not history: history = History(PartitionKey='torrent', RowKey=message.id) history.created = datetime.now() history.epgid = message.epgid history.sourcefile = message.sourcefile if message.sourcelink in ['', 'string']: """ no sourcelink ? """ """ delete queue message and tmp file """ queue.delete(message) else: """ push torrentfile --------------------------------------------------------------------- 1) download torrent to local tmp folder 2) pushfile to ftp 3) delete torrent from local tmp folder 4) delete queue message """ """ 1) download torrent to local tmp folder """ filename, localfile = get_torrentfile( message.sourcelink, config['APPLICATION_PATH_TMP']) if (not filename is None) and (not localfile is None): downloaded, errormessage = download_fromurl( message.sourcelink, localfile) if downloaded: """ 2) pushfile to ftp """ uploaded, errormessage = ftp_upload_file2( log, message.server, message.port, message.user, message.password, message.destpath, filename, localfile) if uploaded: """ 3) delete torrent from local tmp folder, 4) delete queue message """ queue.delete(message) if os.path.exists(localfile): os.remove(localfile) history.status = 'finished' if not errormessage is None: """ delete message after 3 tries """ log.error('push failed because {}'.format(errormessage)) history.status = 'error' if (config['APPLICATION_ENVIRONMENT'] == 'Production') and (message.dequeue_count >= 3): queue.delete(message) history.status = 'deleted' if os.path.exists(localfile): os.remove(localfile) """ update history entry """ history.updated = datetime.now() db.insert(history) """ next message """ message = queue.get(PushMessage(), queuehide) pass
def do_pushvideo_queue_message(config, log): """ retrieve and process all visible download queue mess ages - if link is url then download torrentfile --------------------------------------------------------------------- 1) if videofile is in place: 1a) push video to ftp 1b) delete videofile, otrkeyfile, torrentfile 1c) delete queue message 2) OR if otrkeyfile is in place 2a) init decodingprocess to videofile 3) ELSE if transmission job is not running 3a) add transmission torrent """ queue.register_model(PushVideoMessage()) db.register_model(History()) if config['APPLICATION_ENVIRONMENT'] in ['Development', 'Test']: queuehide = 60 else: queuehide = 5 * 60 """ housekeeping array of files and transmission-queues to be deleted """ houskeeping = [] housekeepingTransmission = [] """ get transmission status """ transmissionstatus = get_transmissionstatus(log) """ loop all push queue messages """ message = queue.get(PushVideoMessage(), queuehide) while not message is None: """ get history entry for message for an status update """ historylist = StorageTableCollection('history', "RowKey eq '" + message.id + "'") historylist = db.query(historylist) for item in historylist: history = db.get( History(PartitionKey=item.PartitionKey, RowKey=message.id)) if not history: history = History(PartitionKey='video', RowKey=message.id) history.created = datetime.now() history.epgid = message.epgid history.sourcefile = message.videofile user = None else: """ get user """ user = None userlist = db.query( StorageTableCollection( 'userprofile', "RowKey eq '" + history.PartitionKey + "'")) for item in userlist: user = db.get( User(PartitionKey=item.PartitionKey, RowKey=history.PartitionKey)) #log.debug('{!s}'.format(history.dict())) """ get single transmission download status """ downloadstatus = [ element for element in transmissionstatus if element['Name'] == message.otrkeyfile ] if downloadstatus != []: downloadstatus = downloadstatus[0] else: downloadstatus = None #log.debug('{!s}'.format(downloadstatus)) if message.sourcelink in ['', 'string']: """ no sourcelink ? """ """ delete queue message and tmp file """ queue.delete(message) history.status = 'deleted' else: """ process push video """ try: localvideofile = os.path.join( config['APPLICATION_PATH_VIDEOS'], message.videofile) localotrkeyfile = os.path.join( config['APPLICATION_PATH_OTRKEYS'], message.otrkeyfile) message.sourcefile, localtorrentfile = get_torrentfile( message.sourcelink, config['APPLICATION_PATH_TORRENTS']) if os.path.exists(localvideofile): """ 1) videofile is in place: 1a) push video to ftp 1b) delete videofile, otrkeyfile, torrentfile 1c) delete queue message """ """ 1a) push video to ftp """ uploaded, errormessage = ftp_upload_file2( log, message.server, message.port, message.user, message.password, message.destpath, message.videofile, localvideofile) if uploaded: """ 1b) delete videofile, otrkeyfile, torrentfile """ houskeeping.append(localvideofile) houskeeping.append(localotrkeyfile) houskeeping.append(localtorrentfile) """ 1c) delete queue message """ queue.delete(message) log.info( 'push video queue message {!s} for {!s} successfully processed!' .format(message.id, message.videofile)) history.status = 'finished' else: raise Exception( 'push failed because {}'.format(errormessage)) elif os.path.exists(localotrkeyfile): """ 2) OR if otrkeyfile is in place 2a) init decodingprocess to videofile 2b) delete transmission queue """ """ 2a) init decodingprocess to videofile """ if message.usecutlist: localcutlistfile = get_cutlist( message.otrkeyfile, message.videofile, config['APPLICATION_PATH_TMP'], log) else: localcutlistfile = None decoded, errormessage = decode( log, message.otruser, message.otrpassword, message.usecutlist, localotrkeyfile, config['APPLICATION_PATH_VIDEOS'], localcutlistfile) if decoded == 0: """ successfully decoded """ houskeeping.append(localcutlistfile) if not downloadstatus is None: housekeepingTransmission.append(downloadstatus) log.info( 'decoding otrkeyfile {!s} successfully processed!'. format(message.otrkeyfile)) history.status = 'decoded' if not user is None: user.FtpConnectionChecked = False db.insert(user) elif decoded == 255: """ otr credentials not worked """ if not user is None: user.OtrCredentialsChecked = False db.insert(user) raise Exception(errormessage) else: """ other error """ raise Exception(errormessage) else: """ 3) ELSE if transmission job is not running 3a) add transmission torrent """ if not downloadstatus is None: history.status = downloadstatus[ 'Status'] + ' ' + downloadstatus[ 'Done'] + ' (ETA ' + downloadstatus['ETA'] + ')' log.info('otrkeyfile {!s} {}'.format( message.otrkeyfile, history.status)) else: """ 3a) add transmission torrent """ if os.path.exists(localtorrentfile): downloaded = True else: downloaded, errormessage = download_fromurl( message.sourcelink, localtorrentfile) if downloaded: log.info( 'downloading torrentfile {!s} successfully initiated!' .format(message.sourcefile)) history.status = 'download started' else: raise Exception(errormessage) except Exception as e: if isinstance(e, subprocess.CalledProcessError): errormessage = 'cmd {!s} failed because {!s}, {!s}'.format( e.cmd, e.stderr, e.stdout) else: errormessage = e log.exception( 'push video failed because {!s}'.format(errormessage)) history.status = 'error' """ delete message after 3 tries """ if (config['APPLICATION_ENVIRONMENT'] == 'Production') and (message.dequeue_count >= 3): queue.delete(message) history.status = 'deleted' """ update history entry """ history.updated = datetime.now() db.insert(history) """ next message """ message = queue.get(PushVideoMessage(), queuehide) """ housekeeping temporary files """ for file in houskeeping: if not file is None: if os.path.exists(file): os.remove(file) """ houskeeping torrent queue """ for torrentsinglestate in housekeepingTransmission: call = 'transmission-remote -t ' + torrentsinglestate['ID'] + ' -r' process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log.debug('{} finished with {}'.format( call, process.stdout.decode(encoding='utf-8'))) for torrentsinglestate in transmissionstatus: """ restart queue entries """ if torrentsinglestate['Status'] == 'Stopped': call = 'transmission-remote -t ' + torrentsinglestate['ID'] + ' -s' process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log.debug('{} finished with {}'.format( call, process.stdout.decode(encoding='utf-8'))) pass